diff options
126 files changed, 1683 insertions, 706 deletions
diff --git a/.github/workflows/ci_frameworks.yml b/.github/workflows/ci_frameworks.yml new file mode 100644 index 0000000..682f5b9 --- /dev/null +++ b/.github/workflows/ci_frameworks.yml @@ -0,0 +1,61 @@ +name: ci_frameworks + +on: + push: + paths: + - "mesonbuild/dependencies/**" + - "test cases/frameworks/**" + - ".github/workflows/ci_frameworks.yml" + pull_request: + paths: + - "mesonbuild/dependencies/**" + - "test cases/frameworks/**" + - ".github/workflows/ci_frameworks.yml" + +jobs: + + scalapackMacOS: + runs-on: macos-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: python -m pip install -e . + - run: brew install pkg-config ninja gcc openmpi lapack scalapack + - run: meson setup "test cases/frameworks/30 scalapack" build + - run: ninja -C build + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: Scalapack_Mac_build + path: build/meson-logs/meson-log.txt + - run: meson test -C build -v + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: Scalapack_Mac_test + path: build/meson-logs/testlog.txt + + HDF5macos: + runs-on: macos-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: python -m pip install -e . + - run: brew install pkg-config ninja gcc hdf5 + - run: meson setup "test cases/frameworks/25 hdf5" build + - run: ninja -C build + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: HDF5_Mac_build + path: build/meson-logs/meson-log.txt + - run: meson test -C build -v + - uses: actions/upload-artifact@v1 + if: failure() + with: + name: HDF5_Mac_test + path: build/meson-logs/testlog.txt diff --git a/.github/workflows/frameworks.yml b/.github/workflows/frameworks.yml deleted file mode 100644 index da57514..0000000 --- a/.github/workflows/frameworks.yml +++ /dev/null @@ -1,39 +0,0 @@ -# at first, we demo HDF5 framework. More can be added. -name: ci_frameworks - -on: - push: - paths: - - "mesonbuild/dependencies/**" - - "test cases/frameworks/25 hdf5" - - ".github/workflows/frameworks.yml" - pull_request: - paths: - - "mesonbuild/dependencies/**" - - "test cases/frameworks/25 hdf5" - - ".github/workflows/frameworks.yml" - -jobs: - - macosHDF5: - runs-on: macos-latest - steps: - - uses: actions/checkout@v1 - - uses: actions/setup-python@v1 - with: - python-version: '3.x' - - run: python -m pip install -e . - - run: brew install pkg-config ninja gcc hdf5 - - run: meson setup "test cases/frameworks/25 hdf5" build - - run: ninja -C build - - uses: actions/upload-artifact@v1 - if: failure() - with: - name: Mac_Log - path: build/meson-logs/meson-log.txt - - run: meson test -C build -v - - uses: actions/upload-artifact@v1 - if: failure() - with: - name: Mac_Test - path: build/meson-logs/testlog.txt diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml index d2564e0..957e6c3 100644 --- a/.github/workflows/lint_mypy.yml +++ b/.github/workflows/lint_mypy.yml @@ -30,4 +30,4 @@ jobs: with: python-version: '3.x' - run: python -m pip install mypy - - run: mypy --follow-imports=skip mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/msetup.py mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py + - run: mypy --follow-imports=skip mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/msetup.py mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml index a75a6d5..c190af0 100644 --- a/.github/workflows/unusedargs_missingreturn.yml +++ b/.github/workflows/unusedargs_missingreturn.yml @@ -12,14 +12,21 @@ on: - "test cases/cmake/**" - "test cases/common/**" - "test cases/fortran/**" - - "test cases/platform-linux/**" + - "test cases/linuxlike/**" + - "test cases/objc/**" + - "test cases/objcpp/**" + - "test caes/windows/**" + pull_request: paths: - ".github/workflows/unusedargs_missingreturn.yml" - "test cases/cmake/**" - "test cases/common/**" - "test cases/fortran/**" - - "test cases/platform-linux/**" + - "test cases/linuxlike/**" + - "test cases/objc/**" + - "test cases/objcpp/**" + - "test caes/windows/**" jobs: @@ -33,9 +40,26 @@ jobs: - name: Install Compilers run: | sudo apt update -yq - sudo apt install -yq --no-install-recommends g++ gfortran ninja-build - - run: python run_project_tests.py --only cmake common fortran platform-linux + sudo apt install -yq --no-install-recommends g++ gfortran ninja-build gobjc gobjc++ + - run: python run_project_tests.py --only cmake common fortran platform-linux "objective c" "objective c++" + env: + CFLAGS: "-Werror=unused-parameter -Werror=return-type -Werror=strict-prototypes" + CPPFLAGS: "-Werror=unused-parameter -Werror=return-type" + FFLAGS: "-fimplicit-none" + + windows: + runs-on: windows-latest + steps: + - uses: actions/checkout@v1 + - uses: actions/setup-python@v1 + with: + python-version: '3.x' + - run: pip install ninja + - run: python run_project_tests.py --only platform-windows env: CFLAGS: "-Werror=unused-parameter -Werror=return-type -Werror=strict-prototypes" CPPFLAGS: "-Werror=unused-parameter -Werror=return-type" FFLAGS: "-fimplicit-none" + CC: gcc + CXX: g++ + FC: gfortran diff --git a/azure-pipelines.yml b/azure-pipelines.yml index e62dde5..b7f2a77 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -28,6 +28,12 @@ jobs: backend: vs2015 steps: + - powershell: 'Invoke-WebRequest https://www.python.org/ftp/python/3.7.4/python-3.7.4.exe -OutFile c:\py3-setup.exe' + - script: | + c:\py3-setup.exe /quiet PrependPath=1 InstallAllUsers=1 Include_doc=0 Include_dev=0 Include_debug=0 TargetDir=c:\Python3 + - script: | + @echo ##vso[task.prependpath]C:\Python3 + @echo ##vso[task.prependpath]C:\Python3\Scripts - template: ci/azure-steps.yml - job: vs2017 diff --git a/ci/azure-steps.yml b/ci/azure-steps.yml index e33f152..ff34c45 100644 --- a/ci/azure-steps.yml +++ b/ci/azure-steps.yml @@ -51,16 +51,6 @@ steps: } if ($env:compiler -eq 'msvc2015') { - if ($env:arch -eq 'x86') { - $forcex86 = "--forcex86" - } - - # download and install python3 and add to path (since it's not installed in vs2015 image!) - Set-ExecutionPolicy Bypass -Scope Process -Force - iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) - choco install python3 -y --no-progress $forcex86 --params "/InstallDir:C:\Python3" - $env:Path = "C:\Python3;$env:Path" - # add JDK installed in vs2015 image to PATH $env:Path = "C:\java\jdk\jdk1.8.0_102\bin\;$env:Path" } diff --git a/ciimage/Dockerfile b/ciimage/Dockerfile index 6d95500..a98662c 100644 --- a/ciimage/Dockerfile +++ b/ciimage/Dockerfile @@ -27,6 +27,7 @@ RUN sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" \ && eatmydata apt-get -y install libboost-python-dev \ && eatmydata apt-get -y install libblocksruntime-dev \ && eatmydata apt-get -y install libperl-dev \ +&& eatmydata apt-get -y install liblapack-dev libscalapack-mpi-dev \ && eatmydata dub fetch urld && dub build urld --compiler=gdc \ && eatmydata dub fetch dubtestproject \ && eatmydata dub build dubtestproject:test1 --compiler=ldc2 \ diff --git a/docs/genrelnotes.py b/docs/genrelnotes.py index e5ff432..70d8915 100755 --- a/docs/genrelnotes.py +++ b/docs/genrelnotes.py @@ -13,13 +13,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -import sys, os, subprocess +''' + Generates release notes for new releases of Meson build system +''' +import subprocess +import sys +import os from glob import glob -relnote_template = '''--- -title: Release %s -short-description: Release notes for %s +RELNOTE_TEMPLATE = '''--- +title: Release {} +short-description: Release notes for {} ... # New features @@ -28,21 +32,27 @@ short-description: Release notes for %s def add_to_sitemap(from_version, to_version): + ''' + Adds release note entry to sitemap.txt. + ''' sitemapfile = '../sitemap.txt' - sf = open(sitemapfile) - lines = sf.readlines() - sf.close() - with open(sitemapfile, 'w') as sf: + s_f = open(sitemapfile) + lines = s_f.readlines() + s_f.close() + with open(sitemapfile, 'w') as s_f: for line in lines: if 'Release-notes' in line and from_version in line: new_line = line.replace(from_version, to_version) - sf.write(new_line) - sf.write(line) + s_f.write(new_line) + s_f.write(line) def generate(from_version, to_version): - ofilename = 'Release-notes-for-%s.md' % to_version + ''' + Generate notes for Meson build next release. + ''' + ofilename = 'Release-notes-for-{}.md'.format(to_version) with open(ofilename, 'w') as ofile: - ofile.write(relnote_template % (to_version, to_version)) + ofile.write(RELNOTE_TEMPLATE.format(to_version, to_version)) for snippetfile in glob('snippets/*.md'): snippet = open(snippetfile).read() ofile.write(snippet) @@ -57,7 +67,7 @@ if __name__ == '__main__': if len(sys.argv) != 3: print(sys.argv[0], 'from_version to_version') sys.exit(1) - from_version = sys.argv[1] - to_version = sys.argv[2] + FROM_VERSION = sys.argv[1] + TO_VERSION = sys.argv[2] os.chdir('markdown') - generate(from_version, to_version) + generate(FROM_VERSION, TO_VERSION) diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md index d94d487..b3cdc81 100644 --- a/docs/markdown/Cross-compilation.md +++ b/docs/markdown/Cross-compilation.md @@ -35,14 +35,24 @@ three machines are the same. Simple so far. Let's next look at the most common cross-compilation setup. Let's suppose you are on a 64 bit OSX machine and you are cross compiling a binary that will run on a 32 bit ARM Linux board. In this case your -*build machine* is 64 bit OSX and both your *host* and *target -machines* are 32 bit ARM Linux. This should be quite understandable as -well. - -It gets a bit trickier when we think about how the cross compiler was -generated. It was built and it runs on a specific platform but the -output it generates is for a different platform. In this case *build* -and *host machines* are the same, but *target machine* is different. +*build machine* is 64 bit OSX, your *host machine* is 32 bit ARM Linux +and your *target machine* is irrelevant (but defaults to the same value +as the *host machine*). This should be quite understandable as well. + +The usual mistake in this case is to call the OSX system the *host* and +the ARM Linux board the *target*. That's because these were their actual +names when the cross-compiler itself was compiled! Let's assume the +cross-compiler was created on OSX too. When that happened the *build* +and *host machines* were the same OSX and different from the ARM Linux +*target machine*. + +In a nutshell, the typical mistake assumes that the terms *build*, +*host* and *target* refer to some fixed positions whereas they're +actually relative to where the current compiler is running. Think of +*host* as a *child* of the current compiler and *target* as an optional +*grand-child*. Compilers don't change their terminology when they're +creating another compiler, that would at the very least make their user +interface much more complex. The most complicated case is when you cross-compile a cross compiler. As an example you can, on a Linux machine, generate a cross @@ -56,8 +66,8 @@ Wikipedia or the net in general. It is very common for them to get build, host and target mixed up, even in consecutive sentences, which can leave you puzzled until you figure it out. -A lot of confusion stems from the fact that when you cross-compile -something, the 3 systems (*build*, *host*, and *target*) used when +Again note that when you cross-compile something, +the 3 systems (*build*, *host*, and *target*) used when building the cross compiler don't align with the ones used when building something with that newly-built cross compiler. To take our Canadian Cross scenario from above (for full generality), since its @@ -67,8 +77,8 @@ Linux, the *host machine* of anything we build with it is *MIPS Linux*. Only the *target machine* of whatever we build with it can be freely chosen by us, say if we want to build another cross compiler that runs on MIPS Linux and targets Aarch64 iOS. As this example -hopefully makes clear to you, the platforms are shifted over to the -left by one position. +hopefully makes clear to you, the machine names are relative and +shifted over to the left by one position. If you did not understand all of the details, don't worry. For most people it takes a while to wrap their head around these diff --git a/docs/markdown/D.md b/docs/markdown/D.md index ed8986b..39aebc8 100644 --- a/docs/markdown/D.md +++ b/docs/markdown/D.md @@ -74,6 +74,8 @@ my_lib = library('mylib', soversion: project_soversion, d_module_versions: ['FeatureA', 'featureB'] ) + +pkgc = import('pkgconfig') pkgc.generate(name: 'mylib', libraries: my_lib, subdirs: 'd/mylib', diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 88043d3..1c66d53 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -207,6 +207,7 @@ The output format is as follows: { "name": "The name of the dependency", "required": true, + "version": [">=1.2.3"], "conditional": false, "has_fallback": false } @@ -219,7 +220,10 @@ in the `meson.build` (all dependencies are required by default). The inside a conditional block. In a real meson run these dependencies might not be used, thus they _may_ not be required, even if the `required` key is set. The `has_fallback` key just indicates whether a fallback was directly set in the -`dependency()` function. +`dependency()` function. The `version` key always contains a list of version +requirements from the `meson.build` and **not** the actual version of the +dependency on disc. The version list is empty if no version was specified +in the `meson.build`. ## Tests diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index dd0b3f4..4f98025 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -144,6 +144,9 @@ build target (e.g. return value of [executable()](#executable), custom_target(), Abort with an error message if `condition` evaluates to `false`. +*Since 0.53.0* `message` argument is optional and defaults to print the condition +statement instead. + ### benchmark() ``` meson @@ -151,9 +154,10 @@ Abort with an error message if `condition` evaluates to `false`. ``` Creates a benchmark item that will be run when the benchmark target is -run. The behavior of this function is identical to `test` with the -exception that there is no `is_parallel` keyword, because benchmarks -are never run in parallel. +run. The behavior of this function is identical to [`test()`](#test) except for: + +* benchmark() has no `is_parallel` keyword because benchmarks are not run in parallel +* benchmark() does not automatically add the `MALLOC_PERTURB_` environment variable *Note:* Prior to 0.52.0 benchmark would warn that `depends` and `priority` were unsupported, this is incorrect @@ -701,6 +705,9 @@ Keyword arguments are the following: If the output is more complicated than that, the version checking will have to be done manually using [`run_command()`](#run_command). +- `dirs` *(since 0.53.0)* Extra list of absolute paths where to look for program + names. + Meson will also autodetect scripts with a shebang line and run them with the executable/interpreter specified in it both on Windows (because the command invocator will reject the command otherwise) and @@ -1509,7 +1516,25 @@ object](#build-target-object) returned by object](#external-program-object) returned by [`find_program()`](#find_program). -Keyword arguments are the following: +By default, environment variable +[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) +is automatically set by `meson test` to a random value between 1..255. +This can help find memory leaks on configurations using glibc, +including with non-GCC compilers. However, this can have a performance impact, +and may fail a test due to external libraries whose internals are out of the +user's control. To check if this feature is causing an expected runtime crash, +disable the feature by temporarily setting environment variable +`MALLOC_PERTURB_=0`. While it's preferable to only temporarily disable this +check, if a project requires permanent disabling of this check +in meson.build do like: + +```meson +nomalloc = environment({'MALLOC_PERTURB_': '0'}) + +test(..., env: nomalloc, ...) +``` + +#### test() Keyword arguments - `args` arguments to pass to the executable diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index b96e6e1..cf0516c 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -324,8 +324,8 @@ Dictionaries -- Dictionaries are delimited by curly braces. A dictionary can contain an -arbitrary number of key value pairs. Keys are required to be literal -strings, values can be objects of any type. +arbitrary number of key value pairs. Keys are required to be strings, values can +be objects of any type. Prior to *0.53.0* keys were required to be literal strings. ```meson my_dict = {'foo': 42, 'bar': 'baz'} @@ -359,6 +359,14 @@ if 'foo' not in my_dict endif ``` +*Since 0.53.0* Keys can be any expression evaluating to a string value, not limited +to string literals any more. +```meson +d = {'a' + 'b' : 42} +k = 'cd' +d += {k : 43} +``` + Function calls -- diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index 4b21699..066b57e 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -15,8 +15,7 @@ You can add as many tests as you want. They are run with the command `ninja test Meson captures the output of all tests and writes it in the log file `meson-logs/testlog.txt`. -Test parameters --- +## Test parameters Some tests require the use of command line arguments or environment variables. These are simple to define. @@ -27,15 +26,21 @@ test('envvar test', exe2, env : ['key1=value1', 'key2=value2']) Note how you need to specify multiple values as an array. -Coverage --- +### MALLOC_PERTURB_ + +By default, environment variable +[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) +is set to a random value between 1..255. This can help find memory +leaks on configurations using glibc, including with non-GCC compilers. +This feature can be disabled as discussed in [test()](./Reference-manual#test). + +## Coverage If you enable coverage measurements by giving Meson the command line flag `-Db_coverage=true`, you can generate coverage reports. Meson will autodetect what coverage generator tools you have installed and will generate the corresponding targets. These targets are `coverage-xml` and `coverage-text` which are both provided by [Gcovr](http://gcovr.com) (version 3.3 or higher) and `coverage-html`, which requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and [GenHTML](https://linux.die.net/man/1/genhtml) or [Gcovr](http://gcovr.com). As a convenience, a high-level `coverage` target is also generated which will produce all 3 coverage report types, if possible. The output of these commands is written to the log directory `meson-logs` in your build directory. -Parallelism --- +## Parallelism To reduce test times, Meson will by default run multiple unit tests in parallel. It is common to have some tests which can not be run in parallel because they require unique hold on some resource such as a file or a D-Bus name. You have to specify these tests with a keyword argument. @@ -51,8 +56,7 @@ By default Meson uses as many concurrent processes as there are cores on the tes $ MESON_TESTTHREADS=5 ninja test ``` -Priorities --- +## Priorities *(added in version 0.52.0)* diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index 5cbef8a..7290a5b 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -65,6 +65,7 @@ listed in the [`meson` GitHub topic](https://github.com/topics/meson). - [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management - [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2 + - [libglvnd](https://gitlab.freedesktop.org/glvnd/libglvnd), Vendor neutral OpenGL dispatch library for Unix-like OSes - [Libhttpseverywhere](https://git.gnome.org/browse/libhttpseverywhere), a library to enable httpseverywhere on any desktop app - [libmodulemd](https://github.com/fedora-modularity/libmodulemd), a GObject Introspected library for managing [Fedora Project](https://getfedora.org/) module metadata. - [Libosmscout](https://github.com/Framstag/libosmscout), a C++ library for offline map rendering, routing and location @@ -77,6 +78,7 @@ format files - [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor - [Mesa](https://gitlab.freedesktop.org/mesa/mesa/), an open source graphics driver project - [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via Wifi-Display specification aka Miracast + - [mrsh](https://github.com/emersion/mrsh), a minimal POSIX shell - [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment - [oomd](https://github.com/facebookincubator/oomd), a userspace Out-Of-Memory (OOM) killer for Linux systems @@ -85,9 +87,11 @@ format files - [Orc](http://cgit.freedesktop.org/gstreamer/orc/), the Optimized Inner Loop Runtime Compiler (not the default yet) - [OTS](https://github.com/khaledhosny/ots), the OpenType Sanitizer, parses and serializes OpenType files (OTF, TTF) and WOFF and WOFF2 font files, validating and sanitizing them as it goes. Used by Chromium and Firefox - [Outlier](https://github.com/kerolasa/outlier), a small Hello World style meson example project + - [Pacman](https://git.archlinux.org/pacman.git/tree/), a package manager for Arch Linux - [Pango](https://git.gnome.org/browse/pango/), an Internationalized text layout and rendering library (not the default yet) - [Parzip](https://github.com/jpakkane/parzip), a multithreaded reimplementation of Zip - [Peek](https://github.com/phw/peek), simple animated GIF screen recorder with an easy to use interface + - [PicoLibc](https://github.com/keith-packard/picolibc), a standard C library for small embedded systems with limited RAM - [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications - [Pithos](https://github.com/pithos/pithos), a Pandora Radio client - [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md index 7081f8e..cbb58a9 100644 --- a/docs/markdown/Vala.md +++ b/docs/markdown/Vala.md @@ -251,7 +251,12 @@ The `find_library()` method of the Vala compiler object needs to have the `dir` keyword added to include the project VAPI directory. This is not added automatically by `add_project_arguments()`. +### Working with the Vala Preprocessor +Passing arguments to [Vala's preprocessor](https://wiki.gnome.org/Projects/Vala/Manual/Preprocessor) requires specifying the language as `c`. For example, the following statement sets the preprocessor symbol `FUSE_USE_VERSION` to the value `26`: +```meson +add_project_arguments('-DFUSE_USE_VERSION=26', language: 'c') +``` ## Building libraries diff --git a/docs/markdown/snippets/add_dictionary_variable_key.md b/docs/markdown/snippets/add_dictionary_variable_key.md index 373ce04..72294ae 100644 --- a/docs/markdown/snippets/add_dictionary_variable_key.md +++ b/docs/markdown/snippets/add_dictionary_variable_key.md @@ -1,17 +1,9 @@ -## Adding dictionary entry using string variable as key - -New dictionary entry can now be added using string variable as key, -in addition to using string literal as key. +## Dictionary entry using string variable as key +Keys can now be any expression evaluating to a string value, not limited +to string literals any more. ```meson -dict = {} - -# A variable to be used as a key -key = 'myKey' - -# Add new entry using the variable -dict += {key : 'myValue'} - -# Test that the stored value is correct -assert(dict[key] == 'myValue', 'Incorrect value retrieved from dictionary') +d = {'a' + 'b' : 42} +k = 'cd' +d += {k : 43} ``` diff --git a/docs/markdown/snippets/dist_subprojects.md b/docs/markdown/snippets/dist_subprojects.md new file mode 100644 index 0000000..cdfa070 --- /dev/null +++ b/docs/markdown/snippets/dist_subprojects.md @@ -0,0 +1,6 @@ +## meson dist --include-subprojects + +`meson dist` command line now gained `--include-subprojects` command line option. +When enabled, the source tree of all subprojects used by the current build will +also be included in the final tarball. This is useful to distribute self contained +tarball that can be built offline (i.e. `--wrap-mode=nodownload`). diff --git a/docs/markdown/snippets/find_program.md b/docs/markdown/snippets/find_program.md new file mode 100644 index 0000000..2bef824 --- /dev/null +++ b/docs/markdown/snippets/find_program.md @@ -0,0 +1,9 @@ +## Search directories for `find_program()` + +It is now possible to give a list of absolute paths where `find_program()` should +also search, using the `dirs` keyword argument. + +For example on Linux `/sbin` and `/usr/sbin` are not always in the `$PATH`: +```meson +prog = find_program('mytool', dirs : ['/usr/sbin', '/sbin']) +``` diff --git a/docs/markdown/snippets/introspect.md b/docs/markdown/snippets/introspect.md index 4d9fab2..097fd17 100644 --- a/docs/markdown/snippets/introspect.md +++ b/docs/markdown/snippets/introspect.md @@ -2,3 +2,6 @@ dependencies (--dependencies, intro-dependencies.json): - added the `version` key + +scanning dependencies (--scan-dependencies): +- added the `version` key containing the required dependency version diff --git a/docs/markdown/snippets/scalapack.md b/docs/markdown/snippets/scalapack.md new file mode 100644 index 0000000..03ddbd4 --- /dev/null +++ b/docs/markdown/snippets/scalapack.md @@ -0,0 +1,14 @@ +## Scalapack + +added in **0.53.0**: + +```meson +scalapack = dependency('scalapack') +``` + +Historically and through today, typical Scalapack setups have broken and incomplete pkg-config or +FindScalapack.cmake. Meson handles finding Scalapack on setups including: + +* Linux: Intel MKL or OpenMPI + Netlib +* MacOS: Intel MKL or OpenMPI + Netlib +* Windows: Intel MKL (OpenMPI not available on Windows)
\ No newline at end of file @@ -40,7 +40,7 @@ def list_projects(): return 0 def unpack(sproj, branch, outdir): - subprocess.check_call(['git', 'clone', '-b', branch, 'https://github.com/mesonbuild/%s.git' % sproj, outdir]) + subprocess.check_call(['git', 'clone', '-b', branch, 'https://github.com/mesonbuild/{}.git'.format(sproj), outdir]) usfile = os.path.join(outdir, 'upstream.wrap') assert(os.path.isfile(usfile)) config = configparser.ConfigParser() @@ -82,7 +82,7 @@ def install(sproj): if os.path.isdir(sproj_dir): print('Subproject is already there. To update, nuke the dir and reinstall.') return 1 - blist = gh_get('https://api.github.com/repos/mesonbuild/%s/branches' % sproj) + blist = gh_get('https://api.github.com/repos/mesonbuild/{}/branches'.format(sproj)) blist = [b['name'] for b in blist] blist = [b for b in blist if b != 'master'] blist.sort() diff --git a/manual tests/1 wrap/main.c b/manual tests/1 wrap/main.c index 39d3a9a..df6abe4 100644 --- a/manual tests/1 wrap/main.c +++ b/manual tests/1 wrap/main.c @@ -1,7 +1,7 @@ #include<sqlite3.h> #include<stdio.h> -int main(int argc, char **argv) { +int main(void) { sqlite3 *db; if(sqlite3_open(":memory:", &db) != SQLITE_OK) { printf("Sqlite failed.\n"); diff --git a/manual tests/10 svn wrap/prog.c b/manual tests/10 svn wrap/prog.c index df38000..6e2c4d8 100644 --- a/manual tests/10 svn wrap/prog.c +++ b/manual tests/10 svn wrap/prog.c @@ -1,6 +1,6 @@ #include"subproj.h" -int main(int argc, char **argv) { +int main(void) { subproj_function(); return 0; } diff --git a/manual tests/11 wrap imposter/meson.build b/manual tests/11 wrap imposter/meson.build new file mode 100644 index 0000000..d0575ac --- /dev/null +++ b/manual tests/11 wrap imposter/meson.build @@ -0,0 +1,8 @@ +project('evil URL') +# showing that new Meson wrap.py code tries to stop imposter WrapDB URLs +# a WrapException is raised. +# +# ERROR: https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip may be a WrapDB-impersonating URL +# + +subproject('zlib')
\ No newline at end of file diff --git a/manual tests/11 wrap imposter/subprojects/zlib.wrap b/manual tests/11 wrap imposter/subprojects/zlib.wrap new file mode 100644 index 0000000..b88f8f2 --- /dev/null +++ b/manual tests/11 wrap imposter/subprojects/zlib.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = zlib-1.2.8 + +source_url = https://zlib.net/zlib-1.2.11.tar.gz +source_filename = zlib-1.2.11.tar.gz +source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1 + +patch_url = https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip +patch_filename = zlib-1.2.11-4-wrap.zip +patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4
\ No newline at end of file diff --git a/manual tests/12 wrap mirror/meson.build b/manual tests/12 wrap mirror/meson.build new file mode 100644 index 0000000..6645bdf --- /dev/null +++ b/manual tests/12 wrap mirror/meson.build @@ -0,0 +1,4 @@ +project('downloader') +# this test will timeout, showing that a subdomain isn't caught as masquarading url + +subproject('zlib') diff --git a/manual tests/12 wrap mirror/subprojects/zlib.wrap b/manual tests/12 wrap mirror/subprojects/zlib.wrap new file mode 100644 index 0000000..de0b9ad --- /dev/null +++ b/manual tests/12 wrap mirror/subprojects/zlib.wrap @@ -0,0 +1,10 @@ +[wrap-file] +directory = zlib-1.2.8 + +source_url = https://zlib.net/zlib-1.2.11.tar.gz +source_filename = zlib-1.2.11.tar.gz +source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1 + +patch_url = https://mirror1.wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/4/get_zip +patch_filename = zlib-1.2.11-4-wrap.zip +patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4
\ No newline at end of file diff --git a/manual tests/3 git wrap/prog.c b/manual tests/3 git wrap/prog.c index df38000..6e2c4d8 100644 --- a/manual tests/3 git wrap/prog.c +++ b/manual tests/3 git wrap/prog.c @@ -1,6 +1,6 @@ #include"subproj.h" -int main(int argc, char **argv) { +int main(void) { subproj_function(); return 0; } diff --git a/manual tests/4 standalone binaries/myapp.cpp b/manual tests/4 standalone binaries/myapp.cpp index 5acde46..8ddff27 100644 --- a/manual tests/4 standalone binaries/myapp.cpp +++ b/manual tests/4 standalone binaries/myapp.cpp @@ -3,7 +3,7 @@ #include<iostream> #include<string> -int main(int argc, char *argv[]) { +int main(void) { SDL_Surface *screenSurface; SDL_Event e; int keepGoing = 1; diff --git a/manual tests/5 rpm/main.c b/manual tests/5 rpm/main.c index 5c46721..8b1d193 100644 --- a/manual tests/5 rpm/main.c +++ b/manual tests/5 rpm/main.c @@ -1,6 +1,6 @@ #include<lib.h> #include<stdio.h> -int main(int argc, char **argv) +int main(void) { char *t = meson_print(); printf("%s", t); diff --git a/manual tests/6 hg wrap/prog.c b/manual tests/6 hg wrap/prog.c index df38000..6e2c4d8 100644 --- a/manual tests/6 hg wrap/prog.c +++ b/manual tests/6 hg wrap/prog.c @@ -1,6 +1,6 @@ #include"subproj.h" -int main(int argc, char **argv) { +int main(void) { subproj_function(); return 0; } diff --git a/manual tests/8 timeout/sleepprog.c b/manual tests/8 timeout/sleepprog.c index e371482..8875e12 100644 --- a/manual tests/8 timeout/sleepprog.c +++ b/manual tests/8 timeout/sleepprog.c @@ -1,6 +1,6 @@ #include<unistd.h> -int main(int argc, char **argv) { +int main(void) { sleep(1000); return 0; } diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index eb9517c..709dbac 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -136,11 +136,15 @@ class IntrospectionInterpreter(AstInterpreter): def func_dependency(self, node, args, kwargs): args = self.flatten_args(args) + kwargs = self.flatten_kwargs(kwargs) if not args: return name = args[0] has_fallback = 'fallback' in kwargs required = kwargs.get('required', True) + version = kwargs.get('version', []) + if not isinstance(version, list): + version = [version] condition_level = node.condition_level if hasattr(node, 'condition_level') else 0 if isinstance(required, ElementaryNode): required = required.value @@ -149,9 +153,10 @@ class IntrospectionInterpreter(AstInterpreter): self.dependencies += [{ 'name': name, 'required': required, + 'version': version, 'has_fallback': has_fallback, 'conditional': condition_level > 0, - 'node': node + 'node': node, }] def build_target(self, node, args, kwargs, targetclass): diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py index c6fb91a..f245a36 100644 --- a/mesonbuild/ast/printer.py +++ b/mesonbuild/ast/printer.py @@ -100,7 +100,7 @@ class AstPrinter(AstVisitor): def visit_ComparisonNode(self, node: mparser.ComparisonNode): node.left.accept(self) - self.append_padded(mparser.comparison_map[node.ctype], node) + self.append_padded(node.ctype, node) node.right.accept(self) def visit_ArithmeticNode(self, node: mparser.ArithmeticNode): @@ -192,7 +192,10 @@ class AstPrinter(AstVisitor): if break_args: self.newline() for key, val in node.kwargs.items(): - self.append(key, node) + if isinstance(key, str): + self.append(key, node) + else: + key.accept(self) self.append_padded(':', node) val.accept(self) self.append(', ', node) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 5630212..824b958 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -769,7 +769,6 @@ int dummy; elem = NinjaBuildElement(self.all_outputs, target_name, 'phony', []) elem.add_dep(deps) - cmd = self.replace_paths(target, cmd) self.add_build(elem) self.processed_targets[target.get_id()] = True @@ -1018,7 +1017,6 @@ int dummy; generated_sources = self.get_target_generated_sources(target) generated_rel_srcs = [] for rel_src in generated_sources.keys(): - dirpart, fnamepart = os.path.split(rel_src) if rel_src.lower().endswith('.cs'): generated_rel_srcs.append(os.path.normpath(rel_src)) deps.append(os.path.normpath(rel_src)) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 35bc450..645db24 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -355,6 +355,26 @@ a hard error in the future.''' % name) if not hasattr(self, 'typename'): raise RuntimeError('Target type is not set for target class "{}". This is a bug'.format(type(self).__name__)) + def __lt__(self, other: typing.Any) -> typing.Union[bool, 'NotImplemented']: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() < other.get_id() + + def __le__(self, other: typing.Any) -> typing.Union[bool, 'NotImplemented']: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() <= other.get_id() + + def __gt__(self, other: typing.Any) -> typing.Union[bool, 'NotImplemented']: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() > other.get_id() + + def __ge__(self, other: typing.Any) -> typing.Union[bool, 'NotImplemented']: + if not hasattr(other, 'get_id') and not callable(other.get_id): + return NotImplemented + return self.get_id() >= other.get_id() + def get_install_dir(self, environment): # Find the installation directory. default_install_dir = self.get_default_install_dir(environment) @@ -411,7 +431,7 @@ a hard error in the future.''' % name) return self.construct_id_from_path( self.subdir, self.name, self.type_suffix()) - def process_kwargs(self, kwargs): + def process_kwargs_base(self, kwargs): if 'build_by_default' in kwargs: self.build_by_default = kwargs['build_by_default'] if not isinstance(self.build_by_default, bool): @@ -488,9 +508,6 @@ class BuildTarget(Target): self.validate_install(environment) self.check_module_linking() - def __lt__(self, other): - return self.get_id() < other.get_id() - def __repr__(self): repr_str = "<{0} {1}: {2}>" return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) @@ -789,7 +806,7 @@ class BuildTarget(Target): return self.install_mode def process_kwargs(self, kwargs, environment): - super().process_kwargs(kwargs) + self.process_kwargs_base(kwargs) self.copy_kwargs(kwargs) kwargs.get('modules', []) self.need_install = kwargs.get('install', self.need_install) @@ -1106,7 +1123,7 @@ You probably should put it in link_with instead.''') msg += "Use the 'pic' option to static_library to build with PIC." raise InvalidArguments(msg) if self.for_machine is not t.for_machine: - msg = 'Tried to mix libraries for machines {1} and {2} in target {!r}'.format(self.name, self.for_machine, t.for_machine) + msg = 'Tried to mix libraries for machines {1} and {2} in target {0!r}'.format(self.name, self.for_machine, t.for_machine) if self.environment.is_cross_build(): raise InvalidArguments(msg + ' This is not possible in a cross build.') else: @@ -2005,9 +2022,6 @@ class CustomTarget(Target): def get_default_install_dir(self, environment): return None - def __lt__(self, other): - return self.get_id() < other.get_id() - def __repr__(self): repr_str = "<{0} {1}: {2}>" return repr_str.format(self.__class__.__name__, self.get_id(), self.command) @@ -2068,7 +2082,7 @@ class CustomTarget(Target): return final_cmd def process_kwargs(self, kwargs, backend): - super().process_kwargs(kwargs) + self.process_kwargs_base(kwargs) self.sources = extract_as_list(kwargs, 'input', unholder=True) if 'output' not in kwargs: raise InvalidArguments('Missing keyword argument "output".') @@ -2245,13 +2259,13 @@ class RunTarget(Target): self.args = args self.dependencies = dependencies - def __lt__(self, other): - return self.get_id() < other.get_id() - def __repr__(self): repr_str = "<{0} {1}: {2}>" return repr_str.format(self.__class__.__name__, self.get_id(), self.command) + def process_kwargs(self, kwargs): + return self.process_kwargs_base(kwargs) + def get_dependencies(self): return self.dependencies diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index b7e341d..4300656 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -15,16 +15,21 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool. +import subprocess +from pathlib import Path +from typing import List, Tuple, Optional, TYPE_CHECKING +import re +import os +import shutil +import ctypes + from .. import mlog, mesonlib from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice from ..environment import Environment -from typing import List, Tuple, Optional, TYPE_CHECKING - if TYPE_CHECKING: from ..dependencies.base import ExternalProgram -import re, os, shutil, ctypes class CMakeExecutor: # The class's copy of the CMake path. Avoids having to search for it @@ -133,8 +138,12 @@ class CMakeExecutor: def _call_real(self, args: List[str], build_dir: str, env) -> Tuple[int, str, str]: os.makedirs(build_dir, exist_ok=True) cmd = self.cmakebin.get_command() + args - p, out, err = Popen_safe(cmd, env=env, cwd=build_dir) - rc = p.returncode + ret = subprocess.run(cmd, env=env, cwd=build_dir, close_fds=False, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + universal_newlines=False) + rc = ret.returncode + out = ret.stdout.decode(errors='ignore') + err = ret.stderr.decode(errors='ignore') call = ' '.join(cmd) mlog.debug("Called `{}` in {} -> {}".format(call, build_dir, rc)) return rc, out, err @@ -167,7 +176,7 @@ class CMakeExecutor: fallback = os.path.realpath(__file__) # A file used as a fallback wehen everything else fails compilers = self.environment.coredata.compilers[MachineChoice.BUILD] - def make_abs(exe: str, lang: str): + def make_abs(exe: str, lang: str) -> str: if os.path.isabs(exe): return exe @@ -177,7 +186,7 @@ class CMakeExecutor: p = fallback return p - def choose_compiler(lang: str): + def choose_compiler(lang: str) -> Tuple[str, str]: exe_list = [] if lang in compilers: exe_list = compilers[lang].get_exelist() @@ -196,27 +205,32 @@ class CMakeExecutor: c_comp, c_launcher = choose_compiler('c') cxx_comp, cxx_launcher = choose_compiler('cpp') + try: + fortran_comp, fortran_launcher = choose_compiler('fortran') + except Exception: + fortran_comp = fortran_launcher = '' # on Windows, choose_compiler returns path with \ as separator - replace by / before writing to CMAKE file c_comp = c_comp.replace('\\', '/') c_launcher = c_launcher.replace('\\', '/') cxx_comp = cxx_comp.replace('\\', '/') cxx_launcher = cxx_launcher.replace('\\', '/') + fortran_comp = fortran_comp.replace('\\', '/') + fortran_launcher = fortran_launcher.replace('\\', '/') # Reset the CMake cache - with open('{}/CMakeCache.txt'.format(build_dir), 'w') as fp: - fp.write('CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1\n') + (Path(build_dir) / 'CMakeCache.txt').write_text('CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1\n') # Fake the compiler files - comp_dir = '{}/CMakeFiles/{}'.format(build_dir, self.cmakevers) - os.makedirs(comp_dir, exist_ok=True) + comp_dir = Path(build_dir) / 'CMakeFiles' / self.cmakevers + comp_dir.mkdir(parents=True, exist_ok=True) - c_comp_file = '{}/CMakeCCompiler.cmake'.format(comp_dir) - cxx_comp_file = '{}/CMakeCXXCompiler.cmake'.format(comp_dir) + c_comp_file = comp_dir / 'CMakeCCompiler.cmake' + cxx_comp_file = comp_dir / 'CMakeCXXCompiler.cmake' + fortran_comp_file = comp_dir / 'CMakeFortranCompiler.cmake' - if not os.path.exists(c_comp_file): - with open(c_comp_file, 'w') as fp: - fp.write('''# Fake CMake file to skip the boring and slow stuff + if not c_comp_file.is_file(): + c_comp_file.write_text('''# Fake CMake file to skip the boring and slow stuff set(CMAKE_C_COMPILER "{}") # Should be a valid compiler for try_compile, etc. set(CMAKE_C_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) set(CMAKE_C_COMPILER_ID "GNU") # Pretend we have found GCC @@ -229,9 +243,8 @@ set(CMAKE_C_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) set(CMAKE_SIZEOF_VOID_P "{}") '''.format(c_comp, c_launcher, ctypes.sizeof(ctypes.c_voidp))) - if not os.path.exists(cxx_comp_file): - with open(cxx_comp_file, 'w') as fp: - fp.write('''# Fake CMake file to skip the boring and slow stuff + if not cxx_comp_file.is_file(): + cxx_comp_file.write_text('''# Fake CMake file to skip the boring and slow stuff set(CMAKE_CXX_COMPILER "{}") # Should be a valid compiler for try_compile, etc. set(CMAKE_CXX_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) set(CMAKE_CXX_COMPILER_ID "GNU") # Pretend we have found GCC @@ -244,6 +257,20 @@ set(CMAKE_CXX_SOURCE_FILE_EXTENSIONS C;M;c++;cc;cpp;cxx;mm;CPP) set(CMAKE_SIZEOF_VOID_P "{}") '''.format(cxx_comp, cxx_launcher, ctypes.sizeof(ctypes.c_voidp))) + if fortran_comp and not fortran_comp_file.is_file(): + fortran_comp_file.write_text('''# Fake CMake file to skip the boring and slow stuff +set(CMAKE_Fortran_COMPILER "{}") # Should be a valid compiler for try_compile, etc. +set(CMAKE_Fortran_COMPILER_LAUNCHER "{}") # The compiler launcher (if presentt) +set(CMAKE_Fortran_COMPILER_ID "GNU") # Pretend we have found GCC +set(CMAKE_COMPILER_IS_GNUG77 1) +set(CMAKE_Fortran_COMPILER_LOADED 1) +set(CMAKE_Fortran_COMPILER_WORKS TRUE) +set(CMAKE_Fortran_ABI_COMPILED TRUE) +set(CMAKE_Fortran_IGNORE_EXTENSIONS h;H;o;O;obj;OBJ;def;DEF;rc;RC) +set(CMAKE_Fortran_SOURCE_FILE_EXTENSIONS f;F;fpp;FPP;f77;F77;f90;F90;for;For;FOR;f95;F95) +set(CMAKE_SIZEOF_VOID_P "{}") +'''.format(fortran_comp, fortran_launcher, ctypes.sizeof(ctypes.c_voidp))) + return self.call(args, build_dir, env) def found(self) -> bool: diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index cb0416d..d9f1e18 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -199,8 +199,6 @@ class OutputTargetMap: return '__art_{}__'.format(os.path.basename(fname)) class ConverterTarget: - lang_cmake_to_meson = {val.lower(): key for key, val in language_map.items()} - def __init__(self, target: CMakeTarget, env: Environment): self.env = env self.artifacts = target.artifacts @@ -240,7 +238,8 @@ class ConverterTarget: for i in target.files: # Determine the meson language - lang = ConverterTarget.lang_cmake_to_meson.get(i.language.lower(), 'c') + lang_cmake_to_meson = {val.lower(): key for key, val in language_map.items()} + lang = lang_cmake_to_meson.get(i.language.lower(), 'c') if lang not in self.languages: self.languages += [lang] if lang not in self.compile_opts: diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 818c62c..eedd4cf 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -13,8 +13,8 @@ # limitations under the License. import contextlib, os.path, re, tempfile +import collections.abc import typing -from typing import Optional, Tuple, List from ..linkers import StaticLinker, GnuLikeDynamicLinkerMixin, SolarisDynamicLinker from .. import coredata @@ -386,11 +386,12 @@ class RunResult: self.stdout = stdout self.stderr = stderr -class CompilerArgs(list): +class CompilerArgs(typing.MutableSequence[str]): ''' - Class derived from list() that manages a list of compiler arguments. Should - be used while constructing compiler arguments from various sources. Can be - operated with ordinary lists, so this does not need to be used everywhere. + List-like class that manages a list of compiler arguments. Should be used + while constructing compiler arguments from various sources. Can be + operated with ordinary lists, so this does not need to be used + everywhere. All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) and can converted to the native type of each compiler by using the @@ -439,37 +440,45 @@ class CompilerArgs(list): # In generate_link() we add external libs without de-dup, but we must # *always* de-dup these because they're special arguments to the linker always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs) - compiler = None - - def _check_args(self, args): - cargs = [] - if len(args) > 2: - raise TypeError("CompilerArgs() only accepts at most 2 arguments: " - "The compiler, and optionally an initial list") - elif not args: - return cargs - elif len(args) == 1: - if isinstance(args[0], (Compiler, StaticLinker)): - self.compiler = args[0] - else: - raise TypeError("you must pass a Compiler instance as one of " - "the arguments") - elif len(args) == 2: - if isinstance(args[0], (Compiler, StaticLinker)): - self.compiler = args[0] - cargs = args[1] - elif isinstance(args[1], (Compiler, StaticLinker)): - cargs = args[0] - self.compiler = args[1] - else: - raise TypeError("you must pass a Compiler instance as one of " - "the two arguments") - else: - raise AssertionError('Not reached') - return cargs - def __init__(self, *args): - super().__init__(self._check_args(args)) + def __init__(self, compiler: typing.Union['Compiler', StaticLinker], + iterable: typing.Optional[typing.Iterable[str]] = None): + self.compiler = compiler + self.__container = list(iterable) if iterable is not None else [] # type: typing.List[str] + + @typing.overload + def __getitem__(self, index: int) -> str: + pass + + @typing.overload + def __getitem__(self, index: slice) -> typing.List[str]: + pass + + def __getitem__(self, index): + return self.__container[index] + + @typing.overload + def __setitem__(self, index: int, value: str) -> None: + pass + + @typing.overload + def __setitem__(self, index: slice, value: typing.List[str]) -> None: + pass + + def __setitem__(self, index, value) -> None: + self.__container[index] = value + + def __delitem__(self, index: typing.Union[int, slice]) -> None: + del self.__container[index] + + def __len__(self) -> int: + return len(self.__container) + + def insert(self, index: int, value: str) -> None: + self.__container.insert(index, value) + + def copy(self) -> 'CompilerArgs': + return CompilerArgs(self.compiler, self.__container.copy()) @classmethod def _can_dedup(cls, arg): @@ -534,7 +543,7 @@ class CompilerArgs(list): # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which # all act like (or are) gnu ld # TODO: this could probably be added to the DynamicLinker instead - if (hasattr(self.compiler, 'linker') and + if (isinstance(self.compiler, Compiler) and self.compiler.linker is not None and isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))): group_start = -1 @@ -554,7 +563,7 @@ class CompilerArgs(list): # Remove system/default include paths added with -isystem if hasattr(self.compiler, 'get_default_include_dirs'): default_dirs = self.compiler.get_default_include_dirs() - bad_idx_list = [] + bad_idx_list = [] # type: typing.List[int] for i, each in enumerate(new): # Remove the -isystem and the path if the path is a default path if (each == '-isystem' and @@ -567,9 +576,9 @@ class CompilerArgs(list): bad_idx_list += [i] for i in reversed(bad_idx_list): new.pop(i) - return self.compiler.unix_args_to_native(new) + return self.compiler.unix_args_to_native(new.__container) - def append_direct(self, arg): + def append_direct(self, arg: str) -> None: ''' Append the specified argument without any reordering or de-dup except for absolute paths to libraries, etc, which can always be de-duped @@ -578,9 +587,9 @@ class CompilerArgs(list): if os.path.isabs(arg): self.append(arg) else: - super().append(arg) + self.__container.append(arg) - def extend_direct(self, iterable): + def extend_direct(self, iterable: typing.Iterable[str]) -> None: ''' Extend using the elements in the specified iterable without any reordering or de-dup except for absolute paths where the order of @@ -589,7 +598,7 @@ class CompilerArgs(list): for elem in iterable: self.append_direct(elem) - def extend_preserving_lflags(self, iterable): + def extend_preserving_lflags(self, iterable: typing.Iterable[str]) -> None: normal_flags = [] lflags = [] for i in iterable: @@ -600,20 +609,20 @@ class CompilerArgs(list): self.extend(normal_flags) self.extend_direct(lflags) - def __add__(self, args): - new = CompilerArgs(self, self.compiler) + def __add__(self, args: typing.Iterable[str]) -> 'CompilerArgs': + new = self.copy() new += args return new - def __iadd__(self, args): + def __iadd__(self, args: typing.Iterable[str]) -> 'CompilerArgs': ''' Add two CompilerArgs while taking into account overriding of arguments and while preserving the order of arguments as much as possible ''' - pre = [] - post = [] - if not isinstance(args, list): - raise TypeError('can only concatenate list (not "{}") to list'.format(args)) + pre = [] # type: typing.List[str] + post = [] # type: typing.List[str] + if not isinstance(args, collections.abc.Iterable): + raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) for arg in args: # If the argument can be de-duped, do it either by removing the # previous occurrence of it and adding a new one, or not adding the @@ -638,29 +647,31 @@ class CompilerArgs(list): # Insert at the beginning self[:0] = pre # Append to the end - super().__iadd__(post) + self.__container += post return self - def __radd__(self, args): - new = CompilerArgs(args, self.compiler) + def __radd__(self, args: typing.Iterable[str]): + new = CompilerArgs(self.compiler, args) new += self return new - def __mul__(self, args): - raise TypeError("can't multiply compiler arguments") - - def __imul__(self, args): - raise TypeError("can't multiply compiler arguments") + def __eq__(self, other: typing.Any) -> typing.Union[bool, 'NotImplemented']: + # Only allow equality checks against other CompilerArgs and lists instances + if isinstance(other, CompilerArgs): + return self.compiler == other.compiler and self.__container == other.__container + elif isinstance(other, list): + return self.__container == other + return NotImplemented - def __rmul__(self, args): - raise TypeError("can't multiply compiler arguments") - - def append(self, arg): + def append(self, arg: str) -> None: self.__iadd__([arg]) - def extend(self, args): + def extend(self, args: typing.Iterable[str]) -> None: self.__iadd__(args) + def __repr__(self) -> str: + return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container) + class Compiler: # Libraries to ignore in find_library() since they are provided by the # compiler or the C library. Currently only used for MSVC. @@ -726,7 +737,7 @@ class Compiler: def get_default_suffix(self) -> str: return self.default_suffix - def get_define(self, dname, prefix, env, extra_args, dependencies) -> Tuple[str, bool]: + def get_define(self, dname, prefix, env, extra_args, dependencies) -> typing.Tuple[str, bool]: raise EnvironmentException('%s does not support get_define ' % self.get_id()) def compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies) -> int: @@ -735,10 +746,12 @@ class Compiler: def compute_parameters_with_absolute_paths(self, parameter_list, build_dir): raise EnvironmentException('%s does not support compute_parameters_with_absolute_paths ' % self.get_id()) - def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None) -> Tuple[bool, bool]: + def has_members(self, typename, membernames, prefix, env, *, + extra_args=None, dependencies=None) -> typing.Tuple[bool, bool]: raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) - def has_type(self, typename, prefix, env, extra_args, *, dependencies=None) -> Tuple[bool, bool]: + def has_type(self, typename, prefix, env, extra_args, *, + dependencies=None) -> typing.Tuple[bool, bool]: raise EnvironmentException('%s does not support has_type ' % self.get_id()) def symbols_have_underscore_prefix(self, env) -> bool: @@ -801,7 +814,7 @@ class Compiler: Returns a tuple of (compile_flags, link_flags) for the specified language from the inherited environment """ - def log_var(var, val: Optional[str]): + def log_var(var, val: typing.Optional[str]): if val: mlog.log('Appending {} from environment: {!r}'.format(var, val)) else: @@ -888,19 +901,19 @@ class Compiler: def get_option_link_args(self, options: 'OptionDictType') -> typing.List[str]: return self.linker.get_option_args(options) - def check_header(self, *args, **kwargs) -> Tuple[bool, bool]: + def check_header(self, *args, **kwargs) -> typing.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) - def has_header(self, *args, **kwargs) -> Tuple[bool, bool]: + def has_header(self, *args, **kwargs) -> typing.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) - def has_header_symbol(self, *args, **kwargs) -> Tuple[bool, bool]: + def has_header_symbol(self, *args, **kwargs) -> typing.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language()) - def compiles(self, *args, **kwargs) -> Tuple[bool, bool]: + def compiles(self, *args, **kwargs) -> typing.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support compile checks.' % self.get_display_language()) - def links(self, *args, **kwargs) -> Tuple[bool, bool]: + def links(self, *args, **kwargs) -> typing.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support link checks.' % self.get_display_language()) def run(self, *args, **kwargs) -> RunResult: @@ -912,7 +925,7 @@ class Compiler: def alignment(self, *args, **kwargs) -> int: raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language()) - def has_function(self, *args, **kwargs) -> Tuple[bool, bool]: + def has_function(self, *args, **kwargs) -> typing.Tuple[bool, bool]: raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language()) @classmethod @@ -934,12 +947,12 @@ class Compiler: def get_program_dirs(self, *args, **kwargs): return [] - def has_multi_arguments(self, args, env) -> Tuple[bool, bool]: + def has_multi_arguments(self, args, env) -> typing.Tuple[bool, bool]: raise EnvironmentException( 'Language {} does not support has_multi_arguments.'.format( self.get_display_language())) - def has_multi_link_arguments(self, args: typing.List[str], env: 'Environment') -> Tuple[bool, bool]: + def has_multi_link_arguments(self, args: typing.List[str], env: 'Environment') -> typing.Tuple[bool, bool]: return self.linker.has_multi_arguments(args, env) def _get_compile_output(self, dirname, mode): @@ -1138,16 +1151,16 @@ class Compiler: def remove_linkerlike_args(self, args): return [x for x in args if not x.startswith('-Wl')] - def get_lto_compile_args(self) -> List[str]: + def get_lto_compile_args(self) -> typing.List[str]: return [] - def get_lto_link_args(self) -> List[str]: + def get_lto_link_args(self) -> typing.List[str]: return self.linker.get_lto_args() - def sanitizer_compile_args(self, value: str) -> List[str]: + def sanitizer_compile_args(self, value: str) -> typing.List[str]: return [] - def sanitizer_link_args(self, value: str) -> List[str]: + def sanitizer_link_args(self, value: str) -> typing.List[str]: return self.linker.sanitizer_args(value) def get_asneeded_args(self) -> typing.List[str]: @@ -1163,7 +1176,8 @@ class Compiler: return self.linker.get_buildtype_args(buildtype) def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, - suffix: str, soversion: str, darwin_versions: typing.Tuple[str, str], + suffix: str, soversion: str, + darwin_versions: typing.Tuple[str, str], is_shared_module: bool) -> typing.List[str]: return self.linker.get_soname_args( env, prefix, shlib_name, suffix, soversion, diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index 907aeec..e84a18f 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -613,6 +613,7 @@ class GnuDCompiler(DCompiler, GnuCompiler): def __init__(self, exelist, version, for_machine: MachineChoice, info: 'MachineInfo', is_cross, exe_wrapper, arch, **kwargs): DCompiler.__init__(self, exelist, version, for_machine, info, is_cross, exe_wrapper, arch, **kwargs) + GnuCompiler.__init__(self, {}) self.id = 'gcc' default_warn_args = ['-Wall', '-Wdeprecated'] self.warn_args = {'0': [], diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 3003fcd..f66ff38 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -13,9 +13,8 @@ # limitations under the License. from pathlib import Path -from typing import List +from typing import TYPE_CHECKING, List import subprocess, os -import typing from .. import coredata from .compilers import ( @@ -36,7 +35,7 @@ from mesonbuild.mesonlib import ( version_compare, EnvironmentException, MesonException, MachineChoice, LibType ) -if typing.TYPE_CHECKING: +if TYPE_CHECKING: from ..envconfig import MachineInfo @@ -192,7 +191,7 @@ class GnuFortranCompiler(GnuCompiler, FortranCompiler): 'none')}) return opts - def get_option_compile_args(self, options) -> typing.List[str]: + def get_option_compile_args(self, options) -> List[str]: args = [] std = options['fortran_std'] if std.value != 'none': @@ -273,6 +272,7 @@ class SunFortranCompiler(FortranCompiler): class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): + def __init__(self, exelist, version, for_machine: MachineChoice, is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): @@ -297,7 +297,7 @@ class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): 'none')}) return opts - def get_option_compile_args(self, options) -> typing.List[str]: + def get_option_compile_args(self, options) -> List[str]: args = [] std = options['fortran_std'] stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} @@ -317,7 +317,7 @@ class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): def language_stdlib_only_link_flags(self): return ['-lifcore', '-limf'] - def get_dependency_gen_args(self, outtarget: str, outfile: str) -> typing.List[str]: + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> List[str]: return ['-gen-dep=' + outtarget, '-gen-depformat=make'] @@ -326,15 +326,6 @@ class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): file_suffixes = ['f90', 'f', 'for', 'ftn', 'fpp'] always_args = ['/nologo'] - BUILD_ARGS = { - 'plain': [], - 'debug': ["/Zi", "/Od"], - 'debugoptimized': ["/Zi", "/O1"], - 'release': ["/O2"], - 'minsize': ["/Os"], - 'custom': [], - } - def __init__(self, exelist, version, for_machine: MachineChoice, is_cross, target: str, info: 'MachineInfo', exe_wrapper=None, **kwargs): @@ -356,7 +347,7 @@ class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): 'none')}) return opts - def get_option_compile_args(self, options) -> typing.List[str]: + def get_option_compile_args(self, options) -> List[str]: args = [] std = options['fortran_std'] stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} @@ -367,9 +358,6 @@ class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): def get_module_outdir_args(self, path) -> List[str]: return ['/module:' + path] - def get_buildtype_args(self, buildtype: str) -> List[str]: - return self.BUILD_ARGS[buildtype] - class PathScaleFortranCompiler(FortranCompiler): def __init__(self, exelist, version, for_machine: MachineChoice, diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py index 3a7e597..28708d5 100644 --- a/mesonbuild/compilers/mixins/arm.py +++ b/mesonbuild/compilers/mixins/arm.py @@ -143,7 +143,7 @@ class ArmclangCompiler: if ver_str: ver_str = ver_str.group(0) else: - mesonlib.EnvironmentException('armlink version string not found') + raise mesonlib.EnvironmentException('armlink version string not found') assert ver_str # makes mypy happy # Using the regular expression from environment.search_version, # which is used for searching compiler version diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index b5992ef..b5516b0 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -167,7 +167,6 @@ class CLikeCompiler: retval.append(d) # at this point, it's an ELF file which doesn't match the # appropriate elf_class, so skip this one - pass return tuple(retval) @functools.lru_cache() diff --git a/mesonbuild/compilers/mixins/elbrus.py b/mesonbuild/compilers/mixins/elbrus.py index 387c5b8..e157d87 100644 --- a/mesonbuild/compilers/mixins/elbrus.py +++ b/mesonbuild/compilers/mixins/elbrus.py @@ -19,18 +19,18 @@ import typing import subprocess import re -from .gnu import GnuCompiler +from .gnu import GnuLikeCompiler from ...mesonlib import Popen_safe if typing.TYPE_CHECKING: from ...environment import Environment -class ElbrusCompiler(GnuCompiler): +class ElbrusCompiler(GnuLikeCompiler): # Elbrus compiler is nearly like GCC, but does not support # PCH, LTO, sanitizers and color output as of version 1.21.x. def __init__(self, defines: typing.Dict[str, str]): - GnuCompiler.__init__(self, defines) + super().__init__() self.id = 'lcc' self.base_options = ['b_pgo', 'b_coverage', 'b_ndebug', 'b_staticpic', diff --git a/mesonbuild/compilers/mixins/intel.py b/mesonbuild/compilers/mixins/intel.py index 72c6fdf..cf26a14 100644 --- a/mesonbuild/compilers/mixins/intel.py +++ b/mesonbuild/compilers/mixins/intel.py @@ -14,8 +14,10 @@ """Abstractions for the Intel Compiler families. -Intel provides both a posix/gcc-like compiler (ICC) and an msvc-like compiler -(ICL). +Intel provides both a posix/gcc-like compiler (ICC) for MacOS and Linux, +with Meson mixin IntelGnuLikeCompiler. +For Windows, the Intel msvc-like compiler (ICL) Meson mixin +is IntelVisualStudioLikeCompiler. """ import os @@ -30,9 +32,18 @@ if typing.TYPE_CHECKING: # XXX: avoid circular dependencies # TODO: this belongs in a posix compiler class +# NOTE: the default Intel optimization is -O2, unlike GNU which defaults to -O0. +# this can be surprising, particularly for debug builds, so we specify the +# default as -O0. +# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-o +# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-g +# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-o +# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-g +# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-traceback +# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html clike_optimization_args = { - '0': [], - 'g': [], + '0': ['-O0'], + 'g': ['-O0'], '1': ['-O1'], '2': ['-O2'], '3': ['-O3'], @@ -43,6 +54,15 @@ clike_optimization_args = { # Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0.0 class IntelGnuLikeCompiler(GnuLikeCompiler): + BUILD_ARGS = { + 'plain': [], + 'debug': ["-g", "-O0", "-traceback"], + 'debugoptimized': ["-g", "-O1", "-traceback"], + 'release': ["-O2"], + 'minsize': ["-Os"], + 'custom': [], + } + def __init__(self): super().__init__() # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support. @@ -96,11 +116,23 @@ class IntelGnuLikeCompiler(GnuLikeCompiler): def get_profile_use_args(self) -> typing.List[str]: return ['-prof-use'] + def get_buildtype_args(self, buildtype: str) -> typing.List[str]: + return self.BUILD_ARGS[buildtype] + class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler): """Abstractions for ICL, the Intel compiler on Windows.""" + BUILD_ARGS = { + 'plain': [], + 'debug': ["/Zi", "/Od", "/traceback"], + 'debugoptimized': ["/Zi", "/O1", "/traceback"], + 'release': ["/O2"], + 'minsize': ["/Os"], + 'custom': [], + } + def __init__(self, target: str): super().__init__(target) self.id = 'intel-cl' @@ -133,3 +165,6 @@ class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler): def openmp_flags(self) -> typing.List[str]: return ['/Qopenmp'] + + def get_buildtype_args(self, buildtype: str) -> typing.List[str]: + return self.BUILD_ARGS[buildtype] diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index a5898f1..bb1d277 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -27,16 +27,16 @@ import ast import argparse import configparser from typing import ( - Any, Dict, Generic, Iterable, List, Optional, Type, TypeVar, Union + Any, Dict, Generic, Iterable, Iterator, List, MutableMapping, Optional, Tuple, Type, TypeVar, Union, + TYPE_CHECKING, ) -import typing import enum import shlex -if typing.TYPE_CHECKING: +if TYPE_CHECKING: from . import dependencies - OptionDictType = typing.Dict[str, 'UserOption[Any]'] + OptionDictType = Dict[str, 'UserOption[Any]'] version = '0.52.999' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode'] @@ -170,7 +170,7 @@ class UserArrayOption(UserOption[List[str]]): self.allow_dups = allow_dups self.value = self.validate_value(value, user_input=user_input) - def validate_value(self, value, user_input=True) -> List[str]: + def validate_value(self, value, user_input: bool = True) -> List[str]: # User input is for options defined on the command line (via -D # options). Users can put their input in as a comma separated # string, but for defining options in meson_options.txt the format @@ -180,7 +180,10 @@ class UserArrayOption(UserOption[List[str]]): if isinstance(value, str): if value.startswith('['): - newvalue = ast.literal_eval(value) + try: + newvalue = ast.literal_eval(value) + except ValueError: + raise MesonException('malformed option {}'.format(value)) elif value == '': newvalue = [] else: @@ -191,7 +194,7 @@ class UserArrayOption(UserOption[List[str]]): elif isinstance(value, list): newvalue = value else: - raise MesonException('"{0}" should be a string array, but it is not'.format(str(newvalue))) + raise MesonException('"{}" should be a string array, but it is not'.format(newvalue)) if not self.allow_dups and len(set(newvalue)) != len(newvalue): msg = 'Duplicated values in array option is deprecated. ' \ @@ -231,9 +234,9 @@ def load_configs(filenames: List[str]) -> configparser.ConfigParser: return config -if typing.TYPE_CHECKING: - CacheKeyType = typing.Tuple[typing.Tuple[typing.Any, ...], ...] - SubCacheKeyType = typing.Tuple[typing.Any, ...] +if TYPE_CHECKING: + CacheKeyType = Tuple[Tuple[Any, ...], ...] + SubCacheKeyType = Tuple[Any, ...] class DependencyCacheType(enum.Enum): @@ -257,7 +260,7 @@ class DependencySubCache: def __init__(self, type_: DependencyCacheType): self.types = [type_] - self.__cache = {} # type: typing.Dict[SubCacheKeyType, dependencies.Dependency] + self.__cache = {} # type: Dict[SubCacheKeyType, dependencies.Dependency] def __getitem__(self, key: 'SubCacheKeyType') -> 'dependencies.Dependency': return self.__cache[key] @@ -268,7 +271,7 @@ class DependencySubCache: def __contains__(self, key: 'SubCacheKeyType') -> bool: return key in self.__cache - def values(self) -> typing.Iterable['dependencies.Dependency']: + def values(self) -> Iterable['dependencies.Dependency']: return self.__cache.values() @@ -280,12 +283,12 @@ class DependencyCache: successfully lookup by providing a simple get/put interface. """ - def __init__(self, builtins_per_machine: PerMachine[typing.Dict[str, UserOption[typing.Any]]], for_machine: MachineChoice): - self.__cache = OrderedDict() # type: typing.MutableMapping[CacheKeyType, DependencySubCache] + def __init__(self, builtins_per_machine: PerMachine[Dict[str, UserOption[Any]]], for_machine: MachineChoice): + self.__cache = OrderedDict() # type: MutableMapping[CacheKeyType, DependencySubCache] self.__builtins_per_machine = builtins_per_machine self.__for_machine = for_machine - def __calculate_subkey(self, type_: DependencyCacheType) -> typing.Tuple[typing.Any, ...]: + def __calculate_subkey(self, type_: DependencyCacheType) -> Tuple[Any, ...]: if type_ is DependencyCacheType.PKG_CONFIG: return tuple(self.__builtins_per_machine[self.__for_machine]['pkg_config_path'].value) elif type_ is DependencyCacheType.CMAKE: @@ -293,7 +296,7 @@ class DependencyCache: assert type_ is DependencyCacheType.OTHER, 'Someone forgot to update subkey calculations for a new type' return tuple() - def __iter__(self) -> typing.Iterator['CacheKeyType']: + def __iter__(self) -> Iterator['CacheKeyType']: return self.keys() def put(self, key: 'CacheKeyType', dep: 'dependencies.Dependency') -> None: @@ -303,7 +306,7 @@ class DependencyCache: subkey = self.__calculate_subkey(t) self.__cache[key][subkey] = dep - def get(self, key: 'CacheKeyType') -> typing.Optional['dependencies.Dependency']: + def get(self, key: 'CacheKeyType') -> Optional['dependencies.Dependency']: """Get a value from the cache. If there is no cache entry then None will be returned. @@ -321,14 +324,14 @@ class DependencyCache: pass return None - def values(self) -> typing.Iterator['dependencies.Dependency']: + def values(self) -> Iterator['dependencies.Dependency']: for c in self.__cache.values(): yield from c.values() - def keys(self) -> typing.Iterator['CacheKeyType']: + def keys(self) -> Iterator['CacheKeyType']: return iter(self.__cache.keys()) - def items(self) -> typing.Iterator[typing.Tuple['CacheKeyType', typing.List['dependencies.Dependency']]]: + def items(self) -> Iterator[Tuple['CacheKeyType', List['dependencies.Dependency']]]: for k, v in self.__cache.items(): vs = [] for t in v.types: @@ -390,9 +393,9 @@ class CoreData: if not filenames: return [] - found_invalid = [] # type: typing.List[str] - missing = [] # type: typing.List[str] - real = [] # type: typing.List[str] + found_invalid = [] # type: List[str] + missing = [] # type: List[str] + real = [] # type: List[str] for i, f in enumerate(filenames): f = os.path.expanduser(os.path.expandvars(f)) if os.path.exists(f): @@ -618,8 +621,7 @@ class CoreData: except MesonException as e: raise type(e)(('Validation failed for option %s: ' % option_name) + str(e)) \ .with_traceback(sys.exc_info()[2]) - else: - raise MesonException('Tried to validate unknown option %s.' % option_name) + raise MesonException('Tried to validate unknown option %s.' % option_name) def get_external_args(self, for_machine: MachineChoice, lang): return self.compiler_options[for_machine][lang + '_args'].value @@ -659,7 +661,6 @@ class CoreData: if not self.is_cross_build(): options = self.strip_build_option_names(options) # Set prefix first because it's needed to sanitize other options - prefix = self.builtins['prefix'].value if 'prefix' in options: prefix = self.sanitize_prefix(options['prefix']) self.builtins['prefix'].set_value(prefix) diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py index 1b7c03f..889a610 100644 --- a/mesonbuild/dependencies/__init__.py +++ b/mesonbuild/dependencies/__init__.py @@ -22,6 +22,7 @@ from .base import ( # noqa: F401 from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency from .coarrays import CoarrayDependency from .mpi import MPIDependency +from .scalapack import ScalapackDependency from .misc import (BlocksDependency, NetCDFDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency, GpgmeDependency, ShadercDependency) from .platform import AppleFrameworks from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency @@ -37,11 +38,14 @@ packages.update({ 'boost': BoostDependency, 'cuda': CudaDependency, - # From misc: - 'blocks': BlocksDependency, + # per-file 'coarray': CoarrayDependency, - 'mpi': MPIDependency, 'hdf5': HDF5Dependency, + 'mpi': MPIDependency, + 'scalapack': ScalapackDependency, + + # From misc: + 'blocks': BlocksDependency, 'netcdf': NetCDFDependency, 'openmp': OpenMPDependency, 'python3': Python3Dependency, diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 06ea1c3..9170400 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -21,10 +21,10 @@ import re import json import shlex import shutil +import stat import textwrap import platform -import typing -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, Optional, Tuple, Type, Union from enum import Enum from pathlib import Path, PurePath @@ -152,7 +152,7 @@ class Dependency: return converted return self.compile_args - def get_link_args(self, raw=False): + def get_link_args(self, raw: bool = False) -> List[str]: if raw and self.raw_link_args is not None: return self.raw_link_args return self.link_args @@ -208,8 +208,8 @@ class Dependency: """ raise RuntimeError('Unreachable code in partial_dependency called') - def _add_sub_dependency(self, dep_type: typing.Type['Dependency'], env: Environment, - kwargs: typing.Dict[str, typing.Any], *, + def _add_sub_dependency(self, dep_type: Type['Dependency'], env: Environment, + kwargs: Dict[str, Any], *, method: DependencyMethods = DependencyMethods.AUTO) -> None: """Add an internal dependency of of the given type. @@ -222,14 +222,14 @@ class Dependency: kwargs['method'] = method self.ext_deps.append(dep_type(env, kwargs)) - def get_variable(self, *, cmake: typing.Optional[str] = None, pkgconfig: typing.Optional[str] = None, - configtool: typing.Optional[str] = None, default_value: typing.Optional[str] = None, - pkgconfig_define: typing.Optional[typing.List[str]] = None) -> typing.Union[str, typing.List[str]]: + def get_variable(self, *, cmake: Optional[str] = None, pkgconfig: Optional[str] = None, + configtool: Optional[str] = None, default_value: Optional[str] = None, + pkgconfig_define: Optional[List[str]] = None) -> Union[str, List[str]]: if default_value is not None: return default_value raise DependencyException('No default provided for dependency {!r}, which is not pkg-config, cmake, or config-tool based.'.format(self)) - def generate_system_dependency(self, include_type: str) -> typing.Type['Dependency']: + def generate_system_dependency(self, include_type: str) -> Type['Dependency']: new_dep = copy.deepcopy(self) new_dep.include_type = self._process_include_type_kw({'include_type': include_type}) return new_dep @@ -274,7 +274,10 @@ class InternalDependency(Dependency): class HasNativeKwarg: def __init__(self, kwargs): - self.for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST + self.for_machine = self.get_for_machine_from_kwargs(kwargs) + + def get_for_machine_from_kwargs(self, kwargs): + return MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST class ExternalDependency(Dependency, HasNativeKwarg): def __init__(self, type_name, environment, language, kwargs): @@ -550,9 +553,9 @@ class ConfigToolDependency(ExternalDependency): def log_tried(self): return self.type_name - def get_variable(self, *, cmake: typing.Optional[str] = None, pkgconfig: typing.Optional[str] = None, - configtool: typing.Optional[str] = None, default_value: typing.Optional[str] = None, - pkgconfig_define: typing.Optional[typing.List[str]] = None) -> typing.Union[str, typing.List[str]]: + def get_variable(self, *, cmake: Optional[str] = None, pkgconfig: Optional[str] = None, + configtool: Optional[str] = None, default_value: Optional[str] = None, + pkgconfig_define: Optional[List[str]] = None) -> Union[str, List[str]]: if configtool: # In the not required case '' (empty string) will be returned if the # variable is not found. Since '' is a valid value to return we @@ -724,6 +727,9 @@ class PkgConfigDependency(ExternalDependency): elif arg.startswith('/'): pargs = PurePath(arg).parts tmpl = '{}:/{}' + elif arg.startswith(('-L', '-I')) or arg[1] == ':': + # clean out improper '\\ ' as comes from some Windows pkg-config files + arg = arg.replace('\\ ', ' ') if len(pargs) > 1 and len(pargs[1]) == 1: arg = tmpl.format(pargs[1], '/'.join(pargs[2:])) converted.append(arg) @@ -984,9 +990,9 @@ class PkgConfigDependency(ExternalDependency): def log_tried(self): return self.type_name - def get_variable(self, *, cmake: typing.Optional[str] = None, pkgconfig: typing.Optional[str] = None, - configtool: typing.Optional[str] = None, default_value: typing.Optional[str] = None, - pkgconfig_define: typing.Optional[typing.List[str]] = None) -> typing.Union[str, typing.List[str]]: + def get_variable(self, *, cmake: Optional[str] = None, pkgconfig: Optional[str] = None, + configtool: Optional[str] = None, default_value: Optional[str] = None, + pkgconfig_define: Optional[List[str]] = None) -> Union[str, List[str]]: if pkgconfig: kwargs = {} if default_value is not None: @@ -1032,7 +1038,23 @@ class CMakeDependency(ExternalDependency): # one module return module - def __init__(self, name: str, environment: Environment, kwargs, language=None): + def __init__(self, name: str, environment: Environment, kwargs, language: str = None): + if language is None: + if kwargs.get('native', False): + if 'c' in environment.coredata.compilers.build.keys(): + language = 'c' + elif 'cpp' in environment.coredata.compilers.build.keys(): + language = 'cpp' + elif 'fortran' in environment.coredata.compilers.build.keys(): + language = 'fortran' + else: + if 'c' in environment.coredata.compilers.host.keys(): + language = 'c' + elif 'cpp' in environment.coredata.compilers.host.keys(): + language = 'cpp' + elif 'fortran' in environment.coredata.compilers.host.keys(): + language = 'fortran' + super().__init__('cmake', environment, language, kwargs) self.name = name self.is_libtool = False @@ -1455,9 +1477,40 @@ class CMakeDependency(ExternalDependency): build_dir = Path(self.cmake_root_dir) / 'cmake_{}'.format(self.name) build_dir.mkdir(parents=True, exist_ok=True) - # Copy the CMakeLists.txt + # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt src_cmake = Path(__file__).parent / 'data' / cmake_file - shutil.copyfile(str(src_cmake), str(build_dir / 'CMakeLists.txt')) # str() is for Python 3.5 + cmake_txt = src_cmake.read_text() + + # In general, some Fortran CMake find_package() also require C language enabled, + # even if nothing from C is directly used. An easy Fortran example that fails + # without C language is + # find_package(Threads) + # To make this general to + # any other language that might need this, we use a list for all + # languages and expand in the cmake Project(... LANGUAGES ...) statement. + if self.language is None: + cmake_language = ['NONE'] + elif self.language == 'c': + cmake_language = ['C'] + elif self.language == 'cpp': + cmake_language = ['CXX'] + elif self.language == 'cs': + cmake_language = ['CSharp'] + elif self.language == 'cuda': + cmake_language = ['CUDA'] + elif self.language == 'fortran': + cmake_language = ['C', 'Fortran'] + elif self.language == 'objc': + cmake_language = ['OBJC'] + elif self.language == 'objcpp': + cmake_language = ['OBJCXX'] + + cmake_txt = """ +cmake_minimum_required(VERSION ${{CMAKE_VERSION}}) +project(MesonTemp LANGUAGES {}) +""".format(' '.join(cmake_language)) + cmake_txt + + (build_dir / 'CMakeLists.txt').write_text(cmake_txt) return str(build_dir) @@ -1479,9 +1532,9 @@ class CMakeDependency(ExternalDependency): return 'modules: ' + ', '.join(modules) return '' - def get_variable(self, *, cmake: typing.Optional[str] = None, pkgconfig: typing.Optional[str] = None, - configtool: typing.Optional[str] = None, default_value: typing.Optional[str] = None, - pkgconfig_define: typing.Optional[typing.List[str]] = None) -> typing.Union[str, typing.List[str]]: + def get_variable(self, *, cmake: Optional[str] = None, pkgconfig: Optional[str] = None, + configtool: Optional[str] = None, default_value: Optional[str] = None, + pkgconfig_define: Optional[List[str]] = None) -> Union[str, List[str]]: if cmake: try: v = self.traceparser.vars[cmake] @@ -1696,13 +1749,20 @@ class ExternalProgram: # An 'ExternalProgram' always runs on the build machine for_machine = MachineChoice.BUILD - def __init__(self, name: str, command: typing.Optional[typing.List[str]] = None, - silent: bool = False, search_dir: typing.Optional[str] = None): + def __init__(self, name: str, command: Optional[List[str]] = None, + silent: bool = False, search_dir: Optional[str] = None, + extra_search_dirs: Optional[List[str]] = None): self.name = name if command is not None: self.command = listify(command) else: - self.command = self._search(name, search_dir) + all_search_dirs = [search_dir] + if extra_search_dirs: + all_search_dirs += extra_search_dirs + for d in all_search_dirs: + self.command = self._search(name, d) + if self.found(): + break # Set path to be the last item that is actually a file (in order to # skip options in something like ['python', '-u', 'file.py']. If we @@ -1715,7 +1775,9 @@ class ExternalProgram: break if not silent: - if self.found(): + # ignore the warning because derived classes never call this __init__ + # method, and thus only the found() method of this class is ever executed + if self.found(): # lgtm [py/init-calls-subclass] mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'), '(%s)' % ' '.join(self.command)) else: @@ -1793,16 +1855,16 @@ class ExternalProgram: return commands + [script] except Exception as e: mlog.debug(e) - pass mlog.debug('Unusable script {!r}'.format(script)) return False def _is_executable(self, path): suffix = os.path.splitext(path)[-1].lower()[1:] + execmask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH if mesonlib.is_windows(): if suffix in self.windows_exts: return True - elif os.access(path, os.X_OK): + elif os.stat(path).st_mode & execmask: return not os.path.isdir(path) return False @@ -1897,7 +1959,7 @@ class ExternalProgram: return self.name -class NonExistingExternalProgram(ExternalProgram): +class NonExistingExternalProgram(ExternalProgram): # lgtm [py/missing-call-to-init] "A program that will never exist" def __init__(self, name='nonexistingprogram'): @@ -1913,7 +1975,7 @@ class NonExistingExternalProgram(ExternalProgram): return False -class EmptyExternalProgram(ExternalProgram): +class EmptyExternalProgram(ExternalProgram): # lgtm [py/missing-call-to-init] ''' A program object that returns an empty list of commands. Used for cases such as a cross file exe_wrapper to represent that it's not required. @@ -2234,13 +2296,15 @@ def _build_external_dependency_list(name, env: Environment, kwargs: Dict[str, An # Otherwise, just use the pkgconfig and cmake dependency detector if 'auto' == kwargs.get('method', 'auto'): candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) - candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) # On OSX, also try framework dependency detector if mesonlib.is_osx(): candidates.append(functools.partial(ExtraFrameworkDependency, name, False, None, env, None, kwargs)) + # Only use CMake as a last resort, since it might not work 100% (see #6113) + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + return candidates diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py index 67b361f..7048e81 100644 --- a/mesonbuild/dependencies/cuda.py +++ b/mesonbuild/dependencies/cuda.py @@ -20,7 +20,7 @@ from .. import mlog from .. import mesonlib from ..environment import detect_cpu_family -from .base import (DependencyException, ExternalDependency, HasNativeKwarg) +from .base import (DependencyException, ExternalDependency) class CudaDependency(ExternalDependency): @@ -28,8 +28,7 @@ class CudaDependency(ExternalDependency): supported_languages = ['cuda', 'cpp', 'c'] # see also _default_language def __init__(self, environment, kwargs): - HasNativeKwarg.__init__(self, kwargs) # initialize self.for_machine - compilers = environment.coredata.compilers[self.for_machine] + compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)] language = self._detect_language(compilers) if language not in self.supported_languages: raise DependencyException('Language \'{}\' is not supported by the CUDA Toolkit. Supported languages are {}.'.format(language, self.supported_languages)) @@ -94,7 +93,7 @@ class CudaDependency(ExternalDependency): defaults = [(path, version) for (path, version, default) in paths if default] if defaults: - return (*defaults[0], True) + return (defaults[0][0], defaults[0][1], True) platform_msg = 'set the CUDA_PATH environment variable' if self._is_windows() \ else 'set the CUDA_PATH environment variable/create the \'/usr/local/cuda\' symbolic link' diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt index 64f5b23..b52a69a 100644 --- a/mesonbuild/dependencies/data/CMakeLists.txt +++ b/mesonbuild/dependencies/data/CMakeLists.txt @@ -1,4 +1,8 @@ -cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} ) +# fail noisily if attempt to use this file without setting: +# cmake_minimum_required(VERSION ${CMAKE_VERSION}) +# project(... LANGUAGES ...) + +cmake_policy(SET CMP0000 NEW) set(PACKAGE_FOUND FALSE) set(_packageName "${NAME}") diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py index 894bfdc..15907d4 100644 --- a/mesonbuild/dependencies/dev.py +++ b/mesonbuild/dependencies/dev.py @@ -25,7 +25,7 @@ from ..mesonlib import version_compare, stringlistify, extract_as_list, MachineC from ..environment import get_llvm_tool_names from .base import ( DependencyException, DependencyMethods, ExternalDependency, PkgConfigDependency, - strip_system_libdirs, ConfigToolDependency, CMakeDependency, HasNativeKwarg + strip_system_libdirs, ConfigToolDependency, CMakeDependency ) from .misc import ThreadDependency @@ -205,17 +205,13 @@ class LLVMDependencyConfigTool(ConfigToolDependency): __cpp_blacklist = {'-DNDEBUG'} def __init__(self, environment, kwargs): - # Already called by `super().__init__`, but need `self.for_machine` - # before `super().__init__` is called. - HasNativeKwarg.__init__(self, kwargs) - self.tools = get_llvm_tool_names('llvm-config') # Fedora starting with Fedora 30 adds a suffix of the number # of bits in the isa that llvm targets, for example, on x86_64 # and aarch64 the name will be llvm-config-64, on x86 and arm # it will be llvm-config-32. - if environment.machines[self.for_machine].is_64_bit: + if environment.machines[self.get_for_machine_from_kwargs(kwargs)].is_64_bit: self.tools.append('llvm-config-64') else: self.tools.append('llvm-config-32') diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index bfd450c..d773eb7 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -22,6 +22,7 @@ import sysconfig from .. import mlog from .. import mesonlib from ..environment import detect_cpu_family +from ..mesonlib import listify from .base import ( DependencyException, DependencyMethods, ExternalDependency, @@ -109,15 +110,34 @@ class ThreadDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('threads', environment, None, kwargs) self.name = 'threads' - self.is_found = True - # Happens if you are using a language with threads - # concept without C, such as plain Cuda. - if self.clib_compiler is None: - self.compile_args = [] - self.link_args = [] - else: - self.compile_args = self.clib_compiler.thread_flags(environment) - self.link_args = self.clib_compiler.thread_link_flags(environment) + self.is_found = False + methods = listify(self.methods) + if DependencyMethods.AUTO in methods: + self.is_found = True + # Happens if you are using a language with threads + # concept without C, such as plain Cuda. + if self.clib_compiler is None: + self.compile_args = [] + self.link_args = [] + else: + self.compile_args = self.clib_compiler.thread_flags(environment) + self.link_args = self.clib_compiler.thread_link_flags(environment) + return + + if DependencyMethods.CMAKE in methods: + # for unit tests and for those who simply want + # dependency('threads', method: 'cmake') + cmakedep = CMakeDependency('Threads', environment, kwargs) + if cmakedep.found(): + self.compile_args = cmakedep.get_compile_args() + self.link_args = cmakedep.get_link_args() + self.version = cmakedep.get_version() + self.is_found = True + return + + @staticmethod + def get_methods(): + return [DependencyMethods.AUTO, DependencyMethods.CMAKE] class BlocksDependency(ExternalDependency): diff --git a/mesonbuild/dependencies/scalapack.py b/mesonbuild/dependencies/scalapack.py new file mode 100644 index 0000000..36bfd66 --- /dev/null +++ b/mesonbuild/dependencies/scalapack.py @@ -0,0 +1,120 @@ +# Copyright 2013-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path +import os + +from .. import mesonlib +from .base import CMakeDependency, DependencyMethods, ExternalDependency, PkgConfigDependency + + +class ScalapackDependency(ExternalDependency): + def __init__(self, environment, kwargs: dict): + methods = mesonlib.listify(kwargs.get('method', 'auto')) + super().__init__('scalapack', environment, None, kwargs) + kwargs['required'] = False + kwargs['silent'] = True + self.is_found = False + self.static = kwargs.get('static', False) + + if set(methods).intersection(['auto', 'pkg-config']): + pkgconfig_files = [] + mklroot = None + is_gcc = self.clib_compiler.get_id() == 'gcc' + # Intel MKL works with non-Intel compilers too -- but not gcc on windows + if 'MKLROOT' in os.environ and not (mesonlib.is_windows() and is_gcc): + try: + mklroot = Path(os.environ['MKLROOT']).resolve() + except Exception: + pass + if mklroot is not None: + # MKL pkg-config is a start, but you have to add / change stuff + # https://software.intel.com/en-us/articles/intel-math-kernel-library-intel-mkl-and-pkg-config-tool + pkgconfig_files = ( + ['mkl-static-lp64-iomp'] if self.static else ['mkl-dynamic-lp64-iomp'] + ) + if mesonlib.is_windows(): + suffix = '.lib' + elif self.static: + suffix = '.a' + else: + suffix = '' + libdir = mklroot / 'lib/intel64' + # Intel compiler might not have Parallel Suite + pkgconfig_files += ['scalapack-openmpi', 'scalapack'] + + for pkg in pkgconfig_files: + pkgdep = PkgConfigDependency( + pkg, environment, kwargs, language=self.language + ) + if pkgdep.found(): + self.compile_args = pkgdep.get_compile_args() + if mklroot: + link_args = pkgdep.get_link_args() + if is_gcc: + for i, a in enumerate(link_args): + if 'mkl_intel_lp64' in a: + link_args[i] = a.replace('intel', 'gf') + break + # MKL pkg-config omits scalapack + # be sure "-L" and "-Wl" are first if present + i = 0 + for j, a in enumerate(link_args): + if a.startswith(('-L', '-Wl')): + i = j + 1 + elif j > 3: + break + if mesonlib.is_windows() or self.static: + link_args.insert( + i, str(libdir / ('mkl_scalapack_lp64' + suffix)) + ) + link_args.insert( + i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix)) + ) + else: + link_args.insert(i, '-lmkl_scalapack_lp64') + link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64') + else: + link_args = pkgdep.get_link_args() + self.link_args = link_args + + self.version = pkgdep.get_version() + if self.version == 'unknown' and mklroot: + try: + v = ( + mklroot.as_posix() + .split('compilers_and_libraries_')[1] + .split('/', 1)[0] + ) + if v: + self.version = v + except IndexError: + pass + + self.is_found = True + self.pcdep = pkgdep + return + + if set(methods).intersection(['auto', 'cmake']): + cmakedep = CMakeDependency('Scalapack', environment, kwargs) + if cmakedep.found(): + self.compile_args = cmakedep.get_compile_args() + self.link_args = cmakedep.get_link_args() + self.version = cmakedep.get_version() + self.is_found = True + return + + @staticmethod + def get_methods(): + return [DependencyMethods.AUTO, DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE] diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 638bc68..bdcc4a7 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -605,16 +605,14 @@ class VulkanDependency(ExternalDependency): # TODO: this config might not work on some platforms, fix bugs as reported # we should at least detect other 64-bit platforms (e.g. armv8) lib_name = 'vulkan' + lib_dir = 'lib' + inc_dir = 'include' if mesonlib.is_windows(): lib_name = 'vulkan-1' lib_dir = 'Lib32' inc_dir = 'Include' if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64': lib_dir = 'Lib' - else: - lib_name = 'vulkan' - lib_dir = 'lib' - inc_dir = 'include' # make sure header and lib are valid inc_path = os.path.join(self.vulkan_sdk, inc_dir) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index d4c1498..8443a47 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -1442,9 +1442,17 @@ class Environment: compilers.LLVMDCompiler, for_machine, use_linker_prefix=False) else: with tempfile.NamedTemporaryFile(suffix='.d') as f: + # LDC writes an object file to the current working directory. + # Clean it up. + objectfile = os.path.basename(f.name)[:-1] + 'o' linker = self._guess_nix_linker( exelist, compilers.LLVMDCompiler, for_machine, extra_args=[f.name]) + try: + os.unlink(objectfile) + except Exception: + # Thank you Windows file system semantics and virus scanners. + pass return compilers.LLVMDCompiler( exelist, version, for_machine, info, arch, full_version=full_version, linker=linker) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 7388e91..d7f826c 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -119,6 +119,18 @@ def extract_required_kwarg(kwargs, subproject, feature_check=None, default=True) return disabled, required, feature +def extract_search_dirs(kwargs): + search_dirs = mesonlib.stringlistify(kwargs.get('dirs', [])) + search_dirs = [Path(d).expanduser() for d in search_dirs] + for d in search_dirs: + if mesonlib.is_windows() and d.root.startswith('\\'): + # a Unix-path starting with `/` that is not absolute on Windows. + # discard without failing for end-user ease of cross-platform directory arrays + continue + if not d.is_absolute(): + raise InvalidCode('Search directory {} is not an absolute path.'.format(d)) + return list(map(str, search_dirs)) + class TryRunResultHolder(InterpreterObject): def __init__(self, res): super().__init__() @@ -882,10 +894,10 @@ class CustomTargetHolder(TargetHolder): def __getitem__(self, index): return CustomTargetIndexHolder(self.held_object[index]) - def __setitem__(self, index, value): + def __setitem__(self, index, value): # lgtm[py/unexpected-raise-in-special-method] raise InterpreterException('Cannot set a member of a CustomTarget') - def __delitem__(self, index): + def __delitem__(self, index): # lgtm[py/unexpected-raise-in-special-method] raise InterpreterException('Cannot delete a member of a CustomTarget') def outdir_include(self): @@ -1554,16 +1566,7 @@ class CompilerHolder(InterpreterObject): if not self.has_header_method([h], has_header_kwargs): return self.notfound_library(libname) - search_dirs = mesonlib.stringlistify(kwargs.get('dirs', [])) - search_dirs = [Path(d).expanduser() for d in search_dirs] - for d in search_dirs: - if mesonlib.is_windows() and d.root.startswith('\\'): - # a Unix-path starting with `/` that is not absolute on Windows. - # discard without failing for end-user ease of cross-platform directory arrays - continue - if not d.is_absolute(): - raise InvalidCode('Search directory {} is not an absolute path.'.format(d)) - search_dirs = list(map(str, search_dirs)) + search_dirs = extract_search_dirs(kwargs) libtype = mesonlib.LibType.PREFER_SHARED if 'static' in kwargs: @@ -2036,7 +2039,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'}, 'version', }, 'executable': build.known_exe_kwargs, - 'find_program': {'required', 'native', 'version'}, + 'find_program': {'required', 'native', 'version', 'dirs'}, 'generator': {'arguments', 'output', 'depends', @@ -2272,7 +2275,7 @@ class Interpreter(InterpreterBase): if f.is_built: return f = os.path.normpath(f.relative_name()) - elif os.path.isfile(f): + elif os.path.isfile(f) and not f.startswith('/dev'): srcdir = self.environment.get_source_dir() builddir = self.environment.get_build_dir() f = os.path.normpath(f) @@ -2363,14 +2366,24 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_assert(self, node, args, kwargs): - if len(args) != 2: - raise InterpreterException('Assert takes exactly two arguments') - value, message = args + if len(args) == 1: + FeatureNew('assert function without message argument', '0.53.0').use(self.subproject) + value = args[0] + message = None + elif len(args) == 2: + value, message = args + if not isinstance(message, str): + raise InterpreterException('Assert message not a string.') + else: + raise InterpreterException('Assert takes between one and two arguments') if not isinstance(value, bool): raise InterpreterException('Assert value not bool.') - if not isinstance(message, str): - raise InterpreterException('Assert message not a string.') if not value: + if message is None: + from .ast import AstPrinter + printer = AstPrinter() + node.args.arguments[0].accept(printer) + message = printer.result raise InterpreterException('Assert failed: ' + message) def validate_arguments(self, args, argcount, arg_types): @@ -2896,7 +2909,7 @@ external dependencies (including libraries) must go to "dependencies".''') return ExternalProgramHolder(prog) return None - def program_from_system(self, args, silent=False): + def program_from_system(self, args, search_dirs, silent=False): # Search for scripts relative to current subdir. # Do not cache found programs because find_program('foobar') # might give different results when run from different source dirs. @@ -2910,12 +2923,15 @@ external dependencies (including libraries) must go to "dependencies".''') search_dir = os.path.join(self.environment.get_source_dir(), exename.subdir) exename = exename.fname + extra_search_dirs = [] elif isinstance(exename, str): search_dir = source_dir + extra_search_dirs = search_dirs else: raise InvalidArguments('find_program only accepts strings and ' 'files, not {!r}'.format(exename)) extprog = dependencies.ExternalProgram(exename, search_dir=search_dir, + extra_search_dirs=extra_search_dirs, silent=silent) progobj = ExternalProgramHolder(extprog) if progobj.found(): @@ -2949,7 +2965,8 @@ external dependencies (including libraries) must go to "dependencies".''') # TODO update modules to always pass `for_machine`. It is bad-form to assume # the host machine. - def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST, required=True, silent=True, wanted=''): + def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST, + required=True, silent=True, wanted='', search_dirs=None): if not isinstance(args, list): args = [args] @@ -2957,7 +2974,7 @@ external dependencies (including libraries) must go to "dependencies".''') if progobj is None: progobj = self.program_from_file_for(for_machine, args, silent=silent) if progobj is None: - progobj = self.program_from_system(args, silent=silent) + progobj = self.program_from_system(args, search_dirs, silent=silent) if progobj is None and args[0].endswith('python3'): prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True) progobj = ExternalProgramHolder(prog) @@ -2980,6 +2997,7 @@ external dependencies (including libraries) must go to "dependencies".''') return ExternalProgramHolder(dependencies.NonExistingExternalProgram()) return progobj + @FeatureNewKwargs('find_program', '0.53.0', ['dirs']) @FeatureNewKwargs('find_program', '0.52.0', ['version']) @FeatureNewKwargs('find_program', '0.49.0', ['disabler']) @disablerIfNotFound @@ -2993,11 +3011,12 @@ external dependencies (including libraries) must go to "dependencies".''') mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled') return ExternalProgramHolder(dependencies.NonExistingExternalProgram()) - if not isinstance(required, bool): - raise InvalidArguments('"required" argument must be a boolean.') + search_dirs = extract_search_dirs(kwargs) wanted = mesonlib.stringlistify(kwargs.get('version', [])) for_machine = self.machine_from_native_kwarg(kwargs) - return self.find_program_impl(args, for_machine, required=required, silent=False, wanted=wanted) + return self.find_program_impl(args, for_machine, required=required, + silent=False, wanted=wanted, + search_dirs=search_dirs) def func_find_library(self, node, args, kwargs): raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n' diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 46f578e..2a976d3 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -496,7 +496,19 @@ class InterpreterBase: def evaluate_dictstatement(self, cur): (arguments, kwargs) = self.reduce_arguments(cur.args) assert (not arguments) - return kwargs + result = {} + self.argument_depth += 1 + for key, value in kwargs.items(): + if not isinstance(key, mparser.StringNode): + FeatureNew('Dictionary entry using non literal key', '0.53.0').use(self.subproject) + key = self.evaluate_statement(key) + if not isinstance(key, str): + raise InvalidArguments('Key must be a string') + if key in result: + raise InvalidArguments('Duplicate dictionary key: {}'.format(key)) + result[key] = value + self.argument_depth -= 1 + return result def evaluate_notstatement(self, cur): v = self.evaluate_statement(cur.value) @@ -731,16 +743,7 @@ The result of this is undefined and will become a hard error in a future Meson r elif isinstance(old_variable, dict): if not isinstance(addition, dict): raise InvalidArguments('The += operator requires a dict on the right hand side if the variable on the left is a dict') - new_addition = {} - for (key, value) in addition.items(): - if isinstance(key, str): - new_addition[key] = value - elif isinstance(key, mparser.IdNode) and isinstance(self.get_variable(key.value), str): - FeatureNew('Adding dictionary entry using string variable as key', '0.53.0').use(self.subproject) - new_addition[self.get_variable(key.value)] = value - else: - raise InvalidArguments('Dictionary key must be a string or string variable') - new_value = {**old_variable, **new_addition} + new_value = {**old_variable, **addition} # Add other data types here. else: raise InvalidArguments('The += operator currently only works with arrays, dicts, strings or ints ') diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index d69f688..ab532a4 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -153,7 +153,7 @@ class ArLinker(StaticLinker): return [target] -class ArmarLinker(ArLinker): +class ArmarLinker(ArLinker): # lgtm [py/missing-call-to-init] def __init__(self, exelist: typing.List[str]): StaticLinker.__init__(self, exelist) diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py index 28e2fce..3cfb39a 100644 --- a/mesonbuild/mdist.py +++ b/mesonbuild/mdist.py @@ -24,6 +24,7 @@ from glob import glob from pathlib import Path from mesonbuild.environment import detect_ninja from mesonbuild.mesonlib import windows_proof_rmtree, MesonException +from mesonbuild.wrap import wrap from mesonbuild import mlog, build archive_choices = ['gztar', 'xztar', 'zip'] @@ -36,6 +37,8 @@ def add_arguments(parser): help='directory to cd into before running') parser.add_argument('--formats', default='xztar', help='Comma separated list of archive types to create.') + parser.add_argument('--include-subprojects', action='store_true', + help='Include source code of subprojects that have been used for the build.') def create_hash(fname): @@ -87,22 +90,37 @@ def run_dist_scripts(dist_root, dist_scripts): print('Failed to run dist script {!r}'.format(name)) sys.exit(1) +def is_git(src_root): + _git = os.path.join(src_root, '.git') + return os.path.isdir(_git) or os.path.isfile(_git) def git_have_dirty_index(src_root): '''Check whether there are uncommitted changes in git''' ret = subprocess.call(['git', '-C', src_root, 'diff-index', '--quiet', 'HEAD']) return ret == 1 -def create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts): +def git_clone(src_root, distdir): if git_have_dirty_index(src_root): mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball') - distdir = os.path.join(dist_sub, dist_name) if os.path.exists(distdir): shutil.rmtree(distdir) os.makedirs(distdir) subprocess.check_call(['git', 'clone', '--shared', src_root, distdir]) process_submodules(distdir) del_gitfiles(distdir) + +def create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, dist_scripts, subprojects): + distdir = os.path.join(dist_sub, dist_name) + git_clone(src_root, distdir) + for path in subprojects: + sub_src_root = os.path.join(src_root, path) + sub_distdir = os.path.join(distdir, path) + if os.path.exists(sub_distdir): + continue + if is_git(sub_src_root): + git_clone(sub_src_root, sub_distdir) + else: + shutil.copytree(sub_src_root, sub_distdir) run_dist_scripts(distdir, dist_scripts) output_names = [] for a in archives: @@ -112,6 +130,8 @@ def create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, dist_scri shutil.rmtree(distdir) return output_names +def is_hg(src_root): + return os.path.isdir(os.path.join(src_root, '.hg')) def hg_have_dirty_index(src_root): '''Check whether there are uncommitted changes in hg''' @@ -147,7 +167,7 @@ def create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, dist_scrip return output_names -def check_dist(packagename, meson_command, bld_root, privdir): +def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir): print('Testing distribution package %s' % packagename) unpackdir = os.path.join(privdir, 'dist-unpack') builddir = os.path.join(privdir, 'dist-build') @@ -165,6 +185,7 @@ def check_dist(packagename, meson_command, bld_root, privdir): with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions: meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions) if o['name'] not in ['backend', 'install_umask']] + meson_command += extra_meson_args if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0: print('Running Meson on distribution package failed') return 1 @@ -214,10 +235,21 @@ def run(options): archives = determine_archives_to_generate(options) - _git = os.path.join(src_root, '.git') - if os.path.isdir(_git) or os.path.isfile(_git): - names = create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts) - elif os.path.isdir(os.path.join(src_root, '.hg')): + subprojects = [] + extra_meson_args = [] + if options.include_subprojects: + subproject_dir = os.path.join(src_root, b.subproject_dir) + for sub in b.subprojects: + _, directory = wrap.get_directory(subproject_dir, sub) + subprojects.append(os.path.join(b.subproject_dir, directory)) + extra_meson_args.append('-Dwrap_mode=nodownload') + + if is_git(src_root): + names = create_dist_git(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts, subprojects) + elif is_hg(src_root): + if subprojects: + print('--include-subprojects option currently not supported with Mercurial') + return 1 names = create_dist_hg(dist_name, archives, src_root, bld_root, dist_sub, b.dist_scripts) else: print('Dist currently only works with Git or Mercurial repos') @@ -225,7 +257,7 @@ def run(options): if names is None: return 1 # Check only one. - rc = check_dist(names[0], meson_command, bld_root, priv_dir) + rc = check_dist(names[0], meson_command, extra_meson_args, bld_root, priv_dir) if rc == 0: for name in names: create_hash(name) diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index a19b2cf..5c3fc45 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -976,7 +976,9 @@ def replace_if_different(dst, dst_tmp): else: os.unlink(dst_tmp) -def listify(item, flatten=True, unholder=False): +def listify(item: typing.Any, + flatten: bool = True, + unholder: bool = False) -> typing.List[typing.Any]: ''' Returns a list with all args embedded in a list if they are not a list. This function preserves order. @@ -1416,8 +1418,14 @@ class LibType(Enum): PREFER_STATIC = 3 -class ProgressBarFallback: - '''Fallback progress bar implementation when tqdm is not found''' +class ProgressBarFallback: # lgtm [py/iter-returns-non-self] + ''' + Fallback progress bar implementation when tqdm is not found + + Since this class is not an actual iterator, but only provides a minimal + fallback, it is safe to ignore the 'Iterator does not return self from + __iter__ method' warning. + ''' def __init__(self, iterable=None, total=None, bar_type=None, desc=None): if iterable is not None: self.iterable = iter(iterable) diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 3550abc..b1a55e5 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -503,7 +503,6 @@ def run(opts): log_dir = os.path.join(private_dir, '../meson-logs') if not os.path.exists(os.path.join(opts.wd, datafilename)): sys.exit('Install data not found. Run this command in build directory root.') - log_dir = os.path.join(private_dir, '../meson-logs') if not opts.no_rebuild: if not rebuild_all(opts.wd): sys.exit(-1) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index fb34b5a..1c9c542 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -26,23 +26,32 @@ from .ast import IntrospectionInterpreter, build_target_functions, AstConditionL from . import mlog from .backend import backends from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode -from typing import Dict, List, Optional +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import os import pathlib -def get_meson_info_file(info_dir: str): +def get_meson_info_file(info_dir: str) -> str: return os.path.join(info_dir, 'meson-info.json') -def get_meson_introspection_version(): +def get_meson_introspection_version() -> str: return '1.0.0' -def get_meson_introspection_required_version(): +def get_meson_introspection_required_version() -> List[str]: return ['>=1.0', '<2.0'] +class IntroCommand: + def __init__(self, + desc: str, + func: Optional[Callable[[], Union[dict, list]]] = None, + no_bd: Optional[Callable[[IntrospectionInterpreter], Union[dict, list]]] = None) -> None: + self.desc = desc + '.' + self.func = func + self.no_bd = no_bd + def get_meson_introspection_types(coredata: Optional[cdata.CoreData] = None, builddata: Optional[build.Build] = None, backend: Optional[backends.Backend] = None, - sourcedir: Optional[str] = None): + sourcedir: Optional[str] = None) -> Dict[str, IntroCommand]: if backend and builddata: benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) testdata = backend.create_test_serialisation(builddata.get_tests()) @@ -51,55 +60,22 @@ def get_meson_introspection_types(coredata: Optional[cdata.CoreData] = None, benchmarkdata = testdata = installdata = None return { - 'benchmarks': { - 'func': lambda: list_benchmarks(benchmarkdata), - 'desc': 'List all benchmarks.', - }, - 'buildoptions': { - 'func': lambda: list_buildoptions(coredata), - 'no_bd': lambda intr: list_buildoptions_from_source(intr), - 'desc': 'List all build options.', - }, - 'buildsystem_files': { - 'func': lambda: list_buildsystem_files(builddata), - 'desc': 'List files that make up the build system.', - 'key': 'buildsystem-files', - }, - 'dependencies': { - 'func': lambda: list_deps(coredata), - 'no_bd': lambda intr: list_deps_from_source(intr), - 'desc': 'List external dependencies.', - }, - 'scan_dependencies': { - 'no_bd': lambda intr: list_deps_from_source(intr), - 'desc': 'Scan for dependencies used in the meson.build file.', - 'key': 'scan-dependencies', - }, - 'installed': { - 'func': lambda: list_installed(installdata), - 'desc': 'List all installed files and directories.', - }, - 'projectinfo': { - 'func': lambda: list_projinfo(builddata), - 'no_bd': lambda intr: list_projinfo_from_source(sourcedir, intr), - 'desc': 'Information about projects.', - }, - 'targets': { - 'func': lambda: list_targets(builddata, installdata, backend), - 'no_bd': lambda intr: list_targets_from_source(intr), - 'desc': 'List top level targets.', - }, - 'tests': { - 'func': lambda: list_tests(testdata), - 'desc': 'List all unit tests.', - } + 'benchmarks': IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)), + 'buildoptions': IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source), + 'buildsystem_files': IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata)), + 'dependencies': IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source), + 'scan_dependencies': IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source), + 'installed': IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata)), + 'projectinfo': IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source), + 'targets': IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source), + 'tests': IntroCommand('List all unit tests', func=lambda: list_tests(testdata)), } def add_arguments(parser): intro_types = get_meson_introspection_types() for key, val in intro_types.items(): - flag = '--' + val.get('key', key) - parser.add_argument(flag, action='store_true', dest=key, default=False, help=val['desc']) + flag = '--' + key.replace('_', '-') + parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc) parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja', help='The backend to use for the --buildoptions introspection.') @@ -127,12 +103,12 @@ def list_installed(installdata): res[path] = os.path.join(installdata.prefix, installpath) return res -def list_targets_from_source(intr: IntrospectionInterpreter): - tlist = [] +def list_targets_from_source(intr: IntrospectionInterpreter) -> List[Dict[str, Union[bool, str, List[Union[str, Dict[str, Union[str, List[str], bool]]]]]]]: + tlist = [] # type: List[Dict[str, Union[bool, str, List[Union[str, Dict[str, Union[str, List[str], bool]]]]]]] for i in intr.targets: - sources = [] + sources = [] # type: List[str] for n in i['sources']: - args = [] + args = [] # type: List[Union[str, StringNode]] if isinstance(n, FunctionNode): args = list(n.args.arguments) if n.func_name in build_target_functions: @@ -167,8 +143,8 @@ def list_targets_from_source(intr: IntrospectionInterpreter): return tlist -def list_targets(builddata: build.Build, installdata, backend: backends.Backend): - tlist = [] +def list_targets(builddata: build.Build, installdata, backend: backends.Backend) -> List[Dict[str, Union[bool, str, List[Union[str, Dict[str, Union[str, List[str], bool]]]]]]]: + tlist = [] # type: List[Dict[str, Union[bool, str, List[Union[str, Dict[str, Union[str, List[str], bool]]]]]]] build_dir = builddata.environment.get_build_dir() src_dir = builddata.environment.get_source_dir() @@ -201,11 +177,11 @@ def list_targets(builddata: build.Build, installdata, backend: backends.Backend) tlist.append(t) return tlist -def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> List[dict]: +def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> List[Dict[str, Union[str, bool, int, List[str]]]]: return list_buildoptions(intr.coredata) -def list_buildoptions(coredata: cdata.CoreData) -> List[dict]: - optlist = [] +def list_buildoptions(coredata: cdata.CoreData) -> List[Dict[str, Union[str, bool, int, List[str]]]]: + optlist = [] # type: List[Dict[str, Union[str, bool, int, List[str]]]] dir_option_names = ['bindir', 'datadir', @@ -228,74 +204,77 @@ def list_buildoptions(coredata: cdata.CoreData) -> List[dict]: test_options = {k: o for k, o in coredata.builtins.items() if k in test_option_names} core_options = {k: o for k, o in coredata.builtins.items() if k in core_option_names} - add_keys(optlist, core_options, 'core') - add_keys(optlist, coredata.builtins_per_machine.host, 'core', machine='host') + def add_keys(options: Dict[str, cdata.UserOption], section: str, machine: str = 'any') -> None: + for key in sorted(options.keys()): + opt = options[key] + optdict = {'name': key, 'value': opt.value, 'section': section, 'machine': machine} + if isinstance(opt, cdata.UserStringOption): + typestr = 'string' + elif isinstance(opt, cdata.UserBooleanOption): + typestr = 'boolean' + elif isinstance(opt, cdata.UserComboOption): + optdict['choices'] = opt.choices + typestr = 'combo' + elif isinstance(opt, cdata.UserIntegerOption): + typestr = 'integer' + elif isinstance(opt, cdata.UserArrayOption): + typestr = 'array' + else: + raise RuntimeError("Unknown option type") + optdict['type'] = typestr + optdict['description'] = opt.description + optlist.append(optdict) + + add_keys(core_options, 'core') + add_keys(coredata.builtins_per_machine.host, 'core', machine='host') add_keys( - optlist, {'build.' + k: o for k, o in coredata.builtins_per_machine.build.items()}, 'core', machine='build', ) - add_keys(optlist, coredata.backend_options, 'backend') - add_keys(optlist, coredata.base_options, 'base') - add_keys(optlist, coredata.compiler_options.host, 'compiler', machine='host') + add_keys(coredata.backend_options, 'backend') + add_keys(coredata.base_options, 'base') + add_keys(coredata.compiler_options.host, 'compiler', machine='host') add_keys( - optlist, {'build.' + k: o for k, o in coredata.compiler_options.build.items()}, 'compiler', machine='build', ) - add_keys(optlist, dir_options, 'directory') - add_keys(optlist, coredata.user_options, 'user') - add_keys(optlist, test_options, 'test') + add_keys(dir_options, 'directory') + add_keys(coredata.user_options, 'user') + add_keys(test_options, 'test') return optlist -def add_keys(optlist, options: Dict[str, cdata.UserOption], section: str, machine: str = 'any'): - keys = list(options.keys()) - keys.sort() - for key in keys: - opt = options[key] - optdict = {'name': key, 'value': opt.value, 'section': section, 'machine': machine} - if isinstance(opt, cdata.UserStringOption): - typestr = 'string' - elif isinstance(opt, cdata.UserBooleanOption): - typestr = 'boolean' - elif isinstance(opt, cdata.UserComboOption): - optdict['choices'] = opt.choices - typestr = 'combo' - elif isinstance(opt, cdata.UserIntegerOption): - typestr = 'integer' - elif isinstance(opt, cdata.UserArrayOption): - typestr = 'array' - else: - raise RuntimeError("Unknown option type") - optdict['type'] = typestr - optdict['description'] = opt.description - optlist.append(optdict) - -def find_buildsystem_files_list(src_dir): +def find_buildsystem_files_list(src_dir) -> List[str]: # I feel dirty about this. But only slightly. - filelist = [] + filelist = [] # type: List[str] for root, _, files in os.walk(src_dir): for f in files: if f == 'meson.build' or f == 'meson_options.txt': filelist.append(os.path.relpath(os.path.join(root, f), src_dir)) return filelist -def list_buildsystem_files(builddata: build.Build): +def list_buildsystem_files(builddata: build.Build) -> List[str]: src_dir = builddata.environment.get_source_dir() filelist = find_buildsystem_files_list(src_dir) filelist = [os.path.join(src_dir, x) for x in filelist] return filelist -def list_deps_from_source(intr: IntrospectionInterpreter): - result = [] +def list_deps_from_source(intr: IntrospectionInterpreter) -> List[Dict[str, Union[str, bool]]]: + result = [] # type: List[Dict[str, Union[str, bool]]] for i in intr.dependencies: - result += [{k: v for k, v in i.items() if k in ['name', 'required', 'has_fallback', 'conditional']}] + keys = [ + 'name', + 'required', + 'version', + 'has_fallback', + 'conditional', + ] + result += [{k: v for k, v in i.items() if k in keys}] return result -def list_deps(coredata: cdata.CoreData): - result = [] +def list_deps(coredata: cdata.CoreData) -> List[Dict[str, Union[str, List[str]]]]: + result = [] # type: List[Dict[str, Union[str, List[str]]]] for d in coredata.deps.host.values(): if d.found(): result += [{'name': d.name, @@ -304,8 +283,8 @@ def list_deps(coredata: cdata.CoreData): 'link_args': d.get_link_args()}] return result -def get_test_list(testdata): - result = [] +def get_test_list(testdata) -> List[Dict[str, Union[str, int, List[str], Dict[str, str]]]]: + result = [] # type: List[Dict[str, Union[str, int, List[str], Dict[str, str]]]] for t in testdata: to = {} if isinstance(t.fname, str): @@ -326,13 +305,13 @@ def get_test_list(testdata): result.append(to) return result -def list_tests(testdata): +def list_tests(testdata) -> List[Dict[str, Union[str, int, List[str], Dict[str, str]]]]: return get_test_list(testdata) -def list_benchmarks(benchdata): +def list_benchmarks(benchdata) -> List[Dict[str, Union[str, int, List[str], Dict[str, str]]]]: return get_test_list(benchdata) -def list_projinfo(builddata: build.Build): +def list_projinfo(builddata: build.Build) -> Dict[str, Union[str, List[Dict[str, str]]]]: result = {'version': builddata.project_version, 'descriptive_name': builddata.project_name, 'subproject_dir': builddata.subproject_dir} @@ -345,7 +324,8 @@ def list_projinfo(builddata: build.Build): result['subprojects'] = subprojects return result -def list_projinfo_from_source(sourcedir: str, intr: IntrospectionInterpreter): +def list_projinfo_from_source(intr: IntrospectionInterpreter) -> Dict[str, Union[str, List[Dict[str, str]]]]: + sourcedir = intr.source_root files = find_buildsystem_files_list(sourcedir) files = [os.path.normpath(x) for x in files] @@ -358,7 +338,7 @@ def list_projinfo_from_source(sourcedir: str, intr: IntrospectionInterpreter): intr.project_data['subproject_dir'] = intr.subproject_dir return intr.project_data -def print_results(options, results, indent): +def print_results(options, results: Sequence[Tuple[str, Union[dict, List[Any]]]], indent: int) -> int: if not results and not options.force_dict: print('No command specified') return 1 @@ -372,14 +352,14 @@ def print_results(options, results, indent): print(json.dumps(out, indent=indent)) return 0 -def run(options): +def run(options) -> int: datadir = 'meson-private' infodir = 'meson-info' if options.builddir is not None: datadir = os.path.join(options.builddir, datadir) infodir = os.path.join(options.builddir, infodir) indent = 4 if options.indent else None - results = [] + results = [] # type: List[Tuple[str, Union[dict, List[Any]]]] sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11] intro_types = get_meson_introspection_types(sourcedir=sourcedir) @@ -392,9 +372,9 @@ def run(options): # Re-enable logging just in case mlog.enable() for key, val in intro_types.items(): - if (not options.all and not getattr(options, key, False)) or 'no_bd' not in val: + if (not options.all and not getattr(options, key, False)) or not val.no_bd: continue - results += [(key, val['no_bd'](intr))] + results += [(key, val.no_bd(intr))] return print_results(options, results, indent) infofile = get_meson_info_file(infodir) @@ -405,7 +385,6 @@ def run(options): 'meson version. Please regenerate it in this case.') return 1 - intro_vers = '0.0.0' with open(infofile, 'r') as fp: raw = json.load(fp) intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0') @@ -420,7 +399,7 @@ def run(options): # Extract introspection information from JSON for i in intro_types.keys(): - if 'func' not in intro_types[i]: + if not intro_types[i].func: continue if not options.all and not getattr(options, i, False): continue @@ -433,9 +412,9 @@ def run(options): return print_results(options, results, indent) -updated_introspection_files = [] +updated_introspection_files = [] # type: List[str] -def write_intro_info(intro_info, info_dir): +def write_intro_info(intro_info: Sequence[Tuple[str, Union[dict, List[Any]]]], info_dir: str) -> None: global updated_introspection_files for i in intro_info: out_file = os.path.join(info_dir, 'intro-{}.json'.format(i[0])) @@ -446,26 +425,26 @@ def write_intro_info(intro_info, info_dir): os.replace(tmp_file, out_file) updated_introspection_files += [i[0]] -def generate_introspection_file(builddata: build.Build, backend: backends.Backend): +def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None: coredata = builddata.environment.get_coredata() intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend) - intro_info = [] + intro_info = [] # type: List[Tuple[str, Union[dict, List[Any]]]] for key, val in intro_types.items(): - if 'func' not in val: + if not val.func: continue - intro_info += [(key, val['func']())] + intro_info += [(key, val.func())] write_intro_info(intro_info, builddata.environment.info_dir) -def update_build_options(coredata: cdata.CoreData, info_dir): +def update_build_options(coredata: cdata.CoreData, info_dir) -> None: intro_info = [ ('buildoptions', list_buildoptions(coredata)) ] write_intro_info(intro_info, info_dir) -def split_version_string(version: str): +def split_version_string(version: str) -> Dict[str, Union[str, int]]: vers_list = version.split('.') return { 'full': version, @@ -474,7 +453,7 @@ def split_version_string(version: str): 'patch': int(vers_list[2] if len(vers_list) > 2 else 0) } -def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False): +def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False) -> None: global updated_introspection_files info_dir = builddata.environment.info_dir info_file = get_meson_info_file(info_dir) @@ -482,7 +461,7 @@ def write_meson_info_file(builddata: build.Build, errors: list, build_files_upda intro_info = {} for i in intro_types.keys(): - if 'func' not in intro_types[i]: + if not intro_types[i].func: continue intro_info[i] = { 'file': 'intro-{}.json'.format(i), diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index ace47f4..c779a58 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -18,8 +18,7 @@ import sys import time import platform from contextlib import contextmanager -import typing -from typing import Any, Generator, List, Optional, Sequence, TextIO, Union +from typing import Any, Generator, List, Optional, Sequence, TextIO, Union, cast from pathlib import Path """This is (mostly) a standalone module used to write logging @@ -232,7 +231,7 @@ def _log_error(severity: str, *rargs: Union[str, AnsiDecorator], **kwargs: Any) location_str = get_error_location_string(location_file, location.lineno) # Unions are frankly awful, and we have to cast here to get mypy # to understand that the list concatenation is safe - location_list = typing.cast(List[Union[str, AnsiDecorator]], [location_str]) + location_list = cast(List[Union[str, AnsiDecorator]], [location_str]) args = location_list + args log(*args, **kwargs) diff --git a/mesonbuild/modules/fs.py b/mesonbuild/modules/fs.py index 6e1f5f7..3307cab 100644 --- a/mesonbuild/modules/fs.py +++ b/mesonbuild/modules/fs.py @@ -40,7 +40,7 @@ class FSModule(ExtensionModule): def _check(self, check: str, state: 'ModuleState', args: typing.Sequence[str]) -> ModuleReturnValue: if len(args) != 1: - MesonException('fs.{} takes exactly one argument.'.format(check)) + raise MesonException('fs.{} takes exactly one argument.'.format(check)) test_file = self._resolve_dir(state, args[0]) return ModuleReturnValue(getattr(test_file, check)(), []) @@ -68,7 +68,7 @@ class FSModule(ExtensionModule): @noKwargs def hash(self, state: 'ModuleState', args: typing.Sequence[str], kwargs: dict) -> ModuleReturnValue: if len(args) != 2: - MesonException('method takes exactly two arguments.') + raise MesonException('method takes exactly two arguments.') file = self._resolve_dir(state, args[0]) if not file.is_file(): raise MesonException('{} is not a file and therefore cannot be hashed'.format(file)) @@ -84,7 +84,7 @@ class FSModule(ExtensionModule): @noKwargs def size(self, state: 'ModuleState', args: typing.Sequence[str], kwargs: dict) -> ModuleReturnValue: if len(args) != 1: - MesonException('method takes exactly one argument.') + raise MesonException('method takes exactly one argument.') file = self._resolve_dir(state, args[0]) if not file.is_file(): raise MesonException('{} is not a file and therefore cannot be sized'.format(file)) @@ -97,7 +97,7 @@ class FSModule(ExtensionModule): @noKwargs def samefile(self, state: 'ModuleState', args: typing.Sequence[str], kwargs: dict) -> ModuleReturnValue: if len(args) != 2: - MesonException('method takes exactly two arguments.') + raise MesonException('method takes exactly two arguments.') file1 = self._resolve_dir(state, args[0]) file2 = self._resolve_dir(state, args[1]) if not file1.exists(): @@ -113,7 +113,7 @@ class FSModule(ExtensionModule): @noKwargs def replace_suffix(self, state: 'ModuleState', args: typing.Sequence[str], kwargs: dict) -> ModuleReturnValue: if len(args) != 2: - MesonException('method takes exactly two arguments.') + raise MesonException('method takes exactly two arguments.') original = PurePath(args[0]) new = original.with_suffix(args[1]) return ModuleReturnValue(str(new), []) @@ -122,7 +122,7 @@ class FSModule(ExtensionModule): @noKwargs def parent(self, state: 'ModuleState', args: typing.Sequence[str], kwargs: dict) -> ModuleReturnValue: if len(args) != 1: - MesonException('method takes exactly one argument.') + raise MesonException('method takes exactly one argument.') original = PurePath(args[0]) new = original.parent return ModuleReturnValue(str(new), []) @@ -131,7 +131,7 @@ class FSModule(ExtensionModule): @noKwargs def name(self, state: 'ModuleState', args: typing.Sequence[str], kwargs: dict) -> ModuleReturnValue: if len(args) != 1: - MesonException('method takes exactly one argument.') + raise MesonException('method takes exactly one argument.') original = PurePath(args[0]) new = original.name return ModuleReturnValue(str(new), []) diff --git a/mesonbuild/modules/rpm.py b/mesonbuild/modules/rpm.py index b99ae8d..073e338 100644 --- a/mesonbuild/modules/rpm.py +++ b/mesonbuild/modules/rpm.py @@ -151,7 +151,7 @@ class RPMModule(ExtensionModule): def __get_required_compilers(self): required_compilers = set() - for compiler in self.coredata.compilers.values(): + for compiler in self.coredata.environment.coredata.compilers.host.values(): # Elbrus has one 'lcc' package for every compiler if isinstance(compiler, compilers.GnuCCompiler): required_compilers.add('gcc') diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index 1baf051..2b503f1 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -674,21 +674,8 @@ class Parser: a = ArgumentNode(s) while not isinstance(s, EmptyNode): - potential = self.current if self.accept('colon'): - key_value = self.statement() - if isinstance(s, StringNode): - if s.value in a.kwargs: - # + 1 to colno to point to the actual string, not the opening quote - raise ParseException('Duplicate dictionary key: {}'.format(s.value), self.getline(), s.lineno, s.colno + 1) - a.set_kwarg(s.value, key_value) - elif isinstance(s, IdNode) and isinstance(key_value, StringNode): - for key in a.kwargs: - if s.value == key.value: - raise ParseException('Duplicate dictionary variable key: {}'.format(s.value), self.getline(), s.lineno, s.colno) - a.set_kwarg(s, key_value) - else: - raise ParseException('Key must be a string or string variable', self.getline(), s.lineno, s.colno) + a.set_kwarg(s, self.statement()) potential = self.current if not self.accept('comma'): return a diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py index eeeb9c6..39fb4ef 100755 --- a/mesonbuild/msubprojects.py +++ b/mesonbuild/msubprojects.py @@ -178,11 +178,17 @@ def foreach(wrap, repo_dir, options): mlog.log(' -> Not downloaded yet') return try: - subprocess.check_call([options.command] + options.args, cwd=repo_dir) - mlog.log('') + out = subprocess.check_output([options.command] + options.args, + stderr=subprocess.STDOUT, + cwd=repo_dir).decode() + mlog.log(out, end='') except subprocess.CalledProcessError as e: - out = e.output.decode().strip() - mlog.log(' -> ', mlog.red(out)) + err_message = "Command '%s' returned non-zero exit status %d." % (" ".join(e.cmd), e.returncode) + out = e.output.decode() + mlog.log(' -> ', mlog.red(err_message)) + mlog.log(out, end='') + except Exception as e: + mlog.log(' -> ', mlog.red(str(e))) def add_common_arguments(p): p.add_argument('--sourcedir', default='.', diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 9bb98cf..5078d3c 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -103,11 +103,11 @@ class RequiredKeys: class MTypeBase: def __init__(self, node: Optional[BaseNode] = None): if node is None: - self.node = self._new_node() + self.node = self._new_node() # lgtm [py/init-calls-subclass] (node creation does not depend on base class state) else: self.node = node self.node_type = None - for i in self.supported_nodes(): + for i in self.supported_nodes(): # lgtm [py/init-calls-subclass] (listing nodes does not depend on base class state) if isinstance(self.node, i): self.node_type = i @@ -854,7 +854,7 @@ class Rewriter: while raw[end] in [' ', '\n', '\t']: end += 1 - raw = files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:] + files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:] for i in str_list: if i['action'] in ['modify', 'rm']: diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index aa764b8..3ea1f1e 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -16,6 +16,7 @@ from .. import mlog import contextlib import urllib.request import urllib.error +import urllib.parse import os import hashlib import shutil @@ -33,7 +34,9 @@ if typing.TYPE_CHECKING: import http.client try: - import ssl + # Importing is just done to check if SSL exists, so all warnings + # regarding 'imported but unused' can be safely ignored + import ssl # noqa has_ssl = True API_ROOT = 'https://wrapdb.mesonbuild.com/v1/' except ImportError: @@ -42,45 +45,49 @@ except ImportError: req_timeout = 600.0 ssl_warning_printed = False +whitelist_subdomain = 'wrapdb.mesonbuild.com' -def build_ssl_context() -> 'ssl.SSLContext': - ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) - ctx.options |= ssl.OP_NO_SSLv2 - ctx.options |= ssl.OP_NO_SSLv3 - ctx.verify_mode = ssl.CERT_REQUIRED - ctx.load_default_certs() - return ctx def quiet_git(cmd: typing.List[str], workingdir: str) -> typing.Tuple[bool, str]: - try: - pc = subprocess.run(['git', '-C', workingdir] + cmd, universal_newlines=True, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except FileNotFoundError as e: - return False, str(e) + git = shutil.which('git') + if not git: + return False, 'Git program not found.' + pc = subprocess.run([git, '-C', workingdir] + cmd, universal_newlines=True, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) if pc.returncode != 0: return False, pc.stderr return True, pc.stdout +def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult: + """ raises WrapException if not whitelisted subdomain """ + url = urllib.parse.urlparse(urlstr) + if not url.hostname: + raise WrapException('{} is not a valid URL'.format(urlstr)) + if not url.hostname.endswith(whitelist_subdomain): + raise WrapException('{} is not a whitelisted WrapDB URL'.format(urlstr)) + if has_ssl and not url.scheme == 'https': + raise WrapException('WrapDB did not have expected SSL https url, instead got {}'.format(urlstr)) + return url + def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse': global ssl_warning_printed + + url = whitelist_wrapdb(urlstring) if has_ssl: try: - return urllib.request.urlopen(urlstring, timeout=req_timeout)# , context=build_ssl_context()) - except urllib.error.URLError: - if not ssl_warning_printed: - print('SSL connection failed. Falling back to unencrypted connections.', file=sys.stderr) - ssl_warning_printed = True + return urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=req_timeout) + except urllib.error.URLError as excp: + raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp)) + + # following code is only for those without Python SSL + nossl_url = url._replace(scheme='http') if not ssl_warning_printed: - print('Warning: SSL not available, traffic not authenticated.', file=sys.stderr) + mlog.warning('SSL module not available in {}: WrapDB traffic not authenticated.'.format(sys.executable)) ssl_warning_printed = True - # Trying to open SSL connection to wrapdb fails because the - # certificate is not known. - if urlstring.startswith('https'): - urlstring = 'http' + urlstring[5:] try: - return urllib.request.urlopen(urlstring, timeout=req_timeout) - except urllib.error.URLError: - raise WrapException('failed to get {} is the internet available?'.format(urlstring)) + return urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=req_timeout) + except urllib.error.URLError as excp: + raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp)) class WrapException(MesonException): @@ -118,6 +125,23 @@ class PackageDefinition: def has_patch(self) -> bool: return 'patch_url' in self.values +def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition: + fname = os.path.join(subdir_root, packagename + '.wrap') + if os.path.isfile(fname): + return PackageDefinition(fname) + return None + +def get_directory(subdir_root: str, packagename: str): + directory = packagename + # We always have to load the wrap file, if it exists, because it could + # override the default directory name. + wrap = load_wrap(subdir_root, packagename) + if wrap and 'directory' in wrap.values: + directory = wrap.get('directory') + if os.path.dirname(directory): + raise WrapException('Directory key must be a name and not a path') + return wrap, directory + class Resolver: def __init__(self, subdir_root: str, wrap_mode=WrapMode.default): self.wrap_mode = wrap_mode @@ -126,14 +150,7 @@ class Resolver: def resolve(self, packagename: str, method: str) -> str: self.packagename = packagename - self.directory = packagename - # We always have to load the wrap file, if it exists, because it could - # override the default directory name. - self.wrap = self.load_wrap() - if self.wrap and 'directory' in self.wrap.values: - self.directory = self.wrap.get('directory') - if os.path.dirname(self.directory): - raise WrapException('Directory key must be a name and not a path') + self.wrap, self.directory = get_directory(self.subdir_root, self.packagename) self.dirname = os.path.join(self.subdir_root, self.directory) meson_file = os.path.join(self.dirname, 'meson.build') cmake_file = os.path.join(self.dirname, 'CMakeLists.txt') @@ -180,12 +197,6 @@ class Resolver: return self.directory - def load_wrap(self) -> PackageDefinition: - fname = os.path.join(self.subdir_root, self.packagename + '.wrap') - if os.path.isfile(fname): - return PackageDefinition(fname) - return None - def check_can_download(self) -> None: # Don't download subproject data based on wrap file if requested. # Git submodules are ok (see above)! @@ -194,6 +205,9 @@ class Resolver: raise WrapException(m) def resolve_git_submodule(self) -> bool: + git = shutil.which('git') + if not git: + raise WrapException('Git program not found.') # Are we in a git repository? ret, out = quiet_git(['rev-parse'], self.subdir_root) if not ret: @@ -210,12 +224,13 @@ class Resolver: raise WrapException('git submodule has merge conflicts') # Submodule exists, but is deinitialized or wasn't initialized elif out.startswith('-'): - if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', self.dirname]) == 0: + if subprocess.run([git, '-C', self.subdir_root, + 'submodule', 'update', '--init', self.dirname]).returncode == 0: return True raise WrapException('git submodule failed to init') # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout. elif out.startswith(' '): - subprocess.call(['git', 'checkout', '.'], cwd=self.dirname) + subprocess.run([git, 'checkout', '.'], cwd=self.dirname) # Even if checkout failed, try building it anyway and let the user # handle any problems manually. return True @@ -238,6 +253,9 @@ class Resolver: self.apply_patch() def get_git(self) -> None: + git = shutil.which('git') + if not git: + raise WrapException('Git program not found.') revno = self.wrap.get('revision') is_shallow = False depth_option = [] # type: typing.List[str] @@ -248,42 +266,42 @@ class Resolver: if is_shallow and self.is_git_full_commit_id(revno): # git doesn't support directly cloning shallowly for commits, # so we follow https://stackoverflow.com/a/43136160 - subprocess.check_call(['git', 'init', self.directory], cwd=self.subdir_root) - subprocess.check_call(['git', 'remote', 'add', 'origin', self.wrap.get('url')], + subprocess.check_call([git, 'init', self.directory], cwd=self.subdir_root) + subprocess.check_call([git, 'remote', 'add', 'origin', self.wrap.get('url')], cwd=self.dirname) revno = self.wrap.get('revision') - subprocess.check_call(['git', 'fetch', *depth_option, 'origin', revno], + subprocess.check_call([git, 'fetch', *depth_option, 'origin', revno], cwd=self.dirname) - subprocess.check_call(['git', 'checkout', revno], cwd=self.dirname) + subprocess.check_call([git, 'checkout', revno], cwd=self.dirname) if self.wrap.values.get('clone-recursive', '').lower() == 'true': - subprocess.check_call(['git', 'submodule', 'update', + subprocess.check_call([git, 'submodule', 'update', '--init', '--checkout', '--recursive', *depth_option], cwd=self.dirname) push_url = self.wrap.values.get('push-url') if push_url: - subprocess.check_call(['git', 'remote', 'set-url', + subprocess.check_call([git, 'remote', 'set-url', '--push', 'origin', push_url], cwd=self.dirname) else: if not is_shallow: - subprocess.check_call(['git', 'clone', self.wrap.get('url'), + subprocess.check_call([git, 'clone', self.wrap.get('url'), self.directory], cwd=self.subdir_root) if revno.lower() != 'head': - if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0: - subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname) - subprocess.check_call(['git', 'checkout', revno], cwd=self.dirname) + if subprocess.run([git, 'checkout', revno], cwd=self.dirname).returncode != 0: + subprocess.check_call([git, 'fetch', self.wrap.get('url'), revno], cwd=self.dirname) + subprocess.check_call([git, 'checkout', revno], cwd=self.dirname) else: - subprocess.check_call(['git', 'clone', *depth_option, + subprocess.check_call([git, 'clone', *depth_option, '--branch', revno, self.wrap.get('url'), self.directory], cwd=self.subdir_root) if self.wrap.values.get('clone-recursive', '').lower() == 'true': - subprocess.check_call(['git', 'submodule', 'update', + subprocess.check_call([git, 'submodule', 'update', '--init', '--checkout', '--recursive', *depth_option], cwd=self.dirname) push_url = self.wrap.values.get('push-url') if push_url: - subprocess.check_call(['git', 'remote', 'set-url', + subprocess.check_call([git, 'remote', 'set-url', '--push', 'origin', push_url], cwd=self.dirname) @@ -295,28 +313,39 @@ class Resolver: def get_hg(self) -> None: revno = self.wrap.get('revision') - subprocess.check_call(['hg', 'clone', self.wrap.get('url'), + hg = shutil.which('hg') + if not hg: + raise WrapException('Mercurial program not found.') + subprocess.check_call([hg, 'clone', self.wrap.get('url'), self.directory], cwd=self.subdir_root) if revno.lower() != 'tip': - subprocess.check_call(['hg', 'checkout', revno], + subprocess.check_call([hg, 'checkout', revno], cwd=self.dirname) def get_svn(self) -> None: revno = self.wrap.get('revision') - subprocess.check_call(['svn', 'checkout', '-r', revno, self.wrap.get('url'), + svn = shutil.which('svn') + if not svn: + raise WrapException('SVN program not found.') + subprocess.check_call([svn, 'checkout', '-r', revno, self.wrap.get('url'), self.directory], cwd=self.subdir_root) - def get_data(self, url: str) -> typing.Tuple[str, str]: + def get_data(self, urlstring: str) -> typing.Tuple[str, str]: blocksize = 10 * 1024 h = hashlib.sha256() tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False) - if url.startswith('https://wrapdb.mesonbuild.com'): - resp = open_wrapdburl(url) + url = urllib.parse.urlparse(urlstring) + if not url.hostname: + raise WrapException('{} is not a valid URL'.format(urlstring)) + if url.hostname.endswith(whitelist_subdomain): + resp = open_wrapdburl(urlstring) + elif whitelist_subdomain in urlstring: + raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring)) else: try: - resp = urllib.request.urlopen(url, timeout=req_timeout) + resp = urllib.request.urlopen(urlstring, timeout=req_timeout) except urllib.error.URLError: - raise WrapException('could not get {} is the internet available?'.format(url)) + raise WrapException('could not get {} is the internet available?'.format(urlstring)) with contextlib.closing(resp) as resp: try: dlsize = int(resp.info()['Content-Length']) @@ -353,7 +382,7 @@ class Resolver: h.update(f.read()) dhash = h.hexdigest() if dhash != expected: - raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash)) def download(self, what: str, ofname: str) -> None: self.check_can_download() @@ -363,7 +392,7 @@ class Resolver: expected = self.wrap.get(what + '_hash') if dhash != expected: os.remove(tmpfile) - raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash)) os.rename(tmpfile, ofname) def get_file_internal(self, what: str) -> str: diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py index cd15cef..351669b 100644 --- a/mesonbuild/wrap/wraptool.py +++ b/mesonbuild/wrap/wraptool.py @@ -94,7 +94,7 @@ def install(options): if os.path.exists(wrapfile): raise SystemExit('Wrap file already exists.') (branch, revision) = get_latest_version(name) - u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%s/get_wrap' % (name, branch, revision)) + u = open_wrapdburl(API_ROOT + 'projects/{}/{}/{}/get_wrap'.format(name, branch, revision)) data = u.read() with open(wrapfile, 'wb') as f: f.write(data) @@ -113,7 +113,7 @@ def get_current_version(wrapfile): return branch, revision, cp['directory'], cp['source_filename'], cp['patch_filename'] def update_wrap_file(wrapfile, name, new_branch, new_revision): - u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%d/get_wrap' % (name, new_branch, new_revision)) + u = open_wrapdburl(API_ROOT + 'projects/{}/{}/{}/get_wrap'.format(name, new_branch, new_revision)) data = u.read() with open(wrapfile, 'wb') as f: f.write(data) @@ -148,7 +148,7 @@ def info(options): versions = jd['versions'] if not versions: raise SystemExit('No available versions of' + name) - print('Available versions of %s:' % name) + print('Available versions of {}:'.format(name)) for v in versions: print(' ', v['branch'], v['revision']) @@ -160,7 +160,7 @@ def do_promotion(from_path, spdir_name): sproj_name = os.path.basename(from_path) outputdir = os.path.join(spdir_name, sproj_name) if os.path.exists(outputdir): - raise SystemExit('Output dir %s already exists. Will not overwrite.' % outputdir) + raise SystemExit('Output dir {} already exists. Will not overwrite.'.format(outputdir)) shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects')) def promote(options): @@ -177,10 +177,10 @@ def promote(options): # otherwise the argument is just a subproject basename which must be unambiguous if argument not in sprojs: - raise SystemExit('Subproject %s not found in directory tree.' % argument) + raise SystemExit('Subproject {} not found in directory tree.'.format(argument)) matches = sprojs[argument] if len(matches) > 1: - print('There is more than one version of %s in tree. Please specify which one to promote:\n' % argument, file=sys.stderr) + print('There is more than one version of {} in tree. Please specify which one to promote:\n'.format(argument), file=sys.stderr) for s in matches: print(s, file=sys.stderr) raise SystemExit(1) @@ -201,9 +201,9 @@ def status(options): print('Wrap file not from wrapdb.', file=sys.stderr) continue if current_branch == latest_branch and current_revision == latest_revision: - print('', name, 'up to date. Branch %s, revision %d.' % (current_branch, current_revision)) + print('', name, 'up to date. Branch {}, revision {}.'.format(current_branch, current_revision)) else: - print('', name, 'not up to date. Have %s %d, but %s %d is available.' % (current_branch, current_revision, latest_branch, latest_revision)) + print('', name, 'not up to date. Have {} {}, but {} {} is available.'.format(current_branch, current_revision, latest_branch, latest_revision)) def run(options): options.wrap_func(options) diff --git a/run_project_tests.py b/run_project_tests.py index 0784d34..4f04fb2 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2012-2016 The Meson development team +# Copyright 2012-2019 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -47,6 +47,12 @@ from run_tests import get_backend_commands, get_backend_args_for_dir, Backend from run_tests import ensure_backend_detects_changes from run_tests import guess_backend +ALL_TESTS = ['cmake', 'common', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test', + 'kconfig', 'platform-osx', 'platform-windows', 'platform-linux', + 'java', 'C#', 'vala', 'rust', 'd', 'objective c', 'objective c++', + 'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm' + ] + class BuildStep(Enum): configure = 1 @@ -282,7 +288,6 @@ def _run_ci_include(args: typing.List[str]) -> str: return 'Included file {}:\n{}\n'.format(args[0], data) except Exception: return 'Failed to open {} ({})'.format(args[0]) - return 'Appended {} to the log'.format(args[0]) ci_commands = { 'ci_include': _run_ci_include @@ -545,7 +550,7 @@ def skippable(suite, test): if test.endswith('10 gtk-doc'): return True - # NetCDF is not in the CI image + # NetCDF is not in the CI Docker image if test.endswith('netcdf'): return True @@ -638,8 +643,10 @@ def detect_tests_to_run(only: typing.List[str]) -> typing.List[typing.Tuple[str, tests to run """ - skip_fortran = not(shutil.which('gfortran') or shutil.which('flang') or - shutil.which('pgfortran') or shutil.which('ifort')) + skip_fortran = not(shutil.which('gfortran') or + shutil.which('flang') or + shutil.which('pgfortran') or + shutil.which('ifort')) # Name, subdirectory, skip condition. all_tests = [ @@ -673,8 +680,9 @@ def detect_tests_to_run(only: typing.List[str]) -> typing.List[typing.Tuple[str, ('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), ] + names = [t[0] for t in all_tests] + assert names == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests names' if only: - names = [t[0] for t in all_tests] ind = [names.index(o) for o in only] all_tests = [all_tests[i] for i in ind] gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] @@ -939,13 +947,12 @@ if __name__ == '__main__': parser = argparse.ArgumentParser(description="Run the test suite of Meson.") parser.add_argument('extra_args', nargs='*', help='arguments that are passed directly to Meson (remember to have -- before these).') - parser.add_argument('--backend', default=None, dest='backend', - choices=backendlist) + parser.add_argument('--backend', dest='backend', choices=backendlist) parser.add_argument('--failfast', action='store_true', help='Stop running if test case fails') parser.add_argument('--no-unittests', action='store_true', help='Not used, only here to simplify run_tests.py') - parser.add_argument('--only', help='name of test(s) to run', nargs='+') + parser.add_argument('--only', help='name of test(s) to run', nargs='+', choices=ALL_TESTS) options = parser.parse_args() setup_commands(options.backend) diff --git a/run_tests.py b/run_tests.py index 504e6ac..33dc1be 100755 --- a/run_tests.py +++ b/run_tests.py @@ -26,7 +26,8 @@ from io import StringIO from enum import Enum from glob import glob from pathlib import Path -import mesonbuild +from mesonbuild import compilers +from mesonbuild import dependencies from mesonbuild import mesonlib from mesonbuild import mesonmain from mesonbuild import mtest @@ -95,7 +96,6 @@ class FakeCompilerOptions: self.value = [] def get_fake_options(prefix=''): - import argparse opts = argparse.Namespace() opts.native_file = [] opts.cross_file = None @@ -135,7 +135,7 @@ def get_meson_script(): running in-process (which is the default). ''' # Is there a meson.py next to the mesonbuild currently in use? - mesonbuild_dir = Path(mesonbuild.__file__).resolve().parent.parent + mesonbuild_dir = Path(mesonmain.__file__).resolve().parent.parent meson_script = mesonbuild_dir / 'meson.py' if meson_script.is_file(): return str(meson_script) @@ -263,12 +263,12 @@ def run_mtest_inprocess(commandlist): return returncode, mystdout.getvalue(), mystderr.getvalue() def clear_meson_configure_class_caches(): - mesonbuild.compilers.CCompiler.library_dirs_cache = {} - mesonbuild.compilers.CCompiler.program_dirs_cache = {} - mesonbuild.compilers.CCompiler.find_library_cache = {} - mesonbuild.compilers.CCompiler.find_framework_cache = {} - mesonbuild.dependencies.PkgConfigDependency.pkgbin_cache = {} - mesonbuild.dependencies.PkgConfigDependency.class_pkgbin = mesonlib.PerMachine(None, None) + compilers.CCompiler.library_dirs_cache = {} + compilers.CCompiler.program_dirs_cache = {} + compilers.CCompiler.find_library_cache = {} + compilers.CCompiler.find_framework_cache = {} + dependencies.PkgConfigDependency.pkgbin_cache = {} + dependencies.PkgConfigDependency.class_pkgbin = mesonlib.PerMachine(None, None) def run_configure_inprocess(commandlist, env=None): old_stdout = sys.stdout diff --git a/run_unittests.py b/run_unittests.py index dcc4dec..a7a0d26 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -31,6 +31,7 @@ import operator import threading import urllib.error import urllib.request +import zipfile from itertools import chain from unittest import mock from configparser import ConfigParser @@ -340,18 +341,14 @@ class InternalTests(unittest.TestCase): def test_compiler_args_class(self): cargsfunc = mesonbuild.compilers.CompilerArgs cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) - # Test that bad initialization fails - self.assertRaises(TypeError, cargsfunc, []) - self.assertRaises(TypeError, cargsfunc, [], []) - self.assertRaises(TypeError, cargsfunc, cc, [], []) # Test that empty initialization works a = cargsfunc(cc) self.assertEqual(a, []) # Test that list initialization works - a = cargsfunc(['-I.', '-I..'], cc) + a = cargsfunc(cc, ['-I.', '-I..']) self.assertEqual(a, ['-I.', '-I..']) # Test that there is no de-dup on initialization - self.assertEqual(cargsfunc(['-I.', '-I.'], cc), ['-I.', '-I.']) + self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.']) ## Test that appending works a.append('-I..') @@ -2530,7 +2527,7 @@ class AllPlatformTests(BasePlatformTests): subprocess.check_call(['hg', 'commit', '-m', 'I am a project'], cwd=project_dir) try: - self.dist_impl(hg_init) + self.dist_impl(hg_init, include_subprojects=False) except PermissionError: # When run under Windows CI, something (virus scanner?) # holds on to the hg files so cleaning up the dir @@ -2557,7 +2554,14 @@ class AllPlatformTests(BasePlatformTests): # fails sometimes. pass - def dist_impl(self, vcs_init): + def create_dummy_subproject(self, project_dir, name): + path = os.path.join(project_dir, 'subprojects', name) + os.makedirs(path) + with open(os.path.join(path, 'meson.build'), 'w') as ofile: + ofile.write("project('{}')".format(name)) + return path + + def dist_impl(self, vcs_init, include_subprojects=True): # Create this on the fly because having rogue .git directories inside # the source tree leads to all kinds of trouble. with tempfile.TemporaryDirectory() as project_dir: @@ -2565,6 +2569,8 @@ class AllPlatformTests(BasePlatformTests): ofile.write('''project('disttest', 'c', version : '1.4.3') e = executable('distexe', 'distexe.c') test('dist test', e) +subproject('vcssub', required : false) +subproject('tarballsub', required : false) ''') with open(os.path.join(project_dir, 'distexe.c'), 'w') as ofile: ofile.write('''#include<stdio.h> @@ -2579,6 +2585,10 @@ int main(int argc, char **argv) { zip_distfile = os.path.join(self.distdir, 'disttest-1.4.3.zip') zip_checksumfile = zip_distfile + '.sha256sum' vcs_init(project_dir) + if include_subprojects: + vcs_init(self.create_dummy_subproject(project_dir, 'vcssub')) + self.create_dummy_subproject(project_dir, 'tarballsub') + self.create_dummy_subproject(project_dir, 'unusedsub') self.init(project_dir) self.build('dist') self.assertPathExists(xz_distfile) @@ -2590,6 +2600,27 @@ int main(int argc, char **argv) { self.assertPathExists(zip_distfile) self.assertPathExists(zip_checksumfile) + if include_subprojects: + z = zipfile.ZipFile(zip_distfile) + self.assertEqual(sorted(['disttest-1.4.3/', + 'disttest-1.4.3/meson.build', + 'disttest-1.4.3/distexe.c']), + sorted(z.namelist())) + + self._run(self.meson_command + ['dist', '--formats', 'zip', '--include-subprojects'], + workdir=self.builddir) + z = zipfile.ZipFile(zip_distfile) + self.assertEqual(sorted(['disttest-1.4.3/', + 'disttest-1.4.3/subprojects/', + 'disttest-1.4.3/meson.build', + 'disttest-1.4.3/distexe.c', + 'disttest-1.4.3/subprojects/tarballsub/', + 'disttest-1.4.3/subprojects/vcssub/', + 'disttest-1.4.3/subprojects/tarballsub/meson.build', + 'disttest-1.4.3/subprojects/vcssub/meson.build']), + sorted(z.namelist())) + + def test_rpath_uses_ORIGIN(self): ''' Test that built targets use $ORIGIN in rpath, which ensures that they @@ -4012,30 +4043,35 @@ recommended as it is not supported on some platforms''') { 'name': 'threads', 'required': True, + 'version': [], 'has_fallback': False, 'conditional': False }, { 'name': 'zlib', 'required': False, + 'version': [], 'has_fallback': False, 'conditional': False }, { 'name': 'bugDep1', - 'required': False, + 'required': True, + 'version': [], 'has_fallback': False, 'conditional': False }, { 'name': 'somethingthatdoesnotexist', 'required': True, + 'version': ['>=1.2.3'], 'has_fallback': False, 'conditional': True }, { 'name': 'look_i_have_a_fallback', 'required': True, + 'version': ['>=1.0.0', '<=99.9.9'], 'has_fallback': True, 'conditional': True } @@ -4366,6 +4402,12 @@ class FailureTests(BasePlatformTests): match = 'Meson version is.*but project requires >=2000' self.assertMesonRaises("", match, meson_version='>=2000', options=options) + def test_assert_default_message(self): + self.assertMesonRaises("k1 = 'a'\n" + + "assert({\n" + + " k1: 1,\n" + + "}['a'] == 2)\n", + r"Assert failed: {k1 : 1}\['a'\] == 2") @unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)") class WindowsTests(BasePlatformTests): @@ -7030,6 +7072,8 @@ def main(): import pytest # noqa: F401 # Need pytest-xdist for `-n` arg import xdist # noqa: F401 + if sys.version_info.major <= 3 and sys.version_info.minor <= 5: + raise ImportError('pytest with python <= 3.5 is causing issues on the CI') pytest_args = ['-n', 'auto', './run_unittests.py'] pytest_args += convert_args(sys.argv[1:]) return subprocess.run(python_command + ['-m', 'pytest'] + pytest_args).returncode @@ -33,4 +33,7 @@ python_requires = >= 3.5.2 [options.extras_require] progress = - tqdm
\ No newline at end of file + tqdm + +[tool:pytest] +python_classes = diff --git a/test cases/cmake/14 fortran threads/meson.build b/test cases/cmake/14 fortran threads/meson.build new file mode 100644 index 0000000..2d2f892 --- /dev/null +++ b/test cases/cmake/14 fortran threads/meson.build @@ -0,0 +1,12 @@ +project('FortranThreads') + +if not add_languages('fortran', required: false) + error('MESON_SKIP_TEST: Fortran language not available.') +endif + +# want to be sure that CMake can find dependencies where even if the +# project isn't C, the C language is required to find the library. +threads = dependency('threads', method: 'cmake', required: false) +if not threads.found() + error('MESON_SKIP_TEST: CMake backend not working for Fortran / threads') +endif diff --git a/test cases/common/193 dict/meson.build b/test cases/common/193 dict/meson.build index 41eea31..dacf01d 100644 --- a/test cases/common/193 dict/meson.build +++ b/test cases/common/193 dict/meson.build @@ -33,3 +33,39 @@ d3 = d2 d3 += {'e' : 'f'} assert(d3 == {'a' : 'b2', 'c' : 'd', 'e' : 'f'}, 'dict plusassign is not working') assert(d2 == {'a' : 'b2', 'c' : 'd'}, 'dict should be immutable') + +dict1 = {} + +# A variable to be used as a key +testkey1 = 'myKey1' +testkey2 = 'myKey2' + +# Add new entry using the variable +dict1 += {testkey1 : 'myValue'} +dict1 += {testkey2 : 42} + +# Test that the stored values are correct +assert(dict1[testkey1] == 'myValue', + 'Incorrect string value retrieved from dictionary - variable key') +assert(dict1['myKey1'] == 'myValue', + 'Incorrect string value retrieved from dictionary - literal key') +assert(dict1[testkey2] == 42, + 'Incorrect int value retrieved from dictionary - variable key') +assert(dict1['myKey2'] == 42, + 'Incorrect int value retrieved from dictionary - literal key') + +d = {testkey1 : 1} +assert(d[testkey1] == 1, + 'Incorrect int value retrieved from dictionary - variable key') +assert(d['myKey1'] == 1, + 'Incorrect int value retrieved from dictionary - literal key') + +d = {'1' / '2' : 1, join_paths('a', 'b') : 2} +k1 = '1' / '2' +k2 = join_paths('a', 'b') +assert(d[k1] == 1, 'Incorrect expression evaluation in dictionary key') +assert(d[k2] == 2, 'Incorrect expression evaluation in dictionary key') + +d = {'a' + 'b' : 1} +assert(d['a' + 'b'] == 1, 'Incorrect expression evaluation in dictionary key') +assert(d['ab'] == 1, 'Incorrect expression evaluation in dictionary key') diff --git a/test cases/common/228 add dict variable key/meson.build b/test cases/common/228 add dict variable key/meson.build deleted file mode 100644 index b39c17e..0000000 --- a/test cases/common/228 add dict variable key/meson.build +++ /dev/null @@ -1,12 +0,0 @@ -project('add dictionary entry using string variable as key', meson_version: '>=0.52') - -dict = {} - -# A variable to be used as a key -key = 'myKey' - -# Add new entry using the variable -dict += {key : 'myValue'} - -# Test that the stored value is correct -assert(dict[key] == 'myValue', 'Incorrect value retrieved from dictionary') diff --git a/test cases/common/28 find program/meson.build b/test cases/common/28 find program/meson.build index 983b7b4..3b59caa 100644 --- a/test cases/common/28 find program/meson.build +++ b/test cases/common/28 find program/meson.build @@ -27,3 +27,9 @@ assert(prog.found(), 'Program version should match') prog = find_program('print-version-with-prefix.py', version : '>=1.0') assert(prog.found(), 'Program version should match') + +prog = find_program('test_subdir.py', required : false) +assert(not prog.found(), 'Program should not be found') + +prog = find_program('test_subdir.py', dirs : ['/donotexist', meson.current_source_dir() / 'scripts']) +assert(prog.found(), 'Program should be found') diff --git a/test cases/common/28 find program/scripts/test_subdir.py b/test cases/common/28 find program/scripts/test_subdir.py new file mode 100644 index 0000000..947ffe4 --- /dev/null +++ b/test cases/common/28 find program/scripts/test_subdir.py @@ -0,0 +1,3 @@ +#!/usr/bin/env python3 + +exit(0) diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build index 8f45dc7..4526c30 100644 --- a/test cases/frameworks/1 boost/meson.build +++ b/test cases/frameworks/1 boost/meson.build @@ -1,3 +1,4 @@ +# this test requires the following on Ubuntu: libboost-{system,python,log,thread,test}-dev project('boosttest', 'cpp', default_options : ['cpp_std=c++11']) diff --git a/test cases/frameworks/2 gtest/meson.build b/test cases/frameworks/2 gtest/meson.build index 3f30215..2d93b52 100644 --- a/test cases/frameworks/2 gtest/meson.build +++ b/test cases/frameworks/2 gtest/meson.build @@ -1,3 +1,4 @@ +# on Ubuntu this test requires libgtest-dev project('gtest', 'cpp') gtest = dependency('gtest', main : true, required : false) diff --git a/test cases/frameworks/30 scalapack/cmake/FindSCALAPACK.cmake b/test cases/frameworks/30 scalapack/cmake/FindSCALAPACK.cmake new file mode 100644 index 0000000..17f4b3b --- /dev/null +++ b/test cases/frameworks/30 scalapack/cmake/FindSCALAPACK.cmake @@ -0,0 +1,220 @@ +# Distributed under the OSI-approved BSD 3-Clause License. See accompanying +# file Copyright.txt or https://cmake.org/licensing for details. + +#[=======================================================================[.rst: + +FindSCALAPACK +------------- + +* Michael Hirsch, Ph.D. www.scivision.dev + +Let Michael know if there are more MKL / Lapack / compiler combination you want. +Refer to https://software.intel.com/en-us/articles/intel-mkl-link-line-advisor + +Finds SCALAPACK libraries for MKL, OpenMPI and MPICH. +Intel MKL relies on having environment variable MKLROOT set, typically by sourcing +mklvars.sh beforehand. + +Parameters +^^^^^^^^^^ + +``MKL`` + Intel MKL for MSVC, ICL, ICC, GCC and PGCC. Working with IntelMPI (default Window, Linux), MPICH (default Mac) or OpenMPI (Linux only). + +``IntelMPI`` + MKL BLACS IntelMPI + +``MKL64`` + MKL only: 64-bit integers (default is 32-bit integers) + +``OpenMPI`` + SCALAPACK + OpenMPI + +``MPICH`` + SCALAPACK + MPICH + + +Result Variables +^^^^^^^^^^^^^^^^ + +``SCALAPACK_FOUND`` + SCALapack libraries were found +``SCALAPACK_<component>_FOUND`` + SCALAPACK <component> specified was found +``SCALAPACK_LIBRARIES`` + SCALapack library files +``SCALAPACK_INCLUDE_DIRS`` + SCALapack include directories + + +References +^^^^^^^^^^ + +* Pkg-Config and MKL: https://software.intel.com/en-us/articles/intel-math-kernel-library-intel-mkl-and-pkg-config-tool +* MKL for Windows: https://software.intel.com/en-us/mkl-windows-developer-guide-static-libraries-in-the-lib-intel64-win-directory +* MKL Windows directories: https://software.intel.com/en-us/mkl-windows-developer-guide-high-level-directory-structure +#]=======================================================================] + +#===== functions +function(mkl_scala) + +set(_mkl_libs ${ARGV}) + +foreach(s ${_mkl_libs}) + find_library(SCALAPACK_${s}_LIBRARY + NAMES ${s} + PATHS ENV MKLROOT ENV I_MPI_ROOT ENV TBBROOT + PATH_SUFFIXES + lib/intel64 lib/intel64_win + intel64/lib/release + lib/intel64/gcc4.7 ../tbb/lib/intel64/gcc4.7 + lib/intel64/vc_mt ../tbb/lib/intel64/vc_mt + ../compiler/lib/intel64 + HINTS ${MKL_LIBRARY_DIRS} ${MKL_LIBDIR} + NO_DEFAULT_PATH) + if(NOT SCALAPACK_${s}_LIBRARY) + message(WARNING "MKL component not found: " ${s}) + return() + endif() + + list(APPEND SCALAPACK_LIB ${SCALAPACK_${s}_LIBRARY}) +endforeach() + + +find_path(SCALAPACK_INCLUDE_DIR + NAMES mkl_scalapack.h + PATHS ENV MKLROOT ENV I_MPI_ROOT ENV TBBROOT + PATH_SUFFIXES + include + include/intel64/${_mkl_bitflag}lp64 + HINTS ${MKL_INCLUDE_DIRS}) + +if(NOT SCALAPACK_INCLUDE_DIR) + message(WARNING "MKL Include Dir not found") + return() +endif() + +list(APPEND SCALAPACK_INCLUDE_DIR + ${MKL_INCLUDE_DIRS}) + +set(SCALAPACK_LIBRARY ${SCALAPACK_LIB} PARENT_SCOPE) +set(SCALAPACK_INCLUDE_DIR ${SCALAPACK_INCLUDE_DIR} PARENT_SCOPE) + +endfunction(mkl_scala) + +#==== main program + +cmake_policy(VERSION 3.11) + +if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12) + cmake_policy(SET CMP0074 NEW) + cmake_policy(SET CMP0075 NEW) +endif() + +if(NOT (OpenMPI IN_LIST SCALAPACK_FIND_COMPONENTS + OR MPICH IN_LIST SCALAPACK_FIND_COMPONENTS + OR MKL IN_LIST SCALAPACK_FIND_COMPONENTS)) + if(DEFINED ENV{MKLROOT}) + if(APPLE) + list(APPEND SCALAPACK_FIND_COMPONENTS MKL MPICH) + else() + list(APPEND SCALAPACK_FIND_COMPONENTS MKL IntelMPI) + endif() + else() + list(APPEND SCALAPACK_FIND_COMPONENTS OpenMPI) + endif() +endif() + +find_package(PkgConfig) + +if(NOT WIN32) + find_package(Threads) # not required--for example Flang +endif() + +if(MKL IN_LIST SCALAPACK_FIND_COMPONENTS) + + if(BUILD_SHARED_LIBS) + set(_mkltype dynamic) + else() + set(_mkltype static) + endif() + + if(MKL64 IN_LIST SCALAPACK_FIND_COMPONENTS) + set(_mkl_bitflag i) + else() + set(_mkl_bitflag) + endif() + + + if(WIN32) + set(_impi impi) + else() + unset(_impi) + endif() + + + pkg_check_modules(MKL mkl-${_mkltype}-${_mkl_bitflag}lp64-iomp) + + if(OpenMPI IN_LIST SCALAPACK_FIND_COMPONENTS) + mkl_scala(mkl_scalapack_${_mkl_bitflag}lp64 mkl_blacs_openmpi_${_mkl_bitflag}lp64) + if(SCALAPACK_LIBRARY) + set(SCALAPACK_OpenMPI_FOUND true) + endif() + elseif(MPICH IN_LIST SCALAPACK_FIND_COMPONENTS) + if(APPLE) + mkl_scala(mkl_scalapack_${_mkl_bitflag}lp64 mkl_blacs_mpich_${_mkl_bitflag}lp64) + elseif(WIN32) + mkl_scala(mkl_scalapack_${_mkl_bitflag}lp64 mkl_blacs_mpich2_${_mkl_bitflag}lp64.lib mpi.lib fmpich2.lib) + else() # linux, yes it's just like IntelMPI + mkl_scala(mkl_scalapack_${_mkl_bitflag}lp64 mkl_blacs_intelmpi_${_mkl_bitflag}lp64) + endif() + if(SCALAPACK_LIBRARY) + set(SCALAPACK_MPICH_FOUND true) + endif() + else() # IntelMPI + mkl_scala(mkl_scalapack_${_mkl_bitflag}lp64 mkl_blacs_intelmpi_${_mkl_bitflag}lp64 ${_impi}) + if(SCALAPACK_LIBRARY) + set(SCALAPACK_IntelMPI_FOUND true) + endif() + endif() + + if(SCALAPACK_LIBRARY) + set(SCALAPACK_MKL_FOUND true) + endif() + +elseif(OpenMPI IN_LIST SCALAPACK_FIND_COMPONENTS) + + pkg_check_modules(SCALAPACK scalapack-openmpi) + + find_library(SCALAPACK_LIBRARY + NAMES scalapack scalapack-openmpi + HINTS ${SCALAPACK_LIBRARY_DIRS} ${SCALAPACK_LIBDIR}) + + if(SCALAPACK_LIBRARY) + set(SCALAPACK_OpenMPI_FOUND true) + endif() + +elseif(MPICH IN_LIST SCALAPACK_FIND_COMPONENTS) + find_library(SCALAPACK_LIBRARY + NAMES scalapack-mpich scalapack-mpich2) + + if(SCALAPACK_LIBRARY) + set(SCALAPACK_MPICH_FOUND true) + endif() + +endif() + +# Finalize + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args( + SCALAPACK + REQUIRED_VARS SCALAPACK_LIBRARY + HANDLE_COMPONENTS) + +if(SCALAPACK_FOUND) + set(SCALAPACK_LIBRARIES ${SCALAPACK_LIBRARY}) + set(SCALAPACK_INCLUDE_DIRS ${SCALAPACK_INCLUDE_DIR}) +endif() + +mark_as_advanced(SCALAPACK_LIBRARY SCALAPACK_INCLUDE_DIR) diff --git a/test cases/frameworks/30 scalapack/main.c b/test cases/frameworks/30 scalapack/main.c new file mode 100644 index 0000000..ca01977 --- /dev/null +++ b/test cases/frameworks/30 scalapack/main.c @@ -0,0 +1,34 @@ +#include <stdio.h> + +// #include <mkl.h> +// #include <mkl_scalapack.h> +// #include <mkl_blacs.h> + +extern float pslamch_(const int *, const char *); +extern void blacs_pinfo_(int *, int *); +extern void blacs_get_(const int *, const int *, int *); +extern void blacs_gridinit_(int *, const char *, const int *, const int *); +extern void blacs_gridinfo_(const int *, int *, int *, int *, int *); +extern void blacs_gridexit_(const int *); +extern void blacs_exit_(const int *); + +int main(void){ + +int myid, nprocs, ictxt, mycol, myrow, npcol=2, nprow=2; +const int i0=0, i1=1, in1=-1; + +blacs_pinfo_(&myid, &nprocs); +blacs_get_(&in1, &i0, &ictxt); +blacs_gridinit_(&ictxt, "C", &nprocs, &i1); + +blacs_gridinfo_(&ictxt, &nprow, &npcol, &myrow, &mycol); + +float eps = pslamch_(&ictxt, "E"); + +if (myrow == mycol) printf("OK: Scalapack C: eps= %f\n", eps); + +blacs_gridexit_(&ictxt); +blacs_exit_(&i0); + +return 0; +}
\ No newline at end of file diff --git a/test cases/frameworks/30 scalapack/main.f90 b/test cases/frameworks/30 scalapack/main.f90 new file mode 100644 index 0000000..53b5fb9 --- /dev/null +++ b/test cases/frameworks/30 scalapack/main.f90 @@ -0,0 +1,25 @@ +! minimal Scalapack demo +implicit none + +integer :: ictxt, myid, nprocs, mycol, myrow, npcol, nprow +real :: eps +real, external :: pslamch + +! arbitrary test parameters +npcol = 2 +nprow = 2 + +call blacs_pinfo(myid, nprocs) +call blacs_get(-1, 0, ictxt) +call blacs_gridinit(ictxt, "C", nprocs, 1) + +call blacs_gridinfo(ictxt, nprow, npcol, myrow, mycol) + +eps = pslamch(ictxt, 'E') + +if(myrow == mycol) print '(A, F10.6)', "OK: Scalapack Fortran eps=", eps + +call blacs_gridexit(ictxt) +call blacs_exit(0) + +end program diff --git a/test cases/frameworks/30 scalapack/meson.build b/test cases/frameworks/30 scalapack/meson.build new file mode 100644 index 0000000..430f54f --- /dev/null +++ b/test cases/frameworks/30 scalapack/meson.build @@ -0,0 +1,27 @@ +project('test scalapack', 'c') + +mpi_c = dependency('mpi', language: 'c', required: false) +if not mpi_c.found() + error('MESON_SKIP_TEST: MPI library not available') +endif + + +scalapack = dependency('scalapack', required: false) +if not scalapack.found() + error('MESON_SKIP_TEST: scalapack library not available') +endif + +c_exe = executable('scalapack_c', 'main.c', + dependencies: [scalapack, mpi_c]) +test('scalapack_c', c_exe, timeout: 30) + +if add_languages('fortran') + mpi_f = dependency('mpi', language: 'fortran', required: false) + if not mpi_f.found() + error('MESON_SKIP_TEST: MPI Fortran library not available') + endif + + f_exe = executable('scalapack_fortran', 'main.f90', + dependencies: [scalapack, mpi_f]) + test('scalapack_fortran', f_exe, timeout: 30) +endif diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build index 682d6d8..7753550 100644 --- a/test cases/linuxlike/13 cmake dependency/meson.build +++ b/test cases/linuxlike/13 cmake dependency/meson.build @@ -1,3 +1,5 @@ +# this test can ONLY be run successfully from run_project_test.py +# due to use of setup_env.json project('external CMake dependency', 'c') if not find_program('cmake', required: false).found() @@ -37,7 +39,7 @@ depf2 = dependency('ZLIB', required : false, method : 'cmake', modules : 'dfggh: assert(depf2.found() == false, 'Invalid CMake targets should fail') # Try to find cmMesonTestDep in a custom prefix - +# setup_env.json is used by run_project_tests.py:_run_test to point to ./cmake_pref_env/ depPrefEnv = dependency('cmMesonTestDep', required : true, method : 'cmake') # Try to find a dependency with a custom CMake module diff --git a/test cases/objc/1 simple/prog.m b/test cases/objc/1 simple/prog.m index f2e2315..87457bf 100644 --- a/test cases/objc/1 simple/prog.m +++ b/test cases/objc/1 simple/prog.m @@ -1,5 +1,5 @@ #import<stdio.h> -int main(int argc, char **argv) { +int main(void) { return 0; }
\ No newline at end of file diff --git a/test cases/objc/2 nsstring/stringprog.m b/test cases/objc/2 nsstring/stringprog.m index f1a2532..faef4dd 100644 --- a/test cases/objc/2 nsstring/stringprog.m +++ b/test cases/objc/2 nsstring/stringprog.m @@ -1,6 +1,6 @@ #import<Foundation/NSString.h> -int main(int argc, char **argv) { +int main(void) { int result; NSString *str = [NSString new]; result = [str length]; diff --git a/test cases/objc/3 objc args/prog.m b/test cases/objc/3 objc args/prog.m index bfd686a..003df98 100644 --- a/test cases/objc/3 objc args/prog.m +++ b/test cases/objc/3 objc args/prog.m @@ -1,6 +1,6 @@ #import<stdio.h> -int main(int argc, char **argv) +int main(void) { #ifdef MESON_TEST int x = 3; diff --git a/test cases/objcpp/1 simple/prog.mm b/test cases/objcpp/1 simple/prog.mm index 927e810..d3bbf6c 100644 --- a/test cases/objcpp/1 simple/prog.mm +++ b/test cases/objcpp/1 simple/prog.mm @@ -3,7 +3,7 @@ class MyClass { }; -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/objcpp/2 objc++ args/prog.mm b/test cases/objcpp/2 objc++ args/prog.mm index 3decaf2..2b437ce 100644 --- a/test cases/objcpp/2 objc++ args/prog.mm +++ b/test cases/objcpp/2 objc++ args/prog.mm @@ -4,7 +4,7 @@ class TestClass { }; -int main(int argc, char **argv) +int main(void) { #ifdef MESON_OBJCPP_TEST int x = 1; diff --git a/test cases/osx/1 basic/main.c b/test cases/osx/1 basic/main.c index 2417ad8..0a07218 100644 --- a/test cases/osx/1 basic/main.c +++ b/test cases/osx/1 basic/main.c @@ -1,5 +1,5 @@ #include <CoreFoundation/CoreFoundation.h> -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/osx/2 library versions/exe.orig.c b/test cases/osx/2 library versions/exe.orig.c index 86c4adc..ad38e6d 100644 --- a/test cases/osx/2 library versions/exe.orig.c +++ b/test cases/osx/2 library versions/exe.orig.c @@ -1,8 +1,6 @@ int myFunc (void); -int -main (int argc, char *argv[]) -{ +int main (void) { if (myFunc() == 55) return 0; return 1; diff --git a/test cases/osx/2 library versions/lib.c b/test cases/osx/2 library versions/lib.c index 67b6f4d..bd251d7 100644 --- a/test cases/osx/2 library versions/lib.c +++ b/test cases/osx/2 library versions/lib.c @@ -1,3 +1,3 @@ -int myFunc() { +int myFunc(void) { return 55; } diff --git a/test cases/osx/4 framework/prog.c b/test cases/osx/4 framework/prog.c index 11b7fad..9b6bdc2 100644 --- a/test cases/osx/4 framework/prog.c +++ b/test cases/osx/4 framework/prog.c @@ -1,3 +1,3 @@ -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/osx/4 framework/stat.c b/test cases/osx/4 framework/stat.c index fa76a65..4825cef 100644 --- a/test cases/osx/4 framework/stat.c +++ b/test cases/osx/4 framework/stat.c @@ -1 +1 @@ -int func() { return 933; } +int func(void) { return 933; } diff --git a/test cases/osx/5 extra frameworks/prog.c b/test cases/osx/5 extra frameworks/prog.c index 11b7fad..9b6bdc2 100644 --- a/test cases/osx/5 extra frameworks/prog.c +++ b/test cases/osx/5 extra frameworks/prog.c @@ -1,3 +1,3 @@ -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/osx/5 extra frameworks/stat.c b/test cases/osx/5 extra frameworks/stat.c index fa76a65..4825cef 100644 --- a/test cases/osx/5 extra frameworks/stat.c +++ b/test cases/osx/5 extra frameworks/stat.c @@ -1 +1 @@ -int func() { return 933; } +int func(void) { return 933; } diff --git a/test cases/osx/7 bitcode/libbar.mm b/test cases/osx/7 bitcode/libbar.mm index 22c4dd4..d9201c1 100644 --- a/test cases/osx/7 bitcode/libbar.mm +++ b/test cases/osx/7 bitcode/libbar.mm @@ -1,7 +1,7 @@ #import <stdio.h> #import "vis.h" -int EXPORT_PUBLIC libbar(int arg) { +int EXPORT_PUBLIC libbar(void) { return 0; } diff --git a/test cases/osx/7 bitcode/libfile.c b/test cases/osx/7 bitcode/libfile.c index cc87aa0..8edc66b 100644 --- a/test cases/osx/7 bitcode/libfile.c +++ b/test cases/osx/7 bitcode/libfile.c @@ -1,5 +1,5 @@ #include "vis.h" -int EXPORT_PUBLIC libfunc() { +int EXPORT_PUBLIC libfunc(void) { return 3; } diff --git a/test cases/osx/7 bitcode/libfoo.m b/test cases/osx/7 bitcode/libfoo.m index 7981ab4..f1c35a9 100644 --- a/test cases/osx/7 bitcode/libfoo.m +++ b/test cases/osx/7 bitcode/libfoo.m @@ -1,7 +1,7 @@ #import <stdio.h> #import "vis.h" -int EXPORT_PUBLIC libfoo(int arg) { +int EXPORT_PUBLIC libfoo(void) { return 0; } diff --git a/test cases/osx/8 pie/main.c b/test cases/osx/8 pie/main.c index 2417ad8..0a07218 100644 --- a/test cases/osx/8 pie/main.c +++ b/test cases/osx/8 pie/main.c @@ -1,5 +1,5 @@ #include <CoreFoundation/CoreFoundation.h> -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/unit/57 introspection/meson.build b/test cases/unit/57 introspection/meson.build index 7589f3f..a094a55 100644 --- a/test cases/unit/57 introspection/meson.build +++ b/test cases/unit/57 introspection/meson.build @@ -13,8 +13,9 @@ set_variable('list_test_plusassign', []) list_test_plusassign += ['bugs everywhere'] if false - dependency('somethingthatdoesnotexist', required: true) - dependency('look_i_have_a_fallback', fallback: ['oh_no', 'the_subproject_does_not_exist']) + vers_str = '<=99.9.9' + dependency('somethingthatdoesnotexist', required: true, version: '>=1.2.3') + dependency('look_i_have_a_fallback', version: ['>=1.0.0', vers_str], fallback: ['oh_no', 'the_subproject_does_not_exist']) endif subdir('sharedlib') diff --git a/test cases/windows/10 vs module defs generated custom target/prog.c b/test cases/windows/10 vs module defs generated custom target/prog.c index 51f7805..066ac22 100644 --- a/test cases/windows/10 vs module defs generated custom target/prog.c +++ b/test cases/windows/10 vs module defs generated custom target/prog.c @@ -1,4 +1,4 @@ -int somedllfunc(); +int somedllfunc(void); int main(void) { return somedllfunc() == 42 ? 0 : 1; diff --git a/test cases/windows/10 vs module defs generated custom target/subdir/somedll.c b/test cases/windows/10 vs module defs generated custom target/subdir/somedll.c index b23d8fe..f095b18 100644 --- a/test cases/windows/10 vs module defs generated custom target/subdir/somedll.c +++ b/test cases/windows/10 vs module defs generated custom target/subdir/somedll.c @@ -1,3 +1,3 @@ -int somedllfunc() { +int somedllfunc(void) { return 42; } diff --git a/test cases/windows/11 exe implib/prog.c b/test cases/windows/11 exe implib/prog.c index 2192019..aa3bc5c 100644 --- a/test cases/windows/11 exe implib/prog.c +++ b/test cases/windows/11 exe implib/prog.c @@ -1,6 +1,6 @@ #include <windows.h> int __declspec(dllexport) -main() { +main(void) { return 0; } diff --git a/test cases/windows/12 resources with custom targets/prog.c b/test cases/windows/12 resources with custom targets/prog.c index 2bef6a2..cb6892d 100644 --- a/test cases/windows/12 resources with custom targets/prog.c +++ b/test cases/windows/12 resources with custom targets/prog.c @@ -10,5 +10,10 @@ WinMain( int nCmdShow) { HICON hIcon; hIcon = LoadIcon(GetModuleHandle(NULL), MAKEINTRESOURCE(MY_ICON)); + // avoid unused argument error while matching template + ((void)hInstance); + ((void)hPrevInstance); + ((void)lpszCmdLine); + ((void)nCmdShow); return hIcon ? 0 : 1; } diff --git a/test cases/windows/13 test argument extra paths/exe/main.c b/test cases/windows/13 test argument extra paths/exe/main.c index 0ac9e38..1032ae2 100644 --- a/test cases/windows/13 test argument extra paths/exe/main.c +++ b/test cases/windows/13 test argument extra paths/exe/main.c @@ -1,5 +1,5 @@ #include <foo.h> -int main(int ac, char **av) { +int main(void) { return foo_process(); } diff --git a/test cases/windows/14 resources with custom target depend_files/prog.c b/test cases/windows/14 resources with custom target depend_files/prog.c index 2bef6a2..cb6892d 100644 --- a/test cases/windows/14 resources with custom target depend_files/prog.c +++ b/test cases/windows/14 resources with custom target depend_files/prog.c @@ -10,5 +10,10 @@ WinMain( int nCmdShow) { HICON hIcon; hIcon = LoadIcon(GetModuleHandle(NULL), MAKEINTRESOURCE(MY_ICON)); + // avoid unused argument error while matching template + ((void)hInstance); + ((void)hPrevInstance); + ((void)lpszCmdLine); + ((void)nCmdShow); return hIcon ? 0 : 1; } diff --git a/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_dll/main.c b/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_dll/main.c index 673b5e4..2bd8cd2 100644 --- a/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_dll/main.c +++ b/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_dll/main.c @@ -2,5 +2,9 @@ BOOL WINAPI DllMain(HINSTANCE hinstDLL, DWORD fdwReason, LPVOID lpvReserved) { + // avoid unused argument error while matching template + ((void)hinstDLL); + ((void)fdwReason); + ((void)lpvReserved); return TRUE; } diff --git a/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_exe/main.c b/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_exe/main.c index 11b7fad..9b6bdc2 100644 --- a/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_exe/main.c +++ b/test cases/windows/15 resource scripts with duplicate filenames/exe3/src_exe/main.c @@ -1,3 +1,3 @@ -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_dll/main.c b/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_dll/main.c index 673b5e4..2bd8cd2 100644 --- a/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_dll/main.c +++ b/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_dll/main.c @@ -2,5 +2,9 @@ BOOL WINAPI DllMain(HINSTANCE hinstDLL, DWORD fdwReason, LPVOID lpvReserved) { + // avoid unused argument error while matching template + ((void)hinstDLL); + ((void)fdwReason); + ((void)lpvReserved); return TRUE; } diff --git a/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_exe/main.c b/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_exe/main.c index 11b7fad..9b6bdc2 100644 --- a/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_exe/main.c +++ b/test cases/windows/15 resource scripts with duplicate filenames/exe4/src_exe/main.c @@ -1,3 +1,3 @@ -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/windows/15 resource scripts with duplicate filenames/verify.c b/test cases/windows/15 resource scripts with duplicate filenames/verify.c index 4d2ccf0..8f5b88e 100644 --- a/test cases/windows/15 resource scripts with duplicate filenames/verify.c +++ b/test cases/windows/15 resource scripts with duplicate filenames/verify.c @@ -1,7 +1,7 @@ #include <assert.h> #include <windows.h> -int main(int arc, char *argv[]) +int main(int argc, char *argv[]) { // verify that the expected resource exists and has the expected contents HRSRC hRsrc; @@ -9,6 +9,8 @@ int main(int arc, char *argv[]) HGLOBAL hGlobal; void* data; + ((void)argc); + hRsrc = FindResource(NULL, argv[1], RT_RCDATA); assert(hRsrc); diff --git a/test cases/windows/16 gui app/gui_prog.c b/test cases/windows/16 gui app/gui_prog.c index 4bc688a..9cdf170 100644 --- a/test cases/windows/16 gui app/gui_prog.c +++ b/test cases/windows/16 gui app/gui_prog.c @@ -2,5 +2,10 @@ int WINAPI WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow) { + // avoid unused argument error while matching template + ((void)hInstance); + ((void)hPrevInstance); + ((void)lpCmdLine); + ((void)nCmdShow); return 0; } diff --git a/test cases/windows/2 winmain/prog.c b/test cases/windows/2 winmain/prog.c index 77d6982..3bd4c95 100644 --- a/test cases/windows/2 winmain/prog.c +++ b/test cases/windows/2 winmain/prog.c @@ -6,5 +6,10 @@ WinMain( HINSTANCE hPrevInstance, LPSTR lpszCmdLine, int nCmdShow) { +// avoid unused argument error while matching template + ((void)hInstance); + ((void)hPrevInstance); + ((void)lpszCmdLine); + ((void)nCmdShow); return 0; } diff --git a/test cases/windows/3 cpp/prog.cpp b/test cases/windows/3 cpp/prog.cpp index cf67335..69092f7 100644 --- a/test cases/windows/3 cpp/prog.cpp +++ b/test cases/windows/3 cpp/prog.cpp @@ -2,6 +2,6 @@ class Foo; -int main(int argc, char **argv) { +int main(void) { return 0; } diff --git a/test cases/windows/4 winmaincpp/prog.cpp b/test cases/windows/4 winmaincpp/prog.cpp index aeecb7b..6182257 100644 --- a/test cases/windows/4 winmaincpp/prog.cpp +++ b/test cases/windows/4 winmaincpp/prog.cpp @@ -8,5 +8,10 @@ WinMain( HINSTANCE hPrevInstance, LPSTR lpszCmdLine, int nCmdShow) { +// avoid unused argument error while matching template + ((void)hInstance); + ((void)hPrevInstance); + ((void)lpszCmdLine); + ((void)nCmdShow); return 0; } diff --git a/test cases/windows/5 resources/prog.c b/test cases/windows/5 resources/prog.c index afbb6ae..3409c39 100644 --- a/test cases/windows/5 resources/prog.c +++ b/test cases/windows/5 resources/prog.c @@ -12,5 +12,10 @@ WinMain( int nCmdShow) { HICON hIcon; hIcon = LoadIcon(GetModuleHandle(NULL), MAKEINTRESOURCE(MY_ICON)); +// avoid unused argument error while matching template + ((void)hInstance); + ((void)hPrevInstance); + ((void)lpszCmdLine); + ((void)nCmdShow); return hIcon ? 0 : 1; } diff --git a/test cases/windows/6 vs module defs/prog.c b/test cases/windows/6 vs module defs/prog.c index 51f7805..066ac22 100644 --- a/test cases/windows/6 vs module defs/prog.c +++ b/test cases/windows/6 vs module defs/prog.c @@ -1,4 +1,4 @@ -int somedllfunc(); +int somedllfunc(void); int main(void) { return somedllfunc() == 42 ? 0 : 1; diff --git a/test cases/windows/6 vs module defs/subdir/somedll.c b/test cases/windows/6 vs module defs/subdir/somedll.c index b23d8fe..f095b18 100644 --- a/test cases/windows/6 vs module defs/subdir/somedll.c +++ b/test cases/windows/6 vs module defs/subdir/somedll.c @@ -1,3 +1,3 @@ -int somedllfunc() { +int somedllfunc(void) { return 42; } diff --git a/test cases/windows/7 dll versioning/lib.c b/test cases/windows/7 dll versioning/lib.c index cf7dfdd..37e0d1d 100644 --- a/test cases/windows/7 dll versioning/lib.c +++ b/test cases/windows/7 dll versioning/lib.c @@ -1,6 +1,6 @@ #ifdef _WIN32 __declspec(dllexport) #endif -int myFunc() { +int myFunc(void) { return 55; } diff --git a/test cases/windows/9 vs module defs generated/prog.c b/test cases/windows/9 vs module defs generated/prog.c index 51f7805..066ac22 100644 --- a/test cases/windows/9 vs module defs generated/prog.c +++ b/test cases/windows/9 vs module defs generated/prog.c @@ -1,4 +1,4 @@ -int somedllfunc(); +int somedllfunc(void); int main(void) { return somedllfunc() == 42 ? 0 : 1; diff --git a/test cases/windows/9 vs module defs generated/subdir/somedll.c b/test cases/windows/9 vs module defs generated/subdir/somedll.c index b23d8fe..f095b18 100644 --- a/test cases/windows/9 vs module defs generated/subdir/somedll.c +++ b/test cases/windows/9 vs module defs generated/subdir/somedll.c @@ -1,3 +1,3 @@ -int somedllfunc() { +int somedllfunc(void) { return 42; } diff --git a/tools/ac_converter.py b/tools/ac_converter.py index a1969a9..075eae6 100755 --- a/tools/ac_converter.py +++ b/tools/ac_converter.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -help_message = """Usage: %s <config.h.meson> +help_message = """Usage: {} <config.h.meson> This script reads config.h.meson, looks for header checks and writes the corresponding meson declaration. @@ -368,7 +368,7 @@ functions = [] sizes = [] if len(sys.argv) != 2: - print(help_message % sys.argv[0]) + print(help_message.format(sys.argv[0])) sys.exit(0) with open(sys.argv[1]) as f: @@ -414,7 +414,7 @@ cdata = configuration_data()''') print('check_headers = [') for token, hname in headers: - print(" ['%s', '%s']," % (token, hname)) + print(" ['{}', '{}'],".format(token, hname)) print(']\n') print('''foreach h : check_headers @@ -430,7 +430,7 @@ print('check_functions = [') for tok in functions: if len(tok) == 3: tokstr, fdata0, fdata1 = tok - print(" ['%s', '%s', '#include<%s>']," % (tokstr, fdata0, fdata1)) + print(" ['{}', '{}', '#include<{}>'],".format(tokstr, fdata0, fdata1)) else: print('# check token', tok) print(']\n') @@ -445,7 +445,7 @@ endforeach # Convert sizeof checks. for elem, typename in sizes: - print("cdata.set('%s', cc.sizeof('%s'))" % (elem, typename)) + print("cdata.set('{}', cc.sizeof('{}'))".format(elem, typename)) print(''' configure_file(input : 'config.h.meson', |