diff options
219 files changed, 4496 insertions, 2515 deletions
diff --git a/.github/codecov.yml b/.github/codecov.yml deleted file mode 100644 index fa7b82a..0000000 --- a/.github/codecov.yml +++ /dev/null @@ -1,11 +0,0 @@ -coverage: - status: - project: - default: - informational: true - patch: - default: - informational: true -comment: false -github_checks: - annotations: false diff --git a/.github/workflows/cygwin.yml b/.github/workflows/cygwin.yml index d641b18..2ba1ff2 100644 --- a/.github/workflows/cygwin.yml +++ b/.github/workflows/cygwin.yml @@ -87,7 +87,7 @@ jobs: - name: Run pip run: | export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32 - python3 -m pip --disable-pip-version-check install gcovr fastjsonschema pefile pytest pytest-subtests pytest-xdist coverage + python3 -m pip --disable-pip-version-check install gcovr fastjsonschema pefile pytest pytest-subtests pytest-xdist shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}' - uses: actions/cache/save@v4 @@ -99,7 +99,7 @@ jobs: - name: Run tests run: | export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32 - python3 ./tools/run_with_cov.py run_tests.py --backend=ninja + python3 ./run_tests.py --backend=ninja env: # Cygwin's static boost installation is broken (some static library # variants such as boost_thread are not present) @@ -112,17 +112,3 @@ jobs: path: meson-test-run.* # test log should be saved on failure if: ${{ !cancelled() }} - - - name: Aggregate coverage reports - run: | - export PATH=/usr/bin:/usr/local/bin:$(cygpath ${SYSTEMROOT})/system32 - ./ci/combine_cov.sh - shell: C:\cygwin\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}' - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "${{ matrix.NAME }}" - fail_ci_if_error: false - verbose: true diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml index d20f7e5..369c91e 100644 --- a/.github/workflows/images.yml +++ b/.github/workflows/images.yml @@ -37,13 +37,14 @@ jobs: fail-fast: false matrix: cfg: - - { name: Arch Linux, id: arch } - - { name: CUDA (on Arch), id: cuda } - - { name: Fedora, id: fedora } - - { name: Gentoo, id: gentoo } - - { name: OpenSUSE, id: opensuse } - - { name: Ubuntu Bionic, id: bionic } - - { name: Ubuntu Rolling, id: ubuntu-rolling } + - { name: Arch Linux, id: arch } + - { name: CUDA (on Arch), id: cuda } + - { name: CUDA Cross (on Ubuntu Jammy), id: cuda-cross } + - { name: Fedora, id: fedora } + - { name: Gentoo, id: gentoo } + - { name: OpenSUSE, id: opensuse } + - { name: Ubuntu Bionic, id: bionic } + - { name: Ubuntu Rolling, id: ubuntu-rolling } steps: # Need v3 because of bionic - uses: actions/checkout@v3 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 5588034..ef58895 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -53,7 +53,7 @@ jobs: with: python-version: '3.x' # Pin mypy to version 1.8, so we retain the ability to lint for Python 3.7 - - run: python -m pip install "mypy==1.8" coverage strictyaml types-PyYAML types-tqdm types-chevron + - run: python -m pip install "mypy==1.8" strictyaml types-PyYAML types-tqdm types-chevron - run: python run_mypy.py --allver env: PYTHONUNBUFFERED: 1 diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 2d2ea39..3afb4ba 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -35,7 +35,7 @@ jobs: - run: | export PATH="$HOME/Library/Python/3.9/bin:$PATH" /usr/bin/python3 -m pip install --upgrade pip - /usr/bin/python3 -m pip install pytest pytest-xdist pytest-subtests fastjsonschema coverage + /usr/bin/python3 -m pip install pytest pytest-xdist pytest-subtests fastjsonschema - run: brew install pkg-config ninja llvm qt@5 - env: CPPFLAGS: "-I/opt/homebrew/include" @@ -48,20 +48,7 @@ jobs: export SDKROOT="$(xcodebuild -version -sdk macosx Path)" export PATH="$HOME/Library/Python/3.9/bin:$HOME/tools:/opt/homebrew/opt/qt@5/bin:/opt/homebrew/opt/llvm/bin:$PATH" export PKG_CONFIG_PATH="/Applications/Xcode.app/Contents/Developer/Library/Frameworks/Python3.framework/Versions/Current/lib/pkgconfig:/opt/homebrew/opt/qt@5/lib/pkgconfig:$PKG_CONFIG_PATH" - /usr/bin/python3 ./tools/run_with_cov.py ./run_unittests.py - - - name: Aggregate coverage reports - run: | - export PATH="$HOME/Library/Python/3.9/bin:$PATH" - ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "appleclang [unit tests]" - fail_ci_if_error: false - verbose: true + /usr/bin/python3 ./run_unittests.py project-tests-appleclang: @@ -98,9 +85,7 @@ jobs: # https://github.com/actions/setup-python/issues/58 - run: brew install pkg-config ninja llvm qt@5 boost ldc hdf5 openmpi lapack scalapack sdl2 boost-python3 gtk-doc zstd ncurses objfw libomp - run: | - python3 -m pip install --upgrade setuptools - python3 -m pip install --upgrade pip - python3 -m pip install cython coverage + python3 -m pip install cython - env: CPPFLAGS: "-I/opt/homebrew/include" LDFLAGS: "-L/opt/homebrew/lib" @@ -114,18 +99,7 @@ jobs: # We need this to avoid objfw test failures. export PATH="$HOME/tools:/opt/homebrew/opt/qt@5/bin:/opt/homebrew/opt/ncurses/bin:$PATH:/opt/homebrew/opt/llvm/bin" export PKG_CONFIG_PATH="/opt/homebrew/opt/qt@5/lib/pkgconfig:/opt/homebrew/opt/lapack/lib/pkgconfig:/opt/homebrew/opt/ncurses/lib/pkgconfig:$PKG_CONFIG_PATH" - ./tools/run_with_cov.py ./run_project_tests.py --backend=ninja - - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "appleclang [project tests; unity=${{ matrix.unity }}]" - fail_ci_if_error: false - verbose: true + ./run_project_tests.py --backend=ninja Qt4macos: # This job only works on Intel Macs, because OpenSSL 1.0 doesn't build on diff --git a/.github/workflows/msys2.yml b/.github/workflows/msys2.yml index 9101e6b..b926d18 100644 --- a/.github/workflows/msys2.yml +++ b/.github/workflows/msys2.yml @@ -29,7 +29,7 @@ permissions: jobs: test: - runs-on: windows-2019 + runs-on: windows-2022 name: ${{ matrix.NAME }} strategy: fail-fast: false @@ -85,11 +85,12 @@ jobs: mingw-w64-${{ matrix.MSYS2_ARCH }}-python-pip mingw-w64-${{ matrix.MSYS2_ARCH }}-python-fastjsonschema mingw-w64-${{ matrix.MSYS2_ARCH }}-objfw + mingw-w64-${{ matrix.MSYS2_ARCH }}-llvm mingw-w64-${{ matrix.MSYS2_ARCH }}-${{ matrix.TOOLCHAIN }} - name: Install dependencies run: | - python3 -m pip --disable-pip-version-check install gcovr pefile pytest pytest-subtests pytest-xdist coverage + python3 -m pip --disable-pip-version-check install gcovr pefile pytest pytest-subtests pytest-xdist - name: Install pypy3 on x86_64 run: | @@ -124,20 +125,9 @@ jobs: pacman --noconfirm --needed -S mingw-w64-${{ matrix.MSYS2_ARCH }}-${{ matrix.MSYS2_CURSES }} fi - MSYSTEM= python3 ./tools/run_with_cov.py run_tests.py --backend=ninja + MSYSTEM= python3 ./run_tests.py --backend=ninja - uses: actions/upload-artifact@v4 with: name: ${{ matrix.NAME }} path: meson-test-run.* - - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "${{ matrix.NAME }}" - fail_ci_if_error: false - verbose: true diff --git a/.github/workflows/nonnative.yml b/.github/workflows/nonnative.yml index 2712d10..c616f51 100644 --- a/.github/workflows/nonnative.yml +++ b/.github/workflows/nonnative.yml @@ -36,18 +36,16 @@ jobs: - run: | apt-get -y purge clang gcc gdc apt-get -y autoremove - python3 -m pip install coverage - uses: actions/checkout@v4 - name: Run tests - run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./tools/run_with_cov.py ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only' + run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./run_tests.py $CI_ARGS --cross ubuntu-armhf.json --cross-only' - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "Ubuntu nonnative" - fail_ci_if_error: false - verbose: true + cross-cuda: + runs-on: ubuntu-latest + container: mesonbuild/cuda-cross:latest + env: + MESON_CI_JOBNAME: cuda-cross-${{ github.job }} + steps: + - uses: actions/checkout@v4 + - name: Run tests + run: bash -c 'source /ci/env_vars.sh; cd $GITHUB_WORKSPACE; ./run_tests.py $CI_ARGS --cross cuda-cross.json --cross-only' diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml index 0912a75..91ca62e 100644 --- a/.github/workflows/os_comp.yml +++ b/.github/workflows/os_comp.yml @@ -72,18 +72,7 @@ jobs: source /ci/env_vars.sh cd $GITHUB_WORKSPACE - ./tools/run_with_cov.py ./run_tests.py $CI_ARGS - - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "OS Comp [${{ matrix.cfg.name }}]" - fail_ci_if_error: false - verbose: true + ./run_tests.py $CI_ARGS pypy: name: 'Arch / PyPy' @@ -111,10 +100,12 @@ jobs: - CC: 'gcc' CXX: 'g++' - MESON_ARGS: '--unity=on -Ddefault_library=static' + MESON_TEST_DDEFAULT_LIBRARY: yes RUN_TESTS_ARGS: '--no-unittests' CC: 'gcc' CXX: 'g++' - MESON_ARGS: '-Ddefault_library=both' + MESON_TEST_DDEFAULT_LIBRARY: yes RUN_TESTS_ARGS: '--no-unittests' CC: 'gcc' CXX: 'g++' @@ -141,6 +132,7 @@ jobs: env: MESON_RSP_THRESHOLD: ${{ matrix.cfg.MESON_RSP_THRESHOLD }} MESON_ARGS: ${{ matrix.cfg.MESON_ARGS }} + MESON_TEST_DDEFAULT_LIBRARY: ${{ matrix.cfg.MESON_TEST_DDEFAULT_LIBRARY }} RUN_TESTS_ARGS: ${{ matrix.cfg.RUN_TESTS_ARGS }} CC: ${{ matrix.cfg.CC }} CXX: ${{ matrix.cfg.CXX }} @@ -172,15 +164,4 @@ jobs: update-alternatives --set i686-w64-mingw32-gcc /usr/bin/i686-w64-mingw32-gcc-posix update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix - ./tools/run_with_cov.py ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS - - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "Ubuntu [${{ matrix.cfg.CC }} ${{ matrix.cfg.RUN_TESTS_ARGS }} ${{ matrix.cfg.MESON_ARGS }}]" - fail_ci_if_error: false - verbose: true + ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml index d6f1246..4367ce5 100644 --- a/.github/workflows/unusedargs_missingreturn.yml +++ b/.github/workflows/unusedargs_missingreturn.yml @@ -52,22 +52,10 @@ jobs: run: | sudo apt update -yq sudo apt install -yq --no-install-recommends g++ gfortran ninja-build gobjc gobjc++ - python -m pip install coverage - - run: ./tools/run_with_cov.py run_project_tests.py --only cmake common fortran platform-linux "objective c" "objective c++" + - run: ./run_project_tests.py --only cmake common fortran platform-linux "objective c" "objective c++" env: MESON_CI_JOBNAME: linux-ubuntu-gcc-werror - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "UnusedMissingReturn" - fail_ci_if_error: false - verbose: true - windows: runs-on: windows-latest steps: @@ -76,23 +64,11 @@ jobs: with: python-version: '3.x' - - run: pip install ninja pefile coverage + - run: pip install ninja pefile - - run: python ./tools/run_with_cov.py run_project_tests.py --only platform-windows + - run: python ./run_project_tests.py --only platform-windows env: CC: gcc CXX: g++ FC: gfortran MESON_CI_JOBNAME: msys2-gcc-werror - - - name: Aggregate coverage reports - run: ./ci/combine_cov.sh - shell: C:\msys64\usr\bin\bash.exe --noprofile --norc -o igncr -eo pipefail '{0}' - - - name: Upload coverage report - uses: codecov/codecov-action@v3 - with: - files: .coverage/coverage.xml - name: "UnusedMissingReturn Windows" - fail_ci_if_error: false - verbose: true @@ -22,6 +22,7 @@ __pycache__ *~ *.swp packagecache +.wraplock /MANIFEST /build /dist @@ -1,7 +1,8 @@ Alexandre Foley <Alexandre.foley@usherbrooke.ca> AlexandreFoley <alexandre.foley@usherbrooke.ca> Igor Gnatenko <i.gnatenko.brain@gmail.com> Igor Gnatenko <ignatenko@redhat.com> -Jussi Pakkanen <jpakkane@gmail.com> Jussi Pakkanen <jpakkane@brash.local> -Jussi Pakkanen <jpakkane@gmail.com> jpakkane <jpakkane@gmail.com> +Jussi Pakkanen <jussi.pakkanen@mailbox.org> Jussi Pakkanen <jpakkane@brash.local> +Jussi Pakkanen <jussi.pakkanen@mailbox.org> Jussi Pakkanen <jpakkane@gmail.com> +Jussi Pakkanen <jussi.pakkanen@mailbox.org> jpakkane <jpakkane@gmail.com> Liam Beguin <liambeguin@gmail.com> Liam Beguin <lvb@xiphos.com> Nirbheek Chauhan <nirbheek@centricular.com> Nirbheek Chauhan <nirbheek.chauhan@gmail.com> Nicolas Schneider <nioncode+git@gmail.com> Nicolas Schneider <nioncode+github@gmail.com> diff --git a/azure-pipelines.yml b/azure-pipelines.yml index ea511f3..86c6b3a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -114,8 +114,3 @@ jobs: filePath: .\ci\run.ps1 env: MESON_CI_JOBNAME: azure-$(System.JobName) - - task: PowerShell@2 - displayName: Gathering coverage report - inputs: - targetType: 'filePath' - filePath: .\ci\coverage.ps1 diff --git a/ci/ciimage/cuda-cross/image.json b/ci/ciimage/cuda-cross/image.json new file mode 100644 index 0000000..062322e --- /dev/null +++ b/ci/ciimage/cuda-cross/image.json @@ -0,0 +1,8 @@ +{ + "base_image": "ubuntu:22.04", + "args": ["--only", "cuda", "--cross", "cuda-cross.json"], + "env": { + "CI": "1", + "MESON_CI_JOBNAME": "linux-cuda-cross" + } +} diff --git a/ci/ciimage/cuda-cross/install.sh b/ci/ciimage/cuda-cross/install.sh new file mode 100755 index 0000000..6b5fe7f --- /dev/null +++ b/ci/ciimage/cuda-cross/install.sh @@ -0,0 +1,38 @@ +#!/bin/bash + +set -e + +source /ci/common.sh + +export DEBIAN_FRONTEND=noninteractive +export LANG='C.UTF-8' + +apt-get -y update +apt-get -y upgrade +apt-get -y install wget + +# Cuda repo + keyring. +wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb +apt-get -y install ./cuda-keyring_1.1-1_all.deb + +# Cuda cross repo. +echo "deb [signed-by=/usr/share/keyrings/cuda-archive-keyring.gpg] https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/cross-linux-aarch64/ /" \ + > /etc/apt/sources.list.d/cuda-ubuntu2204-cross-linux-aarch64.list +apt-get -y update + +pkgs=( + clang cmake crossbuild-essential-arm64 cuda-cross-aarch64 + cuda-nvcc-12-9 git libglib2.0-dev ninja-build pkg-config python3-pip +) + +apt-get -y install "${pkgs[@]}" + +install_minimal_python_packages + +# Tests need nvcc in PATH in order to run cuda tests. +echo "export PATH=\$PATH:/usr/local/cuda/bin" >> /ci/env_vars.sh + +# cleanup +apt-get -y clean +apt-get -y autoclean +rm cuda-keyring_1.1-1_all.deb diff --git a/ci/ciimage/gentoo/install.sh b/ci/ciimage/gentoo/install.sh index 30b0299..909a595 100755 --- a/ci/ciimage/gentoo/install.sh +++ b/ci/ciimage/gentoo/install.sh @@ -108,7 +108,7 @@ cat <<-EOF > /etc/portage/package.use/ci # Some of these settings are needed just to get the binpkg but # aren't negative to have anyway - sys-devel/gcc ada d + sys-devel/gcc ada d jit >=sys-devel/gcc-13 ada objc objc++ sys-devel/gcc pgo lto diff --git a/ci/ciimage/ubuntu-rolling/install.sh b/ci/ciimage/ubuntu-rolling/install.sh index 1c0891c..8a2561b 100755 --- a/ci/ciimage/ubuntu-rolling/install.sh +++ b/ci/ciimage/ubuntu-rolling/install.sh @@ -69,7 +69,7 @@ rustup target add arm-unknown-linux-gnueabihf # Use the GitHub API to get the latest release information LATEST_RELEASE=$(wget -qO- "https://api.github.com/repos/ziglang/zig/releases/latest") ZIGVER=$(echo "$LATEST_RELEASE" | jq -r '.tag_name') -ZIG_BASE="zig-linux-x86_64-$ZIGVER" +ZIG_BASE="zig-x86_64-linux-$ZIGVER" wget "https://ziglang.org/download/$ZIGVER/$ZIG_BASE.tar.xz" tar xf "$ZIG_BASE.tar.xz" rm -rf "$ZIG_BASE.tar.xz" diff --git a/ci/combine_cov.sh b/ci/combine_cov.sh deleted file mode 100755 index 99a503b..0000000 --- a/ci/combine_cov.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/bash - -echo "Combining coverage reports..." -coverage combine - -echo "Generating XML report..." -coverage xml - -echo "Printing report" -coverage report diff --git a/ci/coverage.ps1 b/ci/coverage.ps1 deleted file mode 100644 index ebd7cd4..0000000 --- a/ci/coverage.ps1 +++ /dev/null @@ -1,14 +0,0 @@ -echo "" -echo "" -echo "=== Gathering coverage report ===" -echo "" - -python3 -m coverage combine -python3 -m coverage xml -python3 -m coverage report - -# Currently codecov.py does not handle Azure, use this fork of a fork to get it -# working without requiring a token -git clone https://github.com/mensinda/codecov-python -python3 -m pip install --ignore-installed ./codecov-python -python3 -m codecov -f .coverage/coverage.xml -n "VS$env:compiler $env:arch $env:backend" -c $env:SOURCE_VERSION @@ -92,7 +92,7 @@ python --version # Needed for running unit tests in parallel. echo "" -python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist pytest-subtests fastjsonschema coverage +python -m pip --disable-pip-version-check install --upgrade pefile pytest-xdist pytest-subtests fastjsonschema # Needed for running the Cython tests python -m pip --disable-pip-version-check install cython @@ -102,6 +102,6 @@ echo "=== Start running tests ===" # Starting from VS2019 Powershell(?) will fail the test run # if it prints anything to stderr. Python's test runner # does that by default so we need to forward it. -cmd /c "python 2>&1 ./tools/run_with_cov.py run_tests.py --backend $env:backend $env:extraargs" +cmd /c "python 2>&1 run_tests.py --backend $env:backend $env:extraargs" exit $LastExitCode diff --git a/ci/usercustomize.py b/ci/usercustomize.py deleted file mode 100644 index d72c6ad..0000000 --- a/ci/usercustomize.py +++ /dev/null @@ -1,5 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2021 The Meson development team - -import coverage -coverage.process_startup() diff --git a/cross/cuda-cross.json b/cross/cuda-cross.json new file mode 100644 index 0000000..f2d0086 --- /dev/null +++ b/cross/cuda-cross.json @@ -0,0 +1,5 @@ +{ + "file": "cuda-cross.txt", + "tests": ["cuda"], + "env": {} +} diff --git a/cross/cuda-cross.txt b/cross/cuda-cross.txt new file mode 100644 index 0000000..7e81463 --- /dev/null +++ b/cross/cuda-cross.txt @@ -0,0 +1,17 @@ +[binaries] +c = ['/usr/bin/aarch64-linux-gnu-gcc'] +cpp = ['/usr/bin/aarch64-linux-gnu-g++'] +cuda = ['/usr/local/cuda/bin/nvcc'] +ar = '/usr/bin/aarch64-linux-gnu-ar' +strip = '/usr/bin/aarch64-linux-gnu-strip' +ld = '/usr/bin/aarch64-linux-gnu-ld' + +[host_machine] +system = 'linux' +cpu_family = 'aarch64' +cpu = 'aarch64' +endian = 'little' + +[built-in options] +cuda_link_args = ['-lstdc++'] +cuda_ccbindir = '/usr/bin/aarch64-linux-gnu-gcc' diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt index 6409e39..97a1c21 100644 --- a/cross/ubuntu-armhf.txt +++ b/cross/ubuntu-armhf.txt @@ -4,6 +4,7 @@ c = ['/usr/bin/arm-linux-gnueabihf-gcc'] cpp = ['/usr/bin/arm-linux-gnueabihf-g++'] rust = ['rustc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7'] +rustdoc = ['rustdoc', '--target', 'arm-unknown-linux-gnueabihf', '-C', 'linker=/usr/bin/arm-linux-gnueabihf-gcc-7'] ar = '/usr/arm-linux-gnueabihf/bin/ar' strip = '/usr/arm-linux-gnueabihf/bin/strip' pkg-config = '/usr/bin/arm-linux-gnueabihf-pkg-config' diff --git a/docs/extensions/refman_links.py b/docs/extensions/refman_links.py index 5c22a0a..865668b 100644 --- a/docs/extensions/refman_links.py +++ b/docs/extensions/refman_links.py @@ -1,11 +1,13 @@ from pathlib import Path from json import loads +import os import re from hotdoc.core.exceptions import HotdocSourceException from hotdoc.core.extension import Extension from hotdoc.core.tree import Page from hotdoc.core.project import Project +from hotdoc.core.symbols import * from hotdoc.run_hotdoc import Application from hotdoc.core.formatter import Formatter from hotdoc.utils.loggable import Logger, warn, info @@ -52,6 +54,35 @@ class RefmanLinksExtension(Extension): with valid links to the correct URL. To reference objects / types use the [[@object]] syntax. ''' + for key, value in self._data.items(): + path = os.path.relpath(value, self.app.config.get_invoke_dir()).split('#')[0] + if path == page.link.ref: + if key.startswith('@'): + res = self.create_symbol( + ClassSymbol, + display_name=key[1:], + filename=path, + unique_name=key) + res.link = Link(value, res.display_name, res.unique_name) + elif '.' in key: + res = self.create_symbol( + MethodSymbol, + parameters=[], + display_name=key.split('.')[-1], + parent_name=f'@{key.split(".")[-2]}', + filename=path, + unique_name=key) + res.link = Link(value, key, res.unique_name) + else: + res = self.create_symbol( + FunctionSymbol, + parameters=[], + display_name=key, + filename=path, + unique_name=key) + res.link = Link(value, res.display_name, res.unique_name) + page.symbols.append(res) + link_regex = re.compile(r'(\[\[#?@?([ \n\t]*[a-zA-Z0-9_]+[ \n\t]*\.)*[ \n\t]*[a-zA-Z0-9_]+[ \n\t]*\]\])(.)?', re.MULTILINE) for m in link_regex.finditer(page.formatted_contents): i = m.group(1) @@ -103,6 +134,10 @@ class RefmanLinksExtension(Extension): ext.formatter.formatting_page_signal.connect(self._formatting_page_cb) info('Meson refman extension LOADED') + def create_symbol(self, *args, **kwargs): + kwargs['language'] = 'meson' + return super(RefmanLinksExtension, self).create_symbol(*args, **kwargs) + @staticmethod def get_dependencies() -> T.List[T.Type[Extension]]: return [] # In case this extension has dependencies on other extensions diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index faf7a60..ee07df4 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -150,6 +150,10 @@ the two-way mapping: All other combinations of `debug` and `optimization` set `buildtype` to `'custom'`. +Note that `-Ddebug=false` does not cause the compiler preprocessor macro +`NDEBUG` to be defined. +The macro can be defined using the base option `b_ndebug`, described below. + #### Details for `warning_level` Exact flags per warning level is compiler specific, but there is an approximate diff --git a/docs/markdown/CMake-module.md b/docs/markdown/CMake-module.md index f8275c9..982fa35 100644 --- a/docs/markdown/CMake-module.md +++ b/docs/markdown/CMake-module.md @@ -138,8 +138,8 @@ and supports the following methods: `include_type` kwarg *(new in 0.56.0)* controls the include type of the returned dependency object similar to the same kwarg in the [[dependency]] function. - - `include_directories(target)` returns a Meson [[@inc]] - object for the specified target. Using this method is not necessary + - `include_directories(target)` returns an array of Meson [[@inc]] + objects for the specified target. Using this method is not necessary if the dependency object is used. - `target(target)` returns the raw build target. - `target_type(target)` returns the type of the target as a string diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md index 64196f3..65daa22 100644 --- a/docs/markdown/Cross-compilation.md +++ b/docs/markdown/Cross-compilation.md @@ -139,6 +139,22 @@ of a wrapper, these lines are all you need to write. Meson will automatically use the given wrapper when it needs to run host binaries. This happens e.g. when running the project's test suite. +Note that `exe_wrapper` in the cross file is handled separately +from the `exe_wrapper` argument in +[`add_test_setup`](Reference-manual_functions.md#add_test_setup_exe_wrapper) +and [`meson test --wrapper`](Unit-tests.md#other-test-options) +command line argument. Meson must have `exe_wrapper` specified in the +cross file or else it will skip tests that attempt to run cross +compiled binaries. Only the cross file `exe_wrapper` value will be +stored in the `MESON_EXE_WRAPPER` environment variable. If another +wrapper is given in the test setup with `exe_wrapper` or as a +`meson test --wrapper` command line argument, then meson will prepend +the additional wrapper before the cross file wrapper like the +following command: +``` +[prepend_wrapper] <cross_file_wrapper> <exe_binary> <args...> +``` + ### Properties In addition to the properties allowed in [all machine @@ -173,12 +189,12 @@ remember to specify the args as an array and not as a single string *Since 0.52.0* The `sys_root` property may point to the root of the host system path (the system that will run the compiled binaries). -This is used internally by Meson to set the PKG_CONFIG_SYSROOT_DIR +This is used internally by Meson to set the `PKG_CONFIG_SYSROOT_DIR` environment variable for pkg-config. If this is unset the host system is assumed to share a root with the build system. -*Since 0.54.0* The pkg_config_libdir property may point to a list of -path used internally by Meson to set the PKG_CONFIG_LIBDIR environment +*Since 0.54.0* The `pkg_config_libdir` property may point to a list of +path used internally by Meson to set the `PKG_CONFIG_LIBDIR` environment variable for pkg-config. This prevents pkg-config from searching cross dependencies in system directories. @@ -357,7 +373,7 @@ myvar = meson.get_external_property('somekey') As of version 0.44.0 Meson supports loading cross files from system locations (except on Windows). This will be -$XDG_DATA_DIRS/meson/cross, or if XDG_DATA_DIRS is undefined, then +`$XDG_DATA_DIRS/meson/cross`, or if `XDG_DATA_DIRS` is undefined, then /usr/local/share/meson/cross and /usr/share/meson/cross will be tried in that order, for system wide cross files. User local files can be put in $XDG_DATA_HOME/meson/cross, or ~/.local/share/meson/cross if diff --git a/docs/markdown/Design-rationale.md b/docs/markdown/Design-rationale.md index 4133979..c520773 100644 --- a/docs/markdown/Design-rationale.md +++ b/docs/markdown/Design-rationale.md @@ -34,9 +34,9 @@ may not work. In some cases the executable file is a binary whereas at other times it is a wrapper shell script that invokes the real binary which resides in a hidden subdirectory. GDB invocation fails if the binary is a script but succeeds if it is not. The user has to remember -the type of each one of his executables (which is an implementation -detail of the build system) just to be able to debug them. Several -other such pain points can be found in [this blog +the type of each executable (which is an implementation detail of the +build system) just to be able to debug them. Several other such pain +points can be found in [this blog post](http://voices.canonical.com/jussi.pakkanen/2011/09/13/autotools/). Given these idiosyncrasies it is no wonder that most people don't want @@ -132,7 +132,7 @@ and so on. Sometimes you just have to compile files with only given compiler flags and no others, or install files in weird places. The system must -allow the user to do this if he really wants to. +allow the user to do this. Overview of the solution -- @@ -151,7 +151,7 @@ passing around compiler flags and linker flags. In the proposed system the user just declares that a given build target uses a given external dependency. The build system then takes care of passing all flags and settings to their proper locations. This means that the user can focus -on his own code rather than marshalling command line arguments from +on their own code rather than marshalling command line arguments from one place to another. A DSL is more work than the approach taken by SCons, which is to diff --git a/docs/markdown/Getting-meson_zh.md b/docs/markdown/Getting-meson_zh.md index 4a4cb34..da0bdd5 100644 --- a/docs/markdown/Getting-meson_zh.md +++ b/docs/markdown/Getting-meson_zh.md @@ -1,6 +1,6 @@ # 获取Meson -Meson基于Python3运行,要求Python版本3.5以上。 如果你的操作系统提供包管理器, 你应该用包管理器安装meson;如果没有包管理器,你应该在[Python主页]下载合适的Python3。相关请参阅[特殊平台的安装特例](#特殊平台的安装特例). +Meson基于Python3运行,要求Python版本3.7以上。 如果你的操作系统提供包管理器, 你应该用包管理器安装python;如果没有包管理器,你应该在[Python主页]下载合适的Python3。相关请参阅[特殊平台的安装特例](#特殊平台的安装特例). ## 下载Meson diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index e8953ef..84bcc61 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -280,6 +280,8 @@ one XML file. * `object_manager`: *(Added 0.40.0)* if true generates object manager code * `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'` * `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks +* `rst`: *(Added 1.9.0)* prefix to generate `'PREFIX'-NAME.rst` reStructuredTexts +* `markdown`: *(Added 1.9.0)* prefix to generate `'PREFIX'-NAME.md` markdowns * `build_by_default`: causes, when set to true, to have this target be built by default, that is, when invoking plain `meson compile`, the default value is true for all built target types @@ -289,8 +291,9 @@ one XML file. Starting *0.46.0*, this function returns a list of at least two custom targets (in order): one for the source code and one for the header. -The list will contain a third custom target for the generated docbook -files if that keyword argument is passed. +The list can then contain other custom targets for the generated documentation +files depending if the keyword argument is passed (in order): the docbook +target, the reStructuredText target and the markdown target. Earlier versions return a single custom target representing all the outputs. Generally, you should just add this list of targets to a top diff --git a/docs/markdown/Overview.md b/docs/markdown/Overview.md index 7bee937..f41a3b7 100644 --- a/docs/markdown/Overview.md +++ b/docs/markdown/Overview.md @@ -6,11 +6,11 @@ short-description: Overview of the Meson build system Meson is a build system that is designed to be as user-friendly as possible without sacrificing performance. The main tool for this is a -custom language that the user uses to describe the structure of his -build. The main design goals of this language has been simplicity, -clarity and conciseness. Much inspiration was drawn from the Python -programming language, which is considered very readable, even to -people who have not programmed in Python before. +custom language used to describe the structure of the build. The main +design goals of this language has been simplicity, clarity and +conciseness. Much inspiration was drawn from the Python programming +language, which is considered very readable, even to people who have +not programmed in Python before. Another main idea has been to provide first class support for modern programming tools and best practices. These include features as varied diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md index 80882cb..7ddd310 100644 --- a/docs/markdown/Pkgconfig-module.md +++ b/docs/markdown/Pkgconfig-module.md @@ -47,6 +47,7 @@ keyword arguments. `pkgconfig.generate()` was used on to put in the `Requires` field - `requires_private` the same as `requires` but for the `Requires.private` field - `url` a string with a url for the library +- `license` (*Since 1.9.0*) a string with a SPDX license to add to the generated file. - `variables` a list of strings with custom variables to add to the generated file. The strings must be in the form `name=value` and may reference other pkgconfig variables, @@ -90,6 +91,9 @@ application. That will cause pkg-config to prefer those builddir. This is an experimental feature provided on a best-effort basis, it might not work in all use-cases. +*Since 1.9.0* you can specify a license identifier. To use the current project +licence, simply use `license: meson.project_license()` as argument to `generate()`. + ### Implicit dependencies The exact rules followed to find dependencies that are implicitly diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index a5d2785..981bd18 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -58,6 +58,7 @@ These are return values of the `get_linker_id` method in a compiler object. | Value | Linker family | | ----- | --------------- | | ld.bfd | The GNU linker | +| ld.eld | Qualcomm's embedded linker | | ld.gold | The GNU gold linker | | ld.lld | The LLVM linker, with the GNU interface | | ld.mold | The fast MOLD linker | diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index a515b24..5b9821b 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -2,7 +2,7 @@ title: Users ... -# Notable projects using Meson +# Notable projects and organizations using Meson If you're aware of a notable project that uses Meson, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) @@ -11,181 +11,47 @@ for it. For other projects using Meson, you may be interested in this Some additional projects are listed in the [`meson` GitHub topic](https://github.com/topics/meson). - - [2048.cpp](https://github.com/plibither8/2048.cpp), a fully featured terminal version of the game "2048" written in C++ - - [aawordsearch](https://github.com/theimpossibleastronaut/aawordsearch), generate wordsearch puzzles using random words in different languages - - [Adwaita Manager](https://github.com/AdwCustomizerTeam/AdwCustomizer), change the look of Adwaita, with ease - - [Aravis](https://github.com/AravisProject/aravis), a glib/gobject based library for video acquisition using Genicam cameras - - [Akira](https://github.com/akiraux/Akira), a native Linux app for UI and UX design built in Vala and Gtk - - [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3 - - [Arduino sample project](https://github.com/jpakkane/mesonarduino) - - [Asteria](https://github.com/lhmouse/asteria), another scripting language - - [Audacious](https://github.com/audacious-media-player), a lightweight and versatile audio player - - [bolt](https://gitlab.freedesktop.org/bolt/bolt), userspace daemon to enable security levels for Thunderbolt™ 3 on Linux - - [bsdutils](https://github.com/dcantrell/bsdutils), alternative to GNU coreutils using software from FreeBSD - - [Bubblewrap](https://github.com/containers/bubblewrap), unprivileged sandboxing tool - - [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop), a desktop environment built on GNOME technologies - - [Bzip2](https://gitlab.com/federicomenaquintero/bzip2), the bzip2 compressor/decompressor - - [Cage](https://github.com/Hjdskes/cage), a Wayland kiosk - - [canfigger](https://github.com/andy5995/canfigger), simple configuration file parser library - - [casync](https://github.com/systemd/casync), Content-Addressable Data Synchronization Tool - - [cglm](https://github.com/recp/cglm), a highly optimized graphics math library for C - - [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop), the cinnamon desktop library - - [Cozy](https://github.com/geigi/cozy), a modern audio book player for Linux and macOS using GTK+ 3 - - [Criterion](https://github.com/Snaipe/Criterion), extensible cross-platform C and C++ unit testing framework - - [dav1d](https://code.videolan.org/videolan/dav1d), an AV1 decoder - - [dbus-broker](https://github.com/bus1/dbus-broker), Linux D-Bus Message Broker - - [DOSBox Staging](https://github.com/dosbox-staging/dosbox-staging), DOS/x86 emulator - - [DPDK](http://dpdk.org/browse/dpdk), Data Plane Development Kit, a set of libraries and drivers for fast packet processing - - [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine +## [Xorg](https://www.x.org) + + - [Xserver](https://gitlab.freedesktop.org/xorg/xserver), the X.org display server + +## [Gnome](https://www.gnome.org) + + - [GTK](https://gitlab.gnome.org/GNOME/gtk), the multi-platform toolkit used by GNOME + - [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer + +## [Enlightenment](https://www.enlightenment.org/) + - [EFL](https://www.enlightenment.org/about-efl), multiplatform set of libraries, used by the Enlightenment windows manager and other projects - - [Enlightenment](https://www.enlightenment.org/), windows manager, compositor and minimal desktop for Linux - - [elementary OS](https://github.com/elementary/), Linux desktop oriented distribution - - [Emeus](https://github.com/ebassi/emeus), constraint based layout manager for GTK+ - - [Entangle](https://entangle-photo.org/), tethered camera control and capture desktop application - - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson), sample project for using the ESP8266 Arduino port with Meson - - [FeedReader](https://github.com/jangernert/FeedReader), a modern desktop application designed to complement existing web-based RSS accounts - - [Flecs](https://github.com/SanderMertens/flecs), a Fast and Lightweight ECS (Entity Component System) C library - - [Foliate](https://github.com/johnfactotum/foliate), a simple and modern GTK eBook reader, built with GJS and Epub.js - - [Fractal](https://wiki.gnome.org/Apps/Fractal/), a Matrix messaging client for GNOME - - [Frida](https://github.com/frida/frida-core), a dynamic binary instrumentation toolkit - - [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware - - [GameMode](https://github.com/FeralInteractive/gamemode), a daemon/lib combo for Linux that allows games to request a set of optimisations be temporarily applied to the host OS - - [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop + +## [Elementary OS](https://github.com/elementary/) + +## [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop) + + - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment + +## [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop) + +## Other Notable projects + + - [FreeType](https://freetype.org/), widely used open source font rendering engine - [GIMP](https://gitlab.gnome.org/GNOME/gimp), an image manipulation program (master branch) - - [Git](https://git-scm.com/), ["the information manager from hell"](https://github.com/git/git/commit/e83c5163316f89bfbde7d9ab23ca2e25604af290) - - [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer - - [Glorytun](https://github.com/angt/glorytun), a multipath UDP tunnel - - [GNOME Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a GNOME hypervisor - - [GNOME Builder](https://gitlab.gnome.org/GNOME/gnome-builder), an IDE for the GNOME platform - - [GNOME MPV](https://github.com/gnome-mpv/gnome-mpv), GNOME frontend to the mpv video player - - [GNOME Recipes](https://gitlab.gnome.org/GNOME/recipes), application for cooking recipes - - [GNOME Software](https://gitlab.gnome.org/GNOME/gnome-software), an app store for GNOME - - [GNOME Twitch](https://github.com/vinszent/gnome-twitch), an app for viewing Twitch streams on GNOME desktop - - [GNOME Usage](https://gitlab.gnome.org/GNOME/gnome-usage), a GNOME application for visualizing system resources - - [GNOME Web](https://gitlab.gnome.org/GNOME/epiphany), a browser for a simple, clean, beautiful view of the web - - [GNU FriBidi](https://github.com/fribidi/fribidi), the open source implementation of the Unicode Bidirectional Algorithm - - [Graphene](https://ebassi.github.io/graphene/), a thin type library for graphics - - [Grilo](https://git.gnome.org/browse/grilo) and [Grilo plugins](https://git.gnome.org/browse/grilo-plugins), the Grilo multimedia framework - [GStreamer](https://gitlab.freedesktop.org/gstreamer/gstreamer), multimedia framework - - [GTK+](https://gitlab.gnome.org/GNOME/gtk), the multi-platform toolkit used by GNOME - - [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D - - [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO - - [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux + - [Git](https://git-scm.com/), ["the information manager from hell"](https://github.com/git/git/commit/e83c5163316f89bfbde7d9ab23ca2e25604af290) - [HarfBuzz](https://github.com/harfbuzz/harfbuzz), a text shaping engine - - [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system - [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C - - [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite - - [i3](https://i3wm.org), improved tiling window manager - - [inih](https://github.com/benhoyt/inih) (INI Not Invented Here), a small and simple .INI file parser written in C - - [Irssi](https://github.com/irssi/irssi), a terminal chat client in C - - [iSH](https://github.com/tbodt/ish), Linux shell for iOS - - [Janet](https://github.com/janet-lang/janet), a functional and imperative programming language and bytecode interpreter - - [json](https://github.com/nlohmann/json), JSON for Modern C++ - - [JsonCpp](https://github.com/open-source-parsers/jsoncpp), a C++ library for interacting with JSON - - [Json-glib](https://gitlab.gnome.org/GNOME/json-glib), GLib-based JSON manipulation library - - [Kiwix libraries](https://github.com/kiwix/kiwix-lib) - - [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation - - [Ksh](https://github.com/att/ast), a Korn Shell - - [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network - - [Le](https://github.com/kirushyk/le), machine learning framework - - [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android - - [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces - - [libdwarf](https://www.prevanders.net/dwarf.html), a multiplatform DWARF parser library - - [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one - - [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management - - [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface - - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2 - - [libglvnd](https://gitlab.freedesktop.org/glvnd/libglvnd), Vendor neutral OpenGL dispatch library for Unix-like OSes - - [Libhttpseverywhere](https://git.gnome.org/browse/libhttpseverywhere), a library to enable httpseverywhere on any desktop app - - [libmodulemd](https://github.com/fedora-modularity/libmodulemd), a GObject Introspected library for managing [Fedora Project](https://getfedora.org/) module metadata - - [Libosmscout](https://github.com/Framstag/libosmscout), a C++ library for offline map rendering, routing and location -lookup based on OpenStreetMap data - - [libratbag](https://github.com/libratbag/libratbag), provides a DBus daemon to configure input devices, mainly gaming mice - - [libspng](https://github.com/randy408/libspng), a C library for reading and writing Portable Network Graphics (PNG) -format files - - [libSRTP](https://github.com/cisco/libsrtp) (from Cisco Systems), a library for SRTP (Secure Realtime Transport Protocol) - - [libui](https://github.com/andlabs/libui), a simple and portable (but not inflexible) GUI library in C that uses the native GUI technologies of each platform it supports - - [Libva](https://github.com/intel/libva), an implementation for the VA (VIdeo Acceleration) API - - [libvips](https://github.com/libvips/libvips), a fast image processing library with low memory needs - - [Libvirt](https://libvirt.org), a toolkit to manage virtualization platforms - - [Libzim](https://github.com/openzim/libzim), the reference implementation for the ZIM file format - - [Linux PAM](https://github.com/linux-pam/linux-pam), The Pluggable Authentication Modules project for Linux - [LXC](https://github.com/lxc/lxc), Linux container runtime - - [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor - - [mcfgthread](https://github.com/lhmouse/mcfgthread), cornerstone library for C++11 threading on mingw-w64 + - [Linux PAM](https://github.com/linux-pam/linux-pam), The Pluggable Authentication Modules project for Linux - [Mesa](https://mesa3d.org/), an open source graphics driver project - - [Miniz](https://github.com/richgel999/miniz), a zlib replacement library - - [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via WiFi-Display specification aka Miracast - [mpv](https://github.com/mpv-player/mpv), a free, open source, and cross-platform media player - - [mrsh](https://github.com/emersion/mrsh), a minimal POSIX shell - - [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager - - [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment - - [netatalk](https://netatalk.io/), a free and open source AFP file server for Mac and Apple II - - [NetPanzer](https://github.com/netpanzer/netpanzer), a 2D online multiplayer tactical warfare game designed for fast action combat - [NumPy](https://numpy.org/), a Python package for scientific computing - - [nvme-cli](https://github.com/linux-nvme/nvme-cli), NVMe management command line interface - - [oomd](https://github.com/facebookincubator/oomd), a userspace Out-Of-Memory (OOM) killer for Linux systems - [OpenH264](https://github.com/cisco/openh264), open source H.264 codec - - [OpenHMD](https://github.com/OpenHMD/OpenHMD), a free and open source API and drivers for immersive technology, such as head mounted displays with built in head tracking - [OpenRC](https://github.com/OpenRC/openrc), an init system for Unix-like operating systems - - [OpenTitan](https://github.com/lowRISC/opentitan), an open source silicon Root of Trust (RoT) project - - [Orc](https://gitlab.freedesktop.org/gstreamer/orc), the Optimized Inner Loop Runtime Compiler - - [OTS](https://github.com/khaledhosny/ots), the OpenType Sanitizer, parses and serializes OpenType files (OTF, TTF) and WOFF and WOFF2 font files, validating and sanitizing them as it goes. Used by Chromium and Firefox - - [Outlier](https://github.com/kerolasa/outlier), a small Hello World style Meson example project - - [p11-kit](https://github.com/p11-glue/p11-kit), PKCS#11 module aggregator - [Pacman](https://gitlab.archlinux.org/pacman/pacman.git), a package manager for Arch Linux - - [Pango](https://git.gnome.org/browse/pango/), an Internationalized text layout and rendering library - - [Parzip](https://github.com/jpakkane/parzip), a multithreaded reimplementation of Zip - - [Peek](https://github.com/phw/peek), simple animated GIF screen recorder with an easy to use interface - [PicoLibc](https://github.com/keith-packard/picolibc), a standard C library for small embedded systems with limited RAM - [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications - - [Pistache](https://github.com/pistacheio/pistache), a high performance REST toolkit written in C++ - - [Pithos](https://github.com/pithos/pithos), a Pandora Radio client - - [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor - - [Planner](https://github.com/alainm23/planner), task manager with Todoist support designed for GNU/Linux - - [Playerctl](https://github.com/acrisci/playerctl), mpris command-line controller and library for spotify, vlc, audacious, bmp, cmus, and others - - [Polari](https://gitlab.gnome.org/GNOME/polari), an IRC client - [PostgreSQL](https://www.postgresql.org/), an advanced open source relational database - - [qboot](https://github.com/bonzini/qboot), a minimal x86 firmware for booting Linux kernels - [QEMU](https://qemu.org), a processor emulator and virtualizer - - [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default) - - [refivar](https://github.com/nvinson/refivar), A reimplementation of efivar in Rust - - [Rizin](https://rizin.re), Free and Open Source Reverse Engineering Framework - - [rmw](https://theimpossibleastronaut.com/rmw-website/), safe-remove utility for the command line - - [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock) - [SciPy](https://scipy.org/), an open-source software for mathematics, science, and engineering - - [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP - - [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk - - [Siril](https://gitlab.com/free-astro/siril), an image processing software for amateur astronomy - - [slapt-get](https://github.com/jaos/slapt-get), an APT like system for Slackware package management - - [Spot](https://github.com/xou816/spot), Rust based Spotify client for the GNOME desktop - - [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP - - [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor - - [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool - [systemd](https://github.com/systemd/systemd), the init system - - [szl](https://github.com/dimkr/szl), a lightweight, embeddable scripting language - - [Taisei Project](https://taisei-project.org/), an open-source Touhou Project clone and fangame - - [Terminology](https://github.com/billiob/terminology), a terminal emulator based on the Enlightenment Foundation Libraries - - [ThorVG](https://www.thorvg.org/), vector-based scenes and animations library - - [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3 - - [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices - - [Fossil Logic](https://github.com/fossillogic), Fossil Logic is a cutting-edge software development company specializing in C/C++, Python, programming, Android development using Kotlin, and SQL solutions - - [UFJF-MLTK](https://github.com/mateus558/UFJF-Machine-Learning-Toolkit), A C++ cross-platform framework for machine learning algorithms development and testing - - [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages - - [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala - - [Venom](https://github.com/naxuroqa/Venom), a modern Tox client for the GNU/Linux desktop - - [vkQuake](https://github.com/Novum/vkQuake), a port of id Software's Quake using Vulkan instead of OpenGL for rendering - - [VMAF](https://github.com/Netflix/vmaf) (by Netflix), a perceptual video quality assessment based on multi-method fusion - - [Wayland](https://gitlab.freedesktop.org/wayland/wayland) and [Weston](https://gitlab.freedesktop.org/wayland/weston), a next generation display server - - [wlroots](https://gitlab.freedesktop.org/wlroots/wlroots), a modular Wayland compositor library - - [xi-gtk](https://github.com/eyelash/xi-gtk), a GTK+ front-end for the Xi editor - - [Xorg](https://gitlab.freedesktop.org/xorg/xserver), the X.org display server (not the default yet) - - [X Test Suite](https://gitlab.freedesktop.org/xorg/test/xts), The X.org test suite - - [zathura](https://github.com/pwmt/zathura), a highly customizable and functional document viewer based on the -girara user interface library and several document libraries - - [Zrythm](https://git.sr.ht/~alextee/zrythm), a cross-platform digital audio workstation written in C using GTK4 - - [ZStandard](https://github.com/facebook/zstd/commit/4dca56ed832c6a88108a2484a8f8ff63d8d76d91), a compression algorithm developed at Facebook (not used by default) - -Note that a more up-to-date list of GNOME projects that use Meson can -be found -[here](https://wiki.gnome.org/Initiatives/GnomeGoals/MesonPorting). + - [Wayland](https://gitlab.freedesktop.org/wayland/wayland), common display protocol diff --git a/docs/markdown/snippets/eld-support.md b/docs/markdown/snippets/eld-support.md new file mode 100644 index 0000000..cc854fa --- /dev/null +++ b/docs/markdown/snippets/eld-support.md @@ -0,0 +1,6 @@ +## Added Qualcomm's embedded linker, eld + +Qualcomm recently open-sourced their embedded linker. +https://github.com/qualcomm/eld + +Meson users can now use this linker. diff --git a/docs/markdown/snippets/gnome-rsp-files-support.md b/docs/markdown/snippets/gnome-rsp-files-support.md new file mode 100644 index 0000000..29c9082 --- /dev/null +++ b/docs/markdown/snippets/gnome-rsp-files-support.md @@ -0,0 +1,8 @@ +## Support response files for custom targets + +When using the Ninja backend, Meson can now pass arguments to supported tools +through response files. + +In this release it's enabled only for the Gnome module, fixing calling +`gnome.mkenums()` with a large set of files on Windows (requires +Glib 2.59 or higher). diff --git a/docs/markdown/snippets/pkgconfig-gen-license.md b/docs/markdown/snippets/pkgconfig-gen-license.md new file mode 100644 index 0000000..c2e6818 --- /dev/null +++ b/docs/markdown/snippets/pkgconfig-gen-license.md @@ -0,0 +1,3 @@ +## Added license keyword to pkgconfig.generate + +When specified, it will add a `License:` attribute to the generated .pc file. diff --git a/docs/markdown/snippets/swift-pass-c-compiler-options.md b/docs/markdown/snippets/swift-pass-c-compiler-options.md new file mode 100644 index 0000000..3610a8e --- /dev/null +++ b/docs/markdown/snippets/swift-pass-c-compiler-options.md @@ -0,0 +1,8 @@ +## Swift compiler receives select C family compiler options + +Meson now passes select few C family (C/Obj-C) compiler options to the +Swift compiler, notably *-std=*, in order to improve the compatibility +of C code as interpreted by the C compiler and the Swift compiler. + +NB: This does not include any of the options set in the target's +c_flags. diff --git a/docs/markdown/snippets/swift_cxx_interoperability.md b/docs/markdown/snippets/swift_cxx_interoperability.md new file mode 100644 index 0000000..f18e114 --- /dev/null +++ b/docs/markdown/snippets/swift_cxx_interoperability.md @@ -0,0 +1,13 @@ +## Swift/C++ interoperability is now supported + +It is now possible to create Swift executables that can link to C++ or +Objective-C++ libraries. Only specifying a bridging header for the Swift +target is required. + +Swift 5.9 is required to use this feature. Xcode 15 is required if the +Xcode backend is used. + +```meson +lib = static_library('mylib', 'mylib.cpp') +exe = executable('prog', 'main.swift', 'mylib.h', link_with: lib) +``` diff --git a/docs/markdown/snippets/vs2010-masm-support.md b/docs/markdown/snippets/vs2010-masm-support.md new file mode 100644 index 0000000..840cbf3 --- /dev/null +++ b/docs/markdown/snippets/vs2010-masm-support.md @@ -0,0 +1,8 @@ +## Support for MASM in Visual Studio backends + +Previously, assembling `.masm` files with Microsoft's Macro Assembler is only +available on the Ninja backend. This now also works on Visual Studio backends. + +Note that building ARM64EC code using `ml64.exe` is currently unimplemented in +both of the backends. If you need mixing x64 and Arm64 in your project, please +file an issue on GitHub. diff --git a/docs/meson.build b/docs/meson.build index c476b59..9c8fe9e 100644 --- a/docs/meson.build +++ b/docs/meson.build @@ -135,6 +135,8 @@ documentation = hotdoc.generate_doc(meson.project_name(), extra_extension: meson.current_source_dir() / 'extensions' / 'refman_links.py', refman_data_file: refman_md[1], fatal_warnings: true, + devhelp_activate: true, + devhelp_online: 'https://mesonbuild.com/', ) run_target('upload', diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py index cd8156a..62c4839 100644 --- a/mesonbuild/ast/interpreter.py +++ b/mesonbuild/ast/interpreter.py @@ -8,8 +8,12 @@ from __future__ import annotations import os import sys import typing as T +from collections import defaultdict +from dataclasses import dataclass +import itertools +from pathlib import Path -from .. import mparser, mesonlib +from .. import mparser, mesonlib, mlog from .. import environment from ..interpreterbase import ( @@ -20,8 +24,14 @@ from ..interpreterbase import ( ContinueRequest, Disabler, default_resolve_key, + is_disabled, + UnknownValue, + UndefinedVariable, + InterpreterObject, ) +from ..interpreterbase.helpers import flatten + from ..interpreter import ( StringHolder, BooleanHolder, @@ -36,19 +46,21 @@ from ..mparser import ( ArrayNode, AssignmentNode, BaseNode, - ElementaryNode, EmptyNode, IdNode, MethodNode, NotNode, PlusAssignmentNode, TernaryNode, + SymbolNode, + Token, + FunctionNode, ) if T.TYPE_CHECKING: from .visitor import AstVisitor from ..interpreter import Interpreter - from ..interpreterbase import SubProject, TYPE_nkwargs, TYPE_var + from ..interpreterbase import SubProject, TYPE_var, TYPE_nvar from ..mparser import ( AndNode, ComparisonNode, @@ -60,38 +72,122 @@ if T.TYPE_CHECKING: UMinusNode, ) -class DontCareObject(MesonInterpreterObject): - pass - -class MockExecutable(MesonInterpreterObject): - pass - -class MockStaticLibrary(MesonInterpreterObject): - pass - -class MockSharedLibrary(MesonInterpreterObject): - pass - -class MockCustomTarget(MesonInterpreterObject): - pass - -class MockRunTarget(MesonInterpreterObject): - pass - -ADD_SOURCE = 0 -REMOVE_SOURCE = 1 - _T = T.TypeVar('_T') _V = T.TypeVar('_V') +def _symbol(val: str) -> SymbolNode: + return SymbolNode(Token('', '', 0, 0, 0, (0, 0), val)) + +# `IntrospectionFile` is to the `IntrospectionInterpreter` what `File` is to the normal `Interpreter`. +@dataclass +class IntrospectionFile: + subdir: str + rel: str + + def to_abs_path(self, root_dir: Path) -> Path: + return (root_dir / self.subdir / self.rel).resolve() + + def __hash__(self) -> int: + return hash((self.__class__.__name__, self.subdir, self.rel)) + +# `IntrospectionDependency` is to the `IntrospectionInterpreter` what `Dependency` is to the normal `Interpreter`. +@dataclass +class IntrospectionDependency(MesonInterpreterObject): + name: T.Union[str, UnknownValue] + required: T.Union[bool, UnknownValue] + version: T.Union[T.List[str], UnknownValue] + has_fallback: bool + conditional: bool + node: FunctionNode + +# `IntrospectionBuildTarget` is to the `IntrospectionInterpreter` what `BuildTarget` is to the normal `Interpreter`. +@dataclass +class IntrospectionBuildTarget(MesonInterpreterObject): + name: str + machine: str + id: str + typename: str + defined_in: str + subdir: str + build_by_default: bool + installed: bool + outputs: T.List[str] + source_nodes: T.List[BaseNode] + extra_files: BaseNode + kwargs: T.Dict[str, TYPE_var] + node: FunctionNode + +def is_ignored_edge(src: T.Union[BaseNode, UnknownValue]) -> bool: + return (isinstance(src, FunctionNode) and src.func_name.value not in {'files', 'get_variable'}) or isinstance(src, MethodNode) + +class DataflowDAG: + src_to_tgts: T.DefaultDict[T.Union[BaseNode, UnknownValue], T.Set[T.Union[BaseNode, UnknownValue]]] + tgt_to_srcs: T.DefaultDict[T.Union[BaseNode, UnknownValue], T.Set[T.Union[BaseNode, UnknownValue]]] + + def __init__(self) -> None: + self.src_to_tgts = defaultdict(set) + self.tgt_to_srcs = defaultdict(set) + + def add_edge(self, source: T.Union[BaseNode, UnknownValue], target: T.Union[BaseNode, UnknownValue]) -> None: + self.src_to_tgts[source].add(target) + self.tgt_to_srcs[target].add(source) + + # Returns all nodes in the DAG that are reachable from a node in `srcs`. + # In other words, A node `a` is part of the returned set exactly if data + # from `srcs` flows into `a`, directly or indirectly. + # Certain edges are ignored. + def reachable(self, srcs: T.Set[T.Union[BaseNode, UnknownValue]], reverse: bool) -> T.Set[T.Union[BaseNode, UnknownValue]]: + reachable = srcs.copy() + active = srcs.copy() + while active: + new: T.Set[T.Union[BaseNode, UnknownValue]] = set() + if reverse: + for tgt in active: + new.update(src for src in self.tgt_to_srcs[tgt] if not is_ignored_edge(src)) + else: + for src in active: + if is_ignored_edge(src): + continue + new.update(tgt for tgt in self.src_to_tgts[src]) + reachable.update(new) + active = new + return reachable + + # Returns all paths from src to target. + # Certain edges are ignored. + def find_all_paths(self, src: T.Union[BaseNode, UnknownValue], target: T.Union[BaseNode, UnknownValue]) -> T.List[T.List[T.Union[BaseNode, UnknownValue]]]: + queue = [(src, [src])] + paths = [] + while queue: + cur, path = queue.pop() + if cur == target: + paths.append(path) + if is_ignored_edge(cur): + continue + queue.extend((tgt, path + [tgt]) for tgt in self.src_to_tgts[cur]) + return paths class AstInterpreter(InterpreterBase): def __init__(self, source_root: str, subdir: str, subproject: SubProject, subproject_dir: str, env: environment.Environment, visitors: T.Optional[T.List[AstVisitor]] = None): super().__init__(source_root, subdir, subproject, subproject_dir, env) self.visitors = visitors if visitors is not None else [] - self.assignments: T.Dict[str, BaseNode] = {} - self.assign_vals: T.Dict[str, T.Any] = {} - self.reverse_assignment: T.Dict[str, BaseNode] = {} + self.nesting: T.List[int] = [] + self.cur_assignments: T.DefaultDict[str, T.List[T.Tuple[T.List[int], T.Union[BaseNode, UnknownValue]]]] = defaultdict(list) + self.all_assignment_nodes: T.DefaultDict[str, T.List[AssignmentNode]] = defaultdict(list) + # dataflow_dag is an acyclic directed graph that contains an edge + # from one instance of `BaseNode` to another instance of `BaseNode` if + # data flows directly from one to the other. Example: If meson.build + # contains this: + # var = 'foo' + '123' + # executable(var, 'src.c') + # var = 'bar' + # dataflow_dag will contain an edge from the IdNode corresponding to + # 'var' in line 2 to the ArithmeticNode corresponding to 'foo' + '123'. + # This graph is crucial for e.g. node_to_runtime_value because we have + # to know that 'var' in line2 is 'foo123' and not 'bar'. + self.dataflow_dag = DataflowDAG() + self.funcvals: T.Dict[BaseNode, T.Any] = {} + self.tainted = False self.funcs.update({'project': self.func_do_nothing, 'test': self.func_do_nothing, 'benchmark': self.func_do_nothing, @@ -124,7 +220,7 @@ class AstInterpreter(InterpreterBase): 'vcs_tag': self.func_do_nothing, 'add_languages': self.func_do_nothing, 'declare_dependency': self.func_do_nothing, - 'files': self.func_do_nothing, + 'files': self.func_files, 'executable': self.func_do_nothing, 'static_library': self.func_do_nothing, 'shared_library': self.func_do_nothing, @@ -133,9 +229,9 @@ class AstInterpreter(InterpreterBase): 'custom_target': self.func_do_nothing, 'run_target': self.func_do_nothing, 'subdir': self.func_subdir, - 'set_variable': self.func_do_nothing, - 'get_variable': self.func_do_nothing, - 'unset_variable': self.func_do_nothing, + 'set_variable': self.func_set_variable, + 'get_variable': self.func_get_variable, + 'unset_variable': self.func_unset_variable, 'is_disabler': self.func_do_nothing, 'is_variable': self.func_do_nothing, 'disabler': self.func_do_nothing, @@ -153,14 +249,14 @@ class AstInterpreter(InterpreterBase): 'debug': self.func_do_nothing, }) - def _unholder_args(self, args: _T, kwargs: _V) -> T.Tuple[_T, _V]: + def _unholder_args(self, args: T.Any, kwargs: T.Any) -> T.Tuple[T.Any, T.Any]: return args, kwargs - def _holderify(self, res: _T) -> _T: + def _holderify(self, res: T.Any) -> T.Any: return res - def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> bool: - return True + def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> UnknownValue: + return UnknownValue() def load_root_meson_file(self) -> None: super().load_root_meson_file() @@ -182,24 +278,50 @@ class AstInterpreter(InterpreterBase): buildfilename = os.path.join(subdir, environment.build_filename) sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n') - def method_call(self, node: BaseNode) -> bool: - return True + def inner_method_call(self, obj: BaseNode, method_name: str, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Any: + for arg in itertools.chain(args, kwargs.values()): + if isinstance(arg, UnknownValue): + return UnknownValue() + + if isinstance(obj, str): + result = StringHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs) + elif isinstance(obj, bool): + result = BooleanHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs) + elif isinstance(obj, int): + result = IntegerHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs) + elif isinstance(obj, list): + result = ArrayHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs) + elif isinstance(obj, dict): + result = DictHolder(obj, T.cast('Interpreter', self)).method_call(method_name, args, kwargs) + else: + return UnknownValue() + return result - def evaluate_fstring(self, node: mparser.StringNode) -> str: - assert isinstance(node, mparser.StringNode) - return node.value + def method_call(self, node: mparser.MethodNode) -> None: + invocable = node.source_object + self.evaluate_statement(invocable) + obj = self.node_to_runtime_value(invocable) + method_name = node.name.value + (args, kwargs) = self.reduce_arguments(node.args) + if is_disabled(args, kwargs): + res = Disabler() + else: + res = self.inner_method_call(obj, method_name, args, kwargs) + self.funcvals[node] = res + + def evaluate_fstring(self, node: mparser.StringNode) -> None: + pass - def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> TYPE_var: - return self.reduce_arguments(cur.args)[0] + def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> None: + for arg in cur.args.arguments: + self.evaluate_statement(arg) - def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> int: + def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> None: self.evaluate_statement(cur.left) self.evaluate_statement(cur.right) - return 0 - def evaluate_uminusstatement(self, cur: UMinusNode) -> int: + def evaluate_uminusstatement(self, cur: UMinusNode) -> None: self.evaluate_statement(cur.value) - return 0 def evaluate_ternary(self, node: TernaryNode) -> None: assert isinstance(node, TernaryNode) @@ -207,42 +329,27 @@ class AstInterpreter(InterpreterBase): self.evaluate_statement(node.trueblock) self.evaluate_statement(node.falseblock) - def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs: - def resolve_key(node: mparser.BaseNode) -> str: - if isinstance(node, mparser.StringNode): - return node.value - return '__AST_UNKNOWN__' - arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key) - assert not arguments - self.argument_depth += 1 - for key, value in kwargs.items(): - if isinstance(key, BaseNode): - self.evaluate_statement(key) - self.argument_depth -= 1 - return {} - - def evaluate_plusassign(self, node: PlusAssignmentNode) -> None: - assert isinstance(node, PlusAssignmentNode) - # Cheat by doing a reassignment - self.assignments[node.var_name.value] = node.value # Save a reference to the value node - if node.value.ast_id: - self.reverse_assignment[node.value.ast_id] = node - self.assign_vals[node.var_name.value] = self.evaluate_statement(node.value) + def evaluate_dictstatement(self, node: mparser.DictNode) -> None: + for k, v in node.args.kwargs.items(): + self.evaluate_statement(k) + self.evaluate_statement(v) - def evaluate_indexing(self, node: IndexNode) -> int: - return 0 - - def unknown_function_called(self, func_name: str) -> None: - pass + def evaluate_indexing(self, node: IndexNode) -> None: + self.evaluate_statement(node.iobject) + self.evaluate_statement(node.index) def reduce_arguments( self, args: mparser.ArgumentNode, key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key, duplicate_key_error: T.Optional[str] = None, - ) -> T.Tuple[T.List[TYPE_var], TYPE_nkwargs]: + ) -> T.Tuple[T.List[T.Any], T.Any]: + for arg in args.arguments: + self.evaluate_statement(arg) + for value in args.kwargs.values(): + self.evaluate_statement(value) if isinstance(args, ArgumentNode): - kwargs: T.Dict[str, TYPE_var] = {} + kwargs = {} for key, val in args.kwargs.items(): kwargs[key_resolver(key)] = val if args.incorrect_order(): @@ -251,139 +358,370 @@ class AstInterpreter(InterpreterBase): else: return self.flatten_args(args), {} - def evaluate_comparison(self, node: ComparisonNode) -> bool: + def evaluate_comparison(self, node: ComparisonNode) -> None: self.evaluate_statement(node.left) self.evaluate_statement(node.right) - return False - def evaluate_andstatement(self, cur: AndNode) -> bool: + def evaluate_andstatement(self, cur: AndNode) -> None: self.evaluate_statement(cur.left) self.evaluate_statement(cur.right) - return False - def evaluate_orstatement(self, cur: OrNode) -> bool: + def evaluate_orstatement(self, cur: OrNode) -> None: self.evaluate_statement(cur.left) self.evaluate_statement(cur.right) - return False - def evaluate_notstatement(self, cur: NotNode) -> bool: + def evaluate_notstatement(self, cur: NotNode) -> None: self.evaluate_statement(cur.value) - return False + + def find_potential_writes(self, node: BaseNode) -> T.Set[str]: + if isinstance(node, mparser.ForeachClauseNode): + return {el.value for el in node.varnames} | self.find_potential_writes(node.block) + elif isinstance(node, mparser.CodeBlockNode): + ret = set() + for line in node.lines: + ret.update(self.find_potential_writes(line)) + return ret + elif isinstance(node, (AssignmentNode, PlusAssignmentNode)): + return set([node.var_name.value]) | self.find_potential_writes(node.value) + elif isinstance(node, IdNode): + return set() + elif isinstance(node, ArrayNode): + ret = set() + for arg in node.args.arguments: + ret.update(self.find_potential_writes(arg)) + return ret + elif isinstance(node, mparser.DictNode): + ret = set() + for k, v in node.args.kwargs.items(): + ret.update(self.find_potential_writes(k)) + ret.update(self.find_potential_writes(v)) + return ret + elif isinstance(node, FunctionNode): + ret = set() + for arg in node.args.arguments: + ret.update(self.find_potential_writes(arg)) + for arg in node.args.kwargs.values(): + ret.update(self.find_potential_writes(arg)) + return ret + elif isinstance(node, MethodNode): + ret = self.find_potential_writes(node.source_object) + for arg in node.args.arguments: + ret.update(self.find_potential_writes(arg)) + for arg in node.args.kwargs.values(): + ret.update(self.find_potential_writes(arg)) + return ret + elif isinstance(node, ArithmeticNode): + return self.find_potential_writes(node.left) | self.find_potential_writes(node.right) + elif isinstance(node, (mparser.NumberNode, mparser.StringNode, mparser.BreakNode, mparser.BooleanNode, mparser.ContinueNode)): + return set() + elif isinstance(node, mparser.IfClauseNode): + if isinstance(node.elseblock, EmptyNode): + ret = set() + else: + ret = self.find_potential_writes(node.elseblock.block) + for i in node.ifs: + ret.update(self.find_potential_writes(i)) + return ret + elif isinstance(node, mparser.IndexNode): + return self.find_potential_writes(node.iobject) | self.find_potential_writes(node.index) + elif isinstance(node, mparser.IfNode): + return self.find_potential_writes(node.condition) | self.find_potential_writes(node.block) + elif isinstance(node, (mparser.ComparisonNode, mparser.OrNode, mparser.AndNode)): + return self.find_potential_writes(node.left) | self.find_potential_writes(node.right) + elif isinstance(node, mparser.NotNode): + return self.find_potential_writes(node.value) + elif isinstance(node, mparser.TernaryNode): + return self.find_potential_writes(node.condition) | self.find_potential_writes(node.trueblock) | self.find_potential_writes(node.falseblock) + elif isinstance(node, mparser.UMinusNode): + return self.find_potential_writes(node.value) + elif isinstance(node, mparser.ParenthesizedNode): + return self.find_potential_writes(node.inner) + raise mesonlib.MesonBugException('Unhandled node type') def evaluate_foreach(self, node: ForeachClauseNode) -> None: + asses = self.find_potential_writes(node) + for ass in asses: + self.cur_assignments[ass].append((self.nesting.copy(), UnknownValue())) try: self.evaluate_codeblock(node.block) except ContinueRequest: pass except BreakRequest: pass + for ass in asses: + self.cur_assignments[ass].append((self.nesting.copy(), UnknownValue())) # In case the foreach loops 0 times. def evaluate_if(self, node: IfClauseNode) -> None: + self.nesting.append(0) for i in node.ifs: self.evaluate_codeblock(i.block) + self.nesting[-1] += 1 if not isinstance(node.elseblock, EmptyNode): self.evaluate_codeblock(node.elseblock.block) - - def get_variable(self, varname: str) -> int: - return 0 - - def assignment(self, node: AssignmentNode) -> None: - assert isinstance(node, AssignmentNode) - self.assignments[node.var_name.value] = node.value # Save a reference to the value node - if node.value.ast_id: - self.reverse_assignment[node.value.ast_id] = node - self.assign_vals[node.var_name.value] = self.evaluate_statement(node.value) # Evaluate the value just in case - - def resolve_node(self, node: BaseNode, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.Optional[T.Any]: - def quick_resolve(n: BaseNode, loop_detect: T.Optional[T.List[str]] = None) -> T.Any: - if loop_detect is None: - loop_detect = [] - if isinstance(n, IdNode): - assert isinstance(n.value, str) - if n.value in loop_detect or n.value not in self.assignments: - return [] - return quick_resolve(self.assignments[n.value], loop_detect = loop_detect + [n.value]) - elif isinstance(n, ElementaryNode): - return n.value + self.nesting.pop() + for var_name in self.cur_assignments: + potential_values = [] + oldval = self.get_cur_value_if_defined(var_name) + if not isinstance(oldval, UndefinedVariable): + potential_values.append(oldval) + for nesting, value in self.cur_assignments[var_name]: + if len(nesting) > len(self.nesting): + potential_values.append(value) + self.cur_assignments[var_name] = [(nesting, v) for (nesting, v) in self.cur_assignments[var_name] if len(nesting) <= len(self.nesting)] + if len(potential_values) > 1 or (len(potential_values) > 0 and isinstance(oldval, UndefinedVariable)): + uv = UnknownValue() + for pv in potential_values: + self.dataflow_dag.add_edge(pv, uv) + self.cur_assignments[var_name].append((self.nesting.copy(), uv)) + + def func_files(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Any: + ret: T.List[T.Union[IntrospectionFile, UnknownValue]] = [] + for arg in args: + if isinstance(arg, str): + ret.append(IntrospectionFile(self.subdir, arg)) + elif isinstance(arg, UnknownValue): + ret.append(UnknownValue()) else: - return n - - if id_loop_detect is None: - id_loop_detect = [] - result = None - - if not isinstance(node, BaseNode): - return None - - assert node.ast_id - if node.ast_id in id_loop_detect: - return None # Loop detected - id_loop_detect += [node.ast_id] - - # Try to evaluate the value of the node - if isinstance(node, IdNode): - result = quick_resolve(node) - - elif isinstance(node, ElementaryNode): - result = node.value - - elif isinstance(node, NotNode): - result = self.resolve_node(node.value, include_unknown_args, id_loop_detect) - if isinstance(result, bool): - result = not result - + raise TypeError + return ret + + def get_cur_value_if_defined(self, var_name: str) -> T.Union[BaseNode, UnknownValue, UndefinedVariable]: + if var_name in {'meson', 'host_machine', 'build_machine', 'target_machine'}: + return UnknownValue() + ret: T.Union[BaseNode, UnknownValue, UndefinedVariable] = UndefinedVariable() + for nesting, value in reversed(self.cur_assignments[var_name]): + if len(self.nesting) >= len(nesting) and self.nesting[:len(nesting)] == nesting: + ret = value + break + if isinstance(ret, UndefinedVariable) and self.tainted: + return UnknownValue() + return ret + + def get_cur_value(self, var_name: str) -> T.Union[BaseNode, UnknownValue]: + ret = self.get_cur_value_if_defined(var_name) + if isinstance(ret, UndefinedVariable): + path = mlog.get_relative_path(Path(self.current_node.filename), Path(os.getcwd())) + mlog.warning(f"{path}:{self.current_node.lineno}:{self.current_node.colno} will always crash if executed, since a variable named `{var_name}` is not defined") + # We could add more advanced analysis of code referencing undefined + # variables, but it is probably not worth the effort and the + # complexity. So we do the simplest thing, returning an + # UnknownValue. + return UnknownValue() + return ret + + # The function `node_to_runtime_value` takes a node of the ast as an + # argument and tries to return the same thing that would be passed to e.g. + # `func_message` if you put `message(node)` in your `meson.build` file and + # run `meson setup`. If this is not possible, `UnknownValue()` is returned. + # There are 3 Reasons why this is sometimes impossible: + # 1. Because the meson rewriter is imperfect and has not implemented everything yet + # 2. Because the value is different on different machines, example: + # ```meson + # node = somedep.found() + # message(node) + # ``` + # will print `true` on some machines and `false` on others, so + # `node_to_runtime_value` does not know whether to return `true` or + # `false` and will return `UnknownValue()`. + # 3. Here: + # ```meson + # foreach x : [1, 2] + # node = x + # message(node) + # endforeach + # ``` + # `node_to_runtime_value` does not know whether to return `1` or `2` and + # will return `UnknownValue()`. + # + # If you have something like + # ``` + # node = [123, somedep.found()] + # ``` + # `node_to_runtime_value` will return `[123, UnknownValue()]`. + def node_to_runtime_value(self, node: T.Union[UnknownValue, BaseNode, TYPE_var]) -> T.Any: + if isinstance(node, (mparser.StringNode, mparser.BooleanNode, mparser.NumberNode)): + return node.value + elif isinstance(node, mparser.StringNode): + if node.is_fstring: + return UnknownValue() + else: + return node.value + elif isinstance(node, list): + return [self.node_to_runtime_value(x) for x in node] elif isinstance(node, ArrayNode): - result = node.args.arguments.copy() + return [self.node_to_runtime_value(x) for x in node.args.arguments] + elif isinstance(node, mparser.DictNode): + return {self.node_to_runtime_value(k): self.node_to_runtime_value(v) for k, v in node.args.kwargs.items()} + elif isinstance(node, IdNode): + assert len(self.dataflow_dag.tgt_to_srcs[node]) == 1 + val = next(iter(self.dataflow_dag.tgt_to_srcs[node])) + return self.node_to_runtime_value(val) + elif isinstance(node, (MethodNode, FunctionNode)): + funcval = self.funcvals[node] + if isinstance(funcval, (dict, str)): + return funcval + else: + return self.node_to_runtime_value(funcval) + elif isinstance(node, ArithmeticNode): + left = self.node_to_runtime_value(node.left) + right = self.node_to_runtime_value(node.right) + if isinstance(left, list) and isinstance(right, UnknownValue): + return left + [right] + if isinstance(right, list) and isinstance(left, UnknownValue): + return [left] + right + if isinstance(left, UnknownValue) or isinstance(right, UnknownValue): + return UnknownValue() + if node.operation == 'add': + if isinstance(left, dict) and isinstance(right, dict): + ret = left.copy() + for k, v in right.items(): + ret[k] = v + return ret + if isinstance(left, list): + if not isinstance(right, list): + right = [right] + return left + right + return left + right + elif node.operation == 'sub': + return left - right + elif node.operation == 'mul': + return left * right + elif node.operation == 'div': + if isinstance(left, int) and isinstance(right, int): + return left // right + elif isinstance(left, str) and isinstance(right, str): + return os.path.join(left, right).replace('\\', '/') + elif node.operation == 'mod': + if isinstance(left, int) and isinstance(right, int): + return left % right + elif isinstance(node, (UnknownValue, IntrospectionBuildTarget, IntrospectionFile, IntrospectionDependency, str, bool, int)): + return node + elif isinstance(node, mparser.IndexNode): + iobject = self.node_to_runtime_value(node.iobject) + index = self.node_to_runtime_value(node.index) + if isinstance(iobject, UnknownValue) or isinstance(index, UnknownValue): + return UnknownValue() + return iobject[index] + elif isinstance(node, mparser.ComparisonNode): + left = self.node_to_runtime_value(node.left) + right = self.node_to_runtime_value(node.right) + if isinstance(left, UnknownValue) or isinstance(right, UnknownValue): + return UnknownValue() + if node.ctype == '==': + return left == right + elif node.ctype == '!=': + return left != right + elif node.ctype == 'in': + return left in right + elif node.ctype == 'notin': + return left not in right + elif isinstance(node, mparser.TernaryNode): + cond = self.node_to_runtime_value(node.condition) + if isinstance(cond, UnknownValue): + return UnknownValue() + if cond is True: + return self.node_to_runtime_value(node.trueblock) + if cond is False: + return self.node_to_runtime_value(node.falseblock) + elif isinstance(node, mparser.OrNode): + left = self.node_to_runtime_value(node.left) + right = self.node_to_runtime_value(node.right) + if isinstance(left, UnknownValue) or isinstance(right, UnknownValue): + return UnknownValue() + return left or right + elif isinstance(node, mparser.AndNode): + left = self.node_to_runtime_value(node.left) + right = self.node_to_runtime_value(node.right) + if isinstance(left, UnknownValue) or isinstance(right, UnknownValue): + return UnknownValue() + return left and right + elif isinstance(node, mparser.UMinusNode): + val = self.node_to_runtime_value(node.value) + if isinstance(val, UnknownValue): + return val + if isinstance(val, (int, float)): + return -val + elif isinstance(node, mparser.NotNode): + val = self.node_to_runtime_value(node.value) + if isinstance(val, UnknownValue): + return val + if isinstance(val, bool): + return not val + elif isinstance(node, mparser.ParenthesizedNode): + return self.node_to_runtime_value(node.inner) + raise mesonlib.MesonBugException('Unhandled node type') - elif isinstance(node, ArgumentNode): - result = node.arguments.copy() + def assignment(self, node: AssignmentNode) -> None: + assert isinstance(node, AssignmentNode) + self.evaluate_statement(node.value) + self.cur_assignments[node.var_name.value].append((self.nesting.copy(), node.value)) + self.all_assignment_nodes[node.var_name.value].append(node) - elif isinstance(node, ArithmeticNode): - if node.operation != 'add': - return None # Only handle string and array concats - l = self.resolve_node(node.left, include_unknown_args, id_loop_detect) - r = self.resolve_node(node.right, include_unknown_args, id_loop_detect) - if isinstance(l, str) and isinstance(r, str): - result = l + r # String concatenation detected + def evaluate_plusassign(self, node: PlusAssignmentNode) -> None: + assert isinstance(node, PlusAssignmentNode) + self.evaluate_statement(node.value) + lhs = self.get_cur_value(node.var_name.value) + newval: T.Union[UnknownValue, ArithmeticNode] + if isinstance(lhs, UnknownValue): + newval = UnknownValue() + else: + newval = mparser.ArithmeticNode(operation='add', left=lhs, operator=_symbol('+'), right=node.value) + self.cur_assignments[node.var_name.value].append((self.nesting.copy(), newval)) + self.all_assignment_nodes[node.var_name.value].append(node) + + self.dataflow_dag.add_edge(lhs, newval) + self.dataflow_dag.add_edge(node.value, newval) + + def func_set_variable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None: + assert isinstance(node, FunctionNode) + if bool(node.args.kwargs): + raise InvalidArguments('set_variable accepts no keyword arguments') + if len(node.args.arguments) != 2: + raise InvalidArguments('set_variable requires exactly two positional arguments') + var_name = args[0] + value = node.args.arguments[1] + if isinstance(var_name, UnknownValue): + self.evaluate_statement(value) + self.tainted = True + return + assert isinstance(var_name, str) + equiv = AssignmentNode(var_name=IdNode(Token('', '', 0, 0, 0, (0, 0), var_name)), value=value, operator=_symbol('=')) + equiv.ast_id = str(id(equiv)) + self.evaluate_statement(equiv) + + def func_get_variable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Any: + assert isinstance(node, FunctionNode) + var_name = args[0] + assert isinstance(var_name, str) + val = self.get_cur_value(var_name) + self.dataflow_dag.add_edge(val, node) + return val + + def func_unset_variable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None: + assert isinstance(node, FunctionNode) + if bool(node.args.kwargs): + raise InvalidArguments('unset_variable accepts no keyword arguments') + if len(node.args.arguments) != 1: + raise InvalidArguments('unset_variable requires exactly one positional arguments') + var_name = args[0] + assert isinstance(var_name, str) + self.cur_assignments[var_name].append((self.nesting.copy(), node)) + + def nodes_to_pretty_filelist(self, root_path: Path, subdir: str, nodes: T.List[BaseNode]) -> T.List[T.Union[str, UnknownValue]]: + def src_to_abs(src: T.Union[str, IntrospectionFile, UnknownValue]) -> T.Union[str, UnknownValue]: + if isinstance(src, str): + return os.path.normpath(os.path.join(root_path, subdir, src)) + elif isinstance(src, IntrospectionFile): + return str(src.to_abs_path(root_path)) + elif isinstance(src, UnknownValue): + return src else: - result = self.flatten_args(l, include_unknown_args, id_loop_detect) + self.flatten_args(r, include_unknown_args, id_loop_detect) - - elif isinstance(node, MethodNode): - src = quick_resolve(node.source_object) - margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect) - mkwargs: T.Dict[str, TYPE_var] = {} - method_name = node.name.value - try: - if isinstance(src, str): - result = StringHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs) - elif isinstance(src, bool): - result = BooleanHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs) - elif isinstance(src, int): - result = IntegerHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs) - elif isinstance(src, list): - result = ArrayHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs) - elif isinstance(src, dict): - result = DictHolder(src, T.cast('Interpreter', self)).method_call(method_name, margs, mkwargs) - except mesonlib.MesonException: - return None - - # Ensure that the result is fully resolved (no more nodes) - if isinstance(result, BaseNode): - result = self.resolve_node(result, include_unknown_args, id_loop_detect) - elif isinstance(result, list): - new_res: T.List[TYPE_var] = [] - for i in result: - if isinstance(i, BaseNode): - resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) - if resolved is not None: - new_res += self.flatten_args(resolved, include_unknown_args, id_loop_detect) - else: - new_res += [i] - result = new_res + raise TypeError - return result + rtvals: T.List[T.Any] = flatten([self.node_to_runtime_value(sn) for sn in nodes]) + return [src_to_abs(x) for x in rtvals] - def flatten_args(self, args_raw: T.Union[TYPE_var, T.Sequence[TYPE_var]], include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[TYPE_var]: + def flatten_args(self, args_raw: T.Union[TYPE_nvar, T.Sequence[TYPE_nvar]], include_unknown_args: bool = False) -> T.List[TYPE_var]: # Make sure we are always dealing with lists if isinstance(args_raw, list): args = args_raw @@ -395,14 +733,38 @@ class AstInterpreter(InterpreterBase): # Resolve the contents of args for i in args: if isinstance(i, BaseNode): - resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) + resolved = self.node_to_runtime_value(i) if resolved is not None: if not isinstance(resolved, list): resolved = [resolved] flattened_args += resolved - elif isinstance(i, (str, bool, int, float)) or include_unknown_args: + elif isinstance(i, (str, bool, int, float, UnknownValue, IntrospectionFile)) or include_unknown_args: flattened_args += [i] + else: + raise NotImplementedError return flattened_args def evaluate_testcase(self, node: TestCaseClauseNode) -> Disabler | None: return Disabler(subproject=self.subproject) + + def evaluate_statement(self, cur: mparser.BaseNode) -> T.Optional[InterpreterObject]: + if hasattr(cur, 'args'): + for arg in cur.args.arguments: + self.dataflow_dag.add_edge(arg, cur) + for k, v in cur.args.kwargs.items(): + self.dataflow_dag.add_edge(v, cur) + for attr in ['source_object', 'left', 'right', 'items', 'iobject', 'index', 'condition']: + if hasattr(cur, attr): + assert isinstance(getattr(cur, attr), mparser.BaseNode) + self.dataflow_dag.add_edge(getattr(cur, attr), cur) + if isinstance(cur, mparser.IdNode): + self.dataflow_dag.add_edge(self.get_cur_value(cur.value), cur) + return None + else: + return super().evaluate_statement(cur) + + def function_call(self, node: mparser.FunctionNode) -> T.Any: + ret = super().function_call(node) + if ret is not None: + self.funcvals[node] = ret + return ret diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index 4eb3fec..147436d 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -6,19 +6,17 @@ # or an interpreter-based tool from __future__ import annotations -import copy import os import typing as T from .. import compilers, environment, mesonlib, options -from .. import coredata as cdata from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary from ..compilers import detect_compiler_for -from ..interpreterbase import InvalidArguments, SubProject +from ..interpreterbase import InvalidArguments, SubProject, UnknownValue from ..mesonlib import MachineChoice from ..options import OptionKey -from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode -from .interpreter import AstInterpreter +from ..mparser import BaseNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode +from .interpreter import AstInterpreter, IntrospectionBuildTarget, IntrospectionDependency if T.TYPE_CHECKING: from ..build import BuildTarget @@ -44,8 +42,11 @@ class IntrospectionHelper: return NotImplemented class IntrospectionInterpreter(AstInterpreter): - # Interpreter to detect the options without a build directory - # Most of the code is stolen from interpreter.Interpreter + # If you run `meson setup ...` the `Interpreter`-class walks over the AST. + # If you run `meson rewrite ...` and `meson introspect meson.build ...`, + # the `AstInterpreter`-class walks over the AST. + # Works without a build directory. + # Most of the code is stolen from interpreter.Interpreter . def __init__(self, source_root: str, subdir: str, @@ -61,11 +62,10 @@ class IntrospectionInterpreter(AstInterpreter): self.cross_file = cross_file self.backend = backend - self.default_options = {OptionKey('backend'): self.backend} self.project_data: T.Dict[str, T.Any] = {} - self.targets: T.List[T.Dict[str, T.Any]] = [] - self.dependencies: T.List[T.Dict[str, T.Any]] = [] - self.project_node: BaseNode = None + self.targets: T.List[IntrospectionBuildTarget] = [] + self.dependencies: T.List[IntrospectionDependency] = [] + self.project_node: FunctionNode = None self.funcs.update({ 'add_languages': self.func_add_languages, @@ -83,6 +83,7 @@ class IntrospectionInterpreter(AstInterpreter): def func_project(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None: if self.project_node: raise InvalidArguments('Second call to project()') + assert isinstance(node, FunctionNode) self.project_node = node if len(args) < 1: raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') @@ -99,6 +100,16 @@ class IntrospectionInterpreter(AstInterpreter): return [node.value] return None + def create_options_dict(options: T.List[str], subproject: str = '') -> T.Mapping[OptionKey, str]: + result: T.MutableMapping[OptionKey, str] = {} + for o in options: + try: + (key, value) = o.split('=', 1) + except ValueError: + raise mesonlib.MesonException(f'Option {o!r} must have a value separated by equals sign.') + result[OptionKey(key)] = value + return result + proj_name = args[0] proj_vers = kwargs.get('version', 'undefined') if isinstance(proj_vers, ElementaryNode): @@ -114,25 +125,6 @@ class IntrospectionInterpreter(AstInterpreter): self._load_option_file() - def_opts = self.flatten_args(kwargs.get('default_options', [])) - _project_default_options = mesonlib.stringlistify(def_opts) - string_dict = cdata.create_options_dict(_project_default_options, self.subproject) - self.project_default_options = {OptionKey(s): v for s, v in string_dict.items()} - self.default_options.update(self.project_default_options) - if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects): - if self.subproject == '': - self.coredata.optstore.initialize_from_top_level_project_call( - T.cast('T.Dict[T.Union[OptionKey, str], str]', string_dict), - {}, # TODO: not handled by this Interpreter. - self.environment.options) - else: - self.coredata.optstore.initialize_from_subproject_call( - self.subproject, - {}, # TODO: this isn't handled by the introspection interpreter... - T.cast('T.Dict[T.Union[OptionKey, str], str]', string_dict), - {}) # TODO: this isn't handled by the introspection interpreter... - self.coredata.initialized_subprojects.add(self.subproject) - if not self.is_subproject() and 'subproject_dir' in kwargs: spdirname = kwargs['subproject_dir'] if isinstance(spdirname, StringNode): @@ -164,10 +156,10 @@ class IntrospectionInterpreter(AstInterpreter): except (mesonlib.MesonException, RuntimeError): pass - def func_add_languages(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None: + def func_add_languages(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> UnknownValue: kwargs = self.flatten_kwargs(kwargs) required = kwargs.get('required', True) - assert isinstance(required, (bool, options.UserFeatureOption)), 'for mypy' + assert isinstance(required, (bool, options.UserFeatureOption, UnknownValue)), 'for mypy' if isinstance(required, options.UserFeatureOption): required = required.is_enabled() if 'native' in kwargs: @@ -176,8 +168,9 @@ class IntrospectionInterpreter(AstInterpreter): else: for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]: self._add_languages(args, required, for_machine) + return UnknownValue() - def _add_languages(self, raw_langs: T.List[TYPE_var], required: bool, for_machine: MachineChoice) -> None: + def _add_languages(self, raw_langs: T.List[TYPE_var], required: T.Union[bool, UnknownValue], for_machine: MachineChoice) -> None: langs: T.List[str] = [] for l in self.flatten_args(raw_langs): if isinstance(l, str): @@ -192,48 +185,47 @@ class IntrospectionInterpreter(AstInterpreter): comp = detect_compiler_for(self.environment, lang, for_machine, True, self.subproject) except mesonlib.MesonException: # do we even care about introspecting this language? - if required: + if isinstance(required, UnknownValue) or required: raise else: continue - if self.subproject: - options = {} - for k in comp.get_options(): - v = copy.copy(self.coredata.optstore.get_value_object(k)) - k = k.evolve(subproject=self.subproject) - options[k] = v - self.coredata.add_compiler_options(options, lang, for_machine, self.environment, self.subproject) - - def func_dependency(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> None: + if comp: + self.coredata.process_compiler_options(lang, comp, self.subproject) + + def func_dependency(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[IntrospectionDependency]: + assert isinstance(node, FunctionNode) args = self.flatten_args(args) kwargs = self.flatten_kwargs(kwargs) if not args: - return + return None name = args[0] + assert isinstance(name, (str, UnknownValue)) has_fallback = 'fallback' in kwargs required = kwargs.get('required', True) version = kwargs.get('version', []) - if not isinstance(version, list): - version = [version] - if isinstance(required, ElementaryNode): - required = required.value - if not isinstance(required, bool): - required = False - self.dependencies += [{ - 'name': name, - 'required': required, - 'version': version, - 'has_fallback': has_fallback, - 'conditional': node.condition_level > 0, - 'node': node - }] - - def build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs_raw: T.Dict[str, TYPE_var], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]: + if not isinstance(version, UnknownValue): + if not isinstance(version, list): + version = [version] + assert all(isinstance(el, str) for el in version) + version = T.cast(T.List[str], version) + assert isinstance(required, (bool, UnknownValue)) + newdep = IntrospectionDependency( + name=name, + required=required, + version=version, + has_fallback=has_fallback, + conditional=node.condition_level > 0, + node=node) + self.dependencies += [newdep] + return newdep + + def build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs_raw: T.Dict[str, TYPE_var], targetclass: T.Type[BuildTarget]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: + assert isinstance(node, FunctionNode) args = self.flatten_args(args) if not args or not isinstance(args[0], str): - return None + return UnknownValue() name = args[0] - srcqueue = [node] + srcqueue: T.List[BaseNode] = [node] extra_queue = [] # Process the sources BEFORE flattening the kwargs, to preserve the original nodes @@ -245,43 +237,23 @@ class IntrospectionInterpreter(AstInterpreter): kwargs = self.flatten_kwargs(kwargs_raw, True) - def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: - res: T.List[BaseNode] = [] - while inqueue: - curr = inqueue.pop(0) - arg_node = None - assert isinstance(curr, BaseNode) - if isinstance(curr, FunctionNode): - arg_node = curr.args - elif isinstance(curr, ArrayNode): - arg_node = curr.args - elif isinstance(curr, IdNode): - # Try to resolve the ID and append the node to the queue - assert isinstance(curr.value, str) - var_name = curr.value - if var_name in self.assignments: - tmp_node = self.assignments[var_name] - if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)): - inqueue += [tmp_node] - elif isinstance(curr, ArithmeticNode): - inqueue += [curr.left, curr.right] - if arg_node is None: - continue - arg_nodes = arg_node.arguments.copy() - # Pop the first element if the function is a build target function - if isinstance(curr, FunctionNode) and curr.func_name.value in BUILD_TARGET_FUNCTIONS: - arg_nodes.pop(0) - elementary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] - inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))] - if elementary_nodes: - res += [curr] - return res - - source_nodes = traverse_nodes(srcqueue) - extraf_nodes = traverse_nodes(extra_queue) + oldlen = len(node.args.arguments) + source_nodes = node.args.arguments[1:] + for k, v in node.args.kwargs.items(): + assert isinstance(k, IdNode) + if k.value == 'sources': + source_nodes.append(v) + assert oldlen == len(node.args.arguments) + + extraf_nodes = None + for k, v in node.args.kwargs.items(): + assert isinstance(k, IdNode) + if k.value == 'extra_files': + assert extraf_nodes is None + extraf_nodes = v # Make sure nothing can crash when creating the build class - kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always'}} + kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always', 'name_prefix'}} kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()} kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)} for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST @@ -293,26 +265,25 @@ class IntrospectionInterpreter(AstInterpreter): self.environment, self.coredata.compilers[for_machine], kwargs_reduced) target.process_compilers_late() - new_target = { - 'name': target.get_basename(), - 'machine': target.for_machine.get_lower_case_name(), - 'id': target.get_id(), - 'type': target.get_typename(), - 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)), - 'subdir': self.subdir, - 'build_by_default': target.build_by_default, - 'installed': target.should_install(), - 'outputs': target.get_outputs(), - 'sources': source_nodes, - 'extra_files': extraf_nodes, - 'kwargs': kwargs, - 'node': node, - } + new_target = IntrospectionBuildTarget( + name=target.get_basename(), + machine=target.for_machine.get_lower_case_name(), + id=target.get_id(), + typename=target.get_typename(), + defined_in=os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)), + subdir=self.subdir, + build_by_default=target.build_by_default, + installed=target.should_install(), + outputs=target.get_outputs(), + source_nodes=source_nodes, + extra_files=extraf_nodes, + kwargs=kwargs, + node=node) self.targets += [new_target] return new_target - def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: default_library = self.coredata.optstore.get_value_for(OptionKey('default_library')) if default_library == 'shared': return self.build_target(node, args, kwargs, SharedLibrary) @@ -322,28 +293,28 @@ class IntrospectionInterpreter(AstInterpreter): return self.build_target(node, args, kwargs, SharedLibrary) return None - def func_executable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_executable(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_target(node, args, kwargs, Executable) - def func_static_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_static_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_target(node, args, kwargs, StaticLibrary) - def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_target(node, args, kwargs, SharedLibrary) - def func_both_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_both_lib(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_target(node, args, kwargs, SharedLibrary) - def func_shared_module(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_shared_module(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_target(node, args, kwargs, SharedModule) - def func_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_library(node, args, kwargs) - def func_jar(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_jar(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: return self.build_target(node, args, kwargs, Jar) - def func_build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Optional[T.Dict[str, T.Any]]: + def func_build_target(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]: if 'target_type' not in kwargs: return None target_type = kwargs.pop('target_type') @@ -395,7 +366,7 @@ class IntrospectionInterpreter(AstInterpreter): flattened_kwargs = {} for key, val in kwargs.items(): if isinstance(val, BaseNode): - resolved = self.resolve_node(val, include_unknown_args) + resolved = self.node_to_runtime_value(val) if resolved is not None: flattened_kwargs[key] = resolved elif isinstance(val, (str, bool, int, float)) or include_unknown_args: diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py index 4ce3b3f..0d0c821 100644 --- a/mesonbuild/ast/printer.py +++ b/mesonbuild/ast/printer.py @@ -7,12 +7,46 @@ from __future__ import annotations from .. import mparser from .visitor import AstVisitor, FullAstVisitor +from ..mesonlib import MesonBugException import re import typing as T +# Also known as "order of operations" or "binding power". +# This is the counterpart to Parser.e1, Parser.e2, Parser.e3, Parser.e4, Parser.e5, Parser.e6, Parser.e7, Parser.e8, Parser.e9, Parser.e10 +def precedence_level(node: mparser.BaseNode) -> int: + if isinstance(node, (mparser.PlusAssignmentNode, mparser.AssignmentNode, mparser.TernaryNode)): + return 1 + elif isinstance(node, mparser.OrNode): + return 2 + elif isinstance(node, mparser.AndNode): + return 3 + elif isinstance(node, mparser.ComparisonNode): + return 4 + elif isinstance(node, mparser.ArithmeticNode): + if node.operation in {'add', 'sub'}: + return 5 + elif node.operation in {'mod', 'mul', 'div'}: + return 6 + elif isinstance(node, (mparser.NotNode, mparser.UMinusNode)): + return 7 + elif isinstance(node, mparser.FunctionNode): + return 8 + elif isinstance(node, (mparser.ArrayNode, mparser.DictNode)): + return 9 + elif isinstance(node, (mparser.BooleanNode, mparser.IdNode, mparser.NumberNode, mparser.StringNode, mparser.EmptyNode)): + return 10 + elif isinstance(node, mparser.ParenthesizedNode): + # Parenthesize have the highest binding power, but since the AstPrinter + # ignores ParanthesizedNode, the binding power of the inner node is + # relevant. + return precedence_level(node.inner) + raise MesonBugException('Unhandled node type') + class AstPrinter(AstVisitor): + escape_trans: T.Dict[int, str] = str.maketrans({'\\': '\\\\', "'": "\'"}) + def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5, update_ast_line_nos: bool = False): self.result = '' self.indent = indent @@ -57,7 +91,7 @@ class AstPrinter(AstVisitor): node.lineno = self.curr_line or node.lineno def escape(self, val: str) -> str: - return val.replace('\\', '\\\\').replace("'", "\'") + return val.translate(self.escape_trans) def visit_StringNode(self, node: mparser.StringNode) -> None: assert isinstance(node.value, str) @@ -108,11 +142,21 @@ class AstPrinter(AstVisitor): node.lineno = self.curr_line or node.lineno node.right.accept(self) + def maybe_parentheses(self, outer: mparser.BaseNode, inner: mparser.BaseNode, parens: bool) -> None: + if parens: + self.append('(', inner) + inner.accept(self) + if parens: + self.append(')', inner) + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: - node.left.accept(self) + prec = precedence_level(node) + prec_left = precedence_level(node.left) + prec_right = precedence_level(node.right) + self.maybe_parentheses(node, node.left, prec > prec_left) self.append_padded(node.operator.value, node) node.lineno = self.curr_line or node.lineno - node.right.accept(self) + self.maybe_parentheses(node, node.right, prec > prec_right or (prec == prec_right and node.operation in {'sub', 'div', 'mod'})) def visit_NotNode(self, node: mparser.NotNode) -> None: node.lineno = self.curr_line or node.lineno diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 3dfa2fb..ed57a4c 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -29,7 +29,7 @@ from ..mesonlib import ( File, MachineChoice, MesonException, MesonBugException, OrderedSet, ExecutableSerialisation, EnvironmentException, classify_unity_sources, get_compiler_for_source, - is_parent_path, + is_parent_path, get_rsp_threshold, ) from ..options import OptionKey @@ -533,6 +533,7 @@ class Backend: capture: T.Optional[str] = None, feed: T.Optional[str] = None, env: T.Optional[mesonlib.EnvironmentVariables] = None, + can_use_rsp_file: bool = False, tag: T.Optional[str] = None, verbose: bool = False, installdir_map: T.Optional[T.Dict[str, str]] = None) -> 'ExecutableSerialisation': @@ -563,9 +564,7 @@ class Backend: cmd_args: T.List[str] = [] for c in raw_cmd_args: if isinstance(c, programs.ExternalProgram): - p = c.get_path() - assert isinstance(p, str) - cmd_args.append(p) + cmd_args += c.get_command() elif isinstance(c, (build.BuildTarget, build.CustomTarget)): cmd_args.append(self.get_target_filename_abs(c)) elif isinstance(c, mesonlib.File): @@ -594,6 +593,21 @@ class Backend: exe_wrapper = None workdir = workdir or self.environment.get_build_dir() + + # Must include separators as well + needs_rsp_file = can_use_rsp_file and sum(len(i) + 1 for i in cmd_args) >= get_rsp_threshold() + + if needs_rsp_file: + hasher = hashlib.sha1() + args = ' '.join(mesonlib.quote_arg(arg) for arg in cmd_args) + hasher.update(args.encode(encoding='utf-8', errors='ignore')) + digest = hasher.hexdigest() + scratch_file = f'meson_rsp_{digest}.rsp' + rsp_file = os.path.join(self.environment.get_scratch_dir(), scratch_file) + with open(rsp_file, 'w', encoding='utf-8', newline='\n') as f: + f.write(args) + cmd_args = [f'@{rsp_file}'] + return ExecutableSerialisation(exe_cmd + cmd_args, env, exe_wrapper, workdir, extra_paths, capture, feed, tag, verbose, installdir_map) @@ -606,6 +620,7 @@ class Backend: feed: T.Optional[str] = None, force_serialize: bool = False, env: T.Optional[mesonlib.EnvironmentVariables] = None, + can_use_rsp_file: bool = False, verbose: bool = False) -> T.Tuple[T.List[str], str]: ''' Serialize an executable for running with a generator or a custom target @@ -613,7 +628,7 @@ class Backend: cmd: T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]] = [] cmd.append(exe) cmd.extend(cmd_args) - es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, verbose=verbose) + es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, can_use_rsp_file, verbose=verbose) reasons: T.List[str] = [] if es.extra_paths: reasons.append('to set PATH') @@ -653,6 +668,9 @@ class Backend: envlist.append(f'{k}={v}') return ['env'] + envlist + es.cmd_args, ', '.join(reasons) + if any(a.startswith('@') for a in es.cmd_args): + reasons.append('because command is too long') + if not force_serialize: if not capture and not feed: return es.cmd_args, '' @@ -1067,11 +1085,6 @@ class Backend: if compiler.language == 'vala': if dep.type_name == 'pkgconfig': assert isinstance(dep, dependencies.ExternalDependency) - if dep.name == 'glib-2.0' and dep.version_reqs is not None: - for req in dep.version_reqs: - if req.startswith(('>=', '==')): - commands += ['--target-glib', req[2:]] - break commands += ['--pkg', dep.name] elif isinstance(dep, dependencies.ExternalLibrary): commands += dep.get_link_args('vala') @@ -1083,6 +1096,32 @@ class Backend: commands += dep.get_exe_args(compiler) # For 'automagic' deps: Boost and GTest. Also dependency('threads'). # pkg-config puts the thread flags itself via `Cflags:` + if compiler.language == 'vala': + # Vala wants to know the minimum glib version + for dep in target.added_deps: + if dep.name == 'glib-2.0': + if dep.type_name == 'pkgconfig': + assert isinstance(dep, dependencies.ExternalDependency) + if dep.version_reqs is not None: + for req in dep.version_reqs: + if req.startswith(('>=', '==')): + commands += ['--target-glib', req[2:]] + break + elif isinstance(dep, dependencies.InternalDependency) and dep.version is not None: + glib_version = dep.version.split('.') + if len(glib_version) != 3: + mlog.warning(f'GLib version has unexpected format: {dep.version}') + break + try: + # If GLib version is a development version, downgrade + # --target-glib to the previous version, as valac will + # complain about non-even minor versions + glib_version[1] = str((int(glib_version[1]) // 2) * 2) + except ValueError: + mlog.warning(f'GLib version has unexpected format: {dep.version}') + break + commands += ['--target-glib', f'{glib_version[0]}.{glib_version[1]}'] + # Fortran requires extra include directives. if compiler.language == 'fortran': for lt in chain(target.link_targets, target.link_whole_targets): @@ -1258,12 +1297,9 @@ class Backend: extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]] = [] if isinstance(exe, build.CustomTarget): extra_bdeps = list(exe.get_transitive_build_target_deps()) + extra_bdeps.extend(t.depends) + extra_bdeps.extend(a for a in t.cmd_args if isinstance(a, build.BuildTarget)) extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps) - for a in t.cmd_args: - if isinstance(a, build.BuildTarget): - for p in self.determine_windows_extra_paths(a, []): - if p not in extra_paths: - extra_paths.append(p) else: extra_paths = [] @@ -1289,8 +1325,12 @@ class Backend: else: raise MesonException('Bad object in test command.') + # set LD_LIBRARY_PATH for + # a) dependencies, as relying on rpath is not very safe: + # https://github.com/mesonbuild/meson/pull/11119 + # b) depends and targets passed via args. t_env = copy.deepcopy(t.env) - if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin(): + if not machine.is_windows() and not machine.is_cygwin(): ld_lib_path_libs: T.Set[build.SharedLibrary] = set() for d in depends: if isinstance(d, build.BuildTarget): @@ -1303,6 +1343,8 @@ class Backend: if ld_lib_path: t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':') + if machine.is_darwin(): + t_env.prepend('DYLD_LIBRARY_PATH', list(ld_lib_path), ':') ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross, exe_wrapper, self.environment.need_exe_wrapper(), @@ -1562,7 +1604,7 @@ class Backend: def eval_custom_target_command( self, target: build.CustomTarget, absolute_outputs: bool = False) -> \ - T.Tuple[T.List[str], T.List[str], T.List[str]]: + T.Tuple[T.List[str], T.List[str], T.List[str | programs.ExternalProgram]]: # We want the outputs to be absolute only when using the VS backend # XXX: Maybe allow the vs backend to use relative paths too? source_root = self.build_to_src @@ -1575,7 +1617,7 @@ class Backend: outputs = [os.path.join(outdir, i) for i in target.get_outputs()] inputs = self.get_custom_target_sources(target) # Evaluate the command list - cmd: T.List[str] = [] + cmd: T.List[str | programs.ExternalProgram] = [] for i in target.command: if isinstance(i, build.BuildTarget): cmd += self.build_target_to_cmd_array(i) @@ -1611,6 +1653,9 @@ class Backend: if not target.absolute_paths: pdir = self.get_target_private_dir(target) i = i.replace('@PRIVATE_DIR@', pdir) + elif isinstance(i, programs.ExternalProgram): + # Let it pass and be extended elsewhere + pass else: raise RuntimeError(f'Argument {i} is of unknown type {type(i)}') cmd.append(i) @@ -1635,7 +1680,7 @@ class Backend: # fixed. # # https://github.com/mesonbuild/meson/pull/737 - cmd = [i.replace('\\', '/') for i in cmd] + cmd = [i.replace('\\', '/') if isinstance(i, str) else i for i in cmd] return inputs, outputs, cmd def get_introspect_command(self) -> str: @@ -1996,6 +2041,8 @@ class Backend: compiler += [j] elif isinstance(j, (build.BuildTarget, build.CustomTarget)): compiler += j.get_outputs() + elif isinstance(j, programs.ExternalProgram): + compiler += j.get_command() else: raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug') diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index d7de987..73f2db7 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -500,11 +500,6 @@ class NinjaBackend(backends.Backend): # - https://github.com/mesonbuild/meson/pull/9453 # - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040 self.allow_thin_archives = PerMachine[bool](True, True) - if self.environment: - for for_machine in MachineChoice: - if 'cuda' in self.environment.coredata.compilers[for_machine]: - mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine)) - self.allow_thin_archives[for_machine] = False def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement: ''' @@ -595,6 +590,12 @@ class NinjaBackend(backends.Backend): # We don't yet have a use case where we'd expect to make use of this, # so no harm in catching and reporting something unexpected. raise MesonBugException('We do not expect the ninja backend to be given a valid \'vslite_ctx\'') + if self.environment: + for for_machine in MachineChoice: + if 'cuda' in self.environment.coredata.compilers[for_machine]: + mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine)) + self.allow_thin_archives[for_machine] = False + ninja = environment.detect_ninja_command_and_version(log=True) if self.environment.coredata.optstore.get_value_for(OptionKey('vsenv')): builddir = Path(self.environment.get_build_dir()) @@ -1223,6 +1224,7 @@ class NinjaBackend(backends.Backend): capture=ofilenames[0] if target.capture else None, feed=srcs[0] if target.feed else None, env=target.env, + can_use_rsp_file=target.rspable, verbose=target.console) if reason: cmd_type = f' (wrapped by meson {reason})' @@ -1765,6 +1767,9 @@ class NinjaBackend(backends.Backend): girname = os.path.join(self.get_target_dir(target), target.vala_gir) args += ['--gir', os.path.join('..', target.vala_gir)] valac_outputs.append(girname) + shared_target = target.get('shared') + if isinstance(shared_target, build.SharedLibrary): + args += ['--shared-library', self.get_target_filename_for_linking(shared_target)] # Install GIR to default location if requested by user if len(target.install_dir) > 3 and target.install_dir[3] is True: target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0') @@ -1775,7 +1780,7 @@ class NinjaBackend(backends.Backend): gres_xml, = self.get_custom_target_sources(gensrc) args += ['--gresources=' + gres_xml] for source_dir in gensrc.source_dirs: - gres_dirs += [os.path.join(self.get_target_dir(gensrc), source_dir)] + gres_dirs += [source_dir] # Ensure that resources are built before vala sources # This is required since vala code using [GtkTemplate] effectively depends on .ui files # GResourceHeaderTarget is not suitable due to lacking depfile @@ -2261,6 +2266,10 @@ class NinjaBackend(backends.Backend): os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) compile_args = self.generate_basic_compiler_args(target, swiftc) compile_args += swiftc.get_module_args(module_name) + if mesonlib.version_compare(swiftc.version, '>=5.9'): + compile_args += swiftc.get_cxx_interoperability_args(target.compilers) + compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine) + compile_args += self.build.get_global_args(swiftc, target.for_machine) for i in reversed(target.get_include_dirs()): basedir = i.get_curdir() for d in i.get_incdirs(): @@ -3127,9 +3136,9 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # If TASKING compiler family is used and MIL linking is enabled for the target, # then compilation rule name is a special one to output MIL files # instead of object files for .c files - key = OptionKey('b_lto') if compiler.get_id() == 'tasking': - if ((isinstance(target, build.StaticLibrary) and target.prelink) or target.get_option(key)) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']: + target_lto = self.get_target_option(target, OptionKey('b_lto', machine=target.for_machine, subproject=target.subproject)) + if ((isinstance(target, build.StaticLibrary) and target.prelink) or target_lto) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']: compiler_name = self.get_compiler_rule_name('tasking_mil_compile', compiler.for_machine) else: compiler_name = self.compiler_to_rule_name(compiler) @@ -3688,7 +3697,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs) elem.add_dep(dep_targets + custom_target_libraries) if linker.get_id() == 'tasking': - if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not target.get_option(OptionKey('b_lto')): + if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not self.get_target_option(target, OptionKey('b_lto', target.subproject, target.for_machine)): raise MesonException(f'Tried to link the target named \'{target.name}\' with a MIL archive without LTO enabled! This causes the compiler to ignore the archive.') # Compiler args must be included in TI C28x linker commands. diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 283f9f0..deb3dfb 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -147,6 +147,9 @@ class Vs2010Backend(backends.Backend): self.handled_target_deps = {} self.gen_lite = gen_lite # Synonymous with generating the simpler makefile-style multi-config projects that invoke 'meson compile' builds, avoiding native MSBuild complications + def detect_toolset(self) -> None: + pass + def get_target_private_dir(self, target): return os.path.join(self.get_target_dir(target), target.get_id()) @@ -227,6 +230,7 @@ class Vs2010Backend(backends.Backend): # Check for (currently) unexpected capture arg use cases - if capture: raise MesonBugException('We do not expect any vs backend to generate with \'capture = True\'') + self.detect_toolset() host_machine = self.environment.machines.host.cpu_family if host_machine in {'64', 'x86_64'}: # amd64 or x86_64 @@ -619,7 +623,8 @@ class Vs2010Backend(backends.Backend): conftype='Utility', target_ext=None, target_platform=None, - gen_manifest=True) -> T.Tuple[ET.Element, ET.Element]: + gen_manifest=True, + masm_type: T.Optional[T.Literal['masm', 'marmasm']] = None) -> T.Tuple[ET.Element, ET.Element]: root = ET.Element('Project', {'DefaultTargets': "Build", 'ToolsVersion': '4.0', 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) @@ -657,6 +662,13 @@ class Vs2010Backend(backends.Backend): # "The build tools for v142 (Platform Toolset = 'v142') cannot be found. ... please install v142 build tools." # This is extremely unhelpful and misleading since the v14x build tools ARE installed. ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + ext_settings_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionSettings') + if masm_type: + ET.SubElement( + ext_settings_grp, + 'Import', + Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm_type}.props', + ) # This attribute makes sure project names are displayed as expected in solution files even when their project file names differ pname = ET.SubElement(globalgroup, 'ProjectName') @@ -692,9 +704,11 @@ class Vs2010Backend(backends.Backend): if target_ext: ET.SubElement(direlem, 'TargetExt').text = target_ext - ET.SubElement(direlem, 'EmbedManifest').text = 'false' - if not gen_manifest: - ET.SubElement(direlem, 'GenerateManifest').text = 'false' + # Fix weird mt.exe error: + # mt.exe is trying to compile a non-existent .generated.manifest file and link it + # with the target. This does not happen without masm props. + ET.SubElement(direlem, 'EmbedManifest').text = 'true' if masm_type or gen_manifest == 'embed' else 'false' + ET.SubElement(direlem, 'GenerateManifest').text = 'true' if gen_manifest else 'false' return (root, type_config) @@ -775,12 +789,19 @@ class Vs2010Backend(backends.Backend): platform = self.build_platform else: platform = self.platform + + masm = self.get_masm_type(target) + (root, type_config) = self.create_basic_project(target.name, temp_dir=target.get_id(), guid=guid, target_platform=platform, - gen_manifest=self.get_gen_manifest(target)) + gen_manifest=self.get_gen_manifest(target), + masm_type=masm) ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + ext_tgt_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets') + if masm: + ET.SubElement(ext_tgt_grp, 'Import', Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm}.targets') target.generated = [self.compile_target_to_generator(target)] target.sources = [] self.generate_custom_generator_commands(target, root) @@ -795,6 +816,8 @@ class Vs2010Backend(backends.Backend): return 'c' if ext in compilers.cpp_suffixes: return 'cpp' + if ext in compilers.lang_suffixes['masm']: + return 'masm' raise MesonException(f'Could not guess language from source file {src}.') def add_pch(self, pch_sources, lang, inc_cl): @@ -956,13 +979,13 @@ class Vs2010Backend(backends.Backend): other.append(arg) return lpaths, libs, other - def _get_cl_compiler(self, target): + def _get_cl_compiler(self, target: build.BuildTarget): for lang, c in target.compilers.items(): if lang in {'c', 'cpp'}: return c - # No source files, only objects, but we still need a compiler, so + # No C/C++ source files, only objects/assembly source, but we still need a compiler, so # return a found compiler - if len(target.objects) > 0: + if len(target.objects) > 0 or len(target.sources) > 0: for lang, c in self.environment.coredata.compilers[target.for_machine].items(): if lang in {'c', 'cpp'}: return c @@ -1493,8 +1516,9 @@ class Vs2010Backend(backends.Backend): additional_links.append(self.relpath(lib, self.get_target_dir(target))) if len(extra_link_args) > 0: - extra_link_args.append('%(AdditionalOptions)') - ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args) + args = [self.escape_additional_option(arg) for arg in extra_link_args] + args.append('%(AdditionalOptions)') + ET.SubElement(link, "AdditionalOptions").text = ' '.join(args) if len(additional_libpaths) > 0: additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)') ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths) @@ -1607,6 +1631,8 @@ class Vs2010Backend(backends.Backend): else: platform = self.platform + masm = self.get_masm_type(target) + tfilename = os.path.splitext(target.get_filename()) (root, type_config) = self.create_basic_project(tfilename[0], @@ -1615,7 +1641,8 @@ class Vs2010Backend(backends.Backend): conftype=conftype, target_ext=tfilename[1], target_platform=platform, - gen_manifest=self.get_gen_manifest(target)) + gen_manifest=self.get_gen_manifest(target), + masm_type=masm) generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands( target, root) @@ -1719,12 +1746,17 @@ class Vs2010Backend(backends.Backend): for s in sources: relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src)) if path_normalize_add(relpath, previous_sources): - inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath) + lang = Vs2010Backend.lang_from_source_file(s) + if lang == 'masm' and masm: + inc_cl = ET.SubElement(inc_src, masm.upper(), Include=relpath) + else: + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath) + if self.gen_lite: self.add_project_nmake_defs_incs_and_opts(inc_cl, relpath, defs_paths_opts_per_lang_and_buildtype, platform) else: - lang = Vs2010Backend.lang_from_source_file(s) - self.add_pch(pch_sources, lang, inc_cl) + if lang != 'masm': + self.add_pch(pch_sources, lang, inc_cl) self.add_additional_options(lang, inc_cl, file_args) self.add_preprocessor_defines(lang, inc_cl, file_defines) self.add_include_dirs(lang, inc_cl, file_inc_dirs) @@ -1732,12 +1764,17 @@ class Vs2010Backend(backends.Backend): self.object_filename_from_source(target, compiler, s) for s in gen_src: if path_normalize_add(s, previous_sources): - inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s) + lang = Vs2010Backend.lang_from_source_file(s) + if lang == 'masm' and masm: + inc_cl = ET.SubElement(inc_src, masm.upper(), Include=s) + else: + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s) + if self.gen_lite: self.add_project_nmake_defs_incs_and_opts(inc_cl, s, defs_paths_opts_per_lang_and_buildtype, platform) else: - lang = Vs2010Backend.lang_from_source_file(s) - self.add_pch(pch_sources, lang, inc_cl) + if lang != 'masm': + self.add_pch(pch_sources, lang, inc_cl) self.add_additional_options(lang, inc_cl, file_args) self.add_preprocessor_defines(lang, inc_cl, file_defines) self.add_include_dirs(lang, inc_cl, file_inc_dirs) @@ -1786,6 +1823,9 @@ class Vs2010Backend(backends.Backend): ET.SubElement(inc_objs, 'Object', Include=s) ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + ext_tgt_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets') + if masm: + ET.SubElement(ext_tgt_grp, 'Import', Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm}.targets') self.add_regen_dependency(root) if not self.gen_lite: # Injecting further target dependencies into this vcxproj implies and forces a Visual Studio BUILD dependency, @@ -2096,7 +2136,8 @@ class Vs2010Backend(backends.Backend): pass # Returns if a target generates a manifest or not. - def get_gen_manifest(self, target): + # Returns 'embed' if the generated manifest is embedded. + def get_gen_manifest(self, target: T.Optional[build.BuildTarget]): if not isinstance(target, build.BuildTarget): return True @@ -2113,6 +2154,31 @@ class Vs2010Backend(backends.Backend): arg = arg.upper() if arg == '/MANIFEST:NO': return False + if arg.startswith('/MANIFEST:EMBED'): + return 'embed' if arg == '/MANIFEST' or arg.startswith('/MANIFEST:'): break return True + + # FIXME: add a way to distinguish between arm64ec+marmasm (written in ARM assembly) + # and arm64ec+masm (written in x64 assembly). + # + # For now, assume it's the native ones. (same behavior as ninja backend) + def get_masm_type(self, target: build.BuildTarget): + if not isinstance(target, build.BuildTarget): + return None + + if 'masm' not in target.compilers: + return None + + if target.for_machine == MachineChoice.BUILD: + platform = self.build_platform + elif target.for_machine == MachineChoice.HOST: + platform = self.platform + else: + return None + + if platform in {'ARM', 'arm64', 'arm64ec'}: + return 'marmasm' + else: + return 'masm' diff --git a/mesonbuild/backend/vs2012backend.py b/mesonbuild/backend/vs2012backend.py index 307964b..922cd60 100644 --- a/mesonbuild/backend/vs2012backend.py +++ b/mesonbuild/backend/vs2012backend.py @@ -21,6 +21,8 @@ class Vs2012Backend(Vs2010Backend): self.vs_version = '2012' self.sln_file_version = '12.00' self.sln_version_comment = '2012' + + def detect_toolset(self) -> None: if self.environment is not None: # TODO: we assume host == build comps = self.environment.coredata.compilers.host diff --git a/mesonbuild/backend/vs2013backend.py b/mesonbuild/backend/vs2013backend.py index ae0b68b..cf5d598 100644 --- a/mesonbuild/backend/vs2013backend.py +++ b/mesonbuild/backend/vs2013backend.py @@ -20,6 +20,8 @@ class Vs2013Backend(Vs2010Backend): self.vs_version = '2013' self.sln_file_version = '12.00' self.sln_version_comment = '2013' + + def detect_toolset(self) -> None: if self.environment is not None: # TODO: we assume host == build comps = self.environment.coredata.compilers.host diff --git a/mesonbuild/backend/vs2015backend.py b/mesonbuild/backend/vs2015backend.py index 4c515cc..1862def 100644 --- a/mesonbuild/backend/vs2015backend.py +++ b/mesonbuild/backend/vs2015backend.py @@ -21,6 +21,8 @@ class Vs2015Backend(Vs2010Backend): self.vs_version = '2015' self.sln_file_version = '12.00' self.sln_version_comment = '14' + + def detect_toolset(self) -> None: if self.environment is not None: # TODO: we assume host == build comps = self.environment.coredata.compilers.host diff --git a/mesonbuild/backend/vs2017backend.py b/mesonbuild/backend/vs2017backend.py index 393544f..372e1ce 100644 --- a/mesonbuild/backend/vs2017backend.py +++ b/mesonbuild/backend/vs2017backend.py @@ -24,6 +24,8 @@ class Vs2017Backend(Vs2010Backend): self.vs_version = '2017' self.sln_file_version = '12.00' self.sln_version_comment = '15' + + def detect_toolset(self) -> None: # We assume that host == build if self.environment is not None: comps = self.environment.coredata.compilers.host diff --git a/mesonbuild/backend/vs2019backend.py b/mesonbuild/backend/vs2019backend.py index 4d6e226..61ad75d 100644 --- a/mesonbuild/backend/vs2019backend.py +++ b/mesonbuild/backend/vs2019backend.py @@ -22,6 +22,8 @@ class Vs2019Backend(Vs2010Backend): super().__init__(build, interpreter) self.sln_file_version = '12.00' self.sln_version_comment = 'Version 16' + + def detect_toolset(self) -> None: if self.environment is not None: comps = self.environment.coredata.compilers.host if comps and all(c.id == 'clang-cl' for c in comps.values()): diff --git a/mesonbuild/backend/vs2022backend.py b/mesonbuild/backend/vs2022backend.py index 27e0438..ca449a4 100644 --- a/mesonbuild/backend/vs2022backend.py +++ b/mesonbuild/backend/vs2022backend.py @@ -22,6 +22,8 @@ class Vs2022Backend(Vs2010Backend): super().__init__(build, interpreter, gen_lite=gen_lite) self.sln_file_version = '12.00' self.sln_version_comment = 'Version 17' + + def detect_toolset(self) -> None: if self.environment is not None: comps = self.environment.coredata.compilers.host if comps and all(c.id == 'clang-cl' for c in comps.values()): diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index 587404a..6ad982d 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -1596,6 +1596,7 @@ class XCodeBackend(backends.Backend): headerdirs = [] bridging_header = "" is_swift = self.is_swift_target(target) + langs = set() for d in target.include_dirs: for sd in d.incdirs: cd = os.path.join(d.curdir, sd) @@ -1715,6 +1716,7 @@ class XCodeBackend(backends.Backend): lang = 'c' elif lang == 'objcpp': lang = 'cpp' + langs.add(lang) langname = LANGNAMEMAP[lang] langargs.setdefault(langname, []) langargs[langname] = cargs + cti_args + args @@ -1776,6 +1778,8 @@ class XCodeBackend(backends.Backend): settings_dict.add_item('SECTORDER_FLAGS', '') if is_swift and bridging_header: settings_dict.add_item('SWIFT_OBJC_BRIDGING_HEADER', bridging_header) + if self.objversion >= 60 and 'cpp' in langs: + settings_dict.add_item('SWIFT_OBJC_INTEROP_MODE', 'objcxx') settings_dict.add_item('BUILD_DIR', symroot) settings_dict.add_item('OBJROOT', f'{symroot}/build') sysheader_arr = PbxArray() diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 7320b88..72d376d 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -33,7 +33,7 @@ from .compilers import ( is_header, is_object, is_source, clink_langs, sort_clink, all_languages, is_known_suffix, detect_static_linker ) -from .interpreterbase import FeatureNew, FeatureDeprecated +from .interpreterbase import FeatureNew, FeatureDeprecated, UnknownValue if T.TYPE_CHECKING: from typing_extensions import Literal, TypedDict @@ -275,7 +275,7 @@ class Build: self.stdlibs = PerMachine({}, {}) self.test_setups: T.Dict[str, TestSetup] = {} self.test_setup_default_name = None - self.find_overrides: T.Dict[str, T.Union['Executable', programs.ExternalProgram, programs.OverrideProgram]] = {} + self.find_overrides: T.Dict[str, T.Union['OverrideExecutable', programs.ExternalProgram, programs.OverrideProgram]] = {} self.searched_programs: T.Set[str] = set() # The list of all programs that have been searched for. # If we are doing a cross build we need two caches, if we're doing a @@ -648,7 +648,7 @@ class Target(HoldableObject, metaclass=abc.ABCMeta): def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None: if 'build_by_default' in kwargs: self.build_by_default = kwargs['build_by_default'] - if not isinstance(self.build_by_default, bool): + if not isinstance(self.build_by_default, (bool, UnknownValue)): raise InvalidArguments('build_by_default must be a boolean value.') if not self.build_by_default and kwargs.get('install', False): @@ -656,29 +656,11 @@ class Target(HoldableObject, metaclass=abc.ABCMeta): # set, use the value of 'install' if it's enabled. self.build_by_default = True - self.raw_overrides = self.parse_overrides(kwargs) + self.raw_overrides = kwargs.get('override_options', {}) def get_override(self, name: str) -> T.Optional[str]: return self.raw_overrides.get(name, None) - @staticmethod - def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]: - opts = kwargs.get('override_options', []) - - # In this case we have an already parsed and ready to go dictionary - # provided by typed_kwargs - if isinstance(opts, dict): - return T.cast('T.Dict[OptionKey, str]', opts) - - result: T.Dict[str, str] = {} - overrides = stringlistify(opts) - for o in overrides: - if '=' not in o: - raise InvalidArguments('Overrides must be of form "key=value"') - k, v = o.split('=', 1) - result[k] = v - return result - def is_linkable_target(self) -> bool: return False @@ -1375,6 +1357,10 @@ class BuildTarget(Target): deps = listify(deps) for dep in deps: if dep in self.added_deps: + # Prefer to add dependencies to added_deps which have a name + if dep.is_named(): + self.added_deps.remove(dep) + self.added_deps.add(dep) continue if isinstance(dep, dependencies.InternalDependency): @@ -2199,10 +2185,16 @@ class StaticLibrary(BuildTarget): elif self.rust_crate_type == 'staticlib': self.suffix = 'a' else: - if 'c' in self.compilers and self.compilers['c'].get_id() == 'tasking': - self.suffix = 'ma' if self.options.get_value('b_lto') and not self.prelink else 'a' - else: - self.suffix = 'a' + self.suffix = 'a' + if 'c' in self.compilers and self.compilers['c'].get_id() == 'tasking' and not self.prelink: + key = OptionKey('b_lto', self.subproject, self.for_machine) + try: + v = self.environment.coredata.get_option_for_target(self, key) + except KeyError: + v = self.environment.coredata.optstore.get_value_for(key) + assert isinstance(v, bool), 'for mypy' + if v: + self.suffix = 'ma' self.filename = self.prefix + self.name + '.' + self.suffix self.outputs[0] = self.filename @@ -2594,7 +2586,7 @@ class CommandBase: subproject: str def flatten_command(self, cmd: T.Sequence[T.Union[str, File, programs.ExternalProgram, BuildTargetTypes]]) -> \ - T.List[T.Union[str, File, BuildTarget, 'CustomTarget']]: + T.List[T.Union[str, File, BuildTarget, CustomTarget, programs.ExternalProgram]]: cmd = listify(cmd) final_cmd: T.List[T.Union[str, File, BuildTarget, 'CustomTarget']] = [] for c in cmd: @@ -2611,7 +2603,8 @@ class CommandBase: # Can only add a dependency on an external program which we # know the absolute path of self.depend_files.append(File.from_absolute_file(path)) - final_cmd += c.get_command() + # Do NOT flatten -- it is needed for later parsing + final_cmd.append(c) elif isinstance(c, (BuildTarget, CustomTarget)): self.dependencies.append(c) final_cmd.append(c) @@ -2681,6 +2674,7 @@ class CustomTarget(Target, CustomTargetBase, CommandBase): install_dir: T.Optional[T.List[T.Union[str, Literal[False]]]] = None, install_mode: T.Optional[FileMode] = None, install_tag: T.Optional[T.List[T.Optional[str]]] = None, + rspable: bool = False, absolute_paths: bool = False, backend: T.Optional['Backend'] = None, description: str = 'Generating {} with a custom command', @@ -2713,6 +2707,9 @@ class CustomTarget(Target, CustomTargetBase, CommandBase): # Whether to use absolute paths for all files on the commandline self.absolute_paths = absolute_paths + # Whether to enable using response files for the underlying tool + self.rspable = rspable + def get_default_install_dir(self) -> T.Union[T.Tuple[str, str], T.Tuple[None, None]]: return None, None @@ -3129,6 +3126,18 @@ class ConfigurationData(HoldableObject): def keys(self) -> T.Iterator[str]: return self.values.keys() +class OverrideExecutable(Executable): + def __init__(self, executable: Executable, version: str): + self._executable = executable + self._version = version + + def __getattr__(self, name: str) -> T.Any: + _executable = object.__getattribute__(self, '_executable') + return getattr(_executable, name) + + def get_version(self, interpreter: T.Optional[Interpreter] = None) -> str: + return self._version + # A bit poorly named, but this represents plain data files to copy # during install. @dataclass(eq=False) diff --git a/mesonbuild/cargo/cfg.py b/mesonbuild/cargo/cfg.py index 0d49527..a0ee6e2 100644 --- a/mesonbuild/cargo/cfg.py +++ b/mesonbuild/cargo/cfg.py @@ -4,6 +4,7 @@ """Rust CFG parser. Rust uses its `cfg()` format in cargo. +https://doc.rust-lang.org/reference/conditional-compilation.html This may have the following functions: - all() @@ -22,18 +23,15 @@ so you could have examples like: from __future__ import annotations import dataclasses import enum -import functools import typing as T -from . import builder -from .. import mparser from ..mesonlib import MesonBugException if T.TYPE_CHECKING: _T = T.TypeVar('_T') _LEX_TOKEN = T.Tuple['TokenType', T.Optional[str]] - _LEX_STREAM = T.Iterable[_LEX_TOKEN] + _LEX_STREAM = T.Iterator[_LEX_TOKEN] _LEX_STREAM_AH = T.Iterator[T.Tuple[_LEX_TOKEN, T.Optional[_LEX_TOKEN]]] @@ -48,6 +46,7 @@ class TokenType(enum.Enum): NOT = enum.auto() COMMA = enum.auto() EQUAL = enum.auto() + CFG = enum.auto() def lexer(raw: str) -> _LEX_STREAM: @@ -56,45 +55,41 @@ def lexer(raw: str) -> _LEX_STREAM: :param raw: The raw cfg() expression :return: An iterable of tokens """ - buffer: T.List[str] = [] + start: int = 0 is_string: bool = False - for s in raw: - if s.isspace() or s in {')', '(', ',', '='} or (s == '"' and buffer): - val = ''.join(buffer) - buffer.clear() - if is_string: + for i, s in enumerate(raw): + if s.isspace() or s in {')', '(', ',', '=', '"'}: + val = raw[start:i] + start = i + 1 + if s == '"' and is_string: yield (TokenType.STRING, val) + is_string = False + continue elif val == 'any': yield (TokenType.ANY, None) elif val == 'all': yield (TokenType.ALL, None) elif val == 'not': yield (TokenType.NOT, None) + elif val == 'cfg': + yield (TokenType.CFG, None) elif val: yield (TokenType.IDENTIFIER, val) if s == '(': yield (TokenType.LPAREN, None) - continue elif s == ')': yield (TokenType.RPAREN, None) - continue elif s == ',': yield (TokenType.COMMA, None) - continue elif s == '=': yield (TokenType.EQUAL, None) - continue - elif s.isspace(): - continue - - if s == '"': - is_string = not is_string - else: - buffer.append(s) - if buffer: + elif s == '"': + is_string = True + val = raw[start:] + if val: # This should always be an identifier - yield (TokenType.IDENTIFIER, ''.join(buffer)) + yield (TokenType.IDENTIFIER, val) def lookahead(iter: T.Iterator[_T]) -> T.Iterator[T.Tuple[_T, T.Optional[_T]]]: @@ -146,8 +141,8 @@ class Identifier(IR): @dataclasses.dataclass class Equal(IR): - lhs: IR - rhs: IR + lhs: Identifier + rhs: String @dataclasses.dataclass @@ -175,41 +170,40 @@ def _parse(ast: _LEX_STREAM_AH) -> IR: else: ntoken, _ = (None, None) - stream: T.List[_LEX_TOKEN] if token is TokenType.IDENTIFIER: + assert value + id_ = Identifier(value) if ntoken is TokenType.EQUAL: - return Equal(Identifier(value), _parse(ast)) - if token is TokenType.STRING: - return String(value) - if token is TokenType.EQUAL: - # In this case the previous caller already has handled the equal - return _parse(ast) - if token in {TokenType.ANY, TokenType.ALL}: + next(ast) + (token, value), _ = next(ast) + assert token is TokenType.STRING + assert value is not None + return Equal(id_, String(value)) + return id_ + elif token in {TokenType.ANY, TokenType.ALL}: type_ = All if token is TokenType.ALL else Any - assert ntoken is TokenType.LPAREN - next(ast) # advance the iterator to get rid of the LPAREN - stream = [] args: T.List[IR] = [] - while token is not TokenType.RPAREN: + (token, value), n_stream = next(ast) + assert token is TokenType.LPAREN + if n_stream and n_stream[0] == TokenType.RPAREN: + return type_(args) + while True: + args.append(_parse(ast)) (token, value), _ = next(ast) - if token is TokenType.COMMA: - args.append(_parse(lookahead(iter(stream)))) - stream.clear() - else: - stream.append((token, value)) - if stream: - args.append(_parse(lookahead(iter(stream)))) + if token is TokenType.RPAREN: + break + assert token is TokenType.COMMA return type_(args) - if token is TokenType.NOT: - next(ast) # advance the iterator to get rid of the LPAREN - stream = [] - # Mypy can't figure out that token is overridden inside the while loop - while token is not TokenType.RPAREN: # type: ignore - (token, value), _ = next(ast) - stream.append((token, value)) - return Not(_parse(lookahead(iter(stream)))) - - raise MesonBugException(f'Unhandled Cargo token: {token}') + elif token in {TokenType.NOT, TokenType.CFG}: + is_not = token is TokenType.NOT + (token, value), _ = next(ast) + assert token is TokenType.LPAREN + arg = _parse(ast) + (token, value), _ = next(ast) + assert token is TokenType.RPAREN + return Not(arg) if is_not else arg + else: + raise MesonBugException(f'Unhandled Cargo token:{token} {value}') def parse(ast: _LEX_STREAM) -> IR: @@ -218,57 +212,24 @@ def parse(ast: _LEX_STREAM) -> IR: :param ast: An iterable of Tokens :return: An mparser Node to be used as a conditional """ - ast_i: _LEX_STREAM_AH = lookahead(iter(ast)) + ast_i: _LEX_STREAM_AH = lookahead(ast) return _parse(ast_i) -@functools.singledispatch -def ir_to_meson(ir: T.Any, build: builder.Builder) -> mparser.BaseNode: - raise NotImplementedError - - -@ir_to_meson.register -def _(ir: String, build: builder.Builder) -> mparser.BaseNode: - return build.string(ir.value) - - -@ir_to_meson.register -def _(ir: Identifier, build: builder.Builder) -> mparser.BaseNode: - host_machine = build.identifier('host_machine') - if ir.value == "target_arch": - return build.method('cpu_family', host_machine) - elif ir.value in {"target_os", "target_family"}: - return build.method('system', host_machine) - elif ir.value == "target_endian": - return build.method('endian', host_machine) - raise MesonBugException(f"Unhandled Cargo identifier: {ir.value}") - - -@ir_to_meson.register -def _(ir: Equal, build: builder.Builder) -> mparser.BaseNode: - return build.equal(ir_to_meson(ir.lhs, build), ir_to_meson(ir.rhs, build)) - - -@ir_to_meson.register -def _(ir: Not, build: builder.Builder) -> mparser.BaseNode: - return build.not_(ir_to_meson(ir.value, build)) - - -@ir_to_meson.register -def _(ir: Any, build: builder.Builder) -> mparser.BaseNode: - args = iter(reversed(ir.args)) - last = next(args) - cur = build.or_(ir_to_meson(next(args), build), ir_to_meson(last, build)) - for a in args: - cur = build.or_(ir_to_meson(a, build), cur) - return cur +def _eval_cfg(ir: IR, cfgs: T.Dict[str, str]) -> bool: + if isinstance(ir, Identifier): + return ir.value in cfgs + elif isinstance(ir, Equal): + return cfgs.get(ir.lhs.value) == ir.rhs.value + elif isinstance(ir, Not): + return not _eval_cfg(ir.value, cfgs) + elif isinstance(ir, Any): + return any(_eval_cfg(i, cfgs) for i in ir.args) + elif isinstance(ir, All): + return all(_eval_cfg(i, cfgs) for i in ir.args) + else: + raise MesonBugException(f'Unhandled Cargo cfg IR: {ir}') -@ir_to_meson.register -def _(ir: All, build: builder.Builder) -> mparser.BaseNode: - args = iter(reversed(ir.args)) - last = next(args) - cur = build.and_(ir_to_meson(next(args), build), ir_to_meson(last, build)) - for a in args: - cur = build.and_(ir_to_meson(a, build), cur) - return cur +def eval_cfg(raw: str, cfgs: T.Dict[str, str]) -> bool: + return _eval_cfg(parse(lexer(raw)), cfgs) diff --git a/mesonbuild/cargo/interpreter.py b/mesonbuild/cargo/interpreter.py index af272a8..a5d703e 100644 --- a/mesonbuild/cargo/interpreter.py +++ b/mesonbuild/cargo/interpreter.py @@ -20,9 +20,8 @@ import urllib.parse import itertools import typing as T -from . import builder -from . import version -from ..mesonlib import MesonException, Popen_safe +from . import builder, version, cfg +from ..mesonlib import MesonException, Popen_safe, MachineChoice from .. import coredata, mlog from ..wrap.wrap import PackageDefinition @@ -35,6 +34,7 @@ if T.TYPE_CHECKING: from .. import mparser from ..environment import Environment from ..interpreterbase import SubProject + from ..compilers.rust import RustCompiler # Copied from typeshed. Blarg that they don't expose this class DataclassInstance(Protocol): @@ -476,10 +476,13 @@ class PackageKey: class Interpreter: def __init__(self, env: Environment) -> None: self.environment = env + self.host_rustc = T.cast('RustCompiler', self.environment.coredata.compilers[MachineChoice.HOST]['rust']) # Map Cargo.toml's subdir to loaded manifest. self.manifests: T.Dict[str, Manifest] = {} # Map of cargo package (name + api) to its state self.packages: T.Dict[PackageKey, PackageState] = {} + # Rustc's config + self.cfgs = self._get_cfgs() def interpret(self, subdir: str) -> mparser.CodeBlockNode: manifest = self._load_manifest(subdir) @@ -526,6 +529,10 @@ class Interpreter: self.environment.wrap_resolver.wraps[meson_depname].type is not None pkg = PackageState(manifest, downloaded) self.packages[key] = pkg + # Merge target specific dependencies that are enabled + for condition, dependencies in manifest.target.items(): + if cfg.eval_cfg(condition, self.cfgs): + manifest.dependencies.update(dependencies) # Fetch required dependencies recursively. for depname, dep in manifest.dependencies.items(): if not dep.optional: @@ -599,6 +606,23 @@ class Interpreter: else: self._enable_feature(pkg, f) + def _get_cfgs(self) -> T.Dict[str, str]: + cfgs = self.host_rustc.get_cfgs().copy() + rustflags = self.environment.coredata.get_external_args(MachineChoice.HOST, 'rust') + rustflags_i = iter(rustflags) + for i in rustflags_i: + if i == '--cfg': + cfgs.append(next(rustflags_i)) + return dict(self._split_cfg(i) for i in cfgs) + + @staticmethod + def _split_cfg(cfg: str) -> T.Tuple[str, str]: + pair = cfg.split('=', maxsplit=1) + value = pair[1] if len(pair) > 1 else '' + if value and value[0] == '"': + value = value[1:-1] + return pair[0], value + def _create_project(self, pkg: PackageState, build: builder.Builder) -> T.List[mparser.BaseNode]: """Create the project() function call @@ -608,6 +632,7 @@ class Interpreter: """ default_options: T.List[mparser.BaseNode] = [] default_options.append(build.string(f'rust_std={pkg.manifest.package.edition}')) + default_options.append(build.string(f'build.rust_std={pkg.manifest.package.edition}')) if pkg.downloaded: default_options.append(build.string('warning_level=0')) diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py index 7644c0b..b7ab1ba 100644 --- a/mesonbuild/cmake/common.py +++ b/mesonbuild/cmake/common.py @@ -19,6 +19,7 @@ language_map = { 'cuda': 'CUDA', 'objc': 'OBJC', 'objcpp': 'OBJCXX', + 'nasm': 'ASM_NASM', 'cs': 'CSharp', 'java': 'Java', 'fortran': 'Fortran', diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 9296276..609038d 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -223,6 +223,7 @@ class ConverterTarget: self.install = target.install self.install_dir: T.Optional[Path] = None self.link_libraries = target.link_libraries + self.link_targets: T.List[str] = [] self.link_flags = target.link_flags + target.link_lang_flags self.public_link_flags: T.List[str] = [] self.depends_raw: T.List[str] = [] @@ -363,6 +364,8 @@ class ConverterTarget: self.public_link_flags += rtgt.public_link_flags self.public_compile_opts += rtgt.public_compile_opts self.link_libraries += rtgt.libraries + self.depends_raw += rtgt.target_dependencies + self.link_targets += rtgt.target_dependencies elif self.type.upper() not in ['EXECUTABLE', 'OBJECT_LIBRARY']: mlog.warning('CMake: Target', mlog.bold(self.cmake_name), 'not found in CMake trace. This can lead to build errors') @@ -957,17 +960,27 @@ class CMakeInterpreter: object_libs += [tgt] self.languages += [x for x in tgt.languages if x not in self.languages] - # Second pass: Detect object library dependencies + # Second pass: Populate link_with project internal targets + for tgt in self.targets: + for i in tgt.link_targets: + # Handle target-based link libraries + link_with = self.output_target_map.target(i) + if not link_with or isinstance(link_with, ConverterCustomTarget): + # Generated file etc. + continue + tgt.link_with.append(link_with) + + # Third pass: Detect object library dependencies for tgt in self.targets: tgt.process_object_libs(object_libs, self._object_lib_workaround) - # Third pass: Reassign dependencies to avoid some loops + # Fourth pass: Reassign dependencies to avoid some loops for tgt in self.targets: tgt.process_inter_target_dependencies() for ctgt in self.custom_targets: ctgt.process_inter_target_dependencies() - # Fourth pass: Remove rassigned dependencies + # Fifth pass: Remove reassigned dependencies for tgt in self.targets: tgt.cleanup_dependencies() diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py index d410886..11a00be 100644 --- a/mesonbuild/cmake/toolchain.py +++ b/mesonbuild/cmake/toolchain.py @@ -175,7 +175,12 @@ class CMakeToolchain: # Set the compiler variables for lang, comp_obj in self.compilers.items(): - prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper())) + language = language_map.get(lang, None) + + if not language: + continue # unsupported language + + prefix = 'CMAKE_{}_'.format(language) exe_list = comp_obj.get_exelist() if not exe_list: @@ -211,7 +216,7 @@ class CMakeToolchain: # Generate the CMakeLists.txt mlog.debug('CMake Toolchain: Calling CMake once to generate the compiler state') languages = list(self.compilers.keys()) - lang_ids = [language_map.get(x, x.upper()) for x in languages] + lang_ids = [language_map.get(x) for x in languages if x in language_map] cmake_content = dedent(f''' cmake_minimum_required(VERSION 3.10) project(CompInfo {' '.join(lang_ids)}) diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py index 2cc0c17..9873845 100644 --- a/mesonbuild/cmake/tracetargets.py +++ b/mesonbuild/cmake/tracetargets.py @@ -45,6 +45,7 @@ class ResolvedTarget: self.public_link_flags: T.List[str] = [] self.public_compile_opts: T.List[str] = [] self.libraries: T.List[str] = [] + self.target_dependencies: T.List[str] = [] def resolve_cmake_trace_targets(target_name: str, trace: 'CMakeTraceParser', @@ -144,9 +145,13 @@ def resolve_cmake_trace_targets(target_name: str, targets += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] if 'LINK_LIBRARIES' in tgt.properties: - targets += [x for x in tgt.properties['LINK_LIBRARIES'] if x] + link_libraries = [x for x in tgt.properties['LINK_LIBRARIES'] if x] + targets += link_libraries + res.target_dependencies += link_libraries if 'INTERFACE_LINK_LIBRARIES' in tgt.properties: - targets += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] + link_libraries = [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] + targets += link_libraries + res.target_dependencies += link_libraries if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties: targets += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x] diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index ad252a1..0376922 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -749,7 +749,7 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta): return args.copy() def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]: raise EnvironmentException(f'Language {self.get_display_language()} does not support library finding.') def get_library_naming(self, env: 'Environment', libtype: LibType, @@ -1119,6 +1119,9 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta): def get_compile_only_args(self) -> T.List[str]: return [] + def get_cxx_interoperability_args(self, lang: T.Dict[str, Compiler]) -> T.List[str]: + raise EnvironmentException('This compiler does not support CXX interoperability') + def get_preprocess_only_args(self) -> T.List[str]: raise EnvironmentException('This compiler does not have a preprocessor') @@ -1424,12 +1427,19 @@ def get_global_options(lang: str, description = f'Extra arguments passed to the {lang}' argkey = OptionKey(f'{lang}_args', machine=for_machine) largkey = OptionKey(f'{lang}_link_args', machine=for_machine) - envkey = OptionKey(f'{lang}_env_args', machine=for_machine) - comp_key = argkey if argkey in env.options else envkey + comp_args_from_envvar = False + comp_options = env.coredata.optstore.get_pending_value(argkey) + if comp_options is None: + comp_args_from_envvar = True + comp_options = env.env_opts.get(argkey, []) + + link_args_from_envvar = False + link_options = env.coredata.optstore.get_pending_value(largkey) + if link_options is None: + link_args_from_envvar = True + link_options = env.env_opts.get(largkey, []) - comp_options = env.options.get(comp_key, []) - link_options = env.options.get(largkey, []) assert isinstance(comp_options, (str, list)), 'for mypy' assert isinstance(link_options, (str, list)), 'for mypy' @@ -1443,7 +1453,7 @@ def get_global_options(lang: str, description + ' linker', link_options, split_args=True, allow_dups=True) - if comp.INVOKES_LINKER and comp_key == envkey: + if comp.INVOKES_LINKER and comp_args_from_envvar and link_args_from_envvar: # If the compiler acts as a linker driver, and we're using the # environment variable flags for both the compiler and linker # arguments, then put the compiler flags in the linker flags as well. diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 01b9bb9..ed8d1cf 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -311,6 +311,9 @@ class ClangCPPCompiler(_StdCPPLibMixin, ClangCPPStds, ClangCompiler, CPPCompiler return libs return [] + def is_libcpp_enable_assertions_deprecated(self) -> bool: + return version_compare(self.version, ">=18") + def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]: if disable: return ['-DNDEBUG'] @@ -323,7 +326,7 @@ class ClangCPPCompiler(_StdCPPLibMixin, ClangCPPStds, ClangCompiler, CPPCompiler if self.language_stdlib_provider(env) == 'stdc++': return ['-D_GLIBCXX_ASSERTIONS=1'] else: - if version_compare(self.version, '>=18'): + if self.is_libcpp_enable_assertions_deprecated(): return ['-D_LIBCPP_HARDENING_MODE=_LIBCPP_HARDENING_MODE_FAST'] elif version_compare(self.version, '>=15'): return ['-D_LIBCPP_ENABLE_ASSERTIONS=1'] @@ -343,7 +346,12 @@ class ArmLtdClangCPPCompiler(ClangCPPCompiler): class AppleClangCPPCompiler(AppleCompilerMixin, AppleCPPStdsMixin, ClangCPPCompiler): - pass + def is_libcpp_enable_assertions_deprecated(self) -> bool: + # Upstream libc++ deprecated _LIBCPP_ENABLE_ASSERTIONS + # in favor of _LIBCPP_HARDENING_MODE from version 18 onwards, + # but Apple Clang 17's libc++ has back-ported that change. + # See: https://github.com/mesonbuild/meson/issues/14440 + return version_compare(self.version, ">=17") class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler): @@ -872,8 +880,7 @@ class CPP11AsCPP14Mixin(CompilerMixinBase): 'attempting best effort; setting the standard to C++14', once=True, fatal=False) original_args = super().get_option_std_args(target, env, subproject) - std_mapping = {'/std:c++11': '/std:c++14', - '/std:c++14': '/std:vc++14'} + std_mapping = {'/std:c++11': '/std:c++14'} processed_args = [std_mapping.get(x, x) for x in original_args] return processed_args diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py index ab00cf1..fd747d1 100644 --- a/mesonbuild/compilers/cuda.py +++ b/mesonbuild/compilers/cuda.py @@ -763,8 +763,8 @@ class CudaCompiler(Compiler): return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), Phase.LINKER) def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: - return self.host_compiler.find_library(libname, env, extra_dirs, libtype, lib_prefix_warning) + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]: + return self.host_compiler.find_library(libname, env, extra_dirs, libtype, lib_prefix_warning, ignore_system_dirs) def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype)) diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py index 53bdd85..040c42f 100644 --- a/mesonbuild/compilers/detect.py +++ b/mesonbuild/compilers/detect.py @@ -107,7 +107,7 @@ def detect_compiler_for(env: 'Environment', lang: str, for_machine: MachineChoic if comp is None: return comp assert comp.for_machine == for_machine - env.coredata.process_compiler_options(lang, comp, env, subproject) + env.coredata.process_compiler_options(lang, comp, subproject) if not skip_sanity_check: comp.sanity_check(env.get_scratch_dir(), env) env.coredata.compilers[comp.for_machine][lang] = comp diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 5794db0..6f4f3d2 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -104,9 +104,9 @@ class FortranCompiler(CLikeCompiler, Compiler): return filename def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]: code = 'stop; end program' - return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning) + return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning, ignore_system_dirs) def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: return self._has_multi_arguments(args, env, 'stop; end program') @@ -446,6 +446,11 @@ class IntelLLVMFortranCompiler(IntelFortranCompiler): id = 'intel-llvm' + def get_preprocess_only_args(self) -> T.List[str]: + return ['-preprocess-only'] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return [] class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): @@ -643,7 +648,11 @@ class LlvmFlangFortranCompiler(ClangCompiler, FortranCompiler): # https://github.com/llvm/llvm-project/commit/8d5386669ed63548daf1bee415596582d6d78d7d; # it seems flang 18 doesn't work if something accidentally includes a program unit, see # https://github.com/llvm/llvm-project/issues/92496 - return search_dirs + ['-lFortranRuntime', '-lFortranDecimal'] + # Only link FortranRuntime and FortranDecimal for flang < 19, see + # https://github.com/scipy/scipy/issues/21562#issuecomment-2942938509 + if version_compare(self.version, '<19'): + search_dirs += ['-lFortranRuntime', '-lFortranDecimal'] + return search_dirs class Open64FortranCompiler(FortranCompiler): diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index ae5ab63..72b987a 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -155,7 +155,10 @@ class ClangCompiler(GnuLikeCompiler): # llvm based) is retargetable, while GCC is not. # - # qcld: Qualcomm Snapdragon linker, based on LLVM + # eld: Qualcomm's opensource embedded linker + if linker == 'eld': + return ['-fuse-ld=eld'] + # qcld: Qualcomm's deprecated linker if linker == 'qcld': return ['-fuse-ld=qcld'] if linker == 'mold': diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index e45c485..1c875a3 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -128,7 +128,7 @@ class CLikeCompiler(Compiler): warn_args: T.Dict[str, T.List[str]] = {} # TODO: Replace this manual cache with functools.lru_cache - find_library_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]] = {} + find_library_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType, bool], T.Optional[T.List[str]]] = {} find_framework_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]] = {} internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS @@ -1113,7 +1113,7 @@ class CLikeCompiler(Compiler): ''' return self.sizeof('void *', '', env)[0] == 8 - def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]: + def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType, lib_prefix_warning: bool, ignore_system_dirs: bool) -> T.Optional[T.List[str]]: # First try if we can just add the library as -l. # Gcc + co seem to prefer builtin lib dirs to -L dirs. # Only try to find std libs if no extra dirs specified. @@ -1144,7 +1144,7 @@ class CLikeCompiler(Compiler): except (mesonlib.MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here elf_class = 0 # Search in the specified dirs, and then in the system libraries - for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)): + for d in itertools.chain(extra_dirs, [] if ignore_system_dirs else self.get_library_dirs(env, elf_class)): for p in patterns: trials = self._get_trials_from_pattern(p, d, libname) if not trials: @@ -1158,15 +1158,15 @@ class CLikeCompiler(Compiler): return None def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]: + code: str, libtype: LibType, lib_prefix_warning: bool, ignore_system_dirs: bool) -> T.Optional[T.List[str]]: # These libraries are either built-in or invalid if libname in self.ignore_libs: return [] if isinstance(extra_dirs, str): extra_dirs = [extra_dirs] - key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype) + key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype, ignore_system_dirs) if key not in self.find_library_cache: - value = self._find_library_real(libname, env, extra_dirs, code, libtype, lib_prefix_warning) + value = self._find_library_real(libname, env, extra_dirs, code, libtype, lib_prefix_warning, ignore_system_dirs) self.find_library_cache[key] = value else: value = self.find_library_cache[key] @@ -1175,9 +1175,9 @@ class CLikeCompiler(Compiler): return value.copy() def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]: code = 'int main(void) { return 0; }\n' - return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning) + return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning, ignore_system_dirs) def find_framework_paths(self, env: 'Environment') -> T.List[str]: ''' diff --git a/mesonbuild/compilers/mixins/emscripten.py b/mesonbuild/compilers/mixins/emscripten.py index 91b25e8..83534e1 100644 --- a/mesonbuild/compilers/mixins/emscripten.py +++ b/mesonbuild/compilers/mixins/emscripten.py @@ -76,7 +76,7 @@ class EmscriptenMixin(Compiler): return wrap_js_includes(super().get_dependency_link_args(dep)) def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]: if not libname.endswith('.js'): return super().find_library(libname, env, extra_dirs, libtype, lib_prefix_warning) if os.path.isabs(libname): diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py index 9ea591e..ddcd14a 100644 --- a/mesonbuild/compilers/mixins/gnu.py +++ b/mesonbuild/compilers/mixins/gnu.py @@ -534,6 +534,8 @@ class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta): # We want to allow preprocessing files with any extension, such as # foo.c.in. In that case we need to tell GCC/CLANG to treat them as # assembly file. + if self.language == 'fortran': + return self.get_preprocess_only_args() lang = gnu_lang_map.get(self.language, 'assembler-with-cpp') return self.get_preprocess_only_args() + [f'-x{lang}'] diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py index 50335c8..fddc837 100644 --- a/mesonbuild/compilers/mixins/pgi.py +++ b/mesonbuild/compilers/mixins/pgi.py @@ -54,6 +54,12 @@ class PGICompiler(Compiler): def openmp_flags(self, env: Environment) -> T.List[str]: return ['-mp'] + def get_preprocess_only_args(self) -> T.List[str]: + return ['-E', '-P', '-o', '-'] + + def get_preprocess_to_file_args(self) -> T.List[str]: + return ['-E', '-P'] + def get_optimization_args(self, optimization_level: str) -> T.List[str]: return clike_optimization_args[optimization_level] diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py index cc9dc21..5ebb093 100644 --- a/mesonbuild/compilers/rust.py +++ b/mesonbuild/compilers/rust.py @@ -182,10 +182,14 @@ class RustCompiler(Compiler): return stdo.split('\n', maxsplit=1)[0] @functools.lru_cache(maxsize=None) - def get_crt_static(self) -> bool: + def get_cfgs(self) -> T.List[str]: cmd = self.get_exelist(ccache=False) + ['--print', 'cfg'] p, stdo, stde = Popen_safe_logged(cmd) - return bool(re.search('^target_feature="crt-static"$', stdo, re.MULTILINE)) + return stdo.splitlines() + + @functools.lru_cache(maxsize=None) + def get_crt_static(self) -> bool: + return 'target_feature="crt-static"' in self.get_cfgs() def get_debug_args(self, is_debug: bool) -> T.List[str]: return clike_debug_args[is_debug] @@ -317,11 +321,11 @@ class RustCompiler(Compiler): return exelist + args def has_multi_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]: - return self.compiles('fn main { std::process::exit(0) };\n', env, extra_args=args, mode=CompileCheckMode.COMPILE) + return self.compiles('fn main() { std::process::exit(0) }\n', env, extra_args=args, mode=CompileCheckMode.COMPILE) def has_multi_link_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]: args = self.linker.fatal_warnings() + args - return self.compiles('fn main { std::process::exit(0) };\n', env, extra_args=args, mode=CompileCheckMode.LINK) + return self.compiles('fn main() { std::process::exit(0) }\n', env, extra_args=args, mode=CompileCheckMode.LINK) @functools.lru_cache(maxsize=None) def get_rustdoc(self, env: 'Environment') -> T.Optional[RustdocTestCompiler]: diff --git a/mesonbuild/compilers/swift.py b/mesonbuild/compilers/swift.py index 528d76f..47d254b 100644 --- a/mesonbuild/compilers/swift.py +++ b/mesonbuild/compilers/swift.py @@ -8,7 +8,7 @@ import subprocess, os.path import typing as T from .. import mlog, options -from ..mesonlib import MesonException, version_compare +from ..mesonlib import first, MesonException, version_compare from .compilers import Compiler, clike_debug_args @@ -139,6 +139,12 @@ class SwiftCompiler(Compiler): if std != 'none': args += ['-swift-version', std] + # Pass C compiler -std=... arg to swiftc + c_lang = first(['objc', 'c'], lambda x: x in target.compilers) + if c_lang is not None: + cc = target.compilers[c_lang] + args.extend(arg for c_arg in cc.get_option_std_args(target, env, subproject) for arg in ['-Xcc', c_arg]) + return args def get_working_directory_args(self, path: str) -> T.Optional[T.List[str]]: @@ -147,6 +153,12 @@ class SwiftCompiler(Compiler): return ['-working-directory', path] + def get_cxx_interoperability_args(self, lang: T.Dict[str, Compiler]) -> T.List[str]: + if 'cpp' in lang or 'objcpp' in lang: + return ['-cxx-interoperability-mode=default'] + else: + return ['-cxx-interoperability-mode=off'] + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: for idx, i in enumerate(parameter_list): diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py index 28861a6..bbaefed 100644 --- a/mesonbuild/compilers/vala.py +++ b/mesonbuild/compilers/vala.py @@ -113,7 +113,7 @@ class ValaCompiler(Compiler): raise EnvironmentException(msg) def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], - libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True, ignore_system_dirs: bool = False) -> T.Optional[T.List[str]]: if extra_dirs and isinstance(extra_dirs, str): extra_dirs = [extra_dirs] # Valac always looks in the default vapi dir, so only search there if diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 90157df..26ef1b8 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -248,6 +248,7 @@ class CoreData: 'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942', 'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942', 'cpp': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942', + 'masm': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942', 'test': '3AC096D0-A1C2-E12C-1390-A8335801FDAB', 'directory': '2150E333-8FDC-42A3-9474-1A3956D46DE8', } @@ -412,6 +413,13 @@ class CoreData: return option_object.validate_value(override) return value + def set_from_configure_command(self, options: SharedCMDOptions) -> bool: + unset_opts = getattr(options, 'unset_opts', []) + all_D = options.projectoptions[:] + for key, valstr in options.cmd_line_options.items(): + all_D.append(f'{key!s}={valstr}') + return self.optstore.set_from_configure_command(all_D, unset_opts) + def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool: dirty = False try: @@ -565,17 +573,16 @@ class CoreData: return dirty - def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice, - env: Environment, subproject: str) -> None: + def add_compiler_options(self, c_options: MutableKeyedOptionDictType, lang: str, for_machine: MachineChoice) -> None: for k, o in c_options.items(): - comp_key = OptionKey(f'{k.name}', None, for_machine) + assert k.subproject is None and k.machine is for_machine if lang == 'objc' and k.name == 'c_std': # For objective C, always fall back to c_std. - self.optstore.add_compiler_option('c', comp_key, o) + self.optstore.add_compiler_option('c', k, o) elif lang == 'objcpp' and k.name == 'cpp_std': - self.optstore.add_compiler_option('cpp', comp_key, o) + self.optstore.add_compiler_option('cpp', k, o) else: - self.optstore.add_compiler_option(lang, comp_key, o) + self.optstore.add_compiler_option(lang, k, o) def add_lang_args(self, lang: str, comp: T.Type['Compiler'], for_machine: MachineChoice, env: 'Environment') -> None: @@ -587,8 +594,8 @@ class CoreData: for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items(): self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj) - def process_compiler_options(self, lang: str, comp: Compiler, env: Environment, subproject: str) -> None: - self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env, subproject) + def process_compiler_options(self, lang: str, comp: Compiler, subproject: str) -> None: + self.add_compiler_options(comp.get_options(), lang, comp.for_machine) for key in comp.base_options: if subproject: @@ -701,18 +708,15 @@ def register_builtin_arguments(parser: argparse.ArgumentParser) -> None: parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option", help='Set the value of an option, can be used several times to set multiple options.') -def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[str, str]: - result: T.OrderedDict[OptionKey, str] = OrderedDict() - for o in options: +def parse_cmd_line_options(args: SharedCMDOptions) -> None: + args.cmd_line_options = {} + for o in args.projectoptions: try: - (key, value) = o.split('=', 1) + keystr, value = o.split('=', 1) except ValueError: raise MesonException(f'Option {o!r} must have a value separated by equals sign.') - result[key] = value - return result - -def parse_cmd_line_options(args: SharedCMDOptions) -> None: - args.cmd_line_options = create_options_dict(args.projectoptions) + key = OptionKey.from_string(keystr) + args.cmd_line_options[key] = value # Merge builtin options set with --option into the dict. for key in chain( @@ -727,7 +731,7 @@ def parse_cmd_line_options(args: SharedCMDOptions) -> None: cmdline_name = options.argparse_name_to_arg(name) raise MesonException( f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.') - args.cmd_line_options[key.name] = value + args.cmd_line_options[key] = value delattr(args, name) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 38bfc08..732bae5 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -143,6 +143,11 @@ class Dependency(HoldableObject): def is_built(self) -> bool: return False + def is_named(self) -> bool: + if self.name is None: + return False + return self.name != f'dep{self._id}' + def summary_value(self) -> T.Union[str, mlog.AnsiDecorator, mlog.AnsiText]: if not self.found(): return mlog.red('NO') diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py index 82bf5ad..cd97127 100644 --- a/mesonbuild/dependencies/cuda.py +++ b/mesonbuild/dependencies/cuda.py @@ -11,9 +11,9 @@ from pathlib import Path from .. import mesonlib from .. import mlog -from ..environment import detect_cpu_family from .base import DependencyException, SystemDependency from .detect import packages +from ..mesonlib import LibType if T.TYPE_CHECKING: @@ -27,8 +27,11 @@ class CudaDependency(SystemDependency): supported_languages = ['cpp', 'c', 'cuda'] # see also _default_language def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None: - compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)] + for_machine = self.get_for_machine_from_kwargs(kwargs) + compilers = environment.coredata.compilers[for_machine] + machine = environment.machines[for_machine] language = self._detect_language(compilers) + if language not in self.supported_languages: raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.') @@ -50,16 +53,26 @@ class CudaDependency(SystemDependency): if not os.path.isabs(self.cuda_path): raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.') + # Cuda target directory relative to cuda path. + if machine.is_linux(): + # E.g. targets/x86_64-linux + self.target_path = os.path.join('targets', f'{machine.cpu_family}-{machine.system}') + else: + self.target_path = '.' + # nvcc already knows where to find the CUDA Toolkit, but if we're compiling # a mixed C/C++/CUDA project, we still need to make the include dir searchable if self.language != 'cuda' or len(compilers) > 1: - self.incdir = os.path.join(self.cuda_path, 'include') + self.incdir = os.path.join(self.cuda_path, self.target_path, 'include') self.compile_args += [f'-I{self.incdir}'] arch_libdir = self._detect_arch_libdir() - self.libdir = os.path.join(self.cuda_path, arch_libdir) + self.libdir = os.path.join(self.cuda_path, self.target_path, arch_libdir) mlog.debug('CUDA library directory is', mlog.bold(self.libdir)) + if 'static' not in kwargs: + self.libtype = LibType.PREFER_STATIC + self.is_found = self._find_requested_libraries() @classmethod @@ -211,8 +224,8 @@ class CudaDependency(SystemDependency): return '.'.join(version.split('.')[:2]) def _detect_arch_libdir(self) -> str: - arch = detect_cpu_family(self.env.coredata.compilers.host) machine = self.env.machines[self.for_machine] + arch = machine.cpu_family msg = '{} architecture is not supported in {} version of the CUDA Toolkit.' if machine.is_windows(): libdirs = {'x86': 'Win32', 'x86_64': 'x64'} @@ -220,10 +233,7 @@ class CudaDependency(SystemDependency): raise DependencyException(msg.format(arch, 'Windows')) return os.path.join('lib', libdirs[arch]) elif machine.is_linux(): - libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64', 'loongarch64': 'lib64'} - if arch not in libdirs: - raise DependencyException(msg.format(arch, 'Linux')) - return libdirs[arch] + return 'lib' elif machine.is_darwin(): libdirs = {'x86_64': 'lib64'} if arch not in libdirs: @@ -236,13 +246,14 @@ class CudaDependency(SystemDependency): all_found = True for module in self.requested_modules: - args = self.clib_compiler.find_library(module, self.env, [self.libdir]) - if module == 'cudart_static' and self.language != 'cuda': - machine = self.env.machines[self.for_machine] - if machine.is_linux(): - # extracted by running - # nvcc -v foo.o - args += ['-lrt', '-lpthread', '-ldl'] + # You should only ever link to libraries inside the cuda tree, nothing outside of it. + # For instance, there is a + # + # - libnvidia-ml.so in stubs/ of the CUDA tree + # - libnvidia-ml.so in /usr/lib/ that is provided by the nvidia drivers + # + # Users should never link to the latter, since its ABI may change. + args = self.clib_compiler.find_library(module, self.env, [self.libdir, os.path.join(self.libdir, 'stubs')], self.libtype, ignore_system_dirs=True) if args is None: self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'') @@ -284,23 +295,26 @@ class CudaDependency(SystemDependency): return candidates def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]: + # when using nvcc to link, we should instead use the native driver options + REWRITE_MODULES = { + 'cudart': ['-cudart', 'shared'], + 'cudart_static': ['-cudart', 'static'], + 'cudadevrt': ['-cudadevrt'], + } + args: T.List[str] = [] for lib in self.requested_modules: link_args = self.lib_modules[lib] - # Turn canonical arguments like - # /opt/cuda/lib64/libcublas.so - # back into - # -lcublas - # since this is how CUDA modules were passed to nvcc since time immemorial - if language == 'cuda': - if lib in frozenset(['cudart', 'cudart_static']): - # nvcc always links these unconditionally - mlog.debug(f'Not adding \'{lib}\' to dependency, since nvcc will link it implicitly') - link_args = [] - elif link_args and link_args[0].startswith(self.libdir): - # module included with CUDA, nvcc knows how to find these itself - mlog.debug(f'CUDA module \'{lib}\' found in CUDA libdir') - link_args = ['-l' + lib] + if language == 'cuda' and lib in REWRITE_MODULES: + link_args = REWRITE_MODULES[lib] + mlog.debug(f'Rewriting module \'{lib}\' to \'{link_args}\'') + elif lib == 'cudart_static': + machine = self.env.machines[self.for_machine] + if machine.is_linux(): + # extracted by running + # nvcc -v foo.o + link_args += ['-lrt', '-lpthread', '-ldl'] + args += link_args return args diff --git a/mesonbuild/dependencies/python.py b/mesonbuild/dependencies/python.py index 3dab31c..b028d9f 100644 --- a/mesonbuild/dependencies/python.py +++ b/mesonbuild/dependencies/python.py @@ -330,10 +330,12 @@ class PythonPkgConfigDependency(PkgConfigDependency, _PythonDependencyBase): # But not Apple, because it's a framework if self.env.machines.host.is_darwin() and 'PYTHONFRAMEWORKPREFIX' in self.variables: framework_prefix = self.variables['PYTHONFRAMEWORKPREFIX'] - # Add rpath, will be de-duplicated if necessary + # Add rpath, will be de-duplicated if necessary if framework_prefix.startswith('/Applications/Xcode.app/'): self.link_args += ['-Wl,-rpath,' + framework_prefix] - self.raw_link_args += ['-Wl,-rpath,' + framework_prefix] + if self.raw_link_args is not None: + # When None, self.link_args is used + self.raw_link_args += ['-Wl,-rpath,' + framework_prefix] class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase): @@ -350,8 +352,14 @@ class PythonSystemDependency(SystemDependency, _PythonDependencyBase): SystemDependency.__init__(self, name, environment, kwargs) _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False)) - # match pkg-config behavior - if self.link_libpython: + # For most platforms, match pkg-config behavior. iOS is a special case; + # check for that first, so that check takes priority over + # `link_libpython` (which *shouldn't* be set, but just in case) + if self.platform.startswith('ios-'): + # iOS doesn't use link_libpython - it links with the *framework*. + self.link_args = ['-framework', 'Python', '-F', self.variables.get('prefix')] + self.is_found = True + elif self.link_libpython: # link args if mesonlib.is_windows(): self.find_libpy_windows(environment, limited_api=False) diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index fc44037..1e80a77 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -187,12 +187,9 @@ class VulkanDependencySystem(SystemDependency): def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: super().__init__(name, environment, kwargs, language=language) - try: - self.vulkan_sdk = os.environ.get('VULKAN_SDK', os.environ['VK_SDK_PATH']) - if not os.path.isabs(self.vulkan_sdk): - raise DependencyException('VULKAN_SDK must be an absolute path.') - except KeyError: - self.vulkan_sdk = None + self.vulkan_sdk = os.environ.get('VULKAN_SDK', os.environ.get('VK_SDK_PATH')) + if self.vulkan_sdk and not os.path.isabs(self.vulkan_sdk): + raise DependencyException('VULKAN_SDK must be an absolute path.') if self.vulkan_sdk: # TODO: this config might not work on some platforms, fix bugs as reported @@ -242,7 +239,7 @@ class VulkanDependencySystem(SystemDependency): low=0, high=None, guess=e, prefix='#include <vulkan/vulkan.h>', env=environment, - extra_args=None, + extra_args=self.compile_args, dependencies=None)) # list containing vulkan version components and their expected value for c, e in [('MAJOR', 1), ('MINOR', 3), ('PATCH', None)]] diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index f322cda..2c3bdec 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -43,13 +43,20 @@ from mesonbuild import envconfig if T.TYPE_CHECKING: from .compilers import Compiler from .compilers.mixins.visualstudio import VisualStudioLikeCompiler - from .options import ElementaryOptionValues + from .options import OptionDict, ElementaryOptionValues from .wrap.wrap import Resolver from . import cargo CompilersDict = T.Dict[str, Compiler] +NON_LANG_ENV_OPTIONS = [ + ('PKG_CONFIG_PATH', 'pkg_config_path'), + ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'), + ('LDFLAGS', 'ldflags'), + ('CPPFLAGS', 'cppflags'), +] + build_filename = 'meson.build' @@ -639,7 +646,12 @@ class Environment: # # Note that order matters because of 'buildtype', if it is after # 'optimization' and 'debug' keys, it override them. - self.options: T.MutableMapping[OptionKey, ElementaryOptionValues] = collections.OrderedDict() + self.options: OptionDict = collections.OrderedDict() + + # Environment variables with the name converted into an OptionKey type. + # These have subtly different behavior compared to machine files, so do + # not store them in self.options. See _set_default_options_from_env. + self.env_opts: OptionDict = {} self.machinestore = machinefile.MachineFileStore(self.coredata.config_files, self.coredata.cross_files, self.source_dir) @@ -777,12 +789,7 @@ class Environment: def _set_default_options_from_env(self) -> None: opts: T.List[T.Tuple[str, str]] = ( [(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] + - [ - ('PKG_CONFIG_PATH', 'pkg_config_path'), - ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'), - ('LDFLAGS', 'ldflags'), - ('CPPFLAGS', 'cppflags'), - ] + NON_LANG_ENV_OPTIONS ) env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list) @@ -817,35 +824,35 @@ class Environment: env_opts[key].extend(p_list) elif keyname == 'cppflags': for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS: - key = OptionKey(f'{lang}_env_args', machine=for_machine) + key = OptionKey(f'{lang}_args', machine=for_machine) env_opts[key].extend(p_list) else: key = OptionKey.from_string(keyname).evolve(machine=for_machine) if evar in compilers.compilers.CFLAGS_MAPPING.values(): - # If this is an environment variable, we have to - # store it separately until the compiler is - # instantiated, as we don't know whether the - # compiler will want to use these arguments at link - # time and compile time (instead of just at compile - # time) until we're instantiating that `Compiler` - # object. This is required so that passing - # `-Dc_args=` on the command line and `$CFLAGS` - # have subtly different behavior. `$CFLAGS` will be - # added to the linker command line if the compiler - # acts as a linker driver, `-Dc_args` will not. - # - # We still use the original key as the base here, as - # we want to inherit the machine and the compiler - # language lang = key.name.split('_', 1)[0] - key = key.evolve(f'{lang}_env_args') + key = key.evolve(f'{lang}_args') env_opts[key].extend(p_list) - # Only store options that are not already in self.options, - # otherwise we'd override the machine files - for k, v in env_opts.items(): - if k not in self.options: - self.options[k] = v + # If this is an environment variable, we have to + # store it separately until the compiler is + # instantiated, as we don't know whether the + # compiler will want to use these arguments at link + # time and compile time (instead of just at compile + # time) until we're instantiating that `Compiler` + # object. This is required so that passing + # `-Dc_args=` on the command line and `$CFLAGS` + # have subtly different behavior. `$CFLAGS` will be + # added to the linker command line if the compiler + # acts as a linker driver, `-Dc_args` will not. + for (_, keyname), for_machine in itertools.product(NON_LANG_ENV_OPTIONS, MachineChoice): + key = OptionKey.from_string(keyname).evolve(machine=for_machine) + # Only store options that are not already in self.options, + # otherwise we'd override the machine files + if key in env_opts and key not in self.options: + self.options[key] = env_opts[key] + del env_opts[key] + + self.env_opts.update(env_opts) def _set_default_binaries_from_env(self) -> None: """Set default binaries from the environment. diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py index 8aeac8a..57e9499 100644 --- a/mesonbuild/interpreter/compiler.py +++ b/mesonbuild/interpreter/compiler.py @@ -19,7 +19,7 @@ from ..compilers import SUFFIX_TO_LANG, RunResult from ..compilers.compilers import CompileCheckMode from ..interpreterbase import (ObjectHolder, noPosargs, noKwargs, FeatureNew, FeatureNewKwargs, disablerIfNotFound, - InterpreterException) + InterpreterException, InterpreterObject) from ..interpreterbase.decorators import ContainerTypeInfo, typed_kwargs, KwargInfo, typed_pos_args from ..options import OptionKey from .interpreterobjects import (extract_required_kwarg, extract_search_dirs) @@ -110,29 +110,28 @@ class _TestMode(enum.Enum): class TryRunResultHolder(ObjectHolder['RunResult']): def __init__(self, res: 'RunResult', interpreter: 'Interpreter'): super().__init__(res, interpreter) - self.methods.update({'returncode': self.returncode_method, - 'compiled': self.compiled_method, - 'stdout': self.stdout_method, - 'stderr': self.stderr_method, - }) @noPosargs @noKwargs + @InterpreterObject.method('returncode') def returncode_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> int: return self.held_object.returncode @noPosargs @noKwargs + @InterpreterObject.method('compiled') def compiled_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: return self.held_object.compiled @noPosargs @noKwargs + @InterpreterObject.method('stdout') def stdout_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.held_object.stdout @noPosargs @noKwargs + @InterpreterObject.method('stderr') def stderr_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.held_object.stderr @@ -190,40 +189,6 @@ class CompilerHolder(ObjectHolder['Compiler']): def __init__(self, compiler: 'Compiler', interpreter: 'Interpreter'): super().__init__(compiler, interpreter) self.environment = self.env - self.methods.update({'compiles': self.compiles_method, - 'links': self.links_method, - 'get_id': self.get_id_method, - 'get_linker_id': self.get_linker_id_method, - 'compute_int': self.compute_int_method, - 'sizeof': self.sizeof_method, - 'get_define': self.get_define_method, - 'has_define': self.has_define_method, - 'check_header': self.check_header_method, - 'has_header': self.has_header_method, - 'has_header_symbol': self.has_header_symbol_method, - 'run': self.run_method, - 'has_function': self.has_function_method, - 'has_member': self.has_member_method, - 'has_members': self.has_members_method, - 'has_type': self.has_type_method, - 'alignment': self.alignment_method, - 'version': self.version_method, - 'cmd_array': self.cmd_array_method, - 'find_library': self.find_library_method, - 'has_argument': self.has_argument_method, - 'has_function_attribute': self.has_func_attribute_method, - 'get_supported_function_attributes': self.get_supported_function_attributes_method, - 'has_multi_arguments': self.has_multi_arguments_method, - 'get_supported_arguments': self.get_supported_arguments_method, - 'first_supported_argument': self.first_supported_argument_method, - 'has_link_argument': self.has_link_argument_method, - 'has_multi_link_arguments': self.has_multi_link_arguments_method, - 'get_supported_link_arguments': self.get_supported_link_arguments_method, - 'first_supported_link_argument': self.first_supported_link_argument_method, - 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method, - 'get_argument_syntax': self.get_argument_syntax_method, - 'preprocess': self.preprocess_method, - }) @property def compiler(self) -> 'Compiler': @@ -254,11 +219,13 @@ class CompilerHolder(ObjectHolder['Compiler']): @noPosargs @noKwargs + @InterpreterObject.method('version') def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.compiler.version @noPosargs @noKwargs + @InterpreterObject.method('cmd_array') def cmd_array_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]: return self.compiler.exelist @@ -289,6 +256,7 @@ class CompilerHolder(ObjectHolder['Compiler']): _ARGS_KW, _DEPENDENCIES_KW, ) + @InterpreterObject.method('alignment') def alignment_method(self, args: T.Tuple[str], kwargs: 'AlignmentKw') -> int: typename = args[0] deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross) @@ -302,6 +270,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.run', (str, mesonlib.File)) @typed_kwargs('compiler.run', *_COMPILES_KWS) + @InterpreterObject.method('run') def run_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> 'RunResult': if self.compiler.language not in {'d', 'c', 'cpp', 'objc', 'objcpp', 'fortran'}: FeatureNew.single_use(f'compiler.run for {self.compiler.get_display_language()} language', @@ -338,17 +307,20 @@ class CompilerHolder(ObjectHolder['Compiler']): @noPosargs @noKwargs + @InterpreterObject.method('get_id') def get_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.compiler.get_id() @noPosargs @noKwargs @FeatureNew('compiler.get_linker_id', '0.53.0') + @InterpreterObject.method('get_linker_id') def get_linker_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.compiler.get_linker_id() @noPosargs @noKwargs + @InterpreterObject.method('symbols_have_underscore_prefix') def symbols_have_underscore_prefix_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: ''' Check if the compiler prefixes _ (underscore) to global C symbols @@ -358,6 +330,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.has_member', str, str) @typed_kwargs('compiler.has_member', _HAS_REQUIRED_KW, *_COMMON_KWS) + @InterpreterObject.method('has_member') def has_member_method(self, args: T.Tuple[str, str], kwargs: 'HasKW') -> bool: typename, membername = args disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) @@ -383,6 +356,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.has_members', str, varargs=str, min_varargs=1) @typed_kwargs('compiler.has_members', _HAS_REQUIRED_KW, *_COMMON_KWS) + @InterpreterObject.method('has_members') def has_members_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'HasKW') -> bool: typename, membernames = args members = mlog.bold(', '.join([f'"{m}"' for m in membernames])) @@ -410,6 +384,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.has_function', str) @typed_kwargs('compiler.has_function', _HAS_REQUIRED_KW, *_COMMON_KWS) + @InterpreterObject.method('has_function') def has_function_method(self, args: T.Tuple[str], kwargs: 'HasKW') -> bool: funcname = args[0] disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) @@ -433,6 +408,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.has_type', str) @typed_kwargs('compiler.has_type', _HAS_REQUIRED_KW, *_COMMON_KWS) + @InterpreterObject.method('has_type') def has_type_method(self, args: T.Tuple[str], kwargs: 'HasKW') -> bool: typename = args[0] disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) @@ -462,6 +438,7 @@ class CompilerHolder(ObjectHolder['Compiler']): KwargInfo('guess', (int, NoneType)), *_COMMON_KWS, ) + @InterpreterObject.method('compute_int') def compute_int_method(self, args: T.Tuple[str], kwargs: 'ComputeIntKW') -> int: expression = args[0] extra_args = functools.partial(self._determine_args, kwargs) @@ -475,6 +452,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.sizeof', str) @typed_kwargs('compiler.sizeof', *_COMMON_KWS) + @InterpreterObject.method('sizeof') def sizeof_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> int: element = args[0] extra_args = functools.partial(self._determine_args, kwargs) @@ -489,6 +467,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @FeatureNew('compiler.get_define', '0.40.0') @typed_pos_args('compiler.get_define', str) @typed_kwargs('compiler.get_define', *_COMMON_KWS) + @InterpreterObject.method('get_define') def get_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> str: element = args[0] extra_args = functools.partial(self._determine_args, kwargs) @@ -504,6 +483,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @FeatureNew('compiler.has_define', '1.3.0') @typed_pos_args('compiler.has_define', str) @typed_kwargs('compiler.has_define', *_COMMON_KWS) + @InterpreterObject.method('has_define') def has_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> bool: define_name = args[0] extra_args = functools.partial(self._determine_args, kwargs) @@ -519,6 +499,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.compiles', (str, mesonlib.File)) @typed_kwargs('compiler.compiles', *_COMPILES_KWS) + @InterpreterObject.method('compiles') def compiles_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool: code = args[0] testname = kwargs['name'] @@ -555,6 +536,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.links', (str, mesonlib.File)) @typed_kwargs('compiler.links', *_COMPILES_KWS) + @InterpreterObject.method('links') def links_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool: code = args[0] testname = kwargs['name'] @@ -606,6 +588,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @FeatureNew('compiler.check_header', '0.47.0') @typed_pos_args('compiler.check_header', str) @typed_kwargs('compiler.check_header', *_HEADER_KWS) + @InterpreterObject.method('check_header') def check_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool: hname = args[0] disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) @@ -648,11 +631,13 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.has_header', str) @typed_kwargs('compiler.has_header', *_HEADER_KWS) + @InterpreterObject.method('has_header') def has_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool: return self._has_header_impl(args[0], kwargs) @typed_pos_args('compiler.has_header_symbol', str, str) @typed_kwargs('compiler.has_header_symbol', *_HEADER_KWS) + @InterpreterObject.method('has_header_symbol') def has_header_symbol_method(self, args: T.Tuple[str, str], kwargs: 'HeaderKW') -> bool: hname, symbol = args disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) @@ -692,6 +677,7 @@ class CompilerHolder(ObjectHolder['Compiler']): KwargInfo('dirs', ContainerTypeInfo(list, str), listify=True, default=[]), *(k.evolve(name=f'header_{k.name}') for k in _HEADER_KWS) ) + @InterpreterObject.method('find_library') def find_library_method(self, args: T.Tuple[str], kwargs: 'FindLibraryKW') -> 'dependencies.ExternalLibrary': # TODO add dependencies support? libname = args[0] @@ -772,12 +758,14 @@ class CompilerHolder(ObjectHolder['Compiler']): @typed_pos_args('compiler.has_argument', str) @typed_kwargs('compiler.has_argument', _HAS_REQUIRED_KW) + @InterpreterObject.method('has_argument') def has_argument_method(self, args: T.Tuple[str], kwargs: 'HasArgumentKW') -> bool: return self._has_argument_impl([args[0]], kwargs=kwargs) @typed_pos_args('compiler.has_multi_arguments', varargs=str) @typed_kwargs('compiler.has_multi_arguments', _HAS_REQUIRED_KW) @FeatureNew('compiler.has_multi_arguments', '0.37.0') + @InterpreterObject.method('has_multi_arguments') def has_multi_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'HasArgumentKW') -> bool: return self._has_argument_impl(args[0], kwargs=kwargs) @@ -788,6 +776,7 @@ class CompilerHolder(ObjectHolder['Compiler']): KwargInfo('checked', str, default='off', since='0.59.0', validator=in_set_validator({'warn', 'require', 'off'})), ) + @InterpreterObject.method('get_supported_arguments') def get_supported_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'GetSupportedArgumentKw') -> T.List[str]: supported_args: T.List[str] = [] checked = kwargs['checked'] @@ -805,6 +794,7 @@ class CompilerHolder(ObjectHolder['Compiler']): @noKwargs @typed_pos_args('compiler.first_supported_argument', varargs=str) + @InterpreterObject.method('first_supported_argument') def first_supported_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]: for arg in args[0]: if self._has_argument_impl([arg]): @@ -816,18 +806,21 @@ class CompilerHolder(ObjectHolder['Compiler']): @FeatureNew('compiler.has_link_argument', '0.46.0') @typed_pos_args('compiler.has_link_argument', str) @typed_kwargs('compiler.has_link_argument', _HAS_REQUIRED_KW) + @InterpreterObject.method('has_link_argument') def has_link_argument_method(self, args: T.Tuple[str], kwargs: 'HasArgumentKW') -> bool: return self._has_argument_impl([args[0]], mode=_TestMode.LINKER, kwargs=kwargs) @FeatureNew('compiler.has_multi_link_argument', '0.46.0') @typed_pos_args('compiler.has_multi_link_argument', varargs=str) @typed_kwargs('compiler.has_multi_link_argument', _HAS_REQUIRED_KW) + @InterpreterObject.method('has_multi_link_arguments') def has_multi_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'HasArgumentKW') -> bool: return self._has_argument_impl(args[0], mode=_TestMode.LINKER, kwargs=kwargs) @FeatureNew('compiler.get_supported_link_arguments', '0.46.0') @noKwargs @typed_pos_args('compiler.get_supported_link_arguments', varargs=str) + @InterpreterObject.method('get_supported_link_arguments') def get_supported_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]: supported_args: T.List[str] = [] for arg in args[0]: @@ -835,9 +828,10 @@ class CompilerHolder(ObjectHolder['Compiler']): supported_args.append(arg) return supported_args - @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0') + @FeatureNew('compiler.first_supported_link_argument', '0.46.0') @noKwargs @typed_pos_args('compiler.first_supported_link_argument', varargs=str) + @InterpreterObject.method('first_supported_link_argument') def first_supported_link_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]: for arg in args[0]: if self._has_argument_impl([arg], mode=_TestMode.LINKER): @@ -871,18 +865,21 @@ class CompilerHolder(ObjectHolder['Compiler']): @FeatureNew('compiler.has_function_attribute', '0.48.0') @typed_pos_args('compiler.has_function_attribute', str) @typed_kwargs('compiler.has_function_attribute', _HAS_REQUIRED_KW) + @InterpreterObject.method('has_function_attribute') def has_func_attribute_method(self, args: T.Tuple[str], kwargs: 'HasArgumentKW') -> bool: return self._has_function_attribute_impl(args[0], kwargs) @FeatureNew('compiler.get_supported_function_attributes', '0.48.0') @noKwargs @typed_pos_args('compiler.get_supported_function_attributes', varargs=str) + @InterpreterObject.method('get_supported_function_attributes') def get_supported_function_attributes_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]: return [a for a in args[0] if self._has_function_attribute_impl(a)] - @FeatureNew('compiler.get_argument_syntax_method', '0.49.0') + @FeatureNew('compiler.get_argument_syntax', '0.49.0') @noPosargs @noKwargs + @InterpreterObject.method('get_argument_syntax') def get_argument_syntax_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.compiler.get_argument_syntax() @@ -897,6 +894,7 @@ class CompilerHolder(ObjectHolder['Compiler']): _DEPENDENCIES_KW.evolve(since='1.1.0'), _DEPENDS_KW.evolve(since='1.4.0'), ) + @InterpreterObject.method('preprocess') def preprocess_method(self, args: T.Tuple[T.List['mesonlib.FileOrString']], kwargs: 'PreprocessKW') -> T.List[build.CustomTargetIndex]: compiler = self.compiler.get_preprocessor() _sources: T.List[mesonlib.File] = self.interpreter.source_strings_to_files(args[0]) diff --git a/mesonbuild/interpreter/dependencyfallbacks.py b/mesonbuild/interpreter/dependencyfallbacks.py index 0ebfe3b..f415026 100644 --- a/mesonbuild/interpreter/dependencyfallbacks.py +++ b/mesonbuild/interpreter/dependencyfallbacks.py @@ -4,14 +4,12 @@ from __future__ import annotations -import copy - from .interpreterobjects import extract_required_kwarg from .. import mlog from .. import dependencies from .. import build from ..wrap import WrapMode -from ..mesonlib import extract_as_list, stringlistify, version_compare_many, listify +from ..mesonlib import extract_as_list, stringlistify, version_compare_many from ..options import OptionKey from ..dependencies import Dependency, DependencyException, NotFoundDependency from ..interpreterbase import (MesonInterpreterObject, FeatureNew, @@ -124,21 +122,17 @@ class DependencyFallbacksHolder(MesonInterpreterObject): # dependency('foo', static: true) should implicitly add # default_options: ['default_library=static'] static = kwargs.get('static') - default_options = func_kwargs.get('default_options', {}) - if static is not None and 'default_library' not in default_options: + forced_options = {} + if static is not None: default_library = 'static' if static else 'shared' mlog.log(f'Building fallback subproject with default_library={default_library}') - default_options = copy.copy(default_options) - default_options['default_library'] = default_library - func_kwargs['default_options'] = default_options + forced_options[OptionKey('default_library')] = default_library # Configure the subproject subp_name = self.subproject_name varname = self.subproject_varname func_kwargs.setdefault('version', []) - if 'default_options' in kwargs and isinstance(kwargs['default_options'], str): - func_kwargs['default_options'] = listify(kwargs['default_options']) - self.interpreter.do_subproject(subp_name, func_kwargs) + self.interpreter.do_subproject(subp_name, func_kwargs, forced_options=forced_options) return self._get_subproject_dep(subp_name, varname, kwargs) def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]: diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py index abdc889..29bb705 100644 --- a/mesonbuild/interpreter/interpreter.py +++ b/mesonbuild/interpreter/interpreter.py @@ -115,6 +115,7 @@ if T.TYPE_CHECKING: from . import kwargs as kwtypes from ..backend.backends import Backend from ..interpreterbase.baseobjects import InterpreterObject, TYPE_var, TYPE_kwargs + from ..options import OptionDict from ..programs import OverrideProgram from .type_checking import SourcesVarargsType @@ -270,7 +271,7 @@ class Interpreter(InterpreterBase, HoldableObject): subproject: str = '', subdir: str = '', subproject_dir: str = 'subprojects', - default_project_options: T.Optional[T.Dict[OptionKey, str]] = None, + invoker_method_default_options: T.Optional[OptionDict] = None, ast: T.Optional[mparser.CodeBlockNode] = None, relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None, user_defined_options: T.Optional[coredata.SharedCMDOptions] = None, @@ -295,13 +296,12 @@ class Interpreter(InterpreterBase, HoldableObject): self.subproject_stack: T.List[str] = [] self.configure_file_outputs: T.Dict[str, int] = {} # Passed from the outside, only used in subprojects. - if default_project_options: - self.default_project_options = default_project_options if isinstance(default_project_options, str) else default_project_options.copy() - if isinstance(default_project_options, dict): - pass + if invoker_method_default_options: + assert isinstance(invoker_method_default_options, dict) + self.invoker_method_default_options = invoker_method_default_options else: - self.default_project_options = {} - self.project_default_options: T.List[str] = [] + self.invoker_method_default_options = {} + self.project_default_options: OptionDict = {} self.build_func_dict() self.build_holder_map() self.user_defined_options = user_defined_options @@ -426,6 +426,7 @@ class Interpreter(InterpreterBase, HoldableObject): build.Generator: OBJ.GeneratorHolder, build.GeneratedList: OBJ.GeneratedListHolder, build.ExtractedObjects: OBJ.GeneratedObjectsHolder, + build.OverrideExecutable: OBJ.OverrideExecutableHolder, build.RunTarget: OBJ.RunTargetHolder, build.AliasTarget: OBJ.AliasTargetHolder, build.Headers: OBJ.HeadersHolder, @@ -868,7 +869,8 @@ class Interpreter(InterpreterBase, HoldableObject): self.subprojects[subp_name] = sub return sub - def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None) -> SubprojectHolder: + def do_subproject(self, subp_name: str, kwargs: kwtypes.DoSubproject, force_method: T.Optional[wrap.Method] = None, + forced_options: T.Optional[OptionDict] = None) -> SubprojectHolder: if subp_name == 'sub_static': pass disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) @@ -879,6 +881,16 @@ class Interpreter(InterpreterBase, HoldableObject): default_options = kwargs['default_options'] + # This in practice is only used for default_library. forced_options is the + # only case in which a meson.build file overrides the machine file or the + # command line. + if forced_options: + for k, v in forced_options.items(): + # FIXME: this should have no business poking at augments[], + # but set_option() does not do what we want + self.coredata.optstore.augments[k.evolve(subproject=subp_name)] = v + default_options = {**forced_options, **default_options} + if subp_name == '': raise InterpreterException('Subproject name must not be empty.') if subp_name[0] == '.': @@ -930,7 +942,8 @@ class Interpreter(InterpreterBase, HoldableObject): m += ['method', mlog.bold(method)] mlog.log(*m, '\n', nested=False) - methods_map: T.Dict[wrap.Method, T.Callable[[str, str, T.Dict[OptionKey, str, kwtypes.DoSubproject]], SubprojectHolder]] = { + methods_map: T.Dict[wrap.Method, T.Callable[[str, str, OptionDict, kwtypes.DoSubproject], + SubprojectHolder]] = { 'meson': self._do_subproject_meson, 'cmake': self._do_subproject_cmake, 'cargo': self._do_subproject_cargo, @@ -952,7 +965,7 @@ class Interpreter(InterpreterBase, HoldableObject): raise e def _do_subproject_meson(self, subp_name: str, subdir: str, - default_options: T.List[str], + default_options: OptionDict, kwargs: kwtypes.DoSubproject, ast: T.Optional[mparser.CodeBlockNode] = None, build_def_files: T.Optional[T.List[str]] = None, @@ -1012,7 +1025,7 @@ class Interpreter(InterpreterBase, HoldableObject): return self.subprojects[subp_name] def _do_subproject_cmake(self, subp_name: str, subdir: str, - default_options: T.List[str], + default_options: OptionDict, kwargs: kwtypes.DoSubproject) -> SubprojectHolder: from ..cmake import CMakeInterpreter with mlog.nested(subp_name): @@ -1039,13 +1052,14 @@ class Interpreter(InterpreterBase, HoldableObject): return result def _do_subproject_cargo(self, subp_name: str, subdir: str, - default_options: T.List[str], + default_options: OptionDict, kwargs: kwtypes.DoSubproject) -> SubprojectHolder: from .. import cargo FeatureNew.single_use('Cargo subproject', '1.3.0', self.subproject, location=self.current_node) mlog.warning('Cargo subproject is an experimental feature and has no backwards compatibility guarantees.', once=True, location=self.current_node) if self.environment.cargo is None: + self.add_languages(['rust'], True, MachineChoice.HOST) self.environment.cargo = cargo.Interpreter(self.environment) with mlog.nested(subp_name): ast = self.environment.cargo.interpret(subdir) @@ -1184,20 +1198,17 @@ class Interpreter(InterpreterBase, HoldableObject): self._load_option_file() self.project_default_options = kwargs['default_options'] - if isinstance(self.project_default_options, str): - self.project_default_options = [self.project_default_options] - assert isinstance(self.project_default_options, (list, dict)) if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects): if self.subproject == '': self.coredata.optstore.initialize_from_top_level_project_call(self.project_default_options, self.user_defined_options.cmd_line_options, self.environment.options) else: - invoker_method_default_options = self.default_project_options self.coredata.optstore.initialize_from_subproject_call(self.subproject, - invoker_method_default_options, + self.invoker_method_default_options, self.project_default_options, - self.user_defined_options.cmd_line_options) + self.user_defined_options.cmd_line_options, + self.environment.options) self.coredata.initialized_subprojects.add(self.subproject) if not self.is_subproject(): @@ -1529,7 +1540,7 @@ class Interpreter(InterpreterBase, HoldableObject): self.backend.allow_thin_archives[for_machine] = False else: # update new values from commandline, if it applies - self.coredata.process_compiler_options(lang, comp, self.environment, self.subproject) + self.coredata.process_compiler_options(lang, comp, self.subproject) if for_machine == MachineChoice.HOST or self.environment.is_cross_build(): logger_fun = mlog.log @@ -1590,7 +1601,7 @@ class Interpreter(InterpreterBase, HoldableObject): def program_from_overrides(self, command_names: T.List[mesonlib.FileOrString], extra_info: T.List['mlog.TV_Loggable'] - ) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.Executable]]: + ) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.OverrideExecutable]]: for name in command_names: if not isinstance(name, str): continue @@ -1605,7 +1616,7 @@ class Interpreter(InterpreterBase, HoldableObject): if isinstance(name, str): self.build.searched_programs.add(name) - def add_find_program_override(self, name: str, exe: T.Union[build.Executable, ExternalProgram, 'OverrideProgram']) -> None: + def add_find_program_override(self, name: str, exe: T.Union[build.OverrideExecutable, ExternalProgram, 'OverrideProgram']) -> None: if name in self.build.searched_programs: raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.') if name in self.build.find_overrides: @@ -1624,13 +1635,13 @@ class Interpreter(InterpreterBase, HoldableObject): # the host machine. def find_program_impl(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice = MachineChoice.HOST, - default_options: T.Optional[T.Dict[OptionKey, options.ElementaryOptionValues]] = None, + default_options: T.Optional[OptionDict] = None, required: bool = True, silent: bool = True, wanted: T.Union[str, T.List[str]] = '', search_dirs: T.Optional[T.List[str]] = None, version_arg: T.Optional[str] = '', version_func: T.Optional[ProgramVersionFunc] = None - ) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']: + ) -> T.Union['ExternalProgram', 'build.OverrideExecutable', 'OverrideProgram']: args = mesonlib.listify(args) extra_info: T.List[mlog.TV_Loggable] = [] @@ -1655,7 +1666,7 @@ class Interpreter(InterpreterBase, HoldableObject): return progobj def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice, - default_options: T.Optional[T.Dict[OptionKey, options.ElementaryOptionValues]], + default_options: T.Optional[OptionDict], required: bool, search_dirs: T.Optional[T.List[str]], wanted: T.Union[str, T.List[str]], @@ -1723,7 +1734,7 @@ class Interpreter(InterpreterBase, HoldableObject): return True def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString], - default_options: T.Dict[OptionKey, options.ElementaryOptionValues], + default_options: OptionDict, required: bool, extra_info: T.List[mlog.TV_Loggable] ) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]: mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program', diff --git a/mesonbuild/interpreter/interpreterobjects.py b/mesonbuild/interpreter/interpreterobjects.py index a2fadbe..17ba989 100644 --- a/mesonbuild/interpreter/interpreterobjects.py +++ b/mesonbuild/interpreter/interpreterobjects.py @@ -15,7 +15,7 @@ from .. import mlog from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule, NewExtensionModule from ..backend.backends import TestProtocol from ..interpreterbase import ( - ContainerTypeInfo, KwargInfo, MesonOperator, + ContainerTypeInfo, KwargInfo, InterpreterObject, MesonOperator, MesonInterpreterObject, ObjectHolder, MutableInterpreterObject, FeatureNew, FeatureDeprecated, typed_pos_args, typed_kwargs, typed_operator, @@ -32,7 +32,7 @@ if T.TYPE_CHECKING: from . import kwargs from ..cmake.interpreter import CMakeInterpreter from ..envconfig import MachineInfo - from ..interpreterbase import FeatureCheckBase, InterpreterObject, SubProject, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs + from ..interpreterbase import FeatureCheckBase, SubProject, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs from .interpreter import Interpreter from typing_extensions import TypedDict @@ -97,16 +97,6 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): auto = T.cast('options.UserFeatureOption', self.env.coredata.optstore.get_value_object_for('auto_features')) self.held_object = copy.copy(auto) self.held_object.name = option.name - self.methods.update({'enabled': self.enabled_method, - 'disabled': self.disabled_method, - 'allowed': self.allowed_method, - 'auto': self.auto_method, - 'require': self.require_method, - 'disable_auto_if': self.disable_auto_if_method, - 'enable_auto_if': self.enable_auto_if_method, - 'disable_if': self.disable_if_method, - 'enable_if': self.enable_if_method, - }) @property def value(self) -> str: @@ -124,22 +114,26 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): @noPosargs @noKwargs + @InterpreterObject.method('enabled') def enabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.value == 'enabled' @noPosargs @noKwargs + @InterpreterObject.method('disabled') def disabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.value == 'disabled' @noPosargs @noKwargs @FeatureNew('feature_option.allowed()', '0.59.0') + @InterpreterObject.method('allowed') def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.value != 'disabled' @noPosargs @noKwargs + @InterpreterObject.method('auto') def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.value == 'auto' @@ -160,6 +154,7 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): 'feature_option.require', _ERROR_MSG_KW, ) + @InterpreterObject.method('require') def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption: return self._disable_if(not args[0], kwargs['error_message']) @@ -169,6 +164,7 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): 'feature_option.disable_if', _ERROR_MSG_KW, ) + @InterpreterObject.method('disable_if') def disable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption: return self._disable_if(args[0], kwargs['error_message']) @@ -178,6 +174,7 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): 'feature_option.enable_if', _ERROR_MSG_KW, ) + @InterpreterObject.method('enable_if') def enable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> options.UserFeatureOption: if not args[0]: return copy.deepcopy(self.held_object) @@ -192,12 +189,14 @@ class FeatureOptionHolder(ObjectHolder[options.UserFeatureOption]): @FeatureNew('feature_option.disable_auto_if()', '0.59.0') @noKwargs @typed_pos_args('feature_option.disable_auto_if', bool) + @InterpreterObject.method('disable_auto_if') def disable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> options.UserFeatureOption: return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled() @FeatureNew('feature_option.enable_auto_if()', '1.1.0') @noKwargs @typed_pos_args('feature_option.enable_auto_if', bool) + @InterpreterObject.method('enable_auto_if') def enable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> options.UserFeatureOption: return self.as_enabled() if self.value == 'auto' and args[0] else copy.deepcopy(self.held_object) @@ -220,10 +219,6 @@ class RunProcess(MesonInterpreterObject): raise AssertionError('BUG: RunProcess must be passed an ExternalProgram') self.capture = capture self.returncode, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check) - self.methods.update({'returncode': self.returncode_method, - 'stdout': self.stdout_method, - 'stderr': self.stderr_method, - }) def run_command(self, cmd: ExternalProgram, @@ -271,16 +266,19 @@ class RunProcess(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('returncode') def returncode_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int: return self.returncode @noPosargs @noKwargs + @InterpreterObject.method('stdout') def stdout_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.stdout @noPosargs @noKwargs + @InterpreterObject.method('stderr') def stderr_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.stderr @@ -288,11 +286,6 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu def __init__(self, obj: mesonlib.EnvironmentVariables, interpreter: 'Interpreter'): super().__init__(obj, interpreter) - self.methods.update({'set': self.set_method, - 'unset': self.unset_method, - 'append': self.append_method, - 'prepend': self.prepend_method, - }) def __repr__(self) -> str: repr_str = "<{0}: {1}>" @@ -310,6 +303,7 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu @typed_pos_args('environment.set', str, varargs=str, min_varargs=1) @typed_kwargs('environment.set', ENV_SEPARATOR_KW) + @InterpreterObject.method('set') def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None: name, values = args self.held_object.set(name, values, kwargs['separator']) @@ -317,11 +311,13 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu @FeatureNew('environment.unset', '1.4.0') @typed_pos_args('environment.unset', str) @noKwargs + @InterpreterObject.method('unset') def unset_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> None: self.held_object.unset(args[0]) @typed_pos_args('environment.append', str, varargs=str, min_varargs=1) @typed_kwargs('environment.append', ENV_SEPARATOR_KW) + @InterpreterObject.method('append') def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None: name, values = args self.warn_if_has_name(name) @@ -329,6 +325,7 @@ class EnvironmentVariablesHolder(ObjectHolder[mesonlib.EnvironmentVariables], Mu @typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1) @typed_kwargs('environment.prepend', ENV_SEPARATOR_KW) + @InterpreterObject.method('prepend') def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None: name, values = args self.warn_if_has_name(name) @@ -342,15 +339,6 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte def __init__(self, obj: build.ConfigurationData, interpreter: 'Interpreter'): super().__init__(obj, interpreter) - self.methods.update({'set': self.set_method, - 'set10': self.set10_method, - 'set_quoted': self.set_quoted_method, - 'has': self.has_method, - 'get': self.get_method, - 'keys': self.keys_method, - 'get_unquoted': self.get_unquoted_method, - 'merge_from': self.merge_from_method, - }) def __deepcopy__(self, memo: T.Dict) -> 'ConfigurationDataHolder': return ConfigurationDataHolder(copy.deepcopy(self.held_object), self.interpreter) @@ -364,12 +352,14 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte @typed_pos_args('configuration_data.set', str, (str, int, bool)) @typed_kwargs('configuration_data.set', _CONF_DATA_SET_KWS) + @InterpreterObject.method('set') def set_method(self, args: T.Tuple[str, T.Union[str, int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None: self.__check_used() self.held_object.values[args[0]] = (args[1], kwargs['description']) @typed_pos_args('configuration_data.set_quoted', str, str) @typed_kwargs('configuration_data.set_quoted', _CONF_DATA_SET_KWS) + @InterpreterObject.method('set_quoted') def set_quoted_method(self, args: T.Tuple[str, str], kwargs: 'kwargs.ConfigurationDataSet') -> None: self.__check_used() escaped_val = '\\"'.join(args[1].split('"')) @@ -377,6 +367,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte @typed_pos_args('configuration_data.set10', str, (int, bool)) @typed_kwargs('configuration_data.set10', _CONF_DATA_SET_KWS) + @InterpreterObject.method('set10') def set10_method(self, args: T.Tuple[str, T.Union[int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None: self.__check_used() # bool is a subclass of int, so we need to check for bool explicitly. @@ -394,12 +385,14 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte @typed_pos_args('configuration_data.has', (str, int, bool)) @noKwargs + @InterpreterObject.method('has') def has_method(self, args: T.Tuple[T.Union[str, int, bool]], kwargs: TYPE_kwargs) -> bool: return args[0] in self.held_object.values @FeatureNew('configuration_data.get()', '0.38.0') @typed_pos_args('configuration_data.get', str, optargs=[(str, int, bool)]) @noKwargs + @InterpreterObject.method('get') def get_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]: name = args[0] @@ -412,6 +405,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte @FeatureNew('configuration_data.get_unquoted()', '0.44.0') @typed_pos_args('configuration_data.get_unquoted', str, optargs=[(str, int, bool)]) @noKwargs + @InterpreterObject.method('get_unquoted') def get_unquoted_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]], kwargs: TYPE_kwargs) -> T.Union[str, int, bool]: name = args[0] @@ -431,6 +425,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte @FeatureNew('configuration_data.keys()', '0.57.0') @noPosargs @noKwargs + @InterpreterObject.method('keys') def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]: return sorted(self.keys()) @@ -439,6 +434,7 @@ class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInte @typed_pos_args('configuration_data.merge_from', build.ConfigurationData) @noKwargs + @InterpreterObject.method('merge_from') def merge_from_method(self, args: T.Tuple[build.ConfigurationData], kwargs: TYPE_kwargs) -> None: from_object = args[0] self.held_object.values.update(from_object.values) @@ -455,31 +451,19 @@ _PARTIAL_DEP_KWARGS = [ class DependencyHolder(ObjectHolder[Dependency]): def __init__(self, dep: Dependency, interpreter: 'Interpreter'): super().__init__(dep, interpreter) - self.methods.update({'found': self.found_method, - 'type_name': self.type_name_method, - 'version': self.version_method, - 'name': self.name_method, - 'get_pkgconfig_variable': self.pkgconfig_method, - 'get_configtool_variable': self.configtool_method, - 'get_variable': self.variable_method, - 'partial_dependency': self.partial_dependency_method, - 'include_type': self.include_type_method, - 'as_system': self.as_system_method, - 'as_link_whole': self.as_link_whole_method, - 'as_static': self.as_static_method, - 'as_shared': self.as_shared_method, - }) def found(self) -> bool: return self.found_method([], {}) @noPosargs @noKwargs + @InterpreterObject.method('type_name') def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.type_name @noPosargs @noKwargs + @InterpreterObject.method('found') def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: if self.held_object.type_name == 'internal': return True @@ -487,11 +471,13 @@ class DependencyHolder(ObjectHolder[Dependency]): @noPosargs @noKwargs + @InterpreterObject.method('version') def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.get_version() @noPosargs @noKwargs + @InterpreterObject.method('name') def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.get_name() @@ -503,6 +489,7 @@ class DependencyHolder(ObjectHolder[Dependency]): KwargInfo('default', str, default=''), PKGCONFIG_DEFINE_KW.evolve(name='define_variable') ) + @InterpreterObject.method('get_pkgconfig_variable') def pkgconfig_method(self, args: T.Tuple[str], kwargs: 'kwargs.DependencyPkgConfigVar') -> str: from ..dependencies.pkgconfig import PkgConfigDependency if not isinstance(self.held_object, PkgConfigDependency): @@ -521,6 +508,7 @@ class DependencyHolder(ObjectHolder[Dependency]): 'use dependency.get_variable(configtool : ...) instead') @noKwargs @typed_pos_args('dependency.get_config_tool_variable', str) + @InterpreterObject.method('get_configtool_variable') def configtool_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> str: from ..dependencies.configtool import ConfigToolDependency if not isinstance(self.held_object, ConfigToolDependency): @@ -533,6 +521,7 @@ class DependencyHolder(ObjectHolder[Dependency]): @FeatureNew('dependency.partial_dependency', '0.46.0') @noPosargs @typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS) + @InterpreterObject.method('partial_dependency') def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency: pdep = self.held_object.get_partial_dependency(**kwargs) return pdep @@ -549,6 +538,7 @@ class DependencyHolder(ObjectHolder[Dependency]): KwargInfo('default_value', (str, NoneType)), PKGCONFIG_DEFINE_KW, ) + @InterpreterObject.method('get_variable') def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: 'kwargs.DependencyGetVariable') -> str: default_varname = args[0] if default_varname is not None: @@ -570,18 +560,21 @@ class DependencyHolder(ObjectHolder[Dependency]): @FeatureNew('dependency.include_type', '0.52.0') @noPosargs @noKwargs + @InterpreterObject.method('include_type') def include_type_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.get_include_type() @FeatureNew('dependency.as_system', '0.52.0') @noKwargs @typed_pos_args('dependency.as_system', optargs=[str]) + @InterpreterObject.method('as_system') def as_system_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> Dependency: return self.held_object.generate_system_dependency(args[0] or 'system') @FeatureNew('dependency.as_link_whole', '0.56.0') @noKwargs @noPosargs + @InterpreterObject.method('as_link_whole') def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency: if not isinstance(self.held_object, InternalDependency): raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects') @@ -594,6 +587,7 @@ class DependencyHolder(ObjectHolder[Dependency]): 'dependency.as_static', KwargInfo('recursive', bool, default=False), ) + @InterpreterObject.method('as_static') def as_static_method(self, args: T.List[TYPE_var], kwargs: InternalDependencyAsKW) -> Dependency: if not isinstance(self.held_object, InternalDependency): raise InterpreterException('as_static method is only supported on declare_dependency() objects') @@ -605,6 +599,7 @@ class DependencyHolder(ObjectHolder[Dependency]): 'dependency.as_shared', KwargInfo('recursive', bool, default=False), ) + @InterpreterObject.method('as_shared') def as_shared_method(self, args: T.List[TYPE_var], kwargs: InternalDependencyAsKW) -> Dependency: if not isinstance(self.held_object, InternalDependency): raise InterpreterException('as_shared method is only supported on declare_dependency() objects') @@ -615,13 +610,10 @@ _EXTPROG = T.TypeVar('_EXTPROG', bound=ExternalProgram) class _ExternalProgramHolder(ObjectHolder[_EXTPROG]): def __init__(self, ep: _EXTPROG, interpreter: 'Interpreter') -> None: super().__init__(ep, interpreter) - self.methods.update({'found': self.found_method, - 'path': self.path_method, - 'version': self.version_method, - 'full_path': self.full_path_method}) @noPosargs @noKwargs + @InterpreterObject.method('found') def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.found() @@ -629,12 +621,14 @@ class _ExternalProgramHolder(ObjectHolder[_EXTPROG]): @noKwargs @FeatureDeprecated('ExternalProgram.path', '0.55.0', 'use ExternalProgram.full_path() instead') + @InterpreterObject.method('path') def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self._full_path() @noPosargs @noKwargs @FeatureNew('ExternalProgram.full_path', '0.55.0') + @InterpreterObject.method('full_path') def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self._full_path() @@ -648,6 +642,7 @@ class _ExternalProgramHolder(ObjectHolder[_EXTPROG]): @noPosargs @noKwargs @FeatureNew('ExternalProgram.version', '0.62.0') + @InterpreterObject.method('version') def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: if not self.found(): raise InterpreterException('Unable to get the version of a not-found external program') @@ -665,25 +660,23 @@ class ExternalProgramHolder(_ExternalProgramHolder[ExternalProgram]): class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]): def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'): super().__init__(el, interpreter) - self.methods.update({'found': self.found_method, - 'type_name': self.type_name_method, - 'partial_dependency': self.partial_dependency_method, - 'name': self.name_method, - }) @noPosargs @noKwargs + @InterpreterObject.method('type_name') def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.type_name @noPosargs @noKwargs + @InterpreterObject.method('found') def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.held_object.found() @FeatureNew('dependency.partial_dependency', '0.46.0') @noPosargs @typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS) + @InterpreterObject.method('partial_dependency') def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency: pdep = self.held_object.get_partial_dependency(**kwargs) return pdep @@ -691,6 +684,7 @@ class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]): @FeatureNew('dependency.name', '1.5.0') @noPosargs @noKwargs + @InterpreterObject.method('name') def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.name @@ -699,36 +693,34 @@ class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]): class MachineHolder(ObjectHolder['MachineInfo']): def __init__(self, machine_info: 'MachineInfo', interpreter: 'Interpreter'): super().__init__(machine_info, interpreter) - self.methods.update({'system': self.system_method, - 'cpu': self.cpu_method, - 'cpu_family': self.cpu_family_method, - 'endian': self.endian_method, - 'kernel': self.kernel_method, - 'subsystem': self.subsystem_method, - }) @noPosargs @noKwargs + @InterpreterObject.method('cpu_family') def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.cpu_family @noPosargs @noKwargs + @InterpreterObject.method('cpu') def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.cpu @noPosargs @noKwargs + @InterpreterObject.method('system') def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.system @noPosargs @noKwargs + @InterpreterObject.method('endian') def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.endian @noPosargs @noKwargs + @InterpreterObject.method('kernel') def kernel_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: if self.held_object.kernel is not None: return self.held_object.kernel @@ -736,6 +728,7 @@ class MachineHolder(ObjectHolder['MachineInfo']): @noPosargs @noKwargs + @InterpreterObject.method('subsystem') def subsystem_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: if self.held_object.subsystem is not None: return self.held_object.subsystem @@ -748,12 +741,11 @@ class IncludeDirsHolder(ObjectHolder[build.IncludeDirs]): class FileHolder(ObjectHolder[mesonlib.File]): def __init__(self, file: mesonlib.File, interpreter: 'Interpreter'): super().__init__(file, interpreter) - self.methods.update({'full_path': self.full_path_method, - }) @noPosargs @noKwargs @FeatureNew('file.full_path', '1.4.0') + @InterpreterObject.method('full_path') def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.absolute_path(self.env.source_dir, self.env.build_dir) @@ -836,12 +828,10 @@ class SubprojectHolder(MesonInterpreterObject): self.subdir = PurePath(subdir).as_posix() self.cm_interpreter: T.Optional[CMakeInterpreter] = None self.callstack = callstack - self.methods.update({'get_variable': self.get_variable_method, - 'found': self.found_method, - }) @noPosargs @noKwargs + @InterpreterObject.method('found') def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.found() @@ -863,6 +853,7 @@ class SubprojectHolder(MesonInterpreterObject): @noKwargs @typed_pos_args('subproject.get_variable', str, optargs=[object]) @noArgsFlattening + @InterpreterObject.method('get_variable') def get_variable_method(self, args: T.Tuple[str, T.Optional[str]], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]: return self.get_variable(args, kwargs) @@ -905,16 +896,6 @@ _BuildTarget = T.TypeVar('_BuildTarget', bound=T.Union[build.BuildTarget, build. class BuildTargetHolder(ObjectHolder[_BuildTarget]): def __init__(self, target: _BuildTarget, interp: 'Interpreter'): super().__init__(target, interp) - self.methods.update({'extract_objects': self.extract_objects_method, - 'extract_all_objects': self.extract_all_objects_method, - 'name': self.name_method, - 'get_id': self.get_id_method, - 'outdir': self.outdir_method, - 'full_path': self.full_path_method, - 'path': self.path_method, - 'found': self.found_method, - 'private_dir_include': self.private_dir_include_method, - }) def __repr__(self) -> str: r = '<{} {}: {}>' @@ -934,6 +915,7 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]): @noPosargs @noKwargs + @InterpreterObject.method('found') def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: if not (isinstance(self.held_object, build.Executable) and self.held_object.was_returned_by_find_program): FeatureNew.single_use('BuildTarget.found', '0.59.0', subproject=self.held_object.subproject) @@ -941,27 +923,32 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]): @noPosargs @noKwargs + @InterpreterObject.method('private_dir_include') def private_dir_include_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.IncludeDirs: return build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self._target_object)]) @noPosargs @noKwargs + @InterpreterObject.method('full_path') def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.interpreter.backend.get_target_filename_abs(self._target_object) @noPosargs @noKwargs @FeatureDeprecated('BuildTarget.path', '0.55.0', 'Use BuildTarget.full_path instead') + @InterpreterObject.method('path') def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.interpreter.backend.get_target_filename_abs(self._target_object) @noPosargs @noKwargs + @InterpreterObject.method('outdir') def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.interpreter.backend.get_target_dir(self._target_object) @noKwargs @typed_pos_args('extract_objects', varargs=(mesonlib.File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) + @InterpreterObject.method('extract_objects') def extract_objects_method(self, args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, 'build.GeneratedTypes']]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects: tobj = self._target_object unity_value = self.interpreter.coredata.get_option_for_target(tobj, "unity") @@ -981,6 +968,7 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]): ''') ) ) + @InterpreterObject.method('extract_all_objects') def extract_all_objects_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.BuildTargeMethodExtractAllObjects') -> build.ExtractedObjects: return self._target_object.extract_all_objects(kwargs['recursive']) @@ -989,12 +977,14 @@ class BuildTargetHolder(ObjectHolder[_BuildTarget]): @FeatureDeprecated('BuildTarget.get_id', '1.2.0', 'This was never formally documented and does not seem to have a real world use. ' + 'See https://github.com/mesonbuild/meson/pull/6061') + @InterpreterObject.method('get_id') def get_id_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self._target_object.get_id() @FeatureNew('name', '0.54.0') @noPosargs @noKwargs + @InterpreterObject.method('name') def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self._target_object.name @@ -1010,9 +1000,6 @@ class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]): class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]): def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'): super().__init__(libs, interp) - self.methods.update({'get_shared_lib': self.get_shared_lib_method, - 'get_static_lib': self.get_static_lib_method, - }) def __repr__(self) -> str: r = '<{} {}: {}, {}: {}>' @@ -1022,6 +1009,7 @@ class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]): @noPosargs @noKwargs + @InterpreterObject.method('get_shared_lib') def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary: lib = copy.copy(self.held_object.shared) lib.both_lib = None @@ -1029,6 +1017,7 @@ class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]): @noPosargs @noKwargs + @InterpreterObject.method('get_static_lib') def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary: lib = copy.copy(self.held_object.static) lib.both_lib = None @@ -1043,12 +1032,11 @@ class JarHolder(BuildTargetHolder[build.Jar]): class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]): def __init__(self, target: build.CustomTargetIndex, interp: 'Interpreter'): super().__init__(target, interp) - self.methods.update({'full_path': self.full_path_method, - }) @FeatureNew('custom_target[i].full_path', '0.54.0') @noPosargs @noKwargs + @InterpreterObject.method('full_path') def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: assert self.interpreter.backend is not None return self.interpreter.backend.get_target_filename_abs(self.held_object) @@ -1058,13 +1046,6 @@ _CT = T.TypeVar('_CT', bound=build.CustomTarget) class _CustomTargetHolder(ObjectHolder[_CT]): def __init__(self, target: _CT, interp: 'Interpreter'): super().__init__(target, interp) - self.methods.update({'full_path': self.full_path_method, - 'to_list': self.to_list_method, - }) - - self.operators.update({ - MesonOperator.INDEX: self.op_index, - }) def __repr__(self) -> str: r = '<{} {}: {}>' @@ -1073,12 +1054,14 @@ class _CustomTargetHolder(ObjectHolder[_CT]): @noPosargs @noKwargs + @InterpreterObject.method('full_path') def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.interpreter.backend.get_target_filename_abs(self.held_object) @FeatureNew('custom_target.to_list', '0.54.0') @noPosargs @noKwargs + @InterpreterObject.method('to_list') def to_list_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[build.CustomTargetIndex]: result = [] for i in self.held_object: @@ -1087,6 +1070,7 @@ class _CustomTargetHolder(ObjectHolder[_CT]): @noKwargs @typed_operator(MesonOperator.INDEX, int) + @InterpreterObject.operator(MesonOperator.INDEX) def op_index(self, other: int) -> build.CustomTargetIndex: try: return self.held_object[other] @@ -1108,7 +1092,6 @@ class GeneratedListHolder(ObjectHolder[build.GeneratedList]): class GeneratorHolder(ObjectHolder[build.Generator]): def __init__(self, gen: build.Generator, interpreter: 'Interpreter'): super().__init__(gen, interpreter) - self.methods.update({'process': self.process_method}) @typed_pos_args('generator.process', min_varargs=1, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) @typed_kwargs( @@ -1117,6 +1100,7 @@ class GeneratorHolder(ObjectHolder[build.Generator]): KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), ENV_KW.evolve(since='1.3.0') ) + @InterpreterObject.method('process') def process_method(self, args: T.Tuple[T.List[T.Union[str, mesonlib.File, 'build.GeneratedTypes']]], kwargs: 'kwargs.GeneratorProcess') -> build.GeneratedList: @@ -1142,3 +1126,11 @@ class StructuredSourcesHolder(ObjectHolder[build.StructuredSources]): def __init__(self, sources: build.StructuredSources, interp: 'Interpreter'): super().__init__(sources, interp) + +class OverrideExecutableHolder(BuildTargetHolder[build.OverrideExecutable]): + @noPosargs + @noKwargs + @FeatureNew('OverrideExecutable.version', '1.9.0') + @InterpreterObject.method('version') + def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: + return self.held_object.get_version(self.interpreter) diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py index fb34bbb..d741aab 100644 --- a/mesonbuild/interpreter/kwargs.py +++ b/mesonbuild/interpreter/kwargs.py @@ -321,7 +321,7 @@ class Subproject(ExtractRequired): class DoSubproject(ExtractRequired): - default_options: T.List[str] + default_options: T.Union[T.List[str], T.Dict[str, options.ElementaryOptionValues], str] version: T.List[str] cmake_options: T.List[str] options: T.Optional[CMakeSubprojectOptions] diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py index 8ede691..602575c 100644 --- a/mesonbuild/interpreter/mesonmain.py +++ b/mesonbuild/interpreter/mesonmain.py @@ -18,7 +18,7 @@ from ..programs import OverrideProgram, ExternalProgram from ..interpreter.type_checking import ENV_KW, ENV_METHOD_KW, ENV_SEPARATOR_KW, env_convertor_with_method from ..interpreterbase import (MesonInterpreterObject, FeatureNew, FeatureDeprecated, typed_pos_args, noArgsFlattening, noPosargs, noKwargs, - typed_kwargs, KwargInfo, InterpreterException) + typed_kwargs, KwargInfo, InterpreterException, InterpreterObject) from .primitives import MesonVersionString from .type_checking import NATIVE_KW, NoneType @@ -55,38 +55,6 @@ class MesonMain(MesonInterpreterObject): super().__init__(subproject=interpreter.subproject) self.build = build self.interpreter = interpreter - self.methods.update({'add_devenv': self.add_devenv_method, - 'add_dist_script': self.add_dist_script_method, - 'add_install_script': self.add_install_script_method, - 'add_postconf_script': self.add_postconf_script_method, - 'backend': self.backend_method, - 'build_options': self.build_options_method, - 'build_root': self.build_root_method, - 'can_run_host_binaries': self.can_run_host_binaries_method, - 'current_source_dir': self.current_source_dir_method, - 'current_build_dir': self.current_build_dir_method, - 'get_compiler': self.get_compiler_method, - 'get_cross_property': self.get_cross_property_method, - 'get_external_property': self.get_external_property_method, - 'global_build_root': self.global_build_root_method, - 'global_source_root': self.global_source_root_method, - 'has_exe_wrapper': self.has_exe_wrapper_method, - 'has_external_property': self.has_external_property_method, - 'install_dependency_manifest': self.install_dependency_manifest_method, - 'is_cross_build': self.is_cross_build_method, - 'is_subproject': self.is_subproject_method, - 'is_unity': self.is_unity_method, - 'override_dependency': self.override_dependency_method, - 'override_find_program': self.override_find_program_method, - 'project_build_root': self.project_build_root_method, - 'project_license': self.project_license_method, - 'project_license_files': self.project_license_files_method, - 'project_name': self.project_name_method, - 'project_source_root': self.project_source_root_method, - 'project_version': self.project_version_method, - 'source_root': self.source_root_method, - 'version': self.version_method, - }) def _find_source_script( self, name: str, prog: T.Union[str, mesonlib.File, build.Executable, ExternalProgram], @@ -157,6 +125,7 @@ class MesonMain(MesonInterpreterObject): KwargInfo('install_tag', (str, NoneType), since='0.60.0'), KwargInfo('dry_run', bool, default=False, since='1.1.0'), ) + @InterpreterObject.method('add_install_script') def add_install_script_method( self, args: T.Tuple[T.Union[str, mesonlib.File, build.Executable, ExternalProgram], @@ -175,6 +144,7 @@ class MesonMain(MesonInterpreterObject): varargs=(str, mesonlib.File, ExternalProgram) ) @noKwargs + @InterpreterObject.method('add_postconf_script') def add_postconf_script_method( self, args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram], @@ -191,6 +161,7 @@ class MesonMain(MesonInterpreterObject): ) @noKwargs @FeatureNew('meson.add_dist_script', '0.48.0') + @InterpreterObject.method('add_dist_script') def add_dist_script_method( self, args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram], @@ -208,6 +179,7 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('current_source_dir') def current_source_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: src = self.interpreter.environment.source_dir sub = self.interpreter.subdir @@ -217,6 +189,7 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('current_build_dir') def current_build_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: src = self.interpreter.environment.build_dir sub = self.interpreter.subdir @@ -226,24 +199,28 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('backend') def backend_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.interpreter.backend.name @noPosargs @noKwargs @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.project_source_root() or meson.global_source_root() instead.') + @InterpreterObject.method('source_root') def source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.interpreter.environment.source_dir @noPosargs @noKwargs @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.project_build_root() or meson.global_build_root() instead.') + @InterpreterObject.method('build_root') def build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.interpreter.environment.build_dir @noPosargs @noKwargs @FeatureNew('meson.project_source_root', '0.56.0') + @InterpreterObject.method('project_source_root') def project_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: src = self.interpreter.environment.source_dir sub = self.interpreter.root_subdir @@ -254,6 +231,7 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs @FeatureNew('meson.project_build_root', '0.56.0') + @InterpreterObject.method('project_build_root') def project_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: src = self.interpreter.environment.build_dir sub = self.interpreter.root_subdir @@ -264,24 +242,28 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs @FeatureNew('meson.global_source_root', '0.58.0') + @InterpreterObject.method('global_source_root') def global_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.interpreter.environment.source_dir @noPosargs @noKwargs @FeatureNew('meson.global_build_root', '0.58.0') + @InterpreterObject.method('global_build_root') def global_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.interpreter.environment.build_dir @noPosargs @noKwargs @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.') + @InterpreterObject.method('has_exe_wrapper') def has_exe_wrapper_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: return self._can_run_host_binaries_impl() @noPosargs @noKwargs @FeatureNew('meson.can_run_host_binaries', '0.55.0') + @InterpreterObject.method('can_run_host_binaries') def can_run_host_binaries_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: return self._can_run_host_binaries_impl() @@ -294,11 +276,13 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('is_cross_build') def is_cross_build_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: return self.build.environment.is_cross_build() @typed_pos_args('meson.get_compiler', str) @typed_kwargs('meson.get_compiler', NATIVE_KW) + @InterpreterObject.method('get_compiler') def get_compiler_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> 'Compiler': cname = args[0] for_machine = kwargs['native'] @@ -310,23 +294,27 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('is_unity') def is_unity_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: optval = self.interpreter.environment.coredata.optstore.get_value_for(OptionKey('unity')) return optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject()) @noPosargs @noKwargs + @InterpreterObject.method('is_subproject') def is_subproject_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: return self.interpreter.is_subproject() @typed_pos_args('meson.install_dependency_manifest', str) @noKwargs + @InterpreterObject.method('install_dependency_manifest') def install_dependency_manifest_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None: self.build.dep_manifest_name = args[0] @FeatureNew('meson.override_find_program', '0.46.0') @typed_pos_args('meson.override_find_program', str, (mesonlib.File, ExternalProgram, build.Executable)) @noKwargs + @InterpreterObject.method('override_find_program') def override_find_program_method(self, args: T.Tuple[str, T.Union[mesonlib.File, ExternalProgram, build.Executable]], kwargs: 'TYPE_kwargs') -> None: name, exe = args if isinstance(exe, mesonlib.File): @@ -335,6 +323,8 @@ class MesonMain(MesonInterpreterObject): if not os.path.exists(abspath): raise InterpreterException(f'Tried to override {name} with a file that does not exist.') exe = OverrideProgram(name, self.interpreter.project_version, command=[abspath]) + elif isinstance(exe, build.Executable): + exe = build.OverrideExecutable(exe, self.interpreter.project_version) self.interpreter.add_find_program_override(name, exe) @typed_kwargs( @@ -344,6 +334,7 @@ class MesonMain(MesonInterpreterObject): ) @typed_pos_args('meson.override_dependency', str, dependencies.Dependency) @FeatureNew('meson.override_dependency', '0.54.0') + @InterpreterObject.method('override_dependency') def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency], kwargs: 'FuncOverrideDependency') -> None: name, dep = args if not name: @@ -409,28 +400,33 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs + @InterpreterObject.method('project_version') def project_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.build.dep_manifest[self.interpreter.active_projectname].version @FeatureNew('meson.project_license()', '0.45.0') @noPosargs @noKwargs + @InterpreterObject.method('project_license') def project_license_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]: return self.build.dep_manifest[self.interpreter.active_projectname].license @FeatureNew('meson.project_license_files()', '1.1.0') @noPosargs @noKwargs + @InterpreterObject.method('project_license_files') def project_license_files_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[mesonlib.File]: return [l[1] for l in self.build.dep_manifest[self.interpreter.active_projectname].license_files] @noPosargs @noKwargs + @InterpreterObject.method('version') def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> MesonVersionString: return MesonVersionString(self.interpreter.coredata.version) @noPosargs @noKwargs + @InterpreterObject.method('project_name') def project_name_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.interpreter.active_projectname @@ -447,6 +443,7 @@ class MesonMain(MesonInterpreterObject): @FeatureDeprecated('meson.get_cross_property', '0.58.0', 'Use meson.get_external_property() instead') @typed_pos_args('meson.get_cross_property', str, optargs=[object]) @noKwargs + @InterpreterObject.method('get_cross_property') def get_cross_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'TYPE_kwargs') -> object: propname, fallback = args return self.__get_external_property_impl(propname, fallback, MachineChoice.HOST) @@ -455,6 +452,7 @@ class MesonMain(MesonInterpreterObject): @FeatureNew('meson.get_external_property', '0.54.0') @typed_pos_args('meson.get_external_property', str, optargs=[object]) @typed_kwargs('meson.get_external_property', NATIVE_KW) + @InterpreterObject.method('get_external_property') def get_external_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'NativeKW') -> object: propname, fallback = args return self.__get_external_property_impl(propname, fallback, kwargs['native']) @@ -462,6 +460,7 @@ class MesonMain(MesonInterpreterObject): @FeatureNew('meson.has_external_property', '0.58.0') @typed_pos_args('meson.has_external_property', str) @typed_kwargs('meson.has_external_property', NATIVE_KW) + @InterpreterObject.method('has_external_property') def has_external_property_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> bool: prop_name = args[0] return prop_name in self.interpreter.environment.properties[kwargs['native']] @@ -469,6 +468,7 @@ class MesonMain(MesonInterpreterObject): @FeatureNew('add_devenv', '0.58.0') @typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0')) @typed_pos_args('add_devenv', (str, list, dict, mesonlib.EnvironmentVariables)) + @InterpreterObject.method('add_devenv') def add_devenv_method(self, args: T.Tuple[T.Union[str, list, dict, mesonlib.EnvironmentVariables]], kwargs: 'AddDevenvKW') -> None: env = args[0] @@ -482,6 +482,7 @@ class MesonMain(MesonInterpreterObject): @noPosargs @noKwargs @FeatureNew('meson.build_options', '1.1.0') + @InterpreterObject.method('build_options') def build_options_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: options = self.interpreter.user_defined_options if options is None: diff --git a/mesonbuild/interpreter/primitives/array.py b/mesonbuild/interpreter/primitives/array.py index b42ddea..ff520a2 100644 --- a/mesonbuild/interpreter/primitives/array.py +++ b/mesonbuild/interpreter/primitives/array.py @@ -5,9 +5,10 @@ from __future__ import annotations import typing as T from ...interpreterbase import ( - ObjectHolder, + InterpreterObject, IterableObject, MesonOperator, + ObjectHolder, typed_operator, noKwargs, noPosargs, @@ -22,31 +23,16 @@ from ...interpreterbase import ( from ...mparser import PlusAssignmentNode if T.TYPE_CHECKING: - # Object holders need the actual interpreter - from ...interpreter import Interpreter from ...interpreterbase import TYPE_kwargs class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject): - def __init__(self, obj: T.List[TYPE_var], interpreter: 'Interpreter') -> None: - super().__init__(obj, interpreter) - self.methods.update({ - 'contains': self.contains_method, - 'length': self.length_method, - 'get': self.get_method, - }) - - self.trivial_operators.update({ - MesonOperator.EQUALS: (list, lambda x: self.held_object == x), - MesonOperator.NOT_EQUALS: (list, lambda x: self.held_object != x), - MesonOperator.IN: (object, lambda x: x in self.held_object), - MesonOperator.NOT_IN: (object, lambda x: x not in self.held_object), - }) - - # Use actual methods for functions that require additional checks - self.operators.update({ - MesonOperator.PLUS: self.op_plus, - MesonOperator.INDEX: self.op_index, - }) + # Operators that only require type checks + TRIVIAL_OPERATORS = { + MesonOperator.EQUALS: (list, lambda obj, x: obj.held_object == x), + MesonOperator.NOT_EQUALS: (list, lambda obj, x: obj.held_object != x), + MesonOperator.IN: (object, lambda obj, x: x in obj.held_object), + MesonOperator.NOT_IN: (object, lambda obj, x: x not in obj.held_object), + } def display_name(self) -> str: return 'array' @@ -63,6 +49,7 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject): @noArgsFlattening @noKwargs @typed_pos_args('array.contains', object) + @InterpreterObject.method('contains') def contains_method(self, args: T.Tuple[object], kwargs: TYPE_kwargs) -> bool: def check_contains(el: T.List[TYPE_var]) -> bool: for element in el: @@ -77,12 +64,14 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject): @noKwargs @noPosargs + @InterpreterObject.method('length') def length_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int: return len(self.held_object) @noArgsFlattening @noKwargs @typed_pos_args('array.get', int, optargs=[object]) + @InterpreterObject.method('get') def get_method(self, args: T.Tuple[int, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var: index = args[0] if index < -len(self.held_object) or index >= len(self.held_object): @@ -92,6 +81,7 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject): return self.held_object[index] @typed_operator(MesonOperator.PLUS, object) + @InterpreterObject.operator(MesonOperator.PLUS) def op_plus(self, other: TYPE_var) -> T.List[TYPE_var]: if not isinstance(other, list): if not isinstance(self.current_node, PlusAssignmentNode): @@ -101,6 +91,7 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject): return self.held_object + other @typed_operator(MesonOperator.INDEX, int) + @InterpreterObject.operator(MesonOperator.INDEX) def op_index(self, other: int) -> TYPE_var: try: return self.held_object[other] diff --git a/mesonbuild/interpreter/primitives/boolean.py b/mesonbuild/interpreter/primitives/boolean.py index 4b49caf..eb01b9f 100644 --- a/mesonbuild/interpreter/primitives/boolean.py +++ b/mesonbuild/interpreter/primitives/boolean.py @@ -3,8 +3,9 @@ from __future__ import annotations from ...interpreterbase import ( - ObjectHolder, + InterpreterObject, MesonOperator, + ObjectHolder, typed_pos_args, noKwargs, noPosargs, @@ -15,35 +16,28 @@ from ...interpreterbase import ( import typing as T if T.TYPE_CHECKING: - # Object holders need the actual interpreter - from ...interpreter import Interpreter from ...interpreterbase import TYPE_var, TYPE_kwargs class BooleanHolder(ObjectHolder[bool]): - def __init__(self, obj: bool, interpreter: 'Interpreter') -> None: - super().__init__(obj, interpreter) - self.methods.update({ - 'to_int': self.to_int_method, - 'to_string': self.to_string_method, - }) - - self.trivial_operators.update({ - MesonOperator.BOOL: (None, lambda x: self.held_object), - MesonOperator.NOT: (None, lambda x: not self.held_object), - MesonOperator.EQUALS: (bool, lambda x: self.held_object == x), - MesonOperator.NOT_EQUALS: (bool, lambda x: self.held_object != x), - }) + TRIVIAL_OPERATORS = { + MesonOperator.BOOL: (None, lambda obj, x: obj.held_object), + MesonOperator.NOT: (None, lambda obj, x: not obj.held_object), + MesonOperator.EQUALS: (bool, lambda obj, x: obj.held_object == x), + MesonOperator.NOT_EQUALS: (bool, lambda obj, x: obj.held_object != x), + } def display_name(self) -> str: return 'bool' @noKwargs @noPosargs + @InterpreterObject.method('to_int') def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int: return 1 if self.held_object else 0 @noKwargs @typed_pos_args('bool.to_string', optargs=[str, str]) + @InterpreterObject.method('to_string') def to_string_method(self, args: T.Tuple[T.Optional[str], T.Optional[str]], kwargs: TYPE_kwargs) -> str: true_str = args[0] or 'true' false_str = args[1] or 'false' diff --git a/mesonbuild/interpreter/primitives/dict.py b/mesonbuild/interpreter/primitives/dict.py index ab4c15f..d641fa8 100644 --- a/mesonbuild/interpreter/primitives/dict.py +++ b/mesonbuild/interpreter/primitives/dict.py @@ -5,9 +5,10 @@ from __future__ import annotations import typing as T from ...interpreterbase import ( - ObjectHolder, + InterpreterObject, IterableObject, MesonOperator, + ObjectHolder, typed_operator, noKwargs, noPosargs, @@ -20,34 +21,20 @@ from ...interpreterbase import ( ) if T.TYPE_CHECKING: - # Object holders need the actual interpreter - from ...interpreter import Interpreter from ...interpreterbase import TYPE_kwargs class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject): - def __init__(self, obj: T.Dict[str, TYPE_var], interpreter: 'Interpreter') -> None: - super().__init__(obj, interpreter) - self.methods.update({ - 'has_key': self.has_key_method, - 'keys': self.keys_method, - 'get': self.get_method, - }) - - self.trivial_operators.update({ - # Arithmetic - MesonOperator.PLUS: (dict, lambda x: {**self.held_object, **x}), - - # Comparison - MesonOperator.EQUALS: (dict, lambda x: self.held_object == x), - MesonOperator.NOT_EQUALS: (dict, lambda x: self.held_object != x), - MesonOperator.IN: (str, lambda x: x in self.held_object), - MesonOperator.NOT_IN: (str, lambda x: x not in self.held_object), - }) - - # Use actual methods for functions that require additional checks - self.operators.update({ - MesonOperator.INDEX: self.op_index, - }) + # Operators that only require type checks + TRIVIAL_OPERATORS = { + # Arithmetic + MesonOperator.PLUS: (dict, lambda obj, x: {**obj.held_object, **x}), + + # Comparison + MesonOperator.EQUALS: (dict, lambda obj, x: obj.held_object == x), + MesonOperator.NOT_EQUALS: (dict, lambda obj, x: obj.held_object != x), + MesonOperator.IN: (str, lambda obj, x: x in obj.held_object), + MesonOperator.NOT_IN: (str, lambda obj, x: x not in obj.held_object), + } def display_name(self) -> str: return 'dict' @@ -63,17 +50,20 @@ class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject): @noKwargs @typed_pos_args('dict.has_key', str) + @InterpreterObject.method('has_key') def has_key_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool: return args[0] in self.held_object @noKwargs @noPosargs + @InterpreterObject.method('keys') def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]: return sorted(self.held_object) @noArgsFlattening @noKwargs @typed_pos_args('dict.get', str, optargs=[object]) + @InterpreterObject.method('get') def get_method(self, args: T.Tuple[str, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var: if args[0] in self.held_object: return self.held_object[args[0]] @@ -82,6 +72,7 @@ class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject): raise InvalidArguments(f'Key {args[0]!r} is not in the dictionary.') @typed_operator(MesonOperator.INDEX, str) + @InterpreterObject.operator(MesonOperator.INDEX) def op_index(self, other: str) -> TYPE_var: if other not in self.held_object: raise InvalidArguments(f'Key {other} is not in the dictionary.') diff --git a/mesonbuild/interpreter/primitives/integer.py b/mesonbuild/interpreter/primitives/integer.py index cdf2355..c59ea6e 100644 --- a/mesonbuild/interpreter/primitives/integer.py +++ b/mesonbuild/interpreter/primitives/integer.py @@ -3,47 +3,33 @@ from __future__ import annotations from ...interpreterbase import ( - FeatureBroken, InvalidArguments, MesonOperator, ObjectHolder, KwargInfo, + InterpreterObject, MesonOperator, ObjectHolder, + FeatureBroken, InvalidArguments, KwargInfo, noKwargs, noPosargs, typed_operator, typed_kwargs ) import typing as T if T.TYPE_CHECKING: - # Object holders need the actual interpreter - from ...interpreter import Interpreter from ...interpreterbase import TYPE_var, TYPE_kwargs class IntegerHolder(ObjectHolder[int]): - def __init__(self, obj: int, interpreter: 'Interpreter') -> None: - super().__init__(obj, interpreter) - self.methods.update({ - 'is_even': self.is_even_method, - 'is_odd': self.is_odd_method, - 'to_string': self.to_string_method, - }) + # Operators that only require type checks + TRIVIAL_OPERATORS = { + # Arithmetic + MesonOperator.UMINUS: (None, lambda obj, x: -obj.held_object), + MesonOperator.PLUS: (int, lambda obj, x: obj.held_object + x), + MesonOperator.MINUS: (int, lambda obj, x: obj.held_object - x), + MesonOperator.TIMES: (int, lambda obj, x: obj.held_object * x), - self.trivial_operators.update({ - # Arithmetic - MesonOperator.UMINUS: (None, lambda x: -self.held_object), - MesonOperator.PLUS: (int, lambda x: self.held_object + x), - MesonOperator.MINUS: (int, lambda x: self.held_object - x), - MesonOperator.TIMES: (int, lambda x: self.held_object * x), - - # Comparison - MesonOperator.EQUALS: (int, lambda x: self.held_object == x), - MesonOperator.NOT_EQUALS: (int, lambda x: self.held_object != x), - MesonOperator.GREATER: (int, lambda x: self.held_object > x), - MesonOperator.LESS: (int, lambda x: self.held_object < x), - MesonOperator.GREATER_EQUALS: (int, lambda x: self.held_object >= x), - MesonOperator.LESS_EQUALS: (int, lambda x: self.held_object <= x), - }) - - # Use actual methods for functions that require additional checks - self.operators.update({ - MesonOperator.DIV: self.op_div, - MesonOperator.MOD: self.op_mod, - }) + # Comparison + MesonOperator.EQUALS: (int, lambda obj, x: obj.held_object == x), + MesonOperator.NOT_EQUALS: (int, lambda obj, x: obj.held_object != x), + MesonOperator.GREATER: (int, lambda obj, x: obj.held_object > x), + MesonOperator.LESS: (int, lambda obj, x: obj.held_object < x), + MesonOperator.GREATER_EQUALS: (int, lambda obj, x: obj.held_object >= x), + MesonOperator.LESS_EQUALS: (int, lambda obj, x: obj.held_object <= x), + } def display_name(self) -> str: return 'int' @@ -57,11 +43,13 @@ class IntegerHolder(ObjectHolder[int]): @noKwargs @noPosargs + @InterpreterObject.method('is_even') def is_even_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.held_object % 2 == 0 @noKwargs @noPosargs + @InterpreterObject.method('is_odd') def is_odd_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool: return self.held_object % 2 != 0 @@ -70,16 +58,19 @@ class IntegerHolder(ObjectHolder[int]): KwargInfo('fill', int, default=0, since='1.3.0') ) @noPosargs + @InterpreterObject.method('to_string') def to_string_method(self, args: T.List[TYPE_var], kwargs: T.Dict[str, T.Any]) -> str: return str(self.held_object).zfill(kwargs['fill']) @typed_operator(MesonOperator.DIV, int) + @InterpreterObject.operator(MesonOperator.DIV) def op_div(self, other: int) -> int: if other == 0: raise InvalidArguments('Tried to divide by 0') return self.held_object // other @typed_operator(MesonOperator.MOD, int) + @InterpreterObject.operator(MesonOperator.MOD) def op_mod(self, other: int) -> int: if other == 0: raise InvalidArguments('Tried to divide by 0') diff --git a/mesonbuild/interpreter/primitives/range.py b/mesonbuild/interpreter/primitives/range.py index 23d5617..1aceb68 100644 --- a/mesonbuild/interpreter/primitives/range.py +++ b/mesonbuild/interpreter/primitives/range.py @@ -5,8 +5,9 @@ from __future__ import annotations import typing as T from ...interpreterbase import ( - MesonInterpreterObject, + InterpreterObject, IterableObject, + MesonInterpreterObject, MesonOperator, InvalidArguments, ) @@ -18,10 +19,8 @@ class RangeHolder(MesonInterpreterObject, IterableObject): def __init__(self, start: int, stop: int, step: int, *, subproject: 'SubProject') -> None: super().__init__(subproject=subproject) self.range = range(start, stop, step) - self.operators.update({ - MesonOperator.INDEX: self.op_index, - }) + @InterpreterObject.operator(MesonOperator.INDEX) def op_index(self, other: int) -> int: try: return self.range[other] diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py index a224dfa..49dd716 100644 --- a/mesonbuild/interpreter/primitives/string.py +++ b/mesonbuild/interpreter/primitives/string.py @@ -9,8 +9,9 @@ import typing as T from ...mesonlib import version_compare, version_compare_many from ...interpreterbase import ( - ObjectHolder, + InterpreterObject, MesonOperator, + ObjectHolder, FeatureNew, typed_operator, noArgsFlattening, @@ -24,73 +25,47 @@ from ...interpreterbase import ( if T.TYPE_CHECKING: - # Object holders need the actual interpreter - from ...interpreter import Interpreter from ...interpreterbase import TYPE_var, TYPE_kwargs class StringHolder(ObjectHolder[str]): - def __init__(self, obj: str, interpreter: 'Interpreter') -> None: - super().__init__(obj, interpreter) - self.methods.update({ - 'contains': self.contains_method, - 'startswith': self.startswith_method, - 'endswith': self.endswith_method, - 'format': self.format_method, - 'join': self.join_method, - 'replace': self.replace_method, - 'split': self.split_method, - 'splitlines': self.splitlines_method, - 'strip': self.strip_method, - 'substring': self.substring_method, - 'to_int': self.to_int_method, - 'to_lower': self.to_lower_method, - 'to_upper': self.to_upper_method, - 'underscorify': self.underscorify_method, - 'version_compare': self.version_compare_method, - }) - - self.trivial_operators.update({ - # Arithmetic - MesonOperator.PLUS: (str, lambda x: self.held_object + x), - - # Comparison - MesonOperator.EQUALS: (str, lambda x: self.held_object == x), - MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x), - MesonOperator.GREATER: (str, lambda x: self.held_object > x), - MesonOperator.LESS: (str, lambda x: self.held_object < x), - MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x), - MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x), - }) - - # Use actual methods for functions that require additional checks - self.operators.update({ - MesonOperator.DIV: self.op_div, - MesonOperator.INDEX: self.op_index, - MesonOperator.IN: self.op_in, - MesonOperator.NOT_IN: self.op_notin, - }) + TRIVIAL_OPERATORS = { + # Arithmetic + MesonOperator.PLUS: (str, lambda obj, x: obj.held_object + x), + + # Comparison + MesonOperator.EQUALS: (str, lambda obj, x: obj.held_object == x), + MesonOperator.NOT_EQUALS: (str, lambda obj, x: obj.held_object != x), + MesonOperator.GREATER: (str, lambda obj, x: obj.held_object > x), + MesonOperator.LESS: (str, lambda obj, x: obj.held_object < x), + MesonOperator.GREATER_EQUALS: (str, lambda obj, x: obj.held_object >= x), + MesonOperator.LESS_EQUALS: (str, lambda obj, x: obj.held_object <= x), + } def display_name(self) -> str: return 'str' @noKwargs @typed_pos_args('str.contains', str) + @InterpreterObject.method('contains') def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool: return self.held_object.find(args[0]) >= 0 @noKwargs @typed_pos_args('str.startswith', str) + @InterpreterObject.method('startswith') def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool: return self.held_object.startswith(args[0]) @noKwargs @typed_pos_args('str.endswith', str) + @InterpreterObject.method('endswith') def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool: return self.held_object.endswith(args[0]) @noArgsFlattening @noKwargs @typed_pos_args('str.format', varargs=object) + @InterpreterObject.method('format') def format_method(self, args: T.Tuple[T.List[TYPE_var]], kwargs: TYPE_kwargs) -> str: arg_strings: T.List[str] = [] for arg in args[0]: @@ -111,27 +86,32 @@ class StringHolder(ObjectHolder[str]): @noKwargs @noPosargs @FeatureNew('str.splitlines', '1.2.0') + @InterpreterObject.method('splitlines') def splitlines_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]: return self.held_object.splitlines() @noKwargs @typed_pos_args('str.join', varargs=str) + @InterpreterObject.method('join') def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str: return self.held_object.join(args[0]) @noKwargs @FeatureNew('str.replace', '0.58.0') @typed_pos_args('str.replace', str, str) + @InterpreterObject.method('replace') def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str: return self.held_object.replace(args[0], args[1]) @noKwargs @typed_pos_args('str.split', optargs=[str]) + @InterpreterObject.method('split') def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]: return self.held_object.split(args[0]) @noKwargs @typed_pos_args('str.strip', optargs=[str]) + @InterpreterObject.method('strip') def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str: if args[0]: FeatureNew.single_use('str.strip with a positional argument', '0.43.0', self.subproject, location=self.current_node) @@ -140,6 +120,7 @@ class StringHolder(ObjectHolder[str]): @noKwargs @FeatureNew('str.substring', '0.56.0') @typed_pos_args('str.substring', optargs=[int, int]) + @InterpreterObject.method('substring') def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str: start = args[0] if args[0] is not None else 0 end = args[1] if args[1] is not None else len(self.held_object) @@ -147,6 +128,7 @@ class StringHolder(ObjectHolder[str]): @noKwargs @noPosargs + @InterpreterObject.method('to_int') def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int: try: return int(self.held_object) @@ -155,20 +137,24 @@ class StringHolder(ObjectHolder[str]): @noKwargs @noPosargs + @InterpreterObject.method('to_lower') def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.lower() @noKwargs @noPosargs + @InterpreterObject.method('to_upper') def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return self.held_object.upper() @noKwargs @noPosargs + @InterpreterObject.method('underscorify') def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str: return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object) @noKwargs + @InterpreterObject.method('version_compare') @typed_pos_args('str.version_compare', varargs=str, min_varargs=1) def version_compare_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> bool: if len(args[0]) > 1: @@ -181,10 +167,12 @@ class StringHolder(ObjectHolder[str]): @FeatureNew('/ with string arguments', '0.49.0') @typed_operator(MesonOperator.DIV, str) + @InterpreterObject.operator(MesonOperator.DIV) def op_div(self, other: str) -> str: return self._op_div(self.held_object, other) @typed_operator(MesonOperator.INDEX, int) + @InterpreterObject.operator(MesonOperator.INDEX) def op_index(self, other: int) -> str: try: return self.held_object[other] @@ -193,11 +181,13 @@ class StringHolder(ObjectHolder[str]): @FeatureNew('"in" string operator', '1.0.0') @typed_operator(MesonOperator.IN, str) + @InterpreterObject.operator(MesonOperator.IN) def op_in(self, other: str) -> bool: return other in self.held_object @FeatureNew('"not in" string operator', '1.0.0') @typed_operator(MesonOperator.NOT_IN, str) + @InterpreterObject.operator(MesonOperator.NOT_IN) def op_notin(self, other: str) -> bool: return other not in self.held_object @@ -208,6 +198,7 @@ class MesonVersionString(str): class MesonVersionStringHolder(StringHolder): @noKwargs @typed_pos_args('str.version_compare', str) + @InterpreterObject.method('version_compare') def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool: self.interpreter.tmp_meson_version = args[0] return version_compare(self.held_object, args[0]) @@ -221,6 +212,7 @@ class DependencyVariableString(str): pass class DependencyVariableStringHolder(StringHolder): + @InterpreterObject.operator(MesonOperator.DIV) def op_div(self, other: str) -> T.Union[str, DependencyVariableString]: ret = super().op_div(other) if '..' in other: @@ -243,6 +235,7 @@ class OptionString(str): class OptionStringHolder(StringHolder): held_object: OptionString + @InterpreterObject.operator(MesonOperator.DIV) def op_div(self, other: str) -> T.Union[str, OptionString]: ret = super().op_div(other) name = self._op_div(self.held_object.optname, other) diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py index 78938ba..fbe3e3e 100644 --- a/mesonbuild/interpreter/type_checking.py +++ b/mesonbuild/interpreter/type_checking.py @@ -11,10 +11,10 @@ from .. import compilers from ..build import (CustomTarget, BuildTarget, CustomTargetIndex, ExtractedObjects, GeneratedList, IncludeDirs, BothLibraries, SharedLibrary, StaticLibrary, Jar, Executable, StructuredSources) -from ..options import UserFeatureOption +from ..options import OptionKey, UserFeatureOption from ..dependencies import Dependency, InternalDependency from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo -from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep, +from ..mesonlib import (File, FileMode, MachineChoice, has_path_sep, listify, stringlistify, EnvironmentVariables) from ..programs import ExternalProgram @@ -293,11 +293,22 @@ COMMAND_KW: KwargInfo[T.List[T.Union[str, BuildTarget, CustomTarget, CustomTarge ) -OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.Dict[str, ElementaryOptionValues], T.List[str]]] = KwargInfo( +def _override_options_convertor(raw: T.Union[str, T.List[str], T.Dict[str, ElementaryOptionValues]]) -> T.Dict[str, ElementaryOptionValues]: + if isinstance(raw, dict): + return raw + raw = stringlistify(raw) + output: T.Dict[str, ElementaryOptionValues] = {} + for each in raw: + k, v = split_equal_string(each) + output[k] = v + return output + +OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.List[str], T.Dict[str, ElementaryOptionValues]]] = KwargInfo( 'override_options', (str, ContainerTypeInfo(list, str), ContainerTypeInfo(dict, (str, int, bool, list))), default={}, validator=_options_validator, + convertor=_override_options_convertor, since_values={dict: '1.2.0'}, ) @@ -394,7 +405,13 @@ INCLUDE_DIRECTORIES: KwargInfo[T.List[T.Union[str, IncludeDirs]]] = KwargInfo( default=[], ) -DEFAULT_OPTIONS = OVERRIDE_OPTIONS_KW.evolve(name='default_options') +def _default_options_convertor(raw: T.Union[str, T.List[str], T.Dict[str, ElementaryOptionValues]]) -> T.Dict[OptionKey, ElementaryOptionValues]: + d = _override_options_convertor(raw) + return {OptionKey.from_string(k): v for k, v in d.items()} + +DEFAULT_OPTIONS = OVERRIDE_OPTIONS_KW.evolve( + name='default_options', + convertor=_default_options_convertor) ENV_METHOD_KW = KwargInfo('method', str, default='set', since='0.62.0', validator=in_set_validator({'set', 'prepend', 'append'})) diff --git a/mesonbuild/interpreterbase/__init__.py b/mesonbuild/interpreterbase/__init__.py index aa38e94..88fa706 100644 --- a/mesonbuild/interpreterbase/__init__.py +++ b/mesonbuild/interpreterbase/__init__.py @@ -59,6 +59,9 @@ __all__ = [ 'TYPE_HoldableTypes', 'HoldableTypes', + + 'UnknownValue', + 'UndefinedVariable', ] from .baseobjects import ( @@ -81,6 +84,9 @@ from .baseobjects import ( SubProject, HoldableTypes, + + UnknownValue, + UndefinedVariable, ) from .decorators import ( diff --git a/mesonbuild/interpreterbase/baseobjects.py b/mesonbuild/interpreterbase/baseobjects.py index a5cccce..c756761 100644 --- a/mesonbuild/interpreterbase/baseobjects.py +++ b/mesonbuild/interpreterbase/baseobjects.py @@ -15,16 +15,11 @@ from abc import ABCMeta from contextlib import AbstractContextManager if T.TYPE_CHECKING: - from typing_extensions import Protocol, TypeAlias + from typing_extensions import TypeAlias # Object holders need the actual interpreter from ..interpreter import Interpreter - __T = T.TypeVar('__T', bound='TYPE_var', contravariant=True) - - class OperatorCall(Protocol[__T]): - def __call__(self, other: __T) -> 'TYPE_var': ... - TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any]) @@ -34,34 +29,85 @@ TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode] TYPE_kwargs = T.Dict[str, TYPE_var] TYPE_nkwargs = T.Dict[str, TYPE_nvar] TYPE_key_resolver = T.Callable[[mparser.BaseNode], str] +TYPE_op_arg = T.TypeVar('TYPE_op_arg', bound='TYPE_var', contravariant=True) +TYPE_op_func = T.Callable[[TYPE_op_arg, TYPE_op_arg], TYPE_var] +TYPE_method_func = T.Callable[['InterpreterObject', T.List[TYPE_var], TYPE_kwargs], TYPE_var] + SubProject = T.NewType('SubProject', str) class InterpreterObject: + TRIVIAL_OPERATORS: T.Dict[ + MesonOperator, + T.Tuple[ + T.Union[T.Type, T.Tuple[T.Type, ...]], + TYPE_op_func + ] + ] = {} + + OPERATORS: T.Dict[MesonOperator, TYPE_op_func] = {} + + METHODS: T.Dict[ + str, + TYPE_method_func, + ] = {} + + def __init_subclass__(cls: T.Type[InterpreterObject], **kwargs: T.Any) -> None: + super().__init_subclass__(**kwargs) + saved_trivial_operators = cls.TRIVIAL_OPERATORS + + cls.METHODS = {} + cls.OPERATORS = {} + cls.TRIVIAL_OPERATORS = {} + + # Compute inherited operators and methods according to the Python resolution + # order. Reverse the result of mro() because update() will overwrite entries + # that are set by the superclass with those that are set by the subclass. + for superclass in reversed(cls.mro()[1:]): + if superclass is InterpreterObject: + # InterpreterObject cannot use @InterpreterObject.operator because + # __init_subclass__ does not operate on InterpreterObject itself + cls.OPERATORS.update({ + MesonOperator.EQUALS: InterpreterObject.op_equals, + MesonOperator.NOT_EQUALS: InterpreterObject.op_not_equals + }) + + elif issubclass(superclass, InterpreterObject): + cls.METHODS.update(superclass.METHODS) + cls.OPERATORS.update(superclass.OPERATORS) + cls.TRIVIAL_OPERATORS.update(superclass.TRIVIAL_OPERATORS) + + for name, method in cls.__dict__.items(): + if hasattr(method, 'meson_method'): + cls.METHODS[method.meson_method] = method + if hasattr(method, 'meson_operator'): + cls.OPERATORS[method.meson_operator] = method + cls.TRIVIAL_OPERATORS.update(saved_trivial_operators) + + @staticmethod + def method(name: str) -> T.Callable[[TV_func], TV_func]: + '''Decorator that tags a Python method as the implementation of a method + for the Meson interpreter''' + def decorator(f: TV_func) -> TV_func: + f.meson_method = name # type: ignore[attr-defined] + return f + return decorator + + @staticmethod + def operator(op: MesonOperator) -> T.Callable[[TV_func], TV_func]: + '''Decorator that tags a method as the implementation of an operator + for the Meson interpreter''' + def decorator(f: TV_func) -> TV_func: + f.meson_operator = op # type: ignore[attr-defined] + return f + return decorator + def __init__(self, *, subproject: T.Optional['SubProject'] = None) -> None: - self.methods: T.Dict[ - str, - T.Callable[[T.List[TYPE_var], TYPE_kwargs], TYPE_var] - ] = {} - self.operators: T.Dict[MesonOperator, 'OperatorCall'] = {} - self.trivial_operators: T.Dict[ - MesonOperator, - T.Tuple[ - T.Union[T.Type, T.Tuple[T.Type, ...]], - 'OperatorCall' - ] - ] = {} # Current node set during a method call. This can be used as location # when printing a warning message during a method call. self.current_node: mparser.BaseNode = None self.subproject = subproject or SubProject('') - # Some default operators supported by all objects - self.operators.update({ - MesonOperator.EQUALS: self.op_equals, - MesonOperator.NOT_EQUALS: self.op_not_equals, - }) - # The type of the object that can be printed to the user def display_name(self) -> str: return type(self).__name__ @@ -72,25 +118,26 @@ class InterpreterObject: args: T.List[TYPE_var], kwargs: TYPE_kwargs ) -> TYPE_var: - if method_name in self.methods: - method = self.methods[method_name] + if method_name in self.METHODS: + method = self.METHODS[method_name] if not getattr(method, 'no-args-flattening', False): args = flatten(args) if not getattr(method, 'no-second-level-holder-flattening', False): args, kwargs = resolve_second_level_holders(args, kwargs) - return method(args, kwargs) + return method(self, args, kwargs) raise InvalidCode(f'Unknown method "{method_name}" in object {self} of type {type(self).__name__}.') def operator_call(self, operator: MesonOperator, other: TYPE_var) -> TYPE_var: - if operator in self.trivial_operators: - op = self.trivial_operators[operator] + if operator in self.TRIVIAL_OPERATORS: + op = self.TRIVIAL_OPERATORS[operator] if op[0] is None and other is not None: raise MesonBugException(f'The unary operator `{operator.value}` of {self.display_name()} was passed the object {other} of type {type(other).__name__}') if op[0] is not None and not isinstance(other, op[0]): raise InvalidArguments(f'The `{operator.value}` operator of {self.display_name()} does not accept objects of type {type(other).__name__} ({other})') - return op[1](other) - if operator in self.operators: - return self.operators[operator](other) + return op[1](self, other) + if operator in self.OPERATORS: + return self.OPERATORS[operator](self, other) + raise InvalidCode(f'Object {self} of type {self.display_name()} does not support the `{operator.value}` operator.') # Default comparison operator support @@ -121,6 +168,16 @@ class MesonInterpreterObject(InterpreterObject): class MutableInterpreterObject: ''' Dummy class to mark the object type as mutable ''' +class UnknownValue(MesonInterpreterObject): + '''This class is only used for the rewriter/static introspection tool and + indicates that a value cannot be determined statically, either because of + limitations in our code or because the value differs from machine to + machine.''' + +class UndefinedVariable(MesonInterpreterObject): + '''This class is only used for the rewriter/static introspection tool and + represents the `value` a meson-variable has if it was never written to.''' + HoldableTypes = (HoldableObject, int, bool, str, list, dict) TYPE_HoldableTypes = T.Union[TYPE_var, HoldableObject] InterpreterObjectTypeVar = T.TypeVar('InterpreterObjectTypeVar', bound=TYPE_HoldableTypes) @@ -142,12 +199,14 @@ class ObjectHolder(InterpreterObject, T.Generic[InterpreterObjectTypeVar]): return type(self.held_object).__name__ # Override default comparison operators for the held object + @InterpreterObject.operator(MesonOperator.EQUALS) def op_equals(self, other: TYPE_var) -> bool: # See the comment from InterpreterObject why we are using `type()` here. if type(self.held_object) is not type(other): self._throw_comp_exception(other, '==') return self.held_object == other + @InterpreterObject.operator(MesonOperator.NOT_EQUALS) def op_not_equals(self, other: TYPE_var) -> bool: if type(self.held_object) is not type(other): self._throw_comp_exception(other, '!=') diff --git a/mesonbuild/interpreterbase/decorators.py b/mesonbuild/interpreterbase/decorators.py index 06cac52..a847689 100644 --- a/mesonbuild/interpreterbase/decorators.py +++ b/mesonbuild/interpreterbase/decorators.py @@ -393,7 +393,7 @@ class KwargInfo(T.Generic[_T]): deprecated_message: T.Union[str, None, _NULL_T] = _NULL, deprecated_values: T.Union[T.Dict[T.Union[_T, ContainerTypeInfo, type], T.Union[str, T.Tuple[str, str]]], None, _NULL_T] = _NULL, validator: T.Union[T.Callable[[_T], T.Optional[str]], None, _NULL_T] = _NULL, - convertor: T.Union[T.Callable[[_T], TYPE_var], None, _NULL_T] = _NULL) -> 'KwargInfo': + convertor: T.Union[T.Callable[[_T], object], None, _NULL_T] = _NULL) -> 'KwargInfo': """Create a shallow copy of this KwargInfo, with modifications. This allows us to create a new copy of a KwargInfo with modifications. diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py index ee9bb08..f4a2f73 100644 --- a/mesonbuild/linkers/detect.py +++ b/mesonbuild/linkers/detect.py @@ -166,6 +166,9 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty linker = lld_cls( compiler, for_machine, comp_class.LINKER_PREFIX, override, system=system, version=v) + elif o.startswith("eld"): + linker = linkers.ELDDynamicLinker( + compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) elif 'Snapdragon' in e and 'LLVM' in e: linker = linkers.QualcommLLVMDynamicLinker( compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py index 59f60e0..d81892b 100644 --- a/mesonbuild/linkers/linkers.py +++ b/mesonbuild/linkers/linkers.py @@ -1235,6 +1235,12 @@ class QualcommLLVMDynamicLinker(LLVMDynamicLinker): id = 'ld.qcld' +class ELDDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker): + + """Qualcomm's opensource embedded linker""" + + id = 'ld.eld' + class NAGDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 416caf1..217379f 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -73,6 +73,7 @@ class Conf: self.build_dir = os.path.dirname(self.build_dir) self.build = None self.max_choices_line_length = 60 + self.pending_section: T.Optional[str] = None self.name_col: T.List[LOGLINE] = [] self.value_col: T.List[LOGLINE] = [] self.choices_col: T.List[LOGLINE] = [] @@ -125,9 +126,6 @@ class Conf: def clear_cache(self) -> None: self.coredata.clear_cache() - def set_options(self, options: T.Dict[OptionKey, str]) -> bool: - return self.coredata.set_options(options) - def save(self) -> None: # Do nothing when using introspection if self.default_values_only: @@ -194,7 +192,7 @@ class Conf: ) -> T.Dict[str, options.MutableKeyedOptionDictType]: result: T.Dict[str, options.MutableKeyedOptionDictType] = {} for k, o in opts.items(): - if k.subproject: + if k.subproject is not None: self.all_subprojects.add(k.subproject) result.setdefault(k.subproject, {})[k] = o return result @@ -210,11 +208,13 @@ class Conf: self.descr_col.append(descr) def add_option(self, name: str, descr: str, value: T.Any, choices: T.Any) -> None: + self._add_section() value = stringify(value) choices = stringify(choices) self._add_line(mlog.green(name), mlog.yellow(value), mlog.blue(choices), descr) def add_title(self, title: str) -> None: + self._add_section() newtitle = mlog.cyan(title) descr = mlog.cyan('Description') value = mlog.cyan('Default Value' if self.default_values_only else 'Current Value') @@ -223,11 +223,17 @@ class Conf: self._add_line(newtitle, value, choices, descr) self._add_line('-' * len(newtitle), '-' * len(value), '-' * len(choices), '-' * len(descr)) - def add_section(self, section: str) -> None: + def _add_section(self) -> None: + if not self.pending_section: + return self.print_margin = 0 self._add_line('', '', '', '') - self._add_line(mlog.normal_yellow(section + ':'), '', '', '') + self._add_line(mlog.normal_yellow(self.pending_section + ':'), '', '', '') self.print_margin = 2 + self.pending_section = None + + def add_section(self, section: str) -> None: + self.pending_section = section def print_options(self, title: str, opts: T.Union[options.MutableKeyedOptionDictType, options.OptionStore]) -> None: if not opts: @@ -291,15 +297,15 @@ class Conf: project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_project_option(k)}) show_build_options = self.default_values_only or self.build.environment.is_cross_build() - self.add_section('Main project options') + self.add_section('Global build options') self.print_options('Core options', host_core_options[None]) if show_build_options and build_core_options: self.print_options('', build_core_options[None]) self.print_options('Backend options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_backend_option(k)}) self.print_options('Base options', {k: v for k, v in self.coredata.optstore.items() if self.coredata.optstore.is_base_option(k)}) - self.print_options('Compiler options', host_compiler_options.get('', {})) + self.print_options('Compiler options', host_compiler_options.get(None, {})) if show_build_options: - self.print_options('', build_compiler_options.get('', {})) + self.print_options('', build_compiler_options.get(None, {})) for mod, mod_options in module_options.items(): self.print_options(f'{mod} module options', mod_options) self.print_options('Directories', dir_options) @@ -307,8 +313,9 @@ class Conf: self.print_options('Project options', project_options.get('', {})) for subproject in sorted(self.all_subprojects): if subproject == '': - continue - self.add_section('Subproject ' + subproject) + self.add_section('Main project') + else: + self.add_section('Subproject ' + subproject) if subproject in host_core_options: self.print_options('Core options', host_core_options[subproject]) if subproject in build_core_options and show_build_options: @@ -317,7 +324,7 @@ class Conf: self.print_options('Compiler options', host_compiler_options[subproject]) if subproject in build_compiler_options and show_build_options: self.print_options('', build_compiler_options[subproject]) - if subproject in project_options: + if subproject != '' and subproject in project_options: self.print_options('Project options', project_options[subproject]) self.print_aligned() @@ -342,7 +349,7 @@ class Conf: if self.coredata.optstore.augments: mlog.log('\nCurrently set option augments:') for k, v in self.coredata.optstore.augments.items(): - mlog.log(f'{k:21}{v:10}') + mlog.log(f'{k!s:21}{v:10}') else: mlog.log('\nThere are no option augments.') @@ -373,11 +380,7 @@ def run_impl(options: CMDOptions, builddir: str) -> int: save = False if has_option_flags(options): - unset_opts = getattr(options, 'unset_opts', []) - all_D = options.projectoptions[:] - for keystr, valstr in options.cmd_line_options.items(): - all_D.append(f'{keystr}={valstr}') - save |= c.coredata.optstore.set_from_configure_command(all_D, unset_opts) + save |= c.coredata.set_from_configure_command(options) coredata.update_cmd_line_file(builddir, options) if options.clearcache: c.clear_cache() diff --git a/mesonbuild/mdevenv.py b/mesonbuild/mdevenv.py index 4962d96..e9974fe 100644 --- a/mesonbuild/mdevenv.py +++ b/mesonbuild/mdevenv.py @@ -4,6 +4,7 @@ import os, subprocess import argparse import tempfile import shutil +import sys import itertools import typing as T @@ -226,8 +227,14 @@ def run(options: argparse.Namespace) -> int: args[0] = abs_path or args[0] try: - os.chdir(workdir) - os.execvpe(args[0], args, env=devenv) + if is_windows(): + # execvpe doesn't return exit code on Windows + # see https://github.com/python/cpython/issues/63323 + result = subprocess.run(args, env=devenv, cwd=workdir) + sys.exit(result.returncode) + else: + os.chdir(workdir) + os.execvpe(args[0], args, env=devenv) except FileNotFoundError: raise MesonException(f'Command not found: {args[0]}') except OSError as e: diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py index 0361606..6e1bfd0 100644 --- a/mesonbuild/mdist.py +++ b/mesonbuild/mdist.py @@ -7,7 +7,6 @@ from __future__ import annotations import abc import argparse -import gzip import os import sys import shlex @@ -294,6 +293,7 @@ class HgDist(Dist): shutil.copyfileobj(tf, bf) output_names.append(bz2name) if 'gztar' in archives: + import gzip with gzip.open(gzname, 'wb') as zf, open(tarname, 'rb') as tf: shutil.copyfileobj(tf, zf) output_names.append(gzname) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 462ee2f..6986186 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -19,20 +19,23 @@ from pathlib import Path, PurePath import sys import typing as T -from . import build, mesonlib, options, coredata as cdata -from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter +from . import build, environment, mesonlib, options, coredata as cdata +from .ast import IntrospectionInterpreter, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter from .backend import backends from .dependencies import Dependency -from . import environment -from .interpreterbase import ObjectHolder +from .interpreterbase import ObjectHolder, UnknownValue from .options import OptionKey -from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode if T.TYPE_CHECKING: import argparse from .interpreter import Interpreter - from .mparser import BaseNode + +class IntrospectionEncoder(json.JSONEncoder): + def default(self, obj: T.Any) -> T.Any: + if isinstance(obj, UnknownValue): + return 'unknown' + return json.JSONEncoder.default(self, obj) def get_meson_info_file(info_dir: str) -> str: return os.path.join(info_dir, 'meson-info.json') @@ -54,7 +57,7 @@ class IntroCommand: def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, builddata: T.Optional[build.Build] = None, - backend: T.Optional[backends.Backend] = None) -> 'T.Mapping[str, IntroCommand]': + backend: T.Optional[backends.Backend] = None) -> T.Mapping[str, IntroCommand]: if backend and builddata: benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) testdata = backend.create_test_serialisation(builddata.get_tests()) @@ -169,56 +172,35 @@ def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str: else: return subdir -def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: - tlist: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] = [] - root_dir = Path(intr.source_root) - - def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]: - res: T.List[Path] = [] - for n in node_list: - args: T.List[BaseNode] = [] - if isinstance(n, FunctionNode): - args = list(n.args.arguments) - if n.func_name.value in BUILD_TARGET_FUNCTIONS: - args.pop(0) - elif isinstance(n, ArrayNode): - args = n.args.arguments - elif isinstance(n, ArgumentNode): - args = n.arguments - for j in args: - if isinstance(j, StringNode): - assert isinstance(j.value, str) - res += [Path(j.value)] - elif isinstance(j, str): - res += [Path(j)] - res = [root_dir / i['subdir'] / x for x in res] - res = [x.resolve() for x in res] - return res +def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, object]]: + tlist = [] + root_dir = Path(intr.source_root).resolve() for i in intr.targets: - sources = nodes_to_paths(i['sources']) - extra_f = nodes_to_paths(i['extra_files']) - outdir = get_target_dir(intr.coredata, i['subdir']) + sources = intr.nodes_to_pretty_filelist(root_dir, i.subdir, i.source_nodes) + extra_files = intr.nodes_to_pretty_filelist(root_dir, i.subdir, [i.extra_files] if i.extra_files else []) + + outdir = get_target_dir(intr.coredata, i.subdir) tlist += [{ - 'name': i['name'], - 'id': i['id'], - 'type': i['type'], - 'defined_in': i['defined_in'], - 'filename': [os.path.join(outdir, x) for x in i['outputs']], - 'build_by_default': i['build_by_default'], + 'name': i.name, + 'id': i.id, + 'type': i.typename, + 'defined_in': i.defined_in, + 'filename': [os.path.join(outdir, x) for x in i.outputs], + 'build_by_default': i.build_by_default, 'target_sources': [{ 'language': 'unknown', - 'machine': i['machine'], + 'machine': i.machine, 'compiler': [], 'parameters': [], - 'sources': [str(x) for x in sources], + 'sources': sources, 'generated_sources': [] }], 'depends': [], - 'extra_files': [str(x) for x in extra_f], + 'extra_files': extra_files, 'subproject': None, # Subprojects are not supported - 'installed': i['installed'] + 'installed': i.installed }] return tlist @@ -380,17 +362,16 @@ def list_compilers(coredata: cdata.CoreData) -> T.Dict[str, T.Dict[str, T.Dict[s } return compilers -def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]: - result: T.List[T.Dict[str, T.Union[str, bool]]] = [] +def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, T.List[str], UnknownValue]]]: + result: T.List[T.Dict[str, T.Union[str, bool, T.List[str], UnknownValue]]] = [] for i in intr.dependencies: - keys = [ - 'name', - 'required', - 'version', - 'has_fallback', - 'conditional', - ] - result += [{k: v for k, v in i.items() if k in keys}] + result += [{ + 'name': i.name, + 'required': i.required, + 'version': i.version, + 'has_fallback': i.has_fallback, + 'conditional': i.conditional, + }] return result def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]: @@ -517,12 +498,12 @@ def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, return 1 elif len(results) == 1 and not options.force_dict: # Make to keep the existing output format for a single option - print(json.dumps(results[0][1], indent=indent)) + print(json.dumps(results[0][1], indent=indent, cls=IntrospectionEncoder)) else: out = {} for i in results: out[i[0]] = i[1] - print(json.dumps(out, indent=indent)) + print(json.dumps(out, indent=indent, cls=IntrospectionEncoder)) return 0 def get_infodir(builddir: T.Optional[str] = None) -> str: @@ -546,10 +527,11 @@ def run(options: argparse.Namespace) -> int: datadir = os.path.join(options.builddir, datadir) indent = 4 if options.indent else None results: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] = [] - sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11] intro_types = get_meson_introspection_types() - if 'meson.build' in [os.path.basename(options.builddir), options.builddir]: + # TODO: This if clause is undocumented. + if os.path.basename(options.builddir) == environment.build_filename: + sourcedir = '.' if options.builddir == environment.build_filename else options.builddir[:-len(environment.build_filename)] # Make sure that log entries in other parts of meson don't interfere with the JSON output with redirect_stdout(sys.stderr): backend = backends.get_backend_from_name(options.backend) diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py index 67d1666..87892e6 100644 --- a/mesonbuild/modules/__init__.py +++ b/mesonbuild/modules/__init__.py @@ -75,14 +75,14 @@ class ModuleState: required: bool = True, version_func: T.Optional[ProgramVersionFunc] = None, wanted: T.Union[str, T.List[str]] = '', silent: bool = False, - for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[ExternalProgram, build.Executable, OverrideProgram]: + for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[ExternalProgram, build.OverrideExecutable, OverrideProgram]: if not isinstance(prog, list): prog = [prog] return self._interpreter.find_program_impl(prog, required=required, version_func=version_func, wanted=wanted, silent=silent, for_machine=for_machine) def find_tool(self, name: str, depname: str, varname: str, required: bool = True, - wanted: T.Optional[str] = None) -> T.Union['build.Executable', ExternalProgram, 'OverrideProgram']: + wanted: T.Optional[str] = None) -> T.Union[build.OverrideExecutable, ExternalProgram, 'OverrideProgram']: # Look in overrides in case it's built as subproject progobj = self._interpreter.program_from_overrides([name], []) if progobj is not None: diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py index e3154b0..f12cc51 100644 --- a/mesonbuild/modules/cmake.py +++ b/mesonbuild/modules/cmake.py @@ -154,10 +154,11 @@ class CMakeSubproject(ModuleObject): @noKwargs @typed_pos_args('cmake.subproject.include_directories', str) - def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> build.IncludeDirs: + def include_directories(self, state: ModuleState, args: T.Tuple[str], kwargs: TYPE_kwargs) -> T.List[build.IncludeDirs]: info = self._args_to_info(args[0]) inc = self.get_variable(state, [info['inc']], kwargs) - assert isinstance(inc, build.IncludeDirs), 'for mypy' + assert isinstance(inc, list), 'for mypy' + assert isinstance(inc[0], build.IncludeDirs), 'for mypy' return inc @noKwargs diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 6764133..9f955ae 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -137,6 +137,8 @@ if T.TYPE_CHECKING: install_header: bool install_dir: T.Optional[str] docbook: T.Optional[str] + rst: T.Optional[str] + markdown: T.Optional[str] autocleanup: Literal['all', 'none', 'objects', 'default'] class GenMarshal(TypedDict): @@ -957,8 +959,8 @@ class GnomeModule(ExtensionModule): return gir_filelist_filename - @staticmethod def _make_gir_target( + self, state: 'ModuleState', girfile: str, scan_command: T.Sequence[T.Union['FileOrString', Executable, ExternalProgram, OverrideProgram]], @@ -988,6 +990,11 @@ class GnomeModule(ExtensionModule): run_env.set('CFLAGS', [quote_arg(x) for x in env_flags], ' ') run_env.merge(kwargs['env']) + gir_dep, _, _ = self._get_gir_dep(state) + + # response file supported? + rspable = mesonlib.version_compare(gir_dep.get_version(), '>= 1.85.0') + return GirTarget( girfile, state.subdir, @@ -1002,6 +1009,7 @@ class GnomeModule(ExtensionModule): install_dir=[install_dir], install_tag=['devel'], env=run_env, + rspable=rspable, ) @staticmethod @@ -1168,13 +1176,13 @@ class GnomeModule(ExtensionModule): scan_cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers]))) scan_internal_ldflags = [] scan_external_ldflags = [] - scan_env_ldflags = [] + scan_env_ldflags = state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c') for cli_flags, env_flags in (self._get_scanner_ldflags(internal_ldflags), self._get_scanner_ldflags(dep_internal_ldflags)): scan_internal_ldflags += cli_flags - scan_env_ldflags = env_flags + scan_env_ldflags += env_flags for cli_flags, env_flags in (self._get_scanner_ldflags(external_ldflags), self._get_scanner_ldflags(dep_external_ldflags)): scan_external_ldflags += cli_flags - scan_env_ldflags = env_flags + scan_env_ldflags += env_flags girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets) inc_dirs = kwargs['include_directories'] @@ -1619,6 +1627,8 @@ class GnomeModule(ExtensionModule): ), KwargInfo('install_header', bool, default=False, since='0.46.0'), KwargInfo('docbook', (str, NoneType)), + KwargInfo('rst', (str, NoneType), since='1.9.0'), + KwargInfo('markdown', (str, NoneType), since='1.9.0'), KwargInfo( 'autocleanup', str, default='default', since='0.47.0', validator=in_set_validator({'all', 'none', 'objects'})), @@ -1675,6 +1685,26 @@ class GnomeModule(ExtensionModule): cmd += ['--generate-docbook', docbook] + if kwargs['rst'] is not None: + if not mesonlib.version_compare(glib_version, '>= 2.71.1'): + mlog.error(f'Glib version ({glib_version}) is too old to ' + 'support the \'rst\' kwarg, need 2.71.1 or ' + 'newer') + + rst = kwargs['rst'] + + cmd += ['--generate-rst', rst] + + if kwargs['markdown'] is not None: + if not mesonlib.version_compare(glib_version, '>= 2.75.2'): + mlog.error(f'Glib version ({glib_version}) is too old to ' + 'support the \'markdown\' kwarg, need 2.75.2 ' + 'or newer') + + markdown = kwargs['markdown'] + + cmd += ['--generate-md', markdown] + # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a if mesonlib.version_compare(glib_version, '>= 2.51.3'): cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@'] @@ -1750,6 +1780,48 @@ class GnomeModule(ExtensionModule): ) targets.append(docbook_custom_target) + if kwargs['rst'] is not None: + rst = kwargs['rst'] + # The rst output is always ${rst}-${name_of_xml_file} + output = namebase + '-rst' + outputs = [] + for f in xml_files: + outputs.append('{}-{}'.format(rst, os.path.basename(str(f)))) + + rst_custom_target = CustomTarget( + output, + state.subdir, + state.subproject, + state.environment, + cmd + ['--output-directory', '@OUTDIR@', '--generate-rst', rst, '@INPUT@'], + xml_files, + outputs, + build_by_default=build_by_default, + description='Generating gdbus reStructuredText {}', + ) + targets.append(rst_custom_target) + + if kwargs['markdown'] is not None: + markdown = kwargs['markdown'] + # The markdown output is always ${markdown}-${name_of_xml_file} + output = namebase + '-markdown' + outputs = [] + for f in xml_files: + outputs.append('{}-{}'.format(markdown, os.path.basename(str(f)))) + + markdown_custom_target = CustomTarget( + output, + state.subdir, + state.subproject, + state.environment, + cmd + ['--output-directory', '@OUTDIR@', '--generate-md', markdown, '@INPUT@'], + xml_files, + outputs, + build_by_default=build_by_default, + description='Generating gdbus markdown {}', + ) + targets.append(markdown_custom_target) + return ModuleReturnValue(targets, targets) @typed_pos_args('gnome.mkenums', str) @@ -1991,6 +2063,7 @@ class GnomeModule(ExtensionModule): extra_depends=depends, # https://github.com/mesonbuild/meson/issues/973 absolute_paths=True, + rspable=mesonlib.is_windows() or mesonlib.is_cygwin(), description='Generating GObject enum file {}', ) diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py index 5099b41..b66a0bf 100644 --- a/mesonbuild/modules/hotdoc.py +++ b/mesonbuild/modules/hotdoc.py @@ -14,7 +14,7 @@ from ..build import CustomTarget, CustomTargetIndex from ..dependencies import Dependency, InternalDependency from ..interpreterbase import ( InvalidArguments, noPosargs, noKwargs, typed_kwargs, FeatureDeprecated, - ContainerTypeInfo, KwargInfo, typed_pos_args + ContainerTypeInfo, KwargInfo, typed_pos_args, InterpreterObject ) from ..interpreter.interpreterobjects import _CustomTargetHolder from ..interpreter.type_checking import NoneType @@ -383,12 +383,9 @@ class HotdocTargetBuilder: class HotdocTargetHolder(_CustomTargetHolder['HotdocTarget']): - def __init__(self, target: HotdocTarget, interp: Interpreter): - super().__init__(target, interp) - self.methods.update({'config_path': self.config_path_method}) - @noPosargs @noKwargs + @InterpreterObject.method('config_path') def config_path_method(self, *args: T.Any, **kwargs: T.Any) -> str: conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index cc0450a..e3f7a97 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -38,6 +38,7 @@ if T.TYPE_CHECKING: filebase: T.Optional[str] description: T.Optional[str] url: str + license: str subdirs: T.List[str] conflicts: T.List[str] dataonly: bool @@ -441,6 +442,7 @@ class PkgConfigModule(NewExtensionModule): def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper, subdirs: T.List[str], name: str, description: str, url: str, version: str, + license: str, pcfile: str, conflicts: T.List[str], variables: T.List[T.Tuple[str, str]], unescaped_variables: T.List[T.Tuple[str, str]], @@ -519,18 +521,20 @@ class PkgConfigModule(NewExtensionModule): ofile.write(f'{k}={v}\n') ofile.write('\n') ofile.write(f'Name: {name}\n') - if len(description) > 0: + if description: ofile.write(f'Description: {description}\n') - if len(url) > 0: + if url: ofile.write(f'URL: {url}\n') + if license: + ofile.write(f'License: {license}\n') ofile.write(f'Version: {version}\n') reqs_str = deps.format_reqs(deps.pub_reqs) - if len(reqs_str) > 0: + if reqs_str: ofile.write(f'Requires: {reqs_str}\n') reqs_str = deps.format_reqs(deps.priv_reqs) - if len(reqs_str) > 0: + if reqs_str: ofile.write(f'Requires.private: {reqs_str}\n') - if len(conflicts) > 0: + if conflicts: ofile.write('Conflicts: {}\n'.format(' '.join(conflicts))) def generate_libs_flags(libs: T.List[LIBS]) -> T.Iterable[str]: @@ -571,9 +575,9 @@ class PkgConfigModule(NewExtensionModule): if isinstance(l, (build.CustomTarget, build.CustomTargetIndex)) or 'cs' not in l.compilers: yield f'-l{lname}' - if len(deps.pub_libs) > 0: + if deps.pub_libs: ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs)))) - if len(deps.priv_libs) > 0: + if deps.priv_libs: ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs)))) cflags: T.List[str] = [] @@ -605,6 +609,7 @@ class PkgConfigModule(NewExtensionModule): KwargInfo('name', (str, NoneType), validator=lambda x: 'must not be an empty string' if x == '' else None), KwargInfo('subdirs', ContainerTypeInfo(list, str), default=[], listify=True), KwargInfo('url', str, default=''), + KwargInfo('license', str, default='', since='1.9.0'), KwargInfo('version', (str, NoneType)), VARIABLES_KW.evolve(name="unescaped_uninstalled_variables", since='0.59.0'), VARIABLES_KW.evolve(name="unescaped_variables", since='0.59.0'), @@ -659,6 +664,7 @@ class PkgConfigModule(NewExtensionModule): filebase = kwargs['filebase'] if kwargs['filebase'] is not None else name description = kwargs['description'] if kwargs['description'] is not None else default_description url = kwargs['url'] + license = kwargs['license'] conflicts = kwargs['conflicts'] # Prepend the main library to public libraries list. This is required @@ -713,7 +719,7 @@ class PkgConfigModule(NewExtensionModule): pkgroot_name = os.path.join('{libdir}', 'pkgconfig') relocatable = state.get_option('pkgconfig.relocatable') self._generate_pkgconfig_file(state, deps, subdirs, name, description, url, - version, pcfile, conflicts, variables, + version, license, pcfile, conflicts, variables, unescaped_variables, False, dataonly, pkgroot=pkgroot if relocatable else None) res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, pkgroot_name, None, state.subproject, install_tag='devel') @@ -722,7 +728,7 @@ class PkgConfigModule(NewExtensionModule): pcfile = filebase + '-uninstalled.pc' self._generate_pkgconfig_file(state, deps, subdirs, name, description, url, - version, pcfile, conflicts, variables, + version, license, pcfile, conflicts, variables, unescaped_variables, uninstalled=True, dataonly=dataonly) # Associate the main library with this generated pc file. If the library # is used in any subsequent call to the generated, it will generate a diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index 2a7e685..8d82a33 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -20,7 +20,7 @@ from ..interpreter.type_checking import NoneType, PRESERVE_PATH_KW, SHARED_MOD_K from ..interpreterbase import ( noPosargs, noKwargs, permittedKwargs, ContainerTypeInfo, InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo, - FeatureNew, FeatureNewKwargs, disablerIfNotFound + FeatureNew, FeatureNewKwargs, disablerIfNotFound, InterpreterObject ) from ..mesonlib import MachineChoice from ..options import OptionKey @@ -126,23 +126,11 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): self.platform = info['platform'] self.is_pypy = info['is_pypy'] self.link_libpython = info['link_libpython'] - self.methods.update({ - 'extension_module': self.extension_module_method, - 'dependency': self.dependency_method, - 'install_sources': self.install_sources_method, - 'get_install_dir': self.get_install_dir_method, - 'language_version': self.language_version_method, - 'found': self.found_method, - 'has_path': self.has_path_method, - 'get_path': self.get_path_method, - 'has_variable': self.has_variable_method, - 'get_variable': self.get_variable_method, - 'path': self.path_method, - }) @permittedKwargs(mod_kwargs) @typed_pos_args('python.extension_module', str, varargs=(str, mesonlib.File, CustomTarget, CustomTargetIndex, GeneratedList, StructuredSources, ExtractedObjects, BuildTarget)) @typed_kwargs('python.extension_module', *_MOD_KWARGS, _DEFAULTABLE_SUBDIR_KW, _LIMITED_API_KW, allow_unknown=True) + @InterpreterObject.method('extension_module') def extension_module_method(self, args: T.Tuple[str, T.List[BuildTargetSource]], kwargs: ExtensionModuleKw) -> 'SharedModule': if 'install_dir' in kwargs: if kwargs['subdir'] is not None: @@ -268,6 +256,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): @permittedKwargs(permitted_dependency_kwargs | {'embed'}) @FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed']) @noPosargs + @InterpreterObject.method('dependency') def dependency_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'Dependency': disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: @@ -287,6 +276,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): PRESERVE_PATH_KW, KwargInfo('install_tag', (str, NoneType), since='0.60.0') ) + @InterpreterObject.method('install_sources') def install_sources_method(self, args: T.Tuple[T.List[T.Union[str, mesonlib.File]]], kwargs: 'PyInstallKw') -> 'Data': self.held_object.run_bytecompile[self.version] = True @@ -301,6 +291,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): @noPosargs @typed_kwargs('python_installation.install_dir', _PURE_KW, _SUBDIR_KW) + @InterpreterObject.method('get_install_dir') def get_install_dir_method(self, args: T.List['TYPE_var'], kwargs: 'PyInstallKw') -> str: self.held_object.run_bytecompile[self.version] = True pure = kwargs['pure'] if kwargs['pure'] is not None else self.pure @@ -318,16 +309,19 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): @noPosargs @noKwargs + @InterpreterObject.method('language_version') def language_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return self.version @typed_pos_args('python_installation.has_path', str) @noKwargs + @InterpreterObject.method('has_path') def has_path_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool: return args[0] in self.paths @typed_pos_args('python_installation.get_path', str, optargs=[object]) @noKwargs + @InterpreterObject.method('get_path') def get_path_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var': path_name, fallback = args try: @@ -339,11 +333,13 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): @typed_pos_args('python_installation.has_variable', str) @noKwargs + @InterpreterObject.method('has_variable') def has_variable_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool: return args[0] in self.variables @typed_pos_args('python_installation.get_variable', str, optargs=[object]) @noKwargs + @InterpreterObject.method('get_variable') def get_variable_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var': var_name, fallback = args try: @@ -356,6 +352,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']): @noPosargs @noKwargs @FeatureNew('Python module path method', '0.50.0') + @InterpreterObject.method('path') def path_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str: return super().path_method(args, kwargs) diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py index f43a0ed..c5f18e8 100644 --- a/mesonbuild/modules/rust.py +++ b/mesonbuild/modules/rust.py @@ -242,6 +242,10 @@ class RustModule(ExtensionModule): def doctest(self, state: ModuleState, args: T.Tuple[str, T.Union[SharedLibrary, StaticLibrary]], kwargs: FuncDoctest) -> ModuleReturnValue: name, base_target = args + if state.environment.is_cross_build() and state.environment.need_exe_wrapper(base_target.for_machine): + mlog.notice('skipping Rust doctests due to cross compilation', once=True) + return ModuleReturnValue(None, []) + # Link the base target's crate into the tests kwargs['link_with'].append(base_target) kwargs['depends'].append(base_target) diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index f1c6071..3dd8f0a 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -201,7 +201,7 @@ class Lexer: lines = value.split('\n') if len(lines) > 1: lineno += len(lines) - 1 - line_start = mo.end() - len(lines[-1]) + line_start = mo.end() - len(lines[-1]) - 3 elif tid == 'eol_cont': lineno += 1 line_start = loc @@ -221,7 +221,7 @@ class Lexer: yield Token(tid, filename, curline_start, curline, col, bytespan, value) break if not matched: - raise ParseException('lexer', self.getline(line_start), lineno, col) + raise ParseException(f'lexer: unrecognized token {self.code[loc]!r}', self.getline(line_start), lineno, loc - line_start) @dataclass class BaseNode: @@ -369,6 +369,13 @@ class ArgumentNode(BaseNode): mlog.warning('This will be an error in Meson 2.0.') self.kwargs[name] = value + def get_kwarg_or_default(self, name: str, default: BaseNode) -> BaseNode: + for k, v in self.kwargs.items(): + assert isinstance(k, IdNode) + if k.value == name: + return v + return default + def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None: self.kwargs[name] = value @@ -676,15 +683,16 @@ comparison_map: T.Mapping[str, COMPARISONS] = { # levels so there are not enough words to describe them all. # Enter numbering: # -# 1 assignment -# 2 or -# 3 and -# 4 comparison -# 5 arithmetic -# 6 negation -# 7 funcall, method call -# 8 parentheses -# 9 plain token +# 1 assignment +# 2 or +# 3 and +# 4 comparison +# 5 addition and subtraction +# 6 multiplication, division and modulus +# 7 negation +# 8 funcall, method call +# 9 parentheses +# 10 plain token class Parser: def __init__(self, code: str, filename: str): @@ -831,50 +839,47 @@ class Parser: return left def e5(self) -> BaseNode: - return self.e5addsub() - - def e5addsub(self) -> BaseNode: op_map = { 'plus': 'add', 'dash': 'sub', } - left = self.e5muldiv() + left = self.e6() while True: op = self.accept_any(tuple(op_map.keys())) if op: operator = self.create_node(SymbolNode, self.previous) - left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e5muldiv()) + left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e6()) else: break return left - def e5muldiv(self) -> BaseNode: + def e6(self) -> BaseNode: op_map = { 'percent': 'mod', 'star': 'mul', 'fslash': 'div', } - left = self.e6() + left = self.e7() while True: op = self.accept_any(tuple(op_map.keys())) if op: operator = self.create_node(SymbolNode, self.previous) - left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e6()) + left = self.create_node(ArithmeticNode, op_map[op], left, operator, self.e7()) else: break return left - def e6(self) -> BaseNode: + def e7(self) -> BaseNode: if self.accept('not'): operator = self.create_node(SymbolNode, self.previous) - return self.create_node(NotNode, self.current, operator, self.e7()) + return self.create_node(NotNode, self.current, operator, self.e8()) if self.accept('dash'): operator = self.create_node(SymbolNode, self.previous) - return self.create_node(UMinusNode, self.current, operator, self.e7()) - return self.e7() + return self.create_node(UMinusNode, self.current, operator, self.e8()) + return self.e8() - def e7(self) -> BaseNode: - left = self.e8() + def e8(self) -> BaseNode: + left = self.e9() block_start = self.current if self.accept('lparen'): lpar = self.create_node(SymbolNode, block_start) @@ -897,7 +902,7 @@ class Parser: left = self.index_call(left) return left - def e8(self) -> BaseNode: + def e9(self) -> BaseNode: block_start = self.current if self.accept('lparen'): lpar = self.create_node(SymbolNode, block_start) @@ -918,9 +923,9 @@ class Parser: rcurl = self.create_node(SymbolNode, self.previous) return self.create_node(DictNode, lcurl, key_values, rcurl) else: - return self.e9() + return self.e10() - def e9(self) -> BaseNode: + def e10(self) -> BaseNode: t = self.current if self.accept('true'): t.value = True @@ -978,7 +983,7 @@ class Parser: def method_call(self, source_object: BaseNode) -> MethodNode: dot = self.create_node(SymbolNode, self.previous) - methodname = self.e9() + methodname = self.e10() if not isinstance(methodname, IdNode): if isinstance(source_object, NumberNode) and isinstance(methodname, NumberNode): raise ParseException('meson does not support float numbers', diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py index 81dd183..b08d5e8 100644 --- a/mesonbuild/msetup.py +++ b/mesonbuild/msetup.py @@ -11,7 +11,7 @@ import typing as T from . import build, coredata, environment, interpreter, mesonlib, mintro, mlog from .mesonlib import MesonException -from .options import COMPILER_BASE_OPTIONS, OptionKey +from .options import OptionKey if T.TYPE_CHECKING: from typing_extensions import Protocol @@ -180,30 +180,30 @@ class MesonApp: # See class Backend's 'generate' for comments on capture args and returned dictionary. def generate(self, capture: bool = False, vslite_ctx: T.Optional[dict] = None) -> T.Optional[dict]: env = environment.Environment(self.source_dir, self.build_dir, self.options) + if not env.first_invocation: + assert self.options.reconfigure + env.coredata.set_from_configure_command(self.options) mlog.initialize(env.get_log_dir(), self.options.fatal_warnings) if self.options.profile: mlog.set_timestamp_start(time.monotonic()) if self.options.clearcache: env.coredata.clear_cache() - with mesonlib.BuildDirLock(self.build_dir): + with mesonlib.DirectoryLock(self.build_dir, 'meson-private/meson.lock', + mesonlib.DirectoryLockAction.FAIL, + 'Some other Meson process is already using this build directory. Exiting.'): return self._generate(env, capture, vslite_ctx) - def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Any, all_subprojects: T.Any) -> None: + def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Dict[OptionKey, str], all_subprojects: T.Mapping[str, object]) -> None: pending = coredata.optstore.pending_options errlist: T.List[str] = [] + known_subprojects = all_subprojects.keys() for opt in pending: - # It is not an error to set wrong option for unknown subprojects or - # language because we don't have control on which one will be selected. - if opt.subproject and opt.subproject not in all_subprojects: + # It is not an error to set wrong option for unknown subprojects + # because they might be used in future reconfigurations + if coredata.optstore.accept_as_pending_option(opt, known_subprojects): continue - if coredata.optstore.is_compiler_option(opt): - continue - if (coredata.optstore.is_base_option(opt) and - opt.evolve(subproject=None, machine=mesonlib.MachineChoice.HOST) in COMPILER_BASE_OPTIONS): - continue - keystr = str(opt) - if keystr in cmd_line_options: - errlist.append(f'"{keystr}"') + if opt in cmd_line_options: + errlist.append(f'"{opt}"') if errlist: errstr = ', '.join(errlist) raise MesonException(f'Unknown options: {errstr}') @@ -347,17 +347,18 @@ def run_genvslite_setup(options: CMDOptions) -> None: # invoke the appropriate 'meson compile ...' build commands upon the normal visual studio build/rebuild/clean actions, instead of using # the native VS/msbuild system. builddir_prefix = options.builddir - genvsliteval = options.cmd_line_options.pop('genvslite') # type: ignore [call-overload] + k_genvslite = OptionKey('genvslite') + genvsliteval = options.cmd_line_options.pop(k_genvslite) # The command line may specify a '--backend' option, which doesn't make sense in conjunction with # '--genvslite', where we always want to use a ninja back end - - k_backend = 'backend' + k_backend = OptionKey('backend') if k_backend in options.cmd_line_options.keys(): - if options.cmd_line_options[k_backend] != 'ninja': # type: ignore [index] + if options.cmd_line_options[k_backend] != 'ninja': raise MesonException('Explicitly specifying a backend option with \'genvslite\' is not necessary ' '(the ninja backend is always used) but specifying a non-ninja backend ' 'conflicts with a \'genvslite\' setup') else: - options.cmd_line_options[k_backend] = 'ninja' # type: ignore [index] + options.cmd_line_options[k_backend] = 'ninja' buildtypes_list = coredata.get_genvs_default_buildtype_list() vslite_ctx = {} @@ -368,7 +369,7 @@ def run_genvslite_setup(options: CMDOptions) -> None: vslite_ctx[buildtypestr] = app.generate(capture=True) #Now for generating the 'lite' solution and project files, which will use these builds we've just set up, above. options.builddir = f'{builddir_prefix}_vs' - options.cmd_line_options[OptionKey('genvslite')] = genvsliteval + options.cmd_line_options[k_genvslite] = genvsliteval app = MesonApp(options) app.generate(capture=False, vslite_ctx=vslite_ctx) @@ -384,7 +385,7 @@ def run(options: T.Union[CMDOptions, T.List[str]]) -> int: # lie options.pager = False - if 'genvslite' in options.cmd_line_options.keys(): + if OptionKey('genvslite') in options.cmd_line_options.keys(): run_genvslite_setup(options) else: app = MesonApp(options) diff --git a/mesonbuild/options.py b/mesonbuild/options.py index 3b7d8b2..bc4d79f 100644 --- a/mesonbuild/options.py +++ b/mesonbuild/options.py @@ -310,7 +310,7 @@ class OptionKey: return self.machine is MachineChoice.BUILD if T.TYPE_CHECKING: - OptionStringLikeDict: TypeAlias = T.Dict[T.Union[OptionKey, str], str] + OptionDict: TypeAlias = T.Dict[OptionKey, ElementaryOptionValues] @dataclasses.dataclass class UserOption(T.Generic[_T], HoldableObject): @@ -327,7 +327,13 @@ class UserOption(T.Generic[_T], HoldableObject): # Final isn't technically allowed in a __post_init__ method self.default: Final[_T] = self.value # type: ignore[misc] - def listify(self, value: T.Any) -> T.List[T.Any]: + def listify(self, value: ElementaryOptionValues) -> T.List[str]: + if isinstance(value, list): + return value + if isinstance(value, bool): + return ['true'] if value else ['false'] + if isinstance(value, int): + return [str(value)] return [value] def printable_value(self) -> ElementaryOptionValues: @@ -340,10 +346,10 @@ class UserOption(T.Generic[_T], HoldableObject): # Check that the input is a valid value and return the # "cleaned" or "native" version. For example the Boolean # option could take the string "true" and return True. - def validate_value(self, value: T.Any) -> _T: + def validate_value(self, value: object) -> _T: raise RuntimeError('Derived option class did not override validate_value.') - def set_value(self, newvalue: T.Any) -> bool: + def set_value(self, newvalue: object) -> bool: oldvalue = self.value self.value = self.validate_value(newvalue) return self.value != oldvalue @@ -361,7 +367,7 @@ class EnumeratedUserOption(UserOption[_T]): class UserStringOption(UserOption[str]): - def validate_value(self, value: T.Any) -> str: + def validate_value(self, value: object) -> str: if not isinstance(value, str): raise MesonException(f'The value of option "{self.name}" is "{value}", which is not a string.') return value @@ -374,7 +380,7 @@ class UserBooleanOption(EnumeratedUserOption[bool]): def __bool__(self) -> bool: return self.value - def validate_value(self, value: T.Any) -> bool: + def validate_value(self, value: object) -> bool: if isinstance(value, bool): return value if not isinstance(value, str): @@ -406,7 +412,7 @@ class _UserIntegerBase(UserOption[_T]): def printable_choices(self) -> T.Optional[T.List[str]]: return [self.__choices] - def validate_value(self, value: T.Any) -> _T: + def validate_value(self, value: object) -> _T: if isinstance(value, str): value = T.cast('_T', self.toint(value)) if not isinstance(value, int): @@ -450,7 +456,7 @@ class UserUmaskOption(_UserIntegerBase[T.Union["Literal['preserve']", OctalInt]] return format(self.value, '04o') return self.value - def validate_value(self, value: T.Any) -> T.Union[Literal['preserve'], OctalInt]: + def validate_value(self, value: object) -> T.Union[Literal['preserve'], OctalInt]: if value == 'preserve': return 'preserve' return OctalInt(super().validate_value(value)) @@ -465,7 +471,7 @@ class UserUmaskOption(_UserIntegerBase[T.Union["Literal['preserve']", OctalInt]] @dataclasses.dataclass class UserComboOption(EnumeratedUserOption[str]): - def validate_value(self, value: T.Any) -> str: + def validate_value(self, value: object) -> str: if value not in self.choices: if isinstance(value, bool): _type = 'boolean' @@ -503,13 +509,13 @@ class UserArrayOption(UserOption[T.List[_T]]): @dataclasses.dataclass class UserStringArrayOption(UserArrayOption[str]): - def listify(self, value: T.Any) -> T.List[T.Any]: + def listify(self, value: object) -> T.List[str]: try: return listify_array_value(value, self.split_args) except MesonException as e: raise MesonException(f'error in option "{self.name}": {e!s}') - def validate_value(self, value: T.Union[str, T.List[str]]) -> T.List[str]: + def validate_value(self, value: object) -> T.List[str]: newvalue = self.listify(value) if not self.allow_dups and len(set(newvalue)) != len(newvalue): @@ -606,11 +612,14 @@ class UserStdOption(UserComboOption): else: self.choices += gnu_stds_map.keys() - def validate_value(self, value: T.Union[str, T.List[str]]) -> str: + def validate_value(self, value: object) -> str: try: candidates = listify_array_value(value) except MesonException as e: raise MesonException(f'error in option "{self.name}": {e!s}') + for std in candidates: + if not isinstance(std, str): + raise MesonException(f'String array element "{candidates!s}" for option "{self.name}" is not a string.') unknown = ','.join(std for std in candidates if std not in self.all_stds) if unknown: raise MesonException(f'Unknown option "{self.name}" value {unknown}. Possible values are {self.all_stds}.') @@ -800,14 +809,13 @@ class OptionStore: self.module_options: T.Set[OptionKey] = set() from .compilers import all_languages self.all_languages = set(all_languages) - self.project_options = set() - self.augments: T.Dict[str, str] = {} + self.augments: OptionDict = {} self.is_cross = is_cross # Pending options are options that need to be initialized later, either # configuration dependent options like compiler options, or options for # a different subproject - self.pending_options: T.Dict[OptionKey, ElementaryOptionValues] = {} + self.pending_options: OptionDict = {} def clear_pending(self) -> None: self.pending_options = {} @@ -829,6 +837,12 @@ class OptionStore: key = key.as_host() return key + def get_pending_value(self, key: T.Union[OptionKey, str], default: T.Optional[ElementaryOptionValues] = None) -> ElementaryOptionValues: + key = self.ensure_and_validate_key(key) + if key in self.options: + return self.options[key].value + return self.pending_options.get(key, default) + def get_value(self, key: T.Union[OptionKey, str]) -> ElementaryOptionValues: return self.get_value_object(key).value @@ -870,9 +884,8 @@ class OptionStore: vobject = self.get_value_object_for(key) computed_value = vobject.value if key.subproject is not None: - keystr = str(key) - if keystr in self.augments: - computed_value = vobject.validate_value(self.augments[keystr]) + if key in self.augments: + computed_value = vobject.validate_value(self.augments[key]) return (vobject, computed_value) def get_value_for(self, name: 'T.Union[OptionKey, str]', subproject: T.Optional[str] = None) -> ElementaryOptionValues: @@ -897,16 +910,16 @@ class OptionStore: if key in self.options: return - self.options[key] = valobj pval = self.pending_options.pop(key, None) if key.subproject: proj_key = key.evolve(subproject=None) self.add_system_option_internal(proj_key, valobj) - if pval is None: - pval = self.options[proj_key].value - - if pval is not None: - self.set_option(key, pval) + if pval is not None: + self.augments[key] = pval + else: + self.options[key] = valobj + if pval is not None: + self.set_option(key, pval) def add_compiler_option(self, language: str, key: T.Union[OptionKey, str], valobj: AnyOptionType) -> None: key = self.ensure_and_validate_key(key) @@ -985,6 +998,10 @@ class OptionStore: return value.as_posix() def set_option(self, key: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool: + error_key = key + if error_key.subproject == '': + error_key = error_key.evolve(subproject=None) + if key.name == 'prefix': assert isinstance(new_value, str), 'for mypy' new_value = self.sanitize_prefix(new_value) @@ -996,26 +1013,26 @@ class OptionStore: try: opt = self.get_value_object_for(key) except KeyError: - raise MesonException(f'Unknown options: "{key!s}" not found.') + raise MesonException(f'Unknown option: "{error_key}".') if opt.deprecated is True: - mlog.deprecation(f'Option {key.name!r} is deprecated') + mlog.deprecation(f'Option "{error_key}" is deprecated') elif isinstance(opt.deprecated, list): for v in opt.listify(new_value): if v in opt.deprecated: - mlog.deprecation(f'Option {key.name!r} value {v!r} is deprecated') + mlog.deprecation(f'Option "{error_key}" value {v!r} is deprecated') elif isinstance(opt.deprecated, dict): - def replace(v: T.Any) -> T.Any: + def replace(v: str) -> str: assert isinstance(opt.deprecated, dict) # No, Mypy can not tell this from two lines above newvalue = opt.deprecated.get(v) if newvalue is not None: - mlog.deprecation(f'Option {key.name!r} value {v!r} is replaced by {newvalue!r}') + mlog.deprecation(f'Option "{error_key}" value {v!r} is replaced by {newvalue!r}') return newvalue return v valarr = [replace(v) for v in opt.listify(new_value)] new_value = ','.join(valarr) elif isinstance(opt.deprecated, str): - mlog.deprecation(f'Option {key.name!r} is replaced by {opt.deprecated!r}') + mlog.deprecation(f'Option "{error_key}" is replaced by {opt.deprecated!r}') # Change both this aption and the new one pointed to. dirty = self.set_option(key.evolve(name=opt.deprecated), new_value) dirty |= opt.set_value(new_value) @@ -1025,14 +1042,14 @@ class OptionStore: changed = opt.set_value(new_value) if opt.readonly and changed and not first_invocation: - raise MesonException(f'Tried to modify read only option {str(key)!r}') + raise MesonException(f'Tried to modify read only option "{error_key}"') if key.name == 'prefix' and first_invocation and changed: assert isinstance(old_value, str), 'for mypy' assert isinstance(new_value, str), 'for mypy' self.reset_prefixed_options(old_value, new_value) - if changed and key.name == 'buildtype': + if changed and key.name == 'buildtype' and new_value != 'custom': assert isinstance(new_value, str), 'for mypy' optimization, debug = self.DEFAULT_DEPENDENTS[new_value] dkey = key.evolve(name='debug') @@ -1042,15 +1059,30 @@ class OptionStore: return changed - def set_option_from_string(self, keystr: T.Union[OptionKey, str], new_value: str) -> bool: - if isinstance(keystr, OptionKey): - o = keystr - else: - o = OptionKey.from_string(keystr) + def set_option_maybe_root(self, o: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool: + if not self.is_cross and o.is_for_build(): + return False + + # This is complicated by the fact that a string can have two meanings: + # + # default_options: 'foo=bar' + # + # can be either + # + # A) a system option in which case the subproject is None + # B) a project option, in which case the subproject is '' (this method is only called from top level) + # + # The key parsing function can not handle the difference between the two + # and defaults to A. if o in self.options: - return self.set_option(o, new_value) - o = o.as_root() - return self.set_option(o, new_value) + return self.set_option(o, new_value, first_invocation) + if self.accept_as_pending_option(o, first_invocation=first_invocation): + old_value = self.pending_options.get(o, None) + self.pending_options[o] = new_value + return old_value is None or str(old_value) == new_value + else: + o = o.as_root() + return self.set_option(o, new_value, first_invocation) def set_from_configure_command(self, D_args: T.List[str], U_args: T.List[str]) -> bool: dirty = False @@ -1058,20 +1090,21 @@ class OptionStore: (global_options, perproject_global_options, project_options) = self.classify_D_arguments(D_args) U_args = [] if U_args is None else U_args for key, valstr in global_options: - dirty |= self.set_option_from_string(key, valstr) + dirty |= self.set_option_maybe_root(key, valstr) for key, valstr in project_options: - dirty |= self.set_option_from_string(key, valstr) - for keystr, valstr in perproject_global_options: - if keystr in self.augments: - if self.augments[keystr] != valstr: - self.augments[keystr] = valstr + dirty |= self.set_option_maybe_root(key, valstr) + for key, valstr in perproject_global_options: + if key in self.augments: + if self.augments[key] != valstr: + self.augments[key] = valstr dirty = True else: - self.augments[keystr] = valstr + self.augments[key] = valstr dirty = True - for delete in U_args: - if delete in self.augments: - del self.augments[delete] + for keystr in U_args: + key = OptionKey.from_string(keystr) + if key in self.augments: + del self.augments[key] dirty = True return dirty @@ -1194,7 +1227,7 @@ class OptionStore: return key in self.module_options def classify_D_arguments(self, D: T.List[str]) -> T.Tuple[T.List[T.Tuple[OptionKey, str]], - T.List[T.Tuple[str, str]], + T.List[T.Tuple[OptionKey, str]], T.List[T.Tuple[OptionKey, str]]]: global_options = [] project_options = [] @@ -1208,49 +1241,28 @@ class OptionStore: elif key.subproject is None: global_options.append(valuetuple) else: - # FIXME, augments are currently stored as strings, not OptionKeys - strvaluetuple = (keystr, valstr) - perproject_global_options.append(strvaluetuple) + perproject_global_options.append(valuetuple) return (global_options, perproject_global_options, project_options) - def optlist2optdict(self, optlist: T.List[str]) -> T.Dict[str, str]: - optdict = {} - for p in optlist: - k, v = p.split('=', 1) - optdict[k] = v - return optdict - - def prefix_split_options(self, coll: T.Union[T.List[str], OptionStringLikeDict]) -> T.Tuple[str, T.Union[T.List[str], OptionStringLikeDict]]: + def prefix_split_options(self, coll: OptionDict) -> T.Tuple[T.Optional[str], OptionDict]: prefix = None - if isinstance(coll, list): - others: T.List[str] = [] - for e in coll: - if e.startswith('prefix='): - prefix = e.split('=', 1)[1] - else: - others.append(e) - return (prefix, others) - else: - others_d: OptionStringLikeDict = {} - for k, v in coll.items(): - if isinstance(k, OptionKey) and k.name == 'prefix': - prefix = v - elif k == 'prefix': - prefix = v - else: - others_d[k] = v - return (prefix, others_d) + others_d: OptionDict = {} + for k, v in coll.items(): + if k.name == 'prefix': + if not isinstance(v, str): + raise MesonException('Incorrect type for prefix option (expected string)') + prefix = v + else: + others_d[k] = v + return (prefix, others_d) def first_handle_prefix(self, - project_default_options: T.Union[T.List[str], OptionStringLikeDict], - cmd_line_options: OptionStringLikeDict, - machine_file_options: T.Mapping[OptionKey, ElementaryOptionValues]) \ - -> T.Tuple[T.Union[T.List[str], OptionStringLikeDict], - T.Union[T.List[str], OptionStringLikeDict], - T.MutableMapping[OptionKey, ElementaryOptionValues]]: + project_default_options: OptionDict, + cmd_line_options: OptionDict, + machine_file_options: OptionDict) \ + -> T.Tuple[OptionDict, OptionDict, OptionDict]: # Copy to avoid later mutation - nopref_machine_file_options = T.cast( - 'T.MutableMapping[OptionKey, ElementaryOptionValues]', copy.copy(machine_file_options)) + nopref_machine_file_options = copy.copy(machine_file_options) prefix = None (possible_prefix, nopref_project_default_options) = self.prefix_split_options(project_default_options) @@ -1281,157 +1293,110 @@ class OptionStore: self.options[OptionKey('prefix')].set_value(prefix) def initialize_from_top_level_project_call(self, - project_default_options_in: T.Union[T.List[str], OptionStringLikeDict], - cmd_line_options_in: OptionStringLikeDict, - machine_file_options_in: T.Mapping[OptionKey, ElementaryOptionValues]) -> None: - first_invocation = True + project_default_options_in: OptionDict, + cmd_line_options_in: OptionDict, + machine_file_options_in: OptionDict) -> None: (project_default_options, cmd_line_options, machine_file_options) = self.first_handle_prefix(project_default_options_in, cmd_line_options_in, machine_file_options_in) - if isinstance(project_default_options, str): - project_default_options = [project_default_options] - if isinstance(project_default_options, list): - project_default_options = self.optlist2optdict(project_default_options) # type: ignore [assignment] - if project_default_options is None: - project_default_options = {} - assert isinstance(machine_file_options, dict) - for keystr, valstr in machine_file_options.items(): - if isinstance(keystr, str): - # FIXME, standardise on Key or string. - key = OptionKey.from_string(keystr) - else: - key = keystr - # Due to backwards compatibility we ignore all build-machine options - # when building natively. - if not self.is_cross and key.is_for_build(): - continue - if key.subproject: - augstr = str(key) - self.augments[augstr] = valstr - elif key in self.options: - self.set_option(key, valstr, first_invocation) - else: - proj_key = key.as_root() - if proj_key in self.options: - self.set_option(proj_key, valstr, first_invocation) - else: - self.pending_options[key] = valstr - assert isinstance(project_default_options, dict) - for keystr, valstr in project_default_options.items(): - # Ths is complicated by the fact that a string can have two meanings: - # - # default_options: 'foo=bar' - # - # can be either - # - # A) a system option in which case the subproject is None - # B) a project option, in which case the subproject is '' (this method is only called from top level) - # - # The key parsing function can not handle the difference between the two - # and defaults to A. - if isinstance(keystr, str): - key = OptionKey.from_string(keystr) - else: - key = keystr + for key, valstr in project_default_options.items(): # Due to backwards compatibility we ignore build-machine options # when building natively. if not self.is_cross and key.is_for_build(): continue if key.subproject: + # do apply project() default_options for subprojects here, because + # they have low priority self.pending_options[key] = valstr - elif key in self.options: - self.set_option(key, valstr, first_invocation) else: - # Setting a project option with default_options. - # Argubly this should be a hard error, the default + # Setting a project option with default_options + # should arguably be a hard error; the default # value of project option should be set in the option # file, not in the project call. - proj_key = key.as_root() - if self.is_project_option(proj_key): - self.set_option(proj_key, valstr) - else: - self.pending_options[key] = valstr - assert isinstance(cmd_line_options, dict) - for keystr, valstr in cmd_line_options.items(): - if isinstance(keystr, str): - key = OptionKey.from_string(keystr) - else: - key = keystr + self.set_option_maybe_root(key, valstr, True) + + # ignore subprojects for now for machine file and command line + # options; they are applied later + for key, valstr in machine_file_options.items(): # Due to backwards compatibility we ignore all build-machine options # when building natively. if not self.is_cross and key.is_for_build(): continue - if key.subproject: - self.pending_options[key] = valstr - elif key in self.options: - self.set_option(key, valstr, True) - else: - proj_key = key.as_root() - if proj_key in self.options: - self.set_option(proj_key, valstr, True) - else: - self.pending_options[key] = valstr + if not key.subproject: + self.set_option_maybe_root(key, valstr, True) + for key, valstr in cmd_line_options.items(): + # Due to backwards compatibility we ignore all build-machine options + # when building natively. + if not self.is_cross and key.is_for_build(): + continue + if not key.subproject: + self.set_option_maybe_root(key, valstr, True) - def validate_cmd_line_options(self, cmd_line_options: OptionStringLikeDict) -> None: + def accept_as_pending_option(self, key: OptionKey, known_subprojects: T.Optional[T.Container[str]] = None, + first_invocation: bool = False) -> bool: + # Fail on unknown options that we can know must exist at this point in time. + # Subproject and compiler options are resolved later. + # + # Some base options (sanitizers etc) might get added later. + # Permitting them all is not strictly correct. + if key.subproject: + if known_subprojects is None or key.subproject not in known_subprojects: + return True + if self.is_compiler_option(key): + return True + if first_invocation and self.is_backend_option(key): + return True + return (self.is_base_option(key) and + key.evolve(subproject=None, machine=MachineChoice.HOST) in COMPILER_BASE_OPTIONS) + + def validate_cmd_line_options(self, cmd_line_options: OptionDict) -> None: unknown_options = [] - for keystr, valstr in cmd_line_options.items(): - if isinstance(keystr, str): - key = OptionKey.from_string(keystr) - else: - key = keystr - # Fail on unknown options that we can know must exist at this point in time. - # Subproject and compiler options are resolved later. - # - # Some base options (sanitizers etc) might get added later. - # Permitting them all is not strictly correct. - if key.subproject is None and not self.is_compiler_option(key) and not self.is_base_option(key) and \ - key in self.pending_options: + for key, valstr in cmd_line_options.items(): + if key in self.pending_options and not self.accept_as_pending_option(key): unknown_options.append(f'"{key}"') if unknown_options: keys = ', '.join(unknown_options) raise MesonException(f'Unknown options: {keys}') - def hacky_mchackface_back_to_list(self, optdict: T.Dict[str, str]) -> T.List[str]: - if isinstance(optdict, dict): - return [f'{k}={v}' for k, v in optdict.items()] - return optdict - def initialize_from_subproject_call(self, subproject: str, - spcall_default_options: T.Union[T.List[str], OptionStringLikeDict], - project_default_options: T.Union[T.List[str], OptionStringLikeDict], - cmd_line_options: T.Union[T.List[str], OptionStringLikeDict]) -> None: - is_first_invocation = True - spcall_default_options = self.hacky_mchackface_back_to_list(spcall_default_options) # type: ignore [arg-type] - project_default_options = self.hacky_mchackface_back_to_list(project_default_options) # type: ignore [arg-type] - if isinstance(spcall_default_options, str): - spcall_default_options = [spcall_default_options] - for o in itertools.chain(project_default_options, spcall_default_options): - keystr, valstr = o.split('=', 1) - key = OptionKey.from_string(keystr) - assert key.subproject is None - key = key.evolve(subproject=subproject) - # If the key points to a project option, set the value from that. - # Otherwise set an augment. - if key in self.project_options: - self.set_option(key, valstr, is_first_invocation) - else: - self.pending_options.pop(key, None) - aug_str = f'{subproject}:{keystr}' - self.augments[aug_str] = valstr - # Check for pending options - assert isinstance(cmd_line_options, dict) - for key, valstr in cmd_line_options.items(): # type: ignore [assignment] - if not isinstance(key, OptionKey): - key = OptionKey.from_string(key) - if key.subproject != subproject: - continue + spcall_default_options: OptionDict, + project_default_options: OptionDict, + cmd_line_options: OptionDict, + machine_file_options: OptionDict) -> None: + # pick up pending per-project settings from the toplevel project() invocation + options = {k: v for k, v in self.pending_options.items() if k.subproject == subproject} + + # apply project() and subproject() default_options + for key, valstr in itertools.chain(project_default_options.items(), spcall_default_options.items()): + if key.subproject is None: + key = key.evolve(subproject=subproject) + elif key.subproject == subproject: + without_subp = key.evolve(subproject=None) + raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"') + options[key] = valstr + + # then global settings from machine file and command line + for key, valstr in itertools.chain(machine_file_options.items(), cmd_line_options.items()): + if key.subproject is None: + subp_key = key.evolve(subproject=subproject) + self.pending_options.pop(subp_key, None) + options.pop(subp_key, None) + + # then finally per project augments from machine file and command line + for key, valstr in itertools.chain(machine_file_options.items(), cmd_line_options.items()): + if key.subproject == subproject: + options[key] = valstr + + # merge everything that has been computed above, while giving self.augments priority + for key, valstr in options.items(): self.pending_options.pop(key, None) - if key in self.options: - self.set_option(key, valstr, is_first_invocation) + valstr = self.augments.pop(key, valstr) + if key in self.project_options: + self.set_option(key, valstr, True) else: - self.augments[str(key)] = valstr + self.augments[key] = valstr def update_project_options(self, project_options: MutableKeyedOptionDictType, subproject: SubProject) -> None: for key, value in project_options.items(): diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 919bd38..4c2fb11 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -10,23 +10,29 @@ from __future__ import annotations from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter -from mesonbuild.mesonlib import MesonException, setup_vsenv +from .ast.interpreter import IntrospectionBuildTarget, IntrospectionDependency, _symbol +from .interpreterbase import UnknownValue, TV_func +from .interpreterbase.helpers import flatten +from mesonbuild.mesonlib import MesonException, setup_vsenv, relpath from . import mlog, environment from functools import wraps -from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, StringNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, SymbolNode -import json, os, re, sys +from .mparser import Token, ArrayNode, ArgumentNode, ArithmeticNode, AssignmentNode, BaseNode, StringNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, PlusAssignmentNode +from .mintro import IntrospectionEncoder +import json, os, re, sys, codecs import typing as T +from pathlib import Path if T.TYPE_CHECKING: - from argparse import ArgumentParser, HelpFormatter - from .mparser import BaseNode + import argparse + from argparse import ArgumentParser, _FormatterClass + from .mlog import AnsiDecorator class RewriterException(MesonException): pass # Note: when adding arguments, please also add them to the completion # scripts in $MESONSRC/data/shell-completions/ -def add_arguments(parser: ArgumentParser, formatter: T.Callable[[str], HelpFormatter]) -> None: +def add_arguments(parser: ArgumentParser, formatter: _FormatterClass) -> None: parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.') parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output') parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting') @@ -62,12 +68,14 @@ def add_arguments(parser: ArgumentParser, formatter: T.Callable[[str], HelpForma cmd_parser.add_argument('json', help='JSON string or file to execute') class RequiredKeys: - def __init__(self, keys): + keys: T.Dict[str, T.Any] + + def __init__(self, keys: T.Dict[str, T.Any]): self.keys = keys - def __call__(self, f): + def __call__(self, f: TV_func) -> TV_func: @wraps(f) - def wrapped(*wrapped_args, **wrapped_kwargs): + def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any: assert len(wrapped_args) >= 2 cmd = wrapped_args[1] for key, val in self.keys.items(): @@ -90,12 +98,11 @@ class RequiredKeys: .format(key, choices, cmd[key])) return f(*wrapped_args, **wrapped_kwargs) - return wrapped - -def _symbol(val: str) -> SymbolNode: - return SymbolNode(Token('', '', 0, 0, 0, (0, 0), val)) + return T.cast('TV_func', wrapped) class MTypeBase: + node: BaseNode + def __init__(self, node: T.Optional[BaseNode] = None): if node is None: self.node = self.new_node() @@ -107,30 +114,30 @@ class MTypeBase: self.node_type = i @classmethod - def new_node(cls, value=None): + def new_node(cls, value: T.Any = None) -> BaseNode: # Overwrite in derived class raise RewriterException('Internal error: new_node of MTypeBase was called') @classmethod - def supported_nodes(cls): + def supported_nodes(cls) -> T.List[type]: # Overwrite in derived class return [] - def can_modify(self): + def can_modify(self) -> bool: return self.node_type is not None - def get_node(self): + def get_node(self) -> BaseNode: return self.node - def add_value(self, value): + def add_value(self, value: T.Any) -> None: # Overwrite in derived class mlog.warning('Cannot add a value of type', mlog.bold(type(self).__name__), '--> skipping') - def remove_value(self, value): + def remove_value(self, value: T.Any) -> None: # Overwrite in derived class mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping') - def remove_regex(self, value): + def remove_regex(self, value: T.Any) -> None: # Overwrite in derived class mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping') @@ -139,13 +146,13 @@ class MTypeStr(MTypeBase): super().__init__(node) @classmethod - def new_node(cls, value=None): + def new_node(cls, value: T.Optional[str] = None) -> BaseNode: if value is None: value = '' return StringNode(Token('string', '', 0, 0, 0, None, str(value))) @classmethod - def supported_nodes(cls): + def supported_nodes(cls) -> T.List[type]: return [StringNode] class MTypeBool(MTypeBase): @@ -153,11 +160,11 @@ class MTypeBool(MTypeBase): super().__init__(node) @classmethod - def new_node(cls, value=None): + def new_node(cls, value: T.Optional[str] = None) -> BaseNode: return BooleanNode(Token('', '', 0, 0, 0, None, bool(value))) @classmethod - def supported_nodes(cls): + def supported_nodes(cls) -> T.List[type]: return [BooleanNode] class MTypeID(MTypeBase): @@ -165,21 +172,23 @@ class MTypeID(MTypeBase): super().__init__(node) @classmethod - def new_node(cls, value=None): + def new_node(cls, value: T.Optional[str] = None) -> BaseNode: if value is None: value = '' return IdNode(Token('', '', 0, 0, 0, None, str(value))) @classmethod - def supported_nodes(cls): + def supported_nodes(cls) -> T.List[type]: return [IdNode] class MTypeList(MTypeBase): + node: ArrayNode + def __init__(self, node: T.Optional[BaseNode] = None): super().__init__(node) @classmethod - def new_node(cls, value=None): + def new_node(cls, value: T.Optional[T.List[T.Any]] = None) -> ArrayNode: if value is None: value = [] elif not isinstance(value, list): @@ -189,50 +198,52 @@ class MTypeList(MTypeBase): return ArrayNode(_symbol('['), args, _symbol(']')) @classmethod - def _new_element_node(cls, value): + def _new_element_node(cls, value: T.Any) -> BaseNode: # Overwrite in derived class raise RewriterException('Internal error: _new_element_node of MTypeList was called') - def _ensure_array_node(self): + def _ensure_array_node(self) -> None: if not isinstance(self.node, ArrayNode): tmp = self.node self.node = self.new_node() self.node.args.arguments = [tmp] @staticmethod - def _check_is_equal(node, value) -> bool: + def _check_is_equal(node: BaseNode, value: str) -> bool: # Overwrite in derived class return False @staticmethod - def _check_regex_matches(node, regex: str) -> bool: + def _check_regex_matches(node: BaseNode, regex: str) -> bool: # Overwrite in derived class return False - def get_node(self): + def get_node(self) -> BaseNode: if isinstance(self.node, ArrayNode): if len(self.node.args.arguments) == 1: return self.node.args.arguments[0] return self.node @classmethod - def supported_element_nodes(cls): + def supported_element_nodes(cls) -> T.List[T.Type]: # Overwrite in derived class return [] @classmethod - def supported_nodes(cls): + def supported_nodes(cls) -> T.List[T.Type]: return [ArrayNode] + cls.supported_element_nodes() - def add_value(self, value): + def add_value(self, value: T.Any) -> None: if not isinstance(value, list): value = [value] self._ensure_array_node() for i in value: + assert hasattr(self.node, 'args') # For mypy + assert isinstance(self.node.args, ArgumentNode) # For mypy self.node.args.arguments += [self._new_element_node(i)] - def _remove_helper(self, value, equal_func): - def check_remove_node(node): + def _remove_helper(self, value: T.Any, equal_func: T.Callable[[T.Any, T.Any], bool]) -> None: + def check_remove_node(node: BaseNode) -> bool: for j in value: if equal_func(i, j): return True @@ -241,16 +252,18 @@ class MTypeList(MTypeBase): if not isinstance(value, list): value = [value] self._ensure_array_node() + assert hasattr(self.node, 'args') # For mypy + assert isinstance(self.node.args, ArgumentNode) # For mypy removed_list = [] for i in self.node.args.arguments: if not check_remove_node(i): removed_list += [i] self.node.args.arguments = removed_list - def remove_value(self, value): + def remove_value(self, value: T.Any) -> None: self._remove_helper(value, self._check_is_equal) - def remove_regex(self, regex: str): + def remove_regex(self, regex: str) -> None: self._remove_helper(regex, self._check_regex_matches) class MTypeStrList(MTypeList): @@ -258,23 +271,23 @@ class MTypeStrList(MTypeList): super().__init__(node) @classmethod - def _new_element_node(cls, value): + def _new_element_node(cls, value: str) -> StringNode: return StringNode(Token('string', '', 0, 0, 0, None, str(value))) @staticmethod - def _check_is_equal(node, value) -> bool: + def _check_is_equal(node: BaseNode, value: str) -> bool: if isinstance(node, StringNode): - return node.value == value + return bool(node.value == value) return False @staticmethod - def _check_regex_matches(node, regex: str) -> bool: + def _check_regex_matches(node: BaseNode, regex: str) -> bool: if isinstance(node, StringNode): return re.match(regex, node.value) is not None return False @classmethod - def supported_element_nodes(cls): + def supported_element_nodes(cls) -> T.List[T.Type]: return [StringNode] class MTypeIDList(MTypeList): @@ -282,26 +295,26 @@ class MTypeIDList(MTypeList): super().__init__(node) @classmethod - def _new_element_node(cls, value): + def _new_element_node(cls, value: str) -> IdNode: return IdNode(Token('', '', 0, 0, 0, None, str(value))) @staticmethod - def _check_is_equal(node, value) -> bool: + def _check_is_equal(node: BaseNode, value: str) -> bool: if isinstance(node, IdNode): - return node.value == value + return bool(node.value == value) return False @staticmethod - def _check_regex_matches(node, regex: str) -> bool: + def _check_regex_matches(node: BaseNode, regex: str) -> bool: if isinstance(node, StringNode): return re.match(regex, node.value) is not None return False @classmethod - def supported_element_nodes(cls): + def supported_element_nodes(cls) -> T.List[T.Type]: return [IdNode] -rewriter_keys = { +rewriter_keys: T.Dict[str, T.Dict[str, T.Any]] = { 'default_options': { 'operation': (str, None, ['set', 'delete']), 'options': (dict, {}, None) @@ -355,13 +368,15 @@ rewriter_func_kwargs = { } class Rewriter: + info_dump: T.Optional[T.Dict[str, T.Dict[str, T.Any]]] + def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False): self.sourcedir = sourcedir self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()]) self.skip_errors = skip_errors - self.modified_nodes = [] - self.to_remove_nodes = [] - self.to_add_nodes = [] + self.modified_nodes: T.List[BaseNode] = [] + self.to_remove_nodes: T.List[BaseNode] = [] + self.to_add_nodes: T.List[BaseNode] = [] self.functions = { 'default_options': self.process_default_options, 'kwargs': self.process_kwargs, @@ -369,89 +384,99 @@ class Rewriter: } self.info_dump = None - def analyze_meson(self): + def analyze_meson(self) -> None: mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename))) self.interpreter.analyze() mlog.log(' -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name'])) mlog.log(' -- Version:', mlog.cyan(self.interpreter.project_data['version'])) - def add_info(self, cmd_type: str, cmd_id: str, data: dict): + def add_info(self, cmd_type: str, cmd_id: str, data: dict) -> None: if self.info_dump is None: self.info_dump = {} if cmd_type not in self.info_dump: self.info_dump[cmd_type] = {} self.info_dump[cmd_type][cmd_id] = data - def print_info(self): + def print_info(self) -> None: if self.info_dump is None: return - sys.stdout.write(json.dumps(self.info_dump, indent=2)) + sys.stdout.write(json.dumps(self.info_dump, indent=2, cls=IntrospectionEncoder)) - def on_error(self): + def on_error(self) -> T.Tuple[AnsiDecorator, AnsiDecorator]: if self.skip_errors: return mlog.cyan('-->'), mlog.yellow('skipping') return mlog.cyan('-->'), mlog.red('aborting') - def handle_error(self): + def handle_error(self) -> None: if self.skip_errors: return None raise MesonException('Rewriting the meson.build failed') - def find_target(self, target: str): - def check_list(name: str) -> T.List[BaseNode]: - result = [] - for i in self.interpreter.targets: - if name in {i['name'], i['id']}: - result += [i] - return result - - targets = check_list(target) - if targets: - if len(targets) == 1: - return targets[0] - else: - mlog.error('There are multiple targets matching', mlog.bold(target)) - for i in targets: - mlog.error(' -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id'])) - mlog.error('Please try again with the unique ID of the target', *self.on_error()) - self.handle_error() - return None - - # Check the assignments - tgt = None - if target in self.interpreter.assignments: - node = self.interpreter.assignments[target] - if isinstance(node, FunctionNode): - if node.func_name.value in {'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries'}: - tgt = self.interpreter.assign_vals[target] - - return tgt - - def find_dependency(self, dependency: str): - def check_list(name: str): - for i in self.interpreter.dependencies: - if name == i['name']: - return i + def all_assignments(self, varname: str) -> T.List[BaseNode]: + assigned_values = [] + for ass in self.interpreter.all_assignment_nodes[varname]: + if isinstance(ass, PlusAssignmentNode): + continue + assert isinstance(ass, AssignmentNode) + assigned_values.append(ass.value) + return assigned_values + + def find_target(self, target: str) -> T.Optional[IntrospectionBuildTarget]: + for i in self.interpreter.targets: + if target == i.id: + return i + + potential_tgts = [] + for i in self.interpreter.targets: + if target == i.name: + potential_tgts.append(i) + + if not potential_tgts: + potenial_tgts_1 = self.all_assignments(target) + potenial_tgts_1 = [self.interpreter.node_to_runtime_value(el) for el in potenial_tgts_1] + potential_tgts = [el for el in potenial_tgts_1 if isinstance(el, IntrospectionBuildTarget)] + + if not potential_tgts: return None + elif len(potential_tgts) == 1: + return potential_tgts[0] + else: + mlog.error('There are multiple targets matching', mlog.bold(target)) + for i in potential_tgts: + mlog.error(' -- Target name', mlog.bold(i.name), 'with ID', mlog.bold(i.id)) + mlog.error('Please try again with the unique ID of the target', *self.on_error()) + self.handle_error() + return None + + def find_dependency(self, dependency: str) -> T.Optional[IntrospectionDependency]: + potential_deps = [] + for i in self.interpreter.dependencies: + if i.name == dependency: + potential_deps.append(i) - dep = check_list(dependency) - if dep is not None: - return dep + checking_varnames = len(potential_deps) == 0 - # Check the assignments - if dependency in self.interpreter.assignments: - node = self.interpreter.assignments[dependency] - if isinstance(node, FunctionNode): - if node.func_name.value == 'dependency': - name = self.interpreter.flatten_args(node.args)[0] - dep = check_list(name) + if checking_varnames: + potential_deps1 = self.all_assignments(dependency) + potential_deps = [self.interpreter.node_to_runtime_value(el) for el in potential_deps1 if isinstance(el, FunctionNode) and el.func_name.value == 'dependency'] - return dep + if not potential_deps: + return None + elif len(potential_deps) == 1: + return potential_deps[0] + else: + mlog.error('There are multiple dependencies matching', mlog.bold(dependency)) + for i in potential_deps: + mlog.error(' -- Dependency name', i) + if checking_varnames: + mlog.error('Please try again with the name of the dependency', *self.on_error()) + self.handle_error() + return None @RequiredKeys(rewriter_keys['default_options']) - def process_default_options(self, cmd): + def process_default_options(self, cmd: T.Dict[str, T.Any]) -> None: # First, remove the old values - kwargs_cmd = { + kwargs_cmd: T.Dict[str, T.Any] = { 'function': 'project', 'id': "/", 'operation': 'remove_regex', @@ -495,7 +520,7 @@ class Rewriter: self.process_kwargs(kwargs_cmd) @RequiredKeys(rewriter_keys['kwargs']) - def process_kwargs(self, cmd): + def process_kwargs(self, cmd: T.Dict[str, T.Any]) -> None: mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'")) if cmd['function'] not in rewriter_func_kwargs: mlog.error('Unknown function type', cmd['function'], *self.on_error()) @@ -516,26 +541,26 @@ class Rewriter: node = self.interpreter.project_node arg_node = node.args elif cmd['function'] == 'target': - tmp = self.find_target(cmd['id']) - if tmp: - node = tmp['node'] + tmp_tgt = self.find_target(cmd['id']) + if tmp_tgt: + node = tmp_tgt.node arg_node = node.args elif cmd['function'] == 'dependency': - tmp = self.find_dependency(cmd['id']) - if tmp: - node = tmp['node'] + tmp_dep = self.find_dependency(cmd['id']) + if tmp_dep: + node = tmp_dep.node arg_node = node.args if not node: mlog.error('Unable to find the function node') assert isinstance(node, FunctionNode) assert isinstance(arg_node, ArgumentNode) # Transform the key nodes to plain strings - arg_node.kwargs = {k.value: v for k, v in arg_node.kwargs.items()} + kwargs = {T.cast(IdNode, k).value: v for k, v in arg_node.kwargs.items()} # Print kwargs info if cmd['operation'] == 'info': - info_data = {} - for key, val in sorted(arg_node.kwargs.items()): + info_data: T.Dict[str, T.Any] = {} + for key, val in sorted(kwargs.items()): info_data[key] = None if isinstance(val, ElementaryNode): info_data[key] = val.value @@ -561,21 +586,21 @@ class Rewriter: if cmd['operation'] == 'delete': # Remove the key from the kwargs - if key not in arg_node.kwargs: + if key not in kwargs: mlog.log(' -- Key', mlog.bold(key), 'is already deleted') continue mlog.log(' -- Deleting', mlog.bold(key), 'from the kwargs') - del arg_node.kwargs[key] + del kwargs[key] elif cmd['operation'] == 'set': # Replace the key from the kwargs mlog.log(' -- Setting', mlog.bold(key), 'to', mlog.yellow(str(val))) - arg_node.kwargs[key] = kwargs_def[key].new_node(val) + kwargs[key] = kwargs_def[key].new_node(val) else: # Modify the value from the kwargs - if key not in arg_node.kwargs: - arg_node.kwargs[key] = None - modifier = kwargs_def[key](arg_node.kwargs[key]) + if key not in kwargs: + kwargs[key] = None + modifier = kwargs_def[key](kwargs[key]) if not modifier.can_modify(): mlog.log(' -- Skipping', mlog.bold(key), 'because it is too complex to modify') continue @@ -593,24 +618,251 @@ class Rewriter: modifier.remove_regex(val) # Write back the result - arg_node.kwargs[key] = modifier.get_node() + kwargs[key] = modifier.get_node() num_changed += 1 # Convert the keys back to IdNode's - arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in arg_node.kwargs.items()} + arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in kwargs.items()} for k, v in arg_node.kwargs.items(): k.level = v.level if num_changed > 0 and node not in self.modified_nodes: self.modified_nodes += [node] - def find_assignment_node(self, node: BaseNode) -> AssignmentNode: - if node.ast_id and node.ast_id in self.interpreter.reverse_assignment: - return self.interpreter.reverse_assignment[node.ast_id] + def find_assignment_node(self, node: BaseNode) -> T.Optional[AssignmentNode]: + for k, v in self.interpreter.all_assignment_nodes.items(): + for ass in v: + if ass.value == node: + return ass return None + def affects_no_other_targets(self, candidate: BaseNode) -> bool: + affected = self.interpreter.dataflow_dag.reachable({candidate}, False) + affected_targets = [x for x in affected if isinstance(x, FunctionNode) and x.func_name.value in BUILD_TARGET_FUNCTIONS] + return len(affected_targets) == 1 + + def get_relto(self, target_node: BaseNode, node: BaseNode) -> Path: + cwd = Path(os.getcwd()) + all_paths = self.interpreter.dataflow_dag.find_all_paths(node, target_node) + # len(all_paths) == 0 would imply that data does not flow from node to + # target_node. This would imply that adding sources to node would not + # add the source to the target. + assert all_paths + if len(all_paths) > 1: + return None + return (cwd / next(x for x in all_paths[0] if isinstance(x, FunctionNode)).filename).parent + + def add_src_or_extra(self, op: str, target: IntrospectionBuildTarget, newfiles: T.List[str], to_sort_nodes: T.List[T.Union[FunctionNode, ArrayNode]]) -> None: + assert op in {'src_add', 'extra_files_add'} + + if op == 'src_add': + old: T.Set[T.Union[BaseNode, UnknownValue]] = set(target.source_nodes) + elif op == 'extra_files_add': + if target.extra_files is None: + old = set() + else: + old = {target.extra_files} + tgt_function: FunctionNode = target.node + + cwd = Path(os.getcwd()) + target_dir_abs = cwd / os.path.dirname(target.node.filename) + source_root_abs = cwd / self.interpreter.source_root + + candidates1 = self.interpreter.dataflow_dag.reachable(old, True) + # A node is a member of the set `candidates1` exactly if data from this node + # flow into one of the `dest` nodes. We assume that this implies that if we + # add `foo.c` to this node, then 'foo.c' will be added to one of these + # nodes. This assumption is not always true: + # ar = ['a.c', 'b.c'] + # srcs = ar[1] + # executable('name', srcs) + # Data flows from `ar` to `srcs`, but if we add 'foo.c': + # ar = ['a.c', 'b.c', 'foo.c'] + # srcs = ar[1] + # executable('name', srcs) + # this does not add 'foo.c' to `srcs`. This is a known bug/limitation of + # the meson rewriter that could be fixed by replacing `reachable` with a + # more advanced analysis. But this is a lot of work and I think e.g. + # `srcs = ar[1]` is rare in real-world projects, so I will just leave + # this for now. + + candidates2 = {x for x in candidates1 if isinstance(x, (FunctionNode, ArrayNode))} + + # If we have this meson.build file: + # shared = ['shared.c'] + # executable('foo', shared + ['foo.c']) + # executable('bar', shared + ['bar.c']) + # and we are tasked with adding 'new.c' to 'foo', we should do e.g this: + # shared = ['shared.c'] + # executable('foo', shared + ['foo.c', 'new.c']) + # executable('bar', shared + ['bar.c']) + # but never this: + # shared = ['shared.c', 'new.c'] + # executable('foo', shared + ['foo.c']) + # executable('bar', shared + ['bar.c']) + # We do this by removing the `['shared.c']`-node from `candidates2`. + candidates2 = {x for x in candidates2 if self.affects_no_other_targets(x)} + + def path_contains_unknowns(candidate: BaseNode) -> bool: + all_paths = self.interpreter.dataflow_dag.find_all_paths(candidate, target.node) + for path in all_paths: + for el in path: + if isinstance(el, UnknownValue): + return True + return False + + candidates2 = {x for x in candidates2 if not path_contains_unknowns(x)} + + candidates2 = {x for x in candidates2 if self.get_relto(target.node, x) is not None} + + chosen: T.Union[FunctionNode, ArrayNode] = None + new_kwarg_flag = False + if len(candidates2) > 0: + # So that files(['a', 'b']) gets modified to files(['a', 'b', 'c']) instead of files(['a', 'b'], 'c') + if len({x for x in candidates2 if isinstance(x, ArrayNode)}) > 0: + candidates2 = {x for x in candidates2 if isinstance(x, ArrayNode)} + + # We choose one more or less arbitrary candidate + chosen = min(candidates2, key=lambda x: (x.lineno, x.colno)) + elif op == 'src_add': + chosen = target.node + elif op == 'extra_files_add': + chosen = ArrayNode(_symbol('['), ArgumentNode(Token('', tgt_function.filename, 0, 0, 0, None, '[]')), _symbol(']')) + + # this is fundamentally error prone + self.interpreter.dataflow_dag.add_edge(chosen, target.node) + + extra_files_idnode = IdNode(Token('string', tgt_function.filename, 0, 0, 0, None, 'extra_files')) + if tgt_function not in self.modified_nodes: + self.modified_nodes += [tgt_function] + new_extra_files_node: BaseNode + if target.node.args.get_kwarg_or_default('extra_files', None) is None: + # Target has no extra_files kwarg, create one + new_kwarg_flag = True + new_extra_files_node = chosen + else: + new_kwarg_flag = True + old_extra_files = target.node.args.get_kwarg_or_default('extra_files', None) + target.node.args.kwargs = {k: v for k, v in target.node.args.kwargs.items() if not (isinstance(k, IdNode) and k.value == 'extra_files')} + new_extra_files_node = ArithmeticNode('add', old_extra_files, _symbol('+'), chosen) + + tgt_function.args.kwargs[extra_files_idnode] = new_extra_files_node + + newfiles_relto = self.get_relto(target.node, chosen) + old_src_list: T.List[T.Any] = flatten([self.interpreter.node_to_runtime_value(sn) for sn in old]) + + if op == 'src_add': + name = 'Source' + elif op == 'extra_files_add': + name = 'Extra file' + # Generate the new String nodes + to_append = [] + added = [] + + old_src_list = [(target_dir_abs / x).resolve() if isinstance(x, str) else x.to_abs_path(source_root_abs) for x in old_src_list if not isinstance(x, UnknownValue)] + for _newf in sorted(set(newfiles)): + newf = Path(_newf) + if os.path.isabs(newf): + newf = Path(newf) + else: + newf = source_root_abs / newf + if newf in old_src_list: + mlog.log(' -- ', name, mlog.green(str(newf)), 'is already defined for the target --> skipping') + continue + + mlog.log(' -- Adding ', name.lower(), mlog.green(str(newf)), 'at', + mlog.yellow(f'{chosen.filename}:{chosen.lineno}')) + added.append(newf) + mocktarget = self.interpreter.funcvals[target.node] + assert isinstance(mocktarget, IntrospectionBuildTarget) + # print("adding ", str(newf), 'to', mocktarget.name) todo: should we write something to stderr? + + path = relpath(newf, newfiles_relto) + path = codecs.encode(path, 'unicode_escape').decode() # Because the StringNode constructor does the inverse + token = Token('string', chosen.filename, 0, 0, 0, None, path) + to_append += [StringNode(token)] + + assert isinstance(chosen, (FunctionNode, ArrayNode)) + arg_node = chosen.args + # Append to the AST at the right place + arg_node.arguments += to_append + + # Mark the node as modified + if chosen not in to_sort_nodes: + to_sort_nodes += [chosen] + # If the extra_files array is newly created, i.e. if new_kwarg_flag is + # True, don't mark it as its parent function node already is, otherwise + # this would cause double modification. + if chosen not in self.modified_nodes and not new_kwarg_flag: + self.modified_nodes += [chosen] + + # Utility function to get a list of the sources from a node + def arg_list_from_node(self, n: BaseNode) -> T.List[BaseNode]: + args = [] + if isinstance(n, FunctionNode): + args = list(n.args.arguments) + if n.func_name.value in BUILD_TARGET_FUNCTIONS: + args.pop(0) + elif isinstance(n, ArrayNode): + args = n.args.arguments + elif isinstance(n, ArgumentNode): + args = n.arguments + return args + + def rm_src_or_extra(self, op: str, target: IntrospectionBuildTarget, to_be_removed: T.List[str], to_sort_nodes: T.List[T.Union[FunctionNode, ArrayNode]]) -> None: + assert op in {'src_rm', 'extra_files_rm'} + cwd = Path(os.getcwd()) + source_root_abs = cwd / self.interpreter.source_root + + # Helper to find the exact string node and its parent + def find_node(src: str) -> T.Tuple[T.Optional[BaseNode], T.Optional[StringNode]]: + if op == 'src_rm': + nodes = self.interpreter.dataflow_dag.reachable(set(target.source_nodes), True).union({target.node}) + elif op == 'extra_files_rm': + nodes = self.interpreter.dataflow_dag.reachable({target.extra_files}, True) + for i in nodes: + if isinstance(i, UnknownValue): + continue + relto = self.get_relto(target.node, i) + if relto is not None: + for j in self.arg_list_from_node(i): + if isinstance(j, StringNode): + if os.path.normpath(relto / j.value) == os.path.normpath(source_root_abs / src): + return i, j + return None, None + + if op == 'src_rm': + name = 'source' + elif op == 'extra_files_rm': + name = 'extra file' + + for i in to_be_removed: + # Try to find the node with the source string + root, string_node = find_node(i) + if root is None: + mlog.warning(' -- Unable to find', name, mlog.green(i), 'in the target') + continue + if not self.affects_no_other_targets(string_node): + mlog.warning(' -- Removing the', name, mlog.green(i), 'is too compilicated') + continue + + if not isinstance(root, (FunctionNode, ArrayNode)): + raise NotImplementedError # I'm lazy + + # Remove the found string node from the argument list + arg_node = root.args + mlog.log(' -- Removing', name, mlog.green(i), 'from', + mlog.yellow(f'{string_node.filename}:{string_node.lineno}')) + arg_node.arguments.remove(string_node) + + # Mark the node as modified + if root not in to_sort_nodes: + to_sort_nodes += [root] + if root not in self.modified_nodes: + self.modified_nodes += [root] + @RequiredKeys(rewriter_keys['target']) - def process_target(self, cmd): + def process_target(self, cmd: T.Dict[str, T.Any]) -> None: mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation'])) target = self.find_target(cmd['target']) if target is None and cmd['operation'] != 'target_add': @@ -619,7 +871,7 @@ class Rewriter: # Make source paths relative to the current subdir def rel_source(src: str) -> str: - subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir'])) + subdir = os.path.abspath(os.path.join(self.sourcedir, target.subdir)) if os.path.isabs(src): return os.path.relpath(src, subdir) elif not os.path.exists(src): @@ -630,180 +882,13 @@ class Rewriter: if target is not None: cmd['sources'] = [rel_source(x) for x in cmd['sources']] - # Utility function to get a list of the sources from a node - def arg_list_from_node(n): - args = [] - if isinstance(n, FunctionNode): - args = list(n.args.arguments) - if n.func_name.value in BUILD_TARGET_FUNCTIONS: - args.pop(0) - elif isinstance(n, ArrayNode): - args = n.args.arguments - elif isinstance(n, ArgumentNode): - args = n.arguments - return args - - to_sort_nodes = [] - - if cmd['operation'] == 'src_add': - node = None - if target['sources']: - node = target['sources'][0] - else: - node = target['node'] - assert node is not None - - # Generate the current source list - src_list = [] - for i in target['sources']: - for j in arg_list_from_node(i): - if isinstance(j, StringNode): - src_list += [j.value] - - # Generate the new String nodes - to_append = [] - for i in sorted(set(cmd['sources'])): - if i in src_list: - mlog.log(' -- Source', mlog.green(i), 'is already defined for the target --> skipping') - continue - mlog.log(' -- Adding source', mlog.green(i), 'at', - mlog.yellow(f'{node.filename}:{node.lineno}')) - token = Token('string', node.filename, 0, 0, 0, None, i) - to_append += [StringNode(token)] - - # Append to the AST at the right place - arg_node = None - if isinstance(node, (FunctionNode, ArrayNode)): - arg_node = node.args - elif isinstance(node, ArgumentNode): - arg_node = node - assert arg_node is not None - arg_node.arguments += to_append - - # Mark the node as modified - if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode): - to_sort_nodes += [arg_node] - if node not in self.modified_nodes: - self.modified_nodes += [node] - - elif cmd['operation'] == 'src_rm': - # Helper to find the exact string node and its parent - def find_node(src): - for i in target['sources']: - for j in arg_list_from_node(i): - if isinstance(j, StringNode): - if j.value == src: - return i, j - return None, None - - for i in cmd['sources']: - # Try to find the node with the source string - root, string_node = find_node(i) - if root is None: - mlog.warning(' -- Unable to find source', mlog.green(i), 'in the target') - continue - - # Remove the found string node from the argument list - arg_node = None - if isinstance(root, (FunctionNode, ArrayNode)): - arg_node = root.args - elif isinstance(root, ArgumentNode): - arg_node = root - assert arg_node is not None - mlog.log(' -- Removing source', mlog.green(i), 'from', - mlog.yellow(f'{string_node.filename}:{string_node.lineno}')) - arg_node.arguments.remove(string_node) - - # Mark the node as modified - if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode): - to_sort_nodes += [arg_node] - if root not in self.modified_nodes: - self.modified_nodes += [root] - - elif cmd['operation'] == 'extra_files_add': - tgt_function: FunctionNode = target['node'] - mark_array = True - try: - node = target['extra_files'][0] - except IndexError: - # Specifying `extra_files` with a list that flattens to empty gives an empty - # target['extra_files'] list, account for that. - try: - extra_files_key = next(k for k in tgt_function.args.kwargs.keys() if isinstance(k, IdNode) and k.value == 'extra_files') - node = tgt_function.args.kwargs[extra_files_key] - except StopIteration: - # Target has no extra_files kwarg, create one - node = ArrayNode(_symbol('['), ArgumentNode(Token('', tgt_function.filename, 0, 0, 0, None, '[]')), _symbol(']')) - tgt_function.args.kwargs[IdNode(Token('string', tgt_function.filename, 0, 0, 0, None, 'extra_files'))] = node - mark_array = False - if tgt_function not in self.modified_nodes: - self.modified_nodes += [tgt_function] - target['extra_files'] = [node] - if isinstance(node, IdNode): - node = self.interpreter.assignments[node.value] - target['extra_files'] = [node] - if not isinstance(node, ArrayNode): - mlog.error('Target', mlog.bold(cmd['target']), 'extra_files argument must be a list', *self.on_error()) - return self.handle_error() - - # Generate the current extra files list - extra_files_list = [] - for i in target['extra_files']: - for j in arg_list_from_node(i): - if isinstance(j, StringNode): - extra_files_list += [j.value] - - # Generate the new String nodes - to_append = [] - for i in sorted(set(cmd['sources'])): - if i in extra_files_list: - mlog.log(' -- Extra file', mlog.green(i), 'is already defined for the target --> skipping') - continue - mlog.log(' -- Adding extra file', mlog.green(i), 'at', - mlog.yellow(f'{node.filename}:{node.lineno}')) - token = Token('string', node.filename, 0, 0, 0, None, i) - to_append += [StringNode(token)] - - # Append to the AST at the right place - arg_node = node.args - arg_node.arguments += to_append - - # Mark the node as modified - if arg_node not in to_sort_nodes: - to_sort_nodes += [arg_node] - # If the extra_files array is newly created, don't mark it as its parent function node already is, - # otherwise this would cause double modification. - if mark_array and node not in self.modified_nodes: - self.modified_nodes += [node] - - elif cmd['operation'] == 'extra_files_rm': - # Helper to find the exact string node and its parent - def find_node(src): - for i in target['extra_files']: - for j in arg_list_from_node(i): - if isinstance(j, StringNode): - if j.value == src: - return i, j - return None, None - - for i in cmd['sources']: - # Try to find the node with the source string - root, string_node = find_node(i) - if root is None: - mlog.warning(' -- Unable to find extra file', mlog.green(i), 'in the target') - continue + to_sort_nodes: T.List[T.Union[FunctionNode, ArrayNode]] = [] - # Remove the found string node from the argument list - arg_node = root.args - mlog.log(' -- Removing extra file', mlog.green(i), 'from', - mlog.yellow(f'{string_node.filename}:{string_node.lineno}')) - arg_node.arguments.remove(string_node) + if cmd['operation'] in {'src_add', 'extra_files_add'}: + self.add_src_or_extra(cmd['operation'], target, cmd['sources'], to_sort_nodes) - # Mark the node as modified - if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode): - to_sort_nodes += [arg_node] - if root not in self.modified_nodes: - self.modified_nodes += [root] + elif cmd['operation'] in {'src_rm', 'extra_files_rm'}: + self.rm_src_or_extra(cmd['operation'], target, cmd['sources'], to_sort_nodes) elif cmd['operation'] == 'target_add': if target is not None: @@ -813,7 +898,7 @@ class Rewriter: id_base = re.sub(r'[- ]', '_', cmd['target']) target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib' source_id = id_base + '_sources' - filename = os.path.join(cmd['subdir'], environment.build_filename) + filename = os.path.join(os.getcwd(), self.interpreter.source_root, cmd['subdir'], environment.build_filename) # Build src list src_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, '')) @@ -838,44 +923,55 @@ class Rewriter: self.to_add_nodes += [src_ass_node, tgt_ass_node] elif cmd['operation'] == 'target_rm': - to_remove = self.find_assignment_node(target['node']) + to_remove: BaseNode = self.find_assignment_node(target.node) if to_remove is None: - to_remove = target['node'] + to_remove = target.node self.to_remove_nodes += [to_remove] mlog.log(' -- Removing target', mlog.green(cmd['target']), 'at', mlog.yellow(f'{to_remove.filename}:{to_remove.lineno}')) elif cmd['operation'] == 'info': # T.List all sources in the target - src_list = [] - for i in target['sources']: - for j in arg_list_from_node(i): - if isinstance(j, StringNode): - src_list += [j.value] - extra_files_list = [] - for i in target['extra_files']: - for j in arg_list_from_node(i): - if isinstance(j, StringNode): - extra_files_list += [j.value] + + cwd = Path(os.getcwd()) + source_root_abs = cwd / self.interpreter.source_root + + src_list = self.interpreter.nodes_to_pretty_filelist(source_root_abs, target.subdir, target.source_nodes) + extra_files_list = self.interpreter.nodes_to_pretty_filelist(source_root_abs, target.subdir, [target.extra_files] if target.extra_files else []) + + src_list = ['unknown' if isinstance(x, UnknownValue) else relpath(x, source_root_abs) for x in src_list] + extra_files_list = ['unknown' if isinstance(x, UnknownValue) else relpath(x, source_root_abs) for x in extra_files_list] + test_data = { - 'name': target['name'], + 'name': target.name, 'sources': src_list, 'extra_files': extra_files_list } - self.add_info('target', target['id'], test_data) + self.add_info('target', target.id, test_data) # Sort files for i in to_sort_nodes: - convert = lambda text: int(text) if text.isdigit() else text.lower() - alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)] - path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))]) + def convert(text: str) -> T.Union[int, str]: + return int(text) if text.isdigit() else text.lower() + + def alphanum_key(key: str) -> T.List[T.Union[int, str]]: + return [convert(c) for c in re.split('([0-9]+)', key)] - unknown = [x for x in i.arguments if not isinstance(x, StringNode)] - sources = [x for x in i.arguments if isinstance(x, StringNode)] + def path_sorter(key: str) -> T.List[T.Tuple[bool, T.List[T.Union[int, str]]]]: + return [(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))] + + if isinstance(i, FunctionNode) and i.func_name.value in BUILD_TARGET_FUNCTIONS: + src_args = i.args.arguments[1:] + target_name = [i.args.arguments[0]] + else: + src_args = i.args.arguments + target_name = [] + unknown: T.List[BaseNode] = [x for x in src_args if not isinstance(x, StringNode)] + sources: T.List[StringNode] = [x for x in src_args if isinstance(x, StringNode)] sources = sorted(sources, key=lambda x: path_sorter(x.value)) - i.arguments = unknown + sources + i.args.arguments = target_name + unknown + T.cast(T.List[BaseNode], sources) - def process(self, cmd): + def process(self, cmd: T.Dict[str, T.Any]) -> None: if 'type' not in cmd: raise RewriterException('Command has no key "type"') if cmd['type'] not in self.functions: @@ -883,7 +979,7 @@ class Rewriter: .format(cmd['type'], list(self.functions.keys()))) self.functions[cmd['type']](cmd) - def apply_changes(self): + def apply_changes(self) -> None: assert all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.modified_nodes) assert all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.to_remove_nodes) assert all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modified_nodes) @@ -891,7 +987,7 @@ class Rewriter: # Sort based on line and column in reversed order work_nodes = [{'node': x, 'action': 'modify'} for x in self.modified_nodes] work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes] - work_nodes = sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True) + work_nodes = sorted(work_nodes, key=lambda x: (T.cast(BaseNode, x['node']).lineno, T.cast(BaseNode, x['node']).colno), reverse=True) work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes] # Generating the new replacement string @@ -900,11 +996,11 @@ class Rewriter: new_data = '' if i['action'] == 'modify' or i['action'] == 'add': printer = AstPrinter() - i['node'].accept(printer) + T.cast(BaseNode, i['node']).accept(printer) printer.post_process() new_data = printer.result.strip() data = { - 'file': i['node'].filename, + 'file': T.cast(BaseNode, i['node']).filename, 'str': new_data, 'node': i['node'], 'action': i['action'] @@ -912,11 +1008,11 @@ class Rewriter: str_list += [data] # Load build files - files = {} + files: T.Dict[str, T.Any] = {} for i in str_list: if i['file'] in files: continue - fpath = os.path.realpath(os.path.join(self.sourcedir, i['file'])) + fpath = os.path.realpath(T.cast(str, i['file'])) fdata = '' # Create an empty file if it does not exist if not os.path.exists(fpath): @@ -933,14 +1029,14 @@ class Rewriter: line_offsets += [offset] offset += len(j) - files[i['file']] = { + files[T.cast(str, i['file'])] = { 'path': fpath, 'raw': fdata, 'offsets': line_offsets } # Replace in source code - def remove_node(i): + def remove_node(i: T.Dict[str, T.Any]) -> None: offsets = files[i['file']]['offsets'] raw = files[i['file']]['raw'] node = i['node'] @@ -968,7 +1064,7 @@ class Rewriter: if i['action'] in {'modify', 'rm'}: remove_node(i) elif i['action'] == 'add': - files[i['file']]['raw'] += i['str'] + '\n' + files[T.cast(str, i['file'])]['raw'] += T.cast(str, i['str']) + '\n' # Write the files back for key, val in files.items(): @@ -999,7 +1095,7 @@ def list_to_dict(in_list: T.List[str]) -> T.Dict[str, str]: raise TypeError('in_list parameter of list_to_dict must have an even length.') return result -def generate_target(options) -> T.List[dict]: +def generate_target(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]: return [{ 'type': 'target', 'target': options.target, @@ -1009,7 +1105,7 @@ def generate_target(options) -> T.List[dict]: 'target_type': options.tgt_type, }] -def generate_kwargs(options) -> T.List[dict]: +def generate_kwargs(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]: return [{ 'type': 'kwargs', 'function': options.function, @@ -1018,19 +1114,19 @@ def generate_kwargs(options) -> T.List[dict]: 'kwargs': list_to_dict(options.kwargs), }] -def generate_def_opts(options) -> T.List[dict]: +def generate_def_opts(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]: return [{ 'type': 'default_options', 'operation': options.operation, 'options': list_to_dict(options.options), }] -def generate_cmd(options) -> T.List[dict]: +def generate_cmd(options: argparse.Namespace) -> T.List[T.Dict[str, T.Any]]: if os.path.exists(options.json): with open(options.json, encoding='utf-8') as fp: - return json.load(fp) + return T.cast(T.List[T.Dict[str, T.Any]], json.load(fp)) else: - return json.loads(options.json) + return T.cast(T.List[T.Dict[str, T.Any]], json.loads(options.json)) # Map options.type to the actual type name cli_type_map = { @@ -1043,7 +1139,7 @@ cli_type_map = { 'cmd': generate_cmd, } -def run(options): +def run(options: argparse.Namespace) -> int: mlog.redirect(True) if not options.verbose: mlog.set_quiet() @@ -1062,12 +1158,22 @@ def run(options): if not isinstance(commands, list): raise TypeError('Command is not a list') - for i in commands: - if not isinstance(i, object): + for i, cmd in enumerate(commands): + if not isinstance(cmd, object): raise TypeError('Command is not an object') - rewriter.process(i) + rewriter.process(cmd) + rewriter.apply_changes() + + if i == len(commands) - 1: # Improves the performance, is not necessary for correctness. + break + + rewriter.modified_nodes = [] + rewriter.to_remove_nodes = [] + rewriter.to_add_nodes = [] + # The AST changed, so we need to update every information that was derived from the AST + rewriter.interpreter = IntrospectionInterpreter(rewriter.sourcedir, '', rewriter.interpreter.backend, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()]) + rewriter.analyze_meson() - rewriter.apply_changes() rewriter.print_info() return 0 except Exception as e: diff --git a/mesonbuild/templates/cpptemplates.py b/mesonbuild/templates/cpptemplates.py index 1bfa2ae..cdfbbf8 100644 --- a/mesonbuild/templates/cpptemplates.py +++ b/mesonbuild/templates/cpptemplates.py @@ -16,7 +16,7 @@ hello_cpp_template = '''#include <iostream> int main(int argc, char **argv) {{ if (argc != 1) {{ - std::cout << argv[0] << "takes no arguments.\\n"; + std::cout << argv[0] << " takes no arguments.\\n"; return 1; }} std::cout << "This is project " << PROJECT_NAME << ".\\n"; diff --git a/mesonbuild/utils/platform.py b/mesonbuild/utils/platform.py index 8e762b6..8fdfee6 100644 --- a/mesonbuild/utils/platform.py +++ b/mesonbuild/utils/platform.py @@ -6,22 +6,30 @@ from __future__ import annotations """base classes providing no-op functionality..""" +import enum import os import typing as T from .. import mlog -__all__ = ['BuildDirLock'] +__all__ = ['DirectoryLock', 'DirectoryLockAction', 'DirectoryLockBase'] -# This needs to be inherited by the specific implementations to make type -# checking happy -class BuildDirLock: +class DirectoryLockAction(enum.Enum): + IGNORE = 0 + WAIT = 1 + FAIL = 2 - def __init__(self, builddir: str) -> None: - self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock') +class DirectoryLockBase: + def __init__(self, directory: str, lockfile: str, action: DirectoryLockAction, err: str) -> None: + self.action = action + self.err = err + self.lockpath = os.path.join(directory, lockfile) def __enter__(self) -> None: - mlog.debug('Calling the no-op version of BuildDirLock') + mlog.debug('Calling the no-op version of DirectoryLock') def __exit__(self, *args: T.Any) -> None: pass + +class DirectoryLock(DirectoryLockBase): + pass diff --git a/mesonbuild/utils/posix.py b/mesonbuild/utils/posix.py index e8387ba..a601dee 100644 --- a/mesonbuild/utils/posix.py +++ b/mesonbuild/utils/posix.py @@ -10,23 +10,33 @@ import fcntl import typing as T from .core import MesonException -from .platform import BuildDirLock as BuildDirLockBase +from .platform import DirectoryLockBase, DirectoryLockAction -__all__ = ['BuildDirLock'] +__all__ = ['DirectoryLock', 'DirectoryLockAction'] -class BuildDirLock(BuildDirLockBase): +class DirectoryLock(DirectoryLockBase): def __enter__(self) -> None: - self.lockfile = open(self.lockfilename, 'w', encoding='utf-8') + self.lockfile = open(self.lockpath, 'w+', encoding='utf-8') try: - fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) - except (BlockingIOError, PermissionError): + flags = fcntl.LOCK_EX + if self.action != DirectoryLockAction.WAIT: + flags = flags | fcntl.LOCK_NB + fcntl.flock(self.lockfile, flags) + except BlockingIOError: self.lockfile.close() - raise MesonException('Some other Meson process is already using this build directory. Exiting.') + if self.action == DirectoryLockAction.IGNORE: + return + raise MesonException(self.err) + except PermissionError: + self.lockfile.close() + raise MesonException(self.err) except OSError as e: self.lockfile.close() - raise MesonException(f'Failed to lock the build directory: {e.strerror}') + raise MesonException(f'Failed to lock directory {self.lockpath}: {e.strerror}') def __exit__(self, *args: T.Any) -> None: + if self.lockfile is None or self.lockfile.closed: + return fcntl.flock(self.lockfile, fcntl.LOCK_UN) self.lockfile.close() diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py index 5b3f131..4b656a6 100644 --- a/mesonbuild/utils/universal.py +++ b/mesonbuild/utils/universal.py @@ -38,6 +38,7 @@ if T.TYPE_CHECKING: from ..environment import Environment from ..compilers.compilers import Compiler from ..interpreterbase.baseobjects import SubProject + from .. import programs class _EnvPickleLoadable(Protocol): @@ -756,6 +757,20 @@ class VcsData: rev_regex: str dep: str wc_dir: T.Optional[str] = None + repo_can_be_file: bool = False + + def repo_exists(self, curdir: Path) -> bool: + if not shutil.which(self.cmd): + return False + + repo = curdir / self.repo_dir + if repo.is_dir(): + return True + if repo.is_file() and self.repo_can_be_file: + return True + + return False + def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]: vcs_systems = [ @@ -766,6 +781,7 @@ def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]: get_rev = ['git', 'describe', '--dirty=+', '--always'], rev_regex = '(.*)', dep = '.git/logs/HEAD', + repo_can_be_file=True, ), VcsData( name = 'mercurial', @@ -801,9 +817,7 @@ def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[VcsData]: parent_paths_and_self.appendleft(source_dir) for curdir in parent_paths_and_self: for vcs in vcs_systems: - repodir = vcs.repo_dir - cmd = vcs.cmd - if curdir.joinpath(repodir).is_dir() and shutil.which(cmd): + if vcs.repo_exists(curdir): vcs.wc_dir = str(curdir) return vcs return None @@ -1226,7 +1240,7 @@ def do_replacement(regex: T.Pattern[str], line: str, if variable_format == 'meson': return do_replacement_meson(regex, line, confdata) elif variable_format in {'cmake', 'cmake@'}: - return do_replacement_cmake(regex, line, variable_format == 'cmake@', confdata) + return do_replacement_cmake(line, variable_format == 'cmake@', confdata) else: raise MesonException('Invalid variable format') @@ -1261,44 +1275,92 @@ def do_replacement_meson(regex: T.Pattern[str], line: str, return var_str return re.sub(regex, variable_replace, line), missing_variables -def do_replacement_cmake(regex: T.Pattern[str], line: str, at_only: bool, +def do_replacement_cmake(line: str, at_only: bool, confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]: missing_variables: T.Set[str] = set() - def variable_replace(match: T.Match[str]) -> str: - # Pairs of escape characters before '@', '\@', '${' or '\${' - if match.group(0).endswith('\\'): - num_escapes = match.end(0) - match.start(0) - return '\\' * (num_escapes // 2) - # Handle cmake escaped \${} tags - elif not at_only and match.group(0) == '\\${': - return '${' - # \@escaped\@ variables - elif match.groupdict().get('escaped') is not None: - return match.group('escaped')[1:-2]+'@' + character_regex = re.compile(r''' + [^a-zA-Z0-9_/.+\-] + ''', re.VERBOSE) + + def variable_get(varname: str) -> str: + var_str = '' + if varname in confdata: + var, _ = confdata.get(varname) + if isinstance(var, str): + var_str = var + elif isinstance(var, bool): + var_str = str(int(var)) + elif isinstance(var, int): + var_str = str(var) + else: + msg = f'Tried to replace variable {varname!r} value with ' \ + f'something other than a string or int: {var!r}' + raise MesonException(msg) else: - # Template variable to be replaced - varname = match.group('variable') - if not varname: - varname = match.group('cmake_variable') - - var_str = '' - if varname in confdata: - var, _ = confdata.get(varname) - if isinstance(var, str): - var_str = var - elif isinstance(var, bool): - var_str = str(int(var)) - elif isinstance(var, int): - var_str = str(var) - else: - msg = f'Tried to replace variable {varname!r} value with ' \ - f'something other than a string or int: {var!r}' + missing_variables.add(varname) + return var_str + + def parse_line(line: str) -> str: + index = 0 + while len(line) > index: + if line[index] == '@': + next_at = line.find("@", index+1) + if next_at > index+1: + varname = line[index+1:next_at] + match = character_regex.search(varname) + + # at substituion doesn't occur if they key isn't valid + # however it also doesn't raise an error + if not match: + value = variable_get(varname) + line = line[:index] + value + line[next_at+1:] + + elif not at_only and line[index:index+2] == '${': + bracket_count = 1 + end_bracket = index + 2 + try: + while bracket_count > 0: + if line[end_bracket:end_bracket+2] == "${": + end_bracket += 2 + bracket_count += 1 + elif line[end_bracket] == "}": + end_bracket += 1 + bracket_count -= 1 + elif line[end_bracket] in {"@", "\n"}: + # these aren't valid variable characters + # but they are inconsequential at this point + end_bracket += 1 + elif character_regex.search(line[end_bracket]): + invalid_character = line[end_bracket] + variable = line[index+2:end_bracket] + msg = f'Found invalid character {invalid_character!r}' \ + f' in variable {variable!r}' + raise MesonException(msg) + else: + end_bracket += 1 + except IndexError: + msg = f'Found incomplete variable {line[index:-1]!r}' raise MesonException(msg) - else: - missing_variables.add(varname) - return var_str - return re.sub(regex, variable_replace, line), missing_variables + + if bracket_count == 0: + varname = parse_line(line[index+2:end_bracket-1]) + match = character_regex.search(varname) + if match: + invalid_character = line[end_bracket-2] + variable = line[index+2:end_bracket-3] + msg = f'Found invalid character {invalid_character!r}' \ + f' in variable {variable!r}' + raise MesonException(msg) + + value = variable_get(varname) + line = line[:index] + value + line[end_bracket:] + + index += 1 + + return line + + return parse_line(line), missing_variables def do_define_meson(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', subproject: T.Optional[SubProject] = None) -> str: @@ -1327,12 +1389,12 @@ def do_define_meson(regex: T.Pattern[str], line: str, confdata: 'ConfigurationDa else: raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname) -def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', at_only: bool, +def do_define_cmake(line: str, confdata: 'ConfigurationData', at_only: bool, subproject: T.Optional[SubProject] = None) -> str: cmake_bool_define = 'cmakedefine01' in line def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str: - arr = line.split() + arr = line[1:].split() if cmake_bool_define: (v, desc) = confdata.get(arr[1]) @@ -1347,7 +1409,7 @@ def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationDa define_value += [token] return ' '.join(define_value) - arr = line.split() + arr = line[1:].split() if len(arr) != 2 and subproject is not None: from ..interpreterbase.decorators import FeatureNew @@ -1367,12 +1429,12 @@ def do_define_cmake(regex: T.Pattern[str], line: str, confdata: 'ConfigurationDa result = get_cmake_define(line, confdata) result = f'#define {varname} {result}'.strip() + '\n' - result, _ = do_replacement_cmake(regex, result, at_only, confdata) + result, _ = do_replacement_cmake(result, at_only, confdata) return result def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'meson') -> T.Pattern[str]: # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define - if variable_format in {'meson', 'cmake@'}: + if variable_format == 'meson': # Also allow escaping pairs of '@' with '\@' regex = re.compile(r''' (?:\\\\)+(?=\\?@) # Matches multiple backslashes followed by an @ symbol @@ -1381,17 +1443,13 @@ def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'm | # OR (?P<escaped>\\@[-a-zA-Z0-9_]+\\@) # Match an escaped variable enclosed in @ symbols ''', re.VERBOSE) - else: + elif variable_format == 'cmake@': regex = re.compile(r''' - (?:\\\\)+(?=\\?(\$|@)) # Match multiple backslashes followed by a dollar sign or an @ symbol - | # OR - \\\${ # Match a backslash followed by a dollar sign and an opening curly brace - | # OR - \${(?P<cmake_variable>[-a-zA-Z0-9_]+)} # Match a variable enclosed in curly braces and capture the variable name - | # OR (?<!\\)@(?P<variable>[-a-zA-Z0-9_]+)@ # Match a variable enclosed in @ symbols and capture the variable name; no matches beginning with '\@' - | # OR - (?P<escaped>\\@[-a-zA-Z0-9_]+\\@) # Match an escaped variable enclosed in @ symbols + ''', re.VERBOSE) + elif variable_format == "cmake": + regex = re.compile(r''' + \${(?P<variable>[-a-zA-Z0-9_]*)} # Match a variable enclosed in curly braces and capture the variable name ''', re.VERBOSE) return regex @@ -1439,9 +1497,7 @@ def do_conf_str_cmake(src: str, data: T.List[str], confdata: 'ConfigurationData' if at_only: variable_format = 'cmake@' - regex = get_variable_regex(variable_format) - - search_token = '#cmakedefine' + search_token = 'cmakedefine' result: T.List[str] = [] missing_variables: T.Set[str] = set() @@ -1449,13 +1505,15 @@ def do_conf_str_cmake(src: str, data: T.List[str], confdata: 'ConfigurationData' # during substitution so we can warn the user to use the `copy:` kwarg. confdata_useless = not confdata.keys() for line in data: - if line.lstrip().startswith(search_token): + stripped_line = line.lstrip() + if len(stripped_line) >= 2 and stripped_line[0] == '#' and stripped_line[1:].lstrip().startswith(search_token): confdata_useless = False - line = do_define_cmake(regex, line, confdata, at_only, subproject) + + line = do_define_cmake(line, confdata, at_only, subproject) else: if '#mesondefine' in line: raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"') - line, missing = do_replacement_cmake(regex, line, at_only, confdata) + line, missing = do_replacement_cmake(line, at_only, confdata) missing_variables.update(missing) if missing: confdata_useless = False @@ -1578,7 +1636,7 @@ def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]: result.append(i) return result -def listify_array_value(value: T.Union[str, T.List[str]], shlex_split_args: bool = False) -> T.List[str]: +def listify_array_value(value: object, shlex_split_args: bool = False) -> T.List[str]: if isinstance(value, str): if value.startswith('['): try: @@ -1738,7 +1796,7 @@ def Popen_safe_logged(args: T.List[str], msg: str = 'Called', **kwargs: T.Any) - return p, o, e -def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]: +def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str | programs.ExternalProgram]) -> T.Optional[str]: ''' Takes each regular expression in @regexiter and tries to search for it in every item in @initer. If there is a match, returns that match. @@ -1754,7 +1812,7 @@ def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T. return None -def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None: +def _substitute_values_check_errors(command: T.List[str | programs.ExternalProgram], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None: # Error checking inregex: T.List[str] = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@'] outregex: T.List[str] = ['@OUTPUT([0-9]+)?@', '@OUTDIR@'] @@ -1794,7 +1852,7 @@ def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T. raise MesonException(m.format(match2.group(), len(values['@OUTPUT@']))) -def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]: +def substitute_values(command: T.List[str | programs.ExternalProgram], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str | programs.ExternalProgram]: ''' Substitute the template strings in the @values dict into the list of strings @command and return a new list. For a full list of the templates, @@ -1821,7 +1879,7 @@ def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.L _substitute_values_check_errors(command, values) # Substitution - outcmd: T.List[str] = [] + outcmd: T.List[str | programs.ExternalProgram] = [] rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')] value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None for vv in command: diff --git a/mesonbuild/utils/win32.py b/mesonbuild/utils/win32.py index 4fcb8ed..22aea86 100644 --- a/mesonbuild/utils/win32.py +++ b/mesonbuild/utils/win32.py @@ -10,20 +10,30 @@ import msvcrt import typing as T from .core import MesonException -from .platform import BuildDirLock as BuildDirLockBase +from .platform import DirectoryLockBase, DirectoryLockAction -__all__ = ['BuildDirLock'] +__all__ = ['DirectoryLock', 'DirectoryLockAction'] -class BuildDirLock(BuildDirLockBase): +class DirectoryLock(DirectoryLockBase): def __enter__(self) -> None: - self.lockfile = open(self.lockfilename, 'w', encoding='utf-8') + self.lockfile = open(self.lockpath, 'w+', encoding='utf-8') try: - msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1) - except (BlockingIOError, PermissionError): + mode = msvcrt.LK_LOCK + if self.action != DirectoryLockAction.WAIT: + mode = msvcrt.LK_NBLCK + msvcrt.locking(self.lockfile.fileno(), mode, 1) + except BlockingIOError: self.lockfile.close() - raise MesonException('Some other Meson process is already using this build directory. Exiting.') + if self.action == DirectoryLockAction.IGNORE: + return + raise MesonException(self.err) + except PermissionError: + self.lockfile.close() + raise MesonException(self.err) def __exit__(self, *args: T.Any) -> None: + if self.lockfile is None or self.lockfile.closed: + return msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) self.lockfile.close() diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 9af1f39..c8eff69 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -21,6 +21,7 @@ import time import typing as T import textwrap import json +import gzip from base64 import b64encode from netrc import netrc @@ -29,7 +30,10 @@ from functools import lru_cache from . import WrapMode from .. import coredata -from ..mesonlib import quiet_git, GIT, ProgressBar, MesonException, windows_proof_rmtree, Popen_safe +from ..mesonlib import ( + DirectoryLock, DirectoryLockAction, quiet_git, GIT, ProgressBar, MesonException, + windows_proof_rmtree, Popen_safe +) from ..interpreterbase import FeatureNew from ..interpreterbase import SubProject from .. import mesonlib @@ -66,16 +70,23 @@ def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult: raise WrapException(f'WrapDB did not have expected SSL https url, instead got {urlstr}') return url -def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool = False) -> 'http.client.HTTPResponse': +def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool = False, allow_compression: bool = False) -> http.client.HTTPResponse: if have_opt: insecure_msg = '\n\n To allow connecting anyway, pass `--allow-insecure`.' else: insecure_msg = '' + def do_urlopen(url: urllib.parse.ParseResult) -> http.client.HTTPResponse: + headers = {} + if allow_compression: + headers['Accept-Encoding'] = 'gzip' + req = urllib.request.Request(urllib.parse.urlunparse(url), headers=headers) + return T.cast('http.client.HTTPResponse', urllib.request.urlopen(req, timeout=REQ_TIMEOUT)) + url = whitelist_wrapdb(urlstring) if has_ssl: try: - return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT)) + return do_urlopen(url) except OSError as excp: msg = f'WrapDB connection failed to {urlstring} with error {excp}.' if isinstance(excp, urllib.error.URLError) and isinstance(excp.reason, ssl.SSLCertVerificationError): @@ -92,15 +103,24 @@ def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool mlog.warning(f'SSL module not available in {sys.executable}: WrapDB traffic not authenticated.', once=True) # If we got this far, allow_insecure was manually passed - nossl_url = url._replace(scheme='http') try: - return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT)) + return do_urlopen(url._replace(scheme='http')) except OSError as excp: raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}') +def read_and_decompress(resp: http.client.HTTPResponse) -> bytes: + data = resp.read() + encoding = resp.headers['Content-Encoding'] + if encoding == 'gzip': + return gzip.decompress(data) + elif encoding: + raise WrapException(f'Unexpected Content-Encoding for {resp.url}: {encoding}') + else: + return data + def get_releases_data(allow_insecure: bool) -> bytes: - url = open_wrapdburl('https://wrapdb.mesonbuild.com/v2/releases.json', allow_insecure, True) - return url.read() + url = open_wrapdburl('https://wrapdb.mesonbuild.com/v2/releases.json', allow_insecure, True, True) + return read_and_decompress(url) @lru_cache(maxsize=None) def get_releases(allow_insecure: bool) -> T.Dict[str, T.Any]: @@ -109,9 +129,9 @@ def get_releases(allow_insecure: bool) -> T.Dict[str, T.Any]: def update_wrap_file(wrapfile: str, name: str, new_version: str, new_revision: str, allow_insecure: bool) -> None: url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{new_version}-{new_revision}/{name}.wrap', - allow_insecure, True) + allow_insecure, True, True) with open(wrapfile, 'wb') as f: - f.write(url.read()) + f.write(read_and_decompress(url)) def parse_patch_url(patch_url: str) -> T.Tuple[str, str]: u = urllib.parse.urlparse(patch_url) @@ -384,10 +404,10 @@ class Resolver: self.check_can_download() latest_version = info['versions'][0] version, revision = latest_version.rsplit('-', 1) - url = urllib.request.urlopen(f'https://wrapdb.mesonbuild.com/v2/{subp_name}_{version}-{revision}/{subp_name}.wrap') + url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{subp_name}_{version}-{revision}/{subp_name}.wrap', allow_compression=True) fname = Path(self.subdir_root, f'{subp_name}.wrap') with fname.open('wb') as f: - f.write(url.read()) + f.write(read_and_decompress(url)) mlog.log(f'Installed {subp_name} version {version} revision {revision}') wrap = PackageDefinition.from_wrap_file(str(fname)) self.wraps[wrap.name] = wrap @@ -432,7 +452,7 @@ class Resolver: return wrap_name return None - def resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> T.Tuple[str, Method]: + def _resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> T.Tuple[str, Method]: wrap = self.wraps.get(packagename) if wrap is None: wrap = self.get_from_wrapdb(packagename) @@ -530,6 +550,15 @@ class Resolver: self.wrap.update_hash_cache(self.dirname) return rel_path, method + def resolve(self, packagename: str, force_method: T.Optional[Method] = None) -> T.Tuple[str, Method]: + try: + with DirectoryLock(self.subdir_root, '.wraplock', + DirectoryLockAction.WAIT, + 'Failed to lock subprojects directory'): + return self._resolve(packagename, force_method) + except FileNotFoundError: + raise WrapNotFoundException('Attempted to resolve subproject without subprojects directory present.') + def check_can_download(self) -> None: # Don't download subproject data based on wrap file if requested. # Git submodules are ok (see above)! diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py index 5486a26..6f97fe2 100644 --- a/mesonbuild/wrap/wraptool.py +++ b/mesonbuild/wrap/wraptool.py @@ -9,8 +9,8 @@ import shutil import typing as T from glob import glob -from .wrap import (open_wrapdburl, WrapException, get_releases, get_releases_data, - parse_patch_url) +from .wrap import (open_wrapdburl, read_and_decompress, WrapException, get_releases, + get_releases_data, parse_patch_url) from pathlib import Path from .. import mesonlib, msubprojects @@ -99,9 +99,9 @@ def install(options: 'argparse.Namespace') -> None: if os.path.exists(wrapfile): raise SystemExit('Wrap file already exists.') (version, revision) = get_latest_version(name, options.allow_insecure) - url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap', options.allow_insecure, True) + url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap', options.allow_insecure, True, True) with open(wrapfile, 'wb') as f: - f.write(url.read()) + f.write(read_and_decompress(url)) print(f'Installed {name} version {version} revision {revision}') def get_current_version(wrapfile: str) -> T.Tuple[str, str, str, str, T.Optional[str]]: diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index f9faca9..7265d3e 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -143,7 +143,7 @@ class CommandTests(unittest.TestCase): os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)]) # Fix importlib-metadata by appending all dirs in pylibdir - PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()] + PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir() if x.name.endswith('.egg')] PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS] os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS) # Check that all the files were installed correctly diff --git a/run_mypy.py b/run_mypy.py index d7d3aaa..a8b21ed 100755 --- a/run_mypy.py +++ b/run_mypy.py @@ -13,7 +13,7 @@ from mesonbuild.mesonlib import version_compare modules = [ # fully typed submodules - # 'mesonbuild/ast/', + 'mesonbuild/ast/', 'mesonbuild/cargo/', 'mesonbuild/cmake/', 'mesonbuild/compilers/', @@ -26,10 +26,6 @@ modules = [ 'mesonbuild/wrap/', # specific files - 'mesonbuild/ast/introspection.py', - 'mesonbuild/ast/printer.py', - 'mesonbuild/ast/postprocess.py', - 'mesonbuild/ast/visitor.py', 'mesonbuild/arglist.py', 'mesonbuild/backend/backends.py', 'mesonbuild/backend/nonebackend.py', @@ -83,6 +79,7 @@ modules = [ 'mesonbuild/optinterpreter.py', 'mesonbuild/options.py', 'mesonbuild/programs.py', + 'mesonbuild/rewriter.py', ] additional = [ 'run_mypy.py', diff --git a/run_project_tests.py b/run_project_tests.py index fa7c8a6..ce6e5c2 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -997,7 +997,7 @@ def have_working_compiler(lang: str, use_tmp: bool) -> bool: return False if not compiler: return False - env.coredata.process_compiler_options(lang, compiler, env, '') + env.coredata.process_compiler_options(lang, compiler, '') try: compiler.sanity_check(env.get_scratch_dir(), env) except mesonlib.MesonException: diff --git a/test cases/cmake/13 system includes/main2.cpp b/test cases/cmake/13 system includes/main2.cpp new file mode 100644 index 0000000..a94a116 --- /dev/null +++ b/test cases/cmake/13 system includes/main2.cpp @@ -0,0 +1,5 @@ +#include <triggerWarn.hpp> + +int main(void) { + return 0; +} diff --git a/test cases/cmake/13 system includes/meson.build b/test cases/cmake/13 system includes/meson.build index 1265d46..fe71580 100644 --- a/test cases/cmake/13 system includes/meson.build +++ b/test cases/cmake/13 system includes/meson.build @@ -13,6 +13,10 @@ endif cm = import('cmake') sub_pro = cm.subproject('cmMod') sub_dep = sub_pro.dependency('cmModLib') +sub_inc = sub_pro.include_directories('cmModLib') exe1 = executable('main1', ['main.cpp'], dependencies: [sub_dep]) test('test1', exe1) + +exe2 = executable('main2', ['main2.cpp'], include_directories: sub_inc) +test('test2', exe1) diff --git a/test cases/common/14 configure file/CMakeLists.txt b/test cases/common/14 configure file/CMakeLists.txt new file mode 100644 index 0000000..6a894b0 --- /dev/null +++ b/test cases/common/14 configure file/CMakeLists.txt @@ -0,0 +1,10 @@ +cmake_minimum_required(VERSION 3.12) + +project("configure file test" LANGUAGES C) + +set("var1" "foo") +set("var2" "bar") +configure_file("config7.h.in" "config7.h") + +set("var" "foo") +configure_file("config10.h.in" "config10.h") diff --git a/test cases/common/14 configure file/config7.h.in b/test cases/common/14 configure file/config7.h.in index edd0bb3..5180c2f 100644 --- a/test cases/common/14 configure file/config7.h.in +++ b/test cases/common/14 configure file/config7.h.in @@ -1,16 +1,11 @@ -/* No escape */ +/* cmake substitions cannot be escaped */ #define MESSAGE1 "${var1}" - -/* Single escape means no replace */ #define MESSAGE2 "\${var1}" - -/* Replace pairs of escapes before '@' or '\@' with escape characters - * (note we have to double number of pairs due to C string escaping) - */ #define MESSAGE3 "\\\\${var1}" - -/* Pairs of escapes and then single escape to avoid replace */ #define MESSAGE4 "\\\\\${var1}" +#define MESSAGE5 "@var1@" +#define MESSAGE6 "\\@var1@" +#define MESSAGE7 "\\\\@var1@" -/* Check escape character outside variables */ -#define MESSAGE5 "\\ ${ \${ \\\\${ \\\\\${" +/* backslash is an invalid variable character */ +#define MESSAGE8 "@var1\@" diff --git a/test cases/common/14 configure file/prog7.c b/test cases/common/14 configure file/prog7.c index 802bc46..900522c 100644 --- a/test cases/common/14 configure file/prog7.c +++ b/test cases/common/14 configure file/prog7.c @@ -3,8 +3,12 @@ int main(void) { return strcmp(MESSAGE1, "foo") - || strcmp(MESSAGE2, "${var1}") - || strcmp(MESSAGE3, "\\foo") - || strcmp(MESSAGE4, "\\${var1}") - || strcmp(MESSAGE5, "\\ ${ ${ \\${ \\${"); + || strcmp(MESSAGE2, "\foo") + || strcmp(MESSAGE3, "\\\\foo") + || strcmp(MESSAGE4, "\\\\\foo") + || strcmp(MESSAGE5, "foo") + || strcmp(MESSAGE6, "\\foo") + || strcmp(MESSAGE7, "\\\\foo") + || strcmp(MESSAGE8, "@var1\@") + || 0; } diff --git a/test cases/common/223 persubproject options/meson.build b/test cases/common/223 persubproject options/meson.build index 25a0100..abe3bee 100644 --- a/test cases/common/223 persubproject options/meson.build +++ b/test cases/common/223 persubproject options/meson.build @@ -1,5 +1,5 @@ project('persubproject options', 'c', 'cpp', - default_options : ['werror=true', + default_options : ['werror=true', 'default_library=both', 'warning_level=3', 'cpp_std=c++11']) diff --git a/test cases/common/223 persubproject options/test.json b/test cases/common/223 persubproject options/test.json index ccfa9ff..ef50ab5 100644 --- a/test cases/common/223 persubproject options/test.json +++ b/test cases/common/223 persubproject options/test.json @@ -1,7 +1,3 @@ { - "matrix": { - "options": { - "default_library": [ { "val": "both" } ] - } - } + "skip_on_env": ["MESON_TEST_DDEFAULT_LIBRARY"] } diff --git a/test cases/common/247 deprecated option/test.json b/test cases/common/247 deprecated option/test.json index a644b04..4b9a475 100644 --- a/test cases/common/247 deprecated option/test.json +++ b/test cases/common/247 deprecated option/test.json @@ -26,77 +26,77 @@ }, "stdout": [ { - "line": ".*DEPRECATION: Option 'o1' is deprecated", + "line": ".*DEPRECATION: Option \"o1\" is deprecated", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'o2' value 'a' is deprecated", + "line": ".*DEPRECATION: Option \"o2\" value 'a' is deprecated", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'o3' value 'a' is replaced by 'c'", + "line": ".*DEPRECATION: Option \"o3\" value 'a' is replaced by 'c'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'o4' value 'true' is replaced by 'enabled'", + "line": ".*DEPRECATION: Option \"o4\" value 'true' is replaced by 'enabled'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'o5' value 'auto' is replaced by 'false'", + "line": ".*DEPRECATION: Option \"o5\" value 'auto' is replaced by 'false'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'p1' is deprecated", + "line": ".*DEPRECATION: Option \"p1\" is deprecated", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'p2' value 'a' is deprecated", + "line": ".*DEPRECATION: Option \"p2\" value 'a' is deprecated", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'p3' value 'a' is replaced by 'c'", + "line": ".*DEPRECATION: Option \"p3\" value 'a' is replaced by 'c'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'p4' value 'true' is replaced by 'enabled'", + "line": ".*DEPRECATION: Option \"p4\" value 'true' is replaced by 'enabled'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'p5' value 'auto' is replaced by 'false'", + "line": ".*DEPRECATION: Option \"p5\" value 'auto' is replaced by 'false'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'c1' is deprecated", + "line": ".*DEPRECATION: Option \"c1\" is deprecated", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'c2' value 'a' is deprecated", + "line": ".*DEPRECATION: Option \"c2\" value 'a' is deprecated", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'c3' value 'a' is replaced by 'c'", + "line": ".*DEPRECATION: Option \"c3\" value 'a' is replaced by 'c'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'c4' value 'true' is replaced by 'enabled'", + "line": ".*DEPRECATION: Option \"c4\" value 'true' is replaced by 'enabled'", "match": "re", "count": 1 }, { - "line": ".*DEPRECATION: Option 'c5' value 'auto' is replaced by 'false'", + "line": ".*DEPRECATION: Option \"c5\" value 'auto' is replaced by 'false'", "match": "re", "count": 1 } diff --git a/test cases/common/280 pkgconfig-gen/meson.build b/test cases/common/280 pkgconfig-gen/meson.build index 3f15888..42ce128 100644 --- a/test cases/common/280 pkgconfig-gen/meson.build +++ b/test cases/common/280 pkgconfig-gen/meson.build @@ -1,4 +1,4 @@ -project('pkgconfig-get', 'c') +project('pkgconfig-get', 'c', meson_version: '>=1.9.0') pkgg = import('pkgconfig') @@ -16,4 +16,5 @@ pkgg.generate( filebase : 'simple', description : 'A simple demo library.', libraries: [lib_dep], -)
\ No newline at end of file + license: 'Apache-2.0', +) diff --git a/test cases/common/281 subproj options/meson.build b/test cases/common/281 subproj options/meson.build index 55fb109..d450004 100644 --- a/test cases/common/281 subproj options/meson.build +++ b/test cases/common/281 subproj options/meson.build @@ -1,4 +1,3 @@ -project('pkg_opt_test') +project('pkg_opt_test', default_options: ['werror=false', 'sub:from_toplevel=true', 'sub:werror=true']) -subproject('sub') -subproject('sub2') +subproject('sub', default_options: ['sub2:from_subp=true']) diff --git a/test cases/common/281 subproj options/subprojects/sub/meson.build b/test cases/common/281 subproj options/subprojects/sub/meson.build index 82cd386..6cc4906 100644 --- a/test cases/common/281 subproj options/subprojects/sub/meson.build +++ b/test cases/common/281 subproj options/subprojects/sub/meson.build @@ -1,8 +1,12 @@ project('subproject', 'c') assert(get_option('bar') == true) +assert(get_option('werror') == true) +assert(get_option('from_toplevel') == true) # b_lto is only initialized if used, see test "common/40 options" cc = meson.get_compiler('c') if cc.get_id() in ['gcc', 'clang', 'clang-cl'] assert(get_option('b_lto') == true) endif + +subproject('sub2') diff --git a/test cases/common/281 subproj options/subprojects/sub/meson_options.txt b/test cases/common/281 subproj options/subprojects/sub/meson_options.txt index 129a7d4..7f94d02 100644 --- a/test cases/common/281 subproj options/subprojects/sub/meson_options.txt +++ b/test cases/common/281 subproj options/subprojects/sub/meson_options.txt @@ -1 +1,2 @@ option('bar', type: 'boolean', value: false) +option('from_toplevel', type: 'boolean', value: false) diff --git a/test cases/common/281 subproj options/subprojects/sub2/meson.build b/test cases/common/281 subproj options/subprojects/sub2/meson.build index 3b0ed92..65f3e5a 100644 --- a/test cases/common/281 subproj options/subprojects/sub2/meson.build +++ b/test cases/common/281 subproj options/subprojects/sub2/meson.build @@ -1,5 +1,7 @@ project('subproject', 'c') +assert(get_option('from_subp') == true) + # b_lto is only initialized if used, see test "common/40 options" cc = meson.get_compiler('c') if cc.get_id() in ['gcc', 'clang', 'clang-cl'] diff --git a/test cases/common/281 subproj options/subprojects/sub2/meson_options.txt b/test cases/common/281 subproj options/subprojects/sub2/meson_options.txt new file mode 100644 index 0000000..d645182 --- /dev/null +++ b/test cases/common/281 subproj options/subprojects/sub2/meson_options.txt @@ -0,0 +1 @@ +option('from_subp', type: 'boolean', value: false) diff --git a/test cases/common/282 test args and depends in path/libs/a/lib_a.c b/test cases/common/282 test args and depends in path/libs/a/lib_a.c new file mode 100644 index 0000000..7191a69 --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/a/lib_a.c @@ -0,0 +1,5 @@ +char +func_a (void) +{ + return 'a'; +} diff --git a/test cases/common/282 test args and depends in path/libs/a/lib_a.def b/test cases/common/282 test args and depends in path/libs/a/lib_a.def new file mode 100644 index 0000000..4af3bdb --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/a/lib_a.def @@ -0,0 +1,3 @@ +LIBRARY LIBA +EXPORTS + func_a diff --git a/test cases/common/282 test args and depends in path/libs/a/meson.build b/test cases/common/282 test args and depends in path/libs/a/meson.build new file mode 100644 index 0000000..0b4b6a4 --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/a/meson.build @@ -0,0 +1,5 @@ +lib_a = shared_library('a', + ['lib_a.c'], + name_prefix: 'lib', + gnu_symbol_visibility: 'default', + vs_module_defs: 'lib_a.def') diff --git a/test cases/common/282 test args and depends in path/libs/b/lib_b.c b/test cases/common/282 test args and depends in path/libs/b/lib_b.c new file mode 100644 index 0000000..17e5730 --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/b/lib_b.c @@ -0,0 +1,5 @@ +char +func_b (void) +{ + return 'b'; +} diff --git a/test cases/common/282 test args and depends in path/libs/b/lib_b.def b/test cases/common/282 test args and depends in path/libs/b/lib_b.def new file mode 100644 index 0000000..403a731 --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/b/lib_b.def @@ -0,0 +1,3 @@ +LIBRARY LIBB +EXPORTS + func_b diff --git a/test cases/common/282 test args and depends in path/libs/b/meson.build b/test cases/common/282 test args and depends in path/libs/b/meson.build new file mode 100644 index 0000000..766125d --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/b/meson.build @@ -0,0 +1,5 @@ +lib_b = shared_library('b', + ['lib_b.c'], + name_prefix: 'lib', + gnu_symbol_visibility: 'default', + vs_module_defs: 'lib_b.def') diff --git a/test cases/common/282 test args and depends in path/libs/meson.build b/test cases/common/282 test args and depends in path/libs/meson.build new file mode 100644 index 0000000..b00ea8a --- /dev/null +++ b/test cases/common/282 test args and depends in path/libs/meson.build @@ -0,0 +1,2 @@ +subdir('a') +subdir('b') diff --git a/test cases/common/282 test args and depends in path/meson.build b/test cases/common/282 test args and depends in path/meson.build new file mode 100644 index 0000000..d9dd9ad --- /dev/null +++ b/test cases/common/282 test args and depends in path/meson.build @@ -0,0 +1,19 @@ +project('test-args-and-depends-in-path', 'c') + +subdir('libs') + +dl_dep = dependency('dl', required: false) + +fs = import('fs') + +test_exe = executable('test-exe', + c_args: [ + '-DLIBA=' + fs.name(lib_a.full_path()), + '-DLIBB=' + fs.name(lib_b.full_path()), + ], + sources: ['test.c'], + dependencies: [dl_dep]) + +test ('test', test_exe, + args: [lib_a], + depends: [lib_b]) diff --git a/test cases/common/282 test args and depends in path/test.c b/test cases/common/282 test args and depends in path/test.c new file mode 100644 index 0000000..82452ba --- /dev/null +++ b/test cases/common/282 test args and depends in path/test.c @@ -0,0 +1,67 @@ +#include <stdlib.h> +#include <stddef.h> +#include <stdio.h> +#include <assert.h> + +#ifndef _WIN32 +#include <dlfcn.h> +#else +#include <windows.h> +#endif + +typedef struct { + const char *library_name; + const char *func_name; + char expected_result; +} test_t; + +static void +load (test_t *test) +{ +#ifndef _WIN32 + void *h = dlopen (test->library_name, RTLD_NOW | RTLD_LOCAL); + if (h == NULL) { + fprintf (stderr, "dlopen (%s) failed: %s\n", + test->library_name, dlerror ()); + exit (EXIT_FAILURE); + } + + typedef char (*func_t)(void); + func_t func = (func_t) dlsym (h, test->func_name); + assert (func != NULL); + + assert (func () == test->expected_result); + dlclose (h); +#else /* _WIN32 */ + HMODULE h = LoadLibraryA (test->library_name); + if (h == NULL) { + fprintf (stderr, "LoadLibrary (%s) failed with error code %u\n", + test->library_name, (unsigned int) GetLastError ()); + exit (EXIT_FAILURE); + } + + typedef char (*func_t)(void); + func_t func = (func_t) GetProcAddress (h, test->func_name); + assert (func != NULL); + + assert (func () == test->expected_result); + FreeLibrary (h); +#endif +} + +#define STRINGIFY_HELPER(x) #x +#define STRINGIFY(x) STRINGIFY_HELPER(x) + +int +main (void) +{ + test_t tests[] = { + {STRINGIFY (LIBA), "func_a", 'a'}, + {STRINGIFY (LIBB), "func_b", 'b'}, + }; + + for (size_t i = 0; i < sizeof (tests) / sizeof (tests[0]); i++) + load (&tests[i]); + + return 0; +} diff --git a/test cases/common/40 options/meson.build b/test cases/common/40 options/meson.build index 3849d54..f41265a 100644 --- a/test cases/common/40 options/meson.build +++ b/test cases/common/40 options/meson.build @@ -40,8 +40,7 @@ if get_option('integer_opt') != 3 endif negint = get_option('neg_int_opt') - -if negint != -3 and negint != -10 +if negint not in [-2, -3, -10] error('Incorrect value @0@ in negative integer option.'.format(negint)) endif diff --git a/test cases/common/98 subproject subdir/meson.build b/test cases/common/98 subproject subdir/meson.build index d2bafed..5d92772 100644 --- a/test cases/common/98 subproject subdir/meson.build +++ b/test cases/common/98 subproject subdir/meson.build @@ -83,7 +83,7 @@ d = dependency('subsubsub') assert(d.found(), 'Should be able to fallback to sub-sub-subproject') # Verify that `static: true` implies 'default_library=static'. -d = dependency('sub_static', static: true) +d = dependency('sub_static', static: true, default_options: ['bar=true']) assert(d.found()) # Verify that when not specifying static kwarg we can still get fallback dep. d = dependency('sub_static') diff --git a/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build b/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build index 6c00623..8de7cb4 100644 --- a/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build +++ b/test cases/common/98 subproject subdir/subprojects/sub_static/meson.build @@ -1,6 +1,7 @@ project('sub_static') assert(get_option('default_library') == 'static') +assert(get_option('bar') == true) meson.override_dependency('sub_static', declare_dependency()) meson.override_dependency('sub_static2', declare_dependency(), static: true) meson.override_dependency('sub_static3', declare_dependency(variables: {'static': 'true'}), static: true) diff --git a/test cases/common/98 subproject subdir/subprojects/sub_static/meson_options.txt b/test cases/common/98 subproject subdir/subprojects/sub_static/meson_options.txt new file mode 100644 index 0000000..129a7d4 --- /dev/null +++ b/test cases/common/98 subproject subdir/subprojects/sub_static/meson_options.txt @@ -0,0 +1 @@ +option('bar', type: 'boolean', value: false) diff --git a/test cases/cuda/10 cuda dependency/modules/meson.build b/test cases/cuda/10 cuda dependency/modules/meson.build index 0da43f2..b934c6b 100644 --- a/test cases/cuda/10 cuda dependency/modules/meson.build +++ b/test cases/cuda/10 cuda dependency/modules/meson.build @@ -1,2 +1,2 @@ -exe = executable('prog', 'prog.cc', dependencies: dependency('cuda', modules: ['cublas'])) +exe = executable('prog', 'prog.cc', dependencies: dependency('cuda', modules: ['cublas', 'nvidia-ml'])) test('cudatest', exe) diff --git a/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build b/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build index c0fed83..e36d877 100644 --- a/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build +++ b/test cases/cuda/11 cuda dependency (nvcc)/modules/meson.build @@ -1,2 +1,2 @@ -exe = executable('prog', 'prog.cu', dependencies: dependency('cuda', modules: ['cublas'])) +exe = executable('prog', 'prog.cu', dependencies: dependency('cuda', modules: ['cublas', 'nvidia-ml'])) test('cudatest', exe) diff --git a/test cases/cuda/12 cuda dependency (mixed)/meson.build b/test cases/cuda/12 cuda dependency (mixed)/meson.build index 44a49db..4519b28 100644 --- a/test cases/cuda/12 cuda dependency (mixed)/meson.build +++ b/test cases/cuda/12 cuda dependency (mixed)/meson.build @@ -1,4 +1,4 @@ project('cuda dependency', 'cpp', 'cuda') -exe = executable('prog', 'prog.cpp', 'kernel.cu', dependencies: dependency('cuda', modules: ['cublas']), link_language: 'cpp') +exe = executable('prog', 'prog.cpp', 'kernel.cu', dependencies: dependency('cuda', modules: ['cublas', 'nvidia-ml']), link_language: 'cpp') test('cudatest', exe) diff --git a/test cases/failing/33 non-root subproject/test.json b/test cases/failing/33 non-root subproject/test.json index 52baf6a..d379a43 100644 --- a/test cases/failing/33 non-root subproject/test.json +++ b/test cases/failing/33 non-root subproject/test.json @@ -1,7 +1,7 @@ { "stdout": [ { - "line": "test cases/failing/33 non-root subproject/some/meson.build:1:0: ERROR: Neither a subproject directory nor a someproj.wrap file was found." + "line": "test cases/failing/33 non-root subproject/some/meson.build:1:0: ERROR: Attempted to resolve subproject without subprojects directory present." } ] } diff --git a/test cases/failing/73 subproj dependency not-found and required/test.json b/test cases/failing/73 subproj dependency not-found and required/test.json index 11ab031..e79e284 100644 --- a/test cases/failing/73 subproj dependency not-found and required/test.json +++ b/test cases/failing/73 subproj dependency not-found and required/test.json @@ -1,7 +1,7 @@ { "stdout": [ { - "line": "test cases/failing/73 subproj dependency not-found and required/meson.build:2:10: ERROR: Neither a subproject directory nor a missing.wrap file was found." + "line": "test cases/failing/73 subproj dependency not-found and required/meson.build:2:10: ERROR: Attempted to resolve subproject without subprojects directory present." } ] } diff --git a/test cases/failing/95 invalid option file/test.json b/test cases/failing/95 invalid option file/test.json index 073ac67..debb4a1 100644 --- a/test cases/failing/95 invalid option file/test.json +++ b/test cases/failing/95 invalid option file/test.json @@ -1,7 +1,7 @@ { "stdout": [ { - "line": "test cases/failing/95 invalid option file/meson_options.txt:1:0: ERROR: lexer" + "line": "test cases/failing/95 invalid option file/meson_options.txt:1:0: ERROR: lexer: unrecognized token \"'\"" } ] } diff --git a/test cases/fortran/23 preprocess/main.f90 b/test cases/fortran/23 preprocess/main.f90 index 7cbc11c..8251741 100644 --- a/test cases/fortran/23 preprocess/main.f90 +++ b/test cases/fortran/23 preprocess/main.f90 @@ -1,4 +1,14 @@ #define MYDEF program MYDEF foo - write (*,*) 'Hello, world!' + character(20) :: str +#ifdef CORRECT + str = 'Hello, ' // 'world!' +#else + str = 'Preprocessing error!' +#endif + if (str /= 'Hello, world!') then + print *, 'Preprocessing failed.' + error stop 1 + end if + stop 0 end MYDEF foo diff --git a/test cases/fortran/23 preprocess/meson.build b/test cases/fortran/23 preprocess/meson.build index b776940..88077d3 100644 --- a/test cases/fortran/23 preprocess/meson.build +++ b/test cases/fortran/23 preprocess/meson.build @@ -1,7 +1,12 @@ -project('preprocess', 'fortran') +project('preprocess', 'fortran', meson_version: '>1.3.2') fc = meson.get_compiler('fortran') -pp_files = fc.preprocess('main.f90', output: '@PLAINNAME@') +pp_files = fc.preprocess( + 'main.f90', + compile_args: ['-DCORRECT=true'], + output: '@PLAINNAME@') -library('foo', pp_files) +t = executable('foo', pp_files) + +test('check_result', t) diff --git a/test cases/frameworks/15 llvm/test.json b/test cases/frameworks/15 llvm/test.json index fa883b1..b9cdc20 100644 --- a/test cases/frameworks/15 llvm/test.json +++ b/test cases/frameworks/15 llvm/test.json @@ -2,9 +2,9 @@ "matrix": { "options": { "method": [ - { "val": "config-tool", "expect_skip_on_jobname": ["msys2-gcc"] }, - { "val": "cmake", "expect_skip_on_jobname": ["msys2-gcc"] }, - { "val": "combination", "expect_skip_on_jobname": ["msys2-gcc"] } + { "val": "config-tool" }, + { "val": "cmake" }, + { "val": "combination" } ], "link-static": [ { "val": true, "expect_skip_on_jobname": ["arch", "opensuse", "linux-gentoo-gcc"] }, diff --git a/test cases/frameworks/17 mpi/test.json b/test cases/frameworks/17 mpi/test.json index 3a46657..cbd1686 100644 --- a/test cases/frameworks/17 mpi/test.json +++ b/test cases/frameworks/17 mpi/test.json @@ -2,10 +2,8 @@ "matrix": { "options": { "method": [ - { "val": "auto", - "expect_skip_on_jobname": ["ubuntu"] }, - { "val": "pkg-config", - "expect_skip_on_jobname": ["ubuntu"] }, + { "val": "auto" }, + { "val": "pkg-config" }, { "val": "config-tool", "expect_skip_on_jobname": ["fedora"] }, { diff --git a/test cases/frameworks/18 vulkan/meson.build b/test cases/frameworks/18 vulkan/meson.build index 5cfe89f..ab9f291 100644 --- a/test cases/frameworks/18 vulkan/meson.build +++ b/test cases/frameworks/18 vulkan/meson.build @@ -1,6 +1,8 @@ project('vulkan test', 'c') -vulkan_dep = dependency('vulkan', required : false) +method = get_option('method') + +vulkan_dep = dependency('vulkan', required : false, method : method) if not vulkan_dep.found() error('MESON_SKIP_TEST: vulkan not found.') endif diff --git a/test cases/frameworks/18 vulkan/meson.options b/test cases/frameworks/18 vulkan/meson.options new file mode 100644 index 0000000..962fbe2 --- /dev/null +++ b/test cases/frameworks/18 vulkan/meson.options @@ -0,0 +1,6 @@ +option( + 'method', + type : 'combo', + choices : ['auto', 'pkg-config', 'system'], + value : 'auto', +) diff --git a/test cases/frameworks/18 vulkan/test.json b/test cases/frameworks/18 vulkan/test.json index 66afb97..820f075 100644 --- a/test cases/frameworks/18 vulkan/test.json +++ b/test cases/frameworks/18 vulkan/test.json @@ -1,3 +1,14 @@ { + "env": { + "VULKAN_SDK": "/usr" + }, + "matrix": { + "options": { + "method": [ + { "val": "pkg-config" }, + { "val": "system" } + ] + } + }, "expect_skip_on_jobname": ["azure", "cygwin", "macos", "msys2"] } diff --git a/test cases/frameworks/38 gettext extractor/meson.build b/test cases/frameworks/38 gettext extractor/meson.build index 9a54df5..a31c87d 100644 --- a/test cases/frameworks/38 gettext extractor/meson.build +++ b/test cases/frameworks/38 gettext extractor/meson.build @@ -9,6 +9,10 @@ if not find_program('xgettext', required: false).found() error('MESON_SKIP_TEST xgettext command not found') endif +if host_machine.system() == 'darwin' + error('MESON_SKIP_TEST test is unstable on macOS for unknown reasons') +endif + i18n = import('i18n') xgettext_args = ['-ktr', '--add-comments=TRANSLATOR:', '--from-code=UTF-8'] diff --git a/test cases/frameworks/38 gettext extractor/test.json b/test cases/frameworks/38 gettext extractor/test.json index c5952ff..032698e 100644 --- a/test cases/frameworks/38 gettext extractor/test.json +++ b/test cases/frameworks/38 gettext extractor/test.json @@ -2,5 +2,5 @@ "installed": [ { "type": "file", "file": "usr/intl/main.pot" } ], - "expect_skip_on_jobname": ["azure", "cygwin"] + "expect_skip_on_jobname": ["azure", "cygwin", "macos"] } diff --git a/test cases/frameworks/7 gnome/gdbus/meson.build b/test cases/frameworks/7 gnome/gdbus/meson.build index fdb3896..22896e0 100644 --- a/test cases/frameworks/7 gnome/gdbus/meson.build +++ b/test cases/frameworks/7 gnome/gdbus/meson.build @@ -52,6 +52,23 @@ assert(gdbus_src.length() == 3, 'expected 3 targets') assert(gdbus_src[0].full_path().endswith('.c'), 'expected 1 c source file') assert(gdbus_src[1].full_path().endswith('.h'), 'expected 1 c header file') +if not pretend_glib_old and glib.version().version_compare('>=2.75.2') + gdbus_src_docs = gnome.gdbus_codegen('generated-gdbus-docs', + sources : files('data/com.example.Sample.xml'), + interface_prefix : 'com.example.', + namespace : 'Sample', + docbook : 'generated-gdbus-docs-doc', + rst : 'generated-gdbus-docs-rst', + markdown : 'generated-gdbus-docs-md', + ) + assert(gdbus_src_docs.length() == 5, 'expected 5 targets') + assert(gdbus_src_docs[0].full_path().endswith('.c'), 'expected 1 c source file') + assert(gdbus_src_docs[1].full_path().endswith('.h'), 'expected 1 c header file') + assert('generated-gdbus-docs-doc' in gdbus_src_docs[2].full_path(), 'expected 1 docbook file') + assert('generated-gdbus-docs-rst' in gdbus_src_docs[3].full_path(), 'expected 1 reStructuredText file') + assert('generated-gdbus-docs-md' in gdbus_src_docs[4].full_path(), 'expected 1 markdown file') +endif + if not pretend_glib_old and glib.version().version_compare('>=2.51.3') includes = [] else diff --git a/test cases/frameworks/7 gnome/meson.build b/test cases/frameworks/7 gnome/meson.build index f75ca93..37934b7 100644 --- a/test cases/frameworks/7 gnome/meson.build +++ b/test cases/frameworks/7 gnome/meson.build @@ -1,4 +1,4 @@ -project('gobject-introspection', 'c', meson_version: '>= 1.2.0') +project('gobject-introspection', 'c', meson_version: '>= 1.9.0') copyfile = find_program('copyfile.py') copyfile_gen = generator(copyfile, diff --git a/test cases/native/9 override with exe/subprojects/sub/meson.build b/test cases/native/9 override with exe/subprojects/sub/meson.build index f0343b2..74deaea 100644 --- a/test cases/native/9 override with exe/subprojects/sub/meson.build +++ b/test cases/native/9 override with exe/subprojects/sub/meson.build @@ -1,3 +1,11 @@ -project('sub', 'c', version : '1.0') +project('sub', 'c', version : '1.0', meson_version: '>= 1.9.0') foobar = executable('foobar', 'foobar.c', native : true) meson.override_find_program('foobar', foobar) + +found_foobar = find_program('foobar') +if found_foobar.version() != meson.project_version() + error('Overriden Executable had incorrect version: got @0@, expected @1@'.format(found_foobar.version(), meson.project_version())) +endif + +test('foobar executable', foobar, args : [ meson.current_build_dir() / 'test-output.c' ]) +test('overriden foobar executable', found_foobar, args : [ meson.current_build_dir() / 'test-output.c' ]) diff --git a/test cases/python/11 script path/gen b/test cases/python/11 script path/gen new file mode 100755 index 0000000..3d31694 --- /dev/null +++ b/test cases/python/11 script path/gen @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + + +if __name__ == '__main__': + with open('x.c', 'w', encoding='utf-8') as f: + f.write('int main() { return 0; }\n') + exit(0) diff --git a/test cases/python/11 script path/meson.build b/test cases/python/11 script path/meson.build new file mode 100644 index 0000000..c913ca4 --- /dev/null +++ b/test cases/python/11 script path/meson.build @@ -0,0 +1,19 @@ +project('11 script path', 'c') + +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif + +run = find_program('run.py') + +gen = find_program('gen') + +src = custom_target( + 'src', + command: [run, gen], + output: 'x.c', +) + +exe = executable('e', + src, +) diff --git a/test cases/python/11 script path/run.py b/test cases/python/11 script path/run.py new file mode 100755 index 0000000..a8e6011 --- /dev/null +++ b/test cases/python/11 script path/run.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import sys +import subprocess + +if __name__ == '__main__': + subprocess.check_call(sys.argv[1:]) diff --git a/test cases/rewrite/1 basic/addSrc.json b/test cases/rewrite/1 basic/addSrc.json index b8bc439..52603f6 100644 --- a/test cases/rewrite/1 basic/addSrc.json +++ b/test cases/rewrite/1 basic/addSrc.json @@ -43,6 +43,24 @@ }, { "type": "target", + "target": "trivialprog10", + "operation": "src_add", + "sources": ["fileA.cpp", "fileB.cpp", "a1.cpp"] + }, + { + "type": "target", + "target": "trivialprog11", + "operation": "src_add", + "sources": ["fileA.cpp", "a1.cpp"] + }, + { + "type": "target", + "target": "trivialprog12", + "operation": "src_add", + "sources": ["fileA.cpp", "fileB.cpp", "a1.cpp"] + }, + { + "type": "target", "target": "trivialprog0", "operation": "info" }, @@ -90,5 +108,25 @@ "type": "target", "target": "trivialprog9", "operation": "info" + }, + { + "type": "target", + "target": "trivialprog10", + "operation": "info" + }, + { + "type": "target", + "target": "trivialprog11", + "operation": "info" + }, + { + "type": "target", + "target": "trivialprog12", + "operation": "info" + }, + { + "type": "target", + "target": "rightName", + "operation": "info" } ] diff --git a/test cases/rewrite/1 basic/addTgt.json b/test cases/rewrite/1 basic/addTgt.json index 2f4e7e2..02d600a 100644 --- a/test cases/rewrite/1 basic/addTgt.json +++ b/test cases/rewrite/1 basic/addTgt.json @@ -1,7 +1,7 @@ [ { "type": "target", - "target": "trivialprog10", + "target": "trivialprog13", "operation": "target_add", "sources": ["new1.cpp", "new2.cpp"], "target_type": "shared_library" diff --git a/test cases/rewrite/1 basic/expected_dag.txt b/test cases/rewrite/1 basic/expected_dag.txt new file mode 100644 index 0000000..c5025b4 --- /dev/null +++ b/test cases/rewrite/1 basic/expected_dag.txt @@ -0,0 +1,129 @@ +Data flowing to FunctionNode(1:0): + StringNode(1:8) + StringNode(1:23) +Data flowing to ArrayNode(3:7): + StringNode(3:8) + StringNode(3:20) +Data flowing to FunctionNode(4:7): + ArrayNode(4:13) +Data flowing to ArrayNode(4:13): + StringNode(4:14) + StringNode(4:27) +Data flowing to IdNode(5:7): + ArrayNode(3:7) +Data flowing to ArrayNode(6:7): + IdNode(6:8) +Data flowing to IdNode(6:8): + IdNode(5:7) +Data flowing to ArithmeticNode(7:7): + ArrayNode(7:7) + ArrayNode(8:8) +Data flowing to ArrayNode(7:7): + StringNode(7:8) + StringNode(7:20) +Data flowing to ArrayNode(8:8): + StringNode(8:9) +Data flowing to ArrayNode(9:7): + StringNode(9:8) + StringNode(9:20) +Data flowing to FunctionNode(10:7): + IdNode(10:13) +Data flowing to IdNode(10:13): + ArrayNode(9:7) +Data flowing to ArrayNode(11:7): + StringNode(11:8) + StringNode(11:20) +Data flowing to IdNode(12:7): + ArrayNode(11:7) +Data flowing to ArrayNode(13:7): + StringNode(13:8) + StringNode(13:21) +Data flowing to FunctionNode(15:13): + StringNode(14:7) + StringNode(15:26) +Data flowing to FunctionNode(20:7): + StringNode(20:18) + ArithmeticNode(20:34) +Data flowing to ArithmeticNode(20:34): + IdNode(20:34) + IdNode(20:41) +Data flowing to IdNode(20:34): + ArrayNode(3:7) +Data flowing to IdNode(20:41): + FunctionNode(4:7) +Data flowing to FunctionNode(21:7): + StringNode(21:18) + IdNode(21:34) +Data flowing to IdNode(21:34): + ArrayNode(3:7) +Data flowing to FunctionNode(22:7): + StringNode(22:18) + ArrayNode(22:34) +Data flowing to ArrayNode(22:34): + IdNode(22:35) +Data flowing to IdNode(22:35): + FunctionNode(4:7) +Data flowing to FunctionNode(23:7): + StringNode(23:18) + ArrayNode(23:34) +Data flowing to ArrayNode(23:34): + StringNode(23:35) + StringNode(23:47) +Data flowing to FunctionNode(24:7): + StringNode(24:18) + ArrayNode(24:34) +Data flowing to ArrayNode(24:34): + StringNode(24:35) + ArrayNode(24:47) +Data flowing to ArrayNode(24:47): + StringNode(24:48) +Data flowing to FunctionNode(25:7): + StringNode(25:18) + ArrayNode(25:34) +Data flowing to ArrayNode(25:34): + IdNode(25:35) + StringNode(25:41) +Data flowing to IdNode(25:35): + FunctionNode(4:7) +Data flowing to FunctionNode(26:7): + StringNode(26:18) + StringNode(26:34) + StringNode(26:46) +Data flowing to FunctionNode(27:7): + StringNode(27:18) + StringNode(27:34) + FunctionNode(27:47) + StringNode(27:69) +Data flowing to FunctionNode(27:47): + ArrayNode(3:7) + StringNode(27:60) +Data flowing to FunctionNode(28:7): + StringNode(28:18) + IdNode(28:34) +Data flowing to IdNode(28:34): + IdNode(5:7) +Data flowing to FunctionNode(29:0): + StringNode(29:11) + IdNode(29:27) +Data flowing to IdNode(29:27): + ArrayNode(6:7) +Data flowing to FunctionNode(30:0): + StringNode(30:11) + IdNode(30:28) +Data flowing to IdNode(30:28): + ArithmeticNode(7:7) +Data flowing to FunctionNode(31:0): + StringNode(31:11) + IdNode(31:28) +Data flowing to IdNode(31:28): + FunctionNode(10:7) +Data flowing to FunctionNode(32:0): + StringNode(32:11) + IdNode(32:28) +Data flowing to IdNode(32:28): + IdNode(12:7) +Data flowing to FunctionNode(33:0): + IdNode(33:11) + StringNode(33:23) +Data flowing to IdNode(33:11): + FunctionNode(15:13) diff --git a/test cases/rewrite/1 basic/info.json b/test cases/rewrite/1 basic/info.json index 0f1a3bd..9977f5a 100644 --- a/test cases/rewrite/1 basic/info.json +++ b/test cases/rewrite/1 basic/info.json @@ -53,5 +53,25 @@ "type": "target", "target": "trivialprog10", "operation": "info" + }, + { + "type": "target", + "target": "trivialprog11", + "operation": "info" + }, + { + "type": "target", + "target": "trivialprog12", + "operation": "info" + }, + { + "type": "target", + "target": "trivialprog13", + "operation": "info" + }, + { + "type": "target", + "target": "rightName", + "operation": "info" } ] diff --git a/test cases/rewrite/1 basic/meson.build b/test cases/rewrite/1 basic/meson.build index 0f87c45..5fe9527 100644 --- a/test cases/rewrite/1 basic/meson.build +++ b/test cases/rewrite/1 basic/meson.build @@ -4,6 +4,16 @@ src1 = ['main.cpp', 'fileA.cpp'] src2 = files(['fileB.cpp', 'fileC.cpp']) src3 = src1 src4 = [src3] +src5 = ['main.cpp', 'fileA.cpp'] +src5 += ['fileB.cpp'] +src6 = ['main.cpp', 'fileA.cpp'] +src6 = files(src6) +src7 = ['main.cpp', 'fileA.cpp'] +src8 = src7 +src7 = ['fileB.cpp', 'fileC.cpp'] +name = 'rightName' +trickyName = get_variable('name') +name = 'wrongName' # Magic comment @@ -14,6 +24,10 @@ exe3 = executable('trivialprog3', ['main.cpp', 'fileA.cpp']) exe4 = executable('trivialprog4', ['main.cpp', ['fileA.cpp']]) exe5 = executable('trivialprog5', [src2, 'main.cpp']) exe6 = executable('trivialprog6', 'main.cpp', 'fileA.cpp') -exe7 = executable('trivialprog7', 'fileB.cpp', src1, 'fileC.cpp') +exe7 = executable('trivialprog7', 'fileB.cpp', get_variable('src1'), 'fileC.cpp') exe8 = executable('trivialprog8', src3) executable('trivialprog9', src4) +executable('trivialprog10', src5) +executable('trivialprog11', src6) +executable('trivialprog12', src8) +executable(trickyName, 'main.cpp') diff --git a/test cases/rewrite/1 basic/rmSrc.json b/test cases/rewrite/1 basic/rmSrc.json index 2e7447c..de56bbe 100644 --- a/test cases/rewrite/1 basic/rmSrc.json +++ b/test cases/rewrite/1 basic/rmSrc.json @@ -1,12 +1,6 @@ [ { "type": "target", - "target": "trivialprog1", - "operation": "src_rm", - "sources": ["fileA.cpp"] - }, - { - "type": "target", "target": "trivialprog3", "operation": "src_rm", "sources": ["fileA.cpp"] @@ -21,7 +15,7 @@ "type": "target", "target": "trivialprog5", "operation": "src_rm", - "sources": ["fileB.cpp"] + "sources": ["main.cpp"] }, { "type": "target", @@ -37,6 +31,18 @@ }, { "type": "target", + "target": "trivialprog10", + "operation": "src_rm", + "sources": ["fileA.cpp", "fileB.cpp"] + }, + { + "type": "target", + "target": "trivialprog11", + "operation": "src_rm", + "sources": ["fileA.cpp"] + }, + { + "type": "target", "target": "trivialprog0", "operation": "info" }, @@ -84,5 +90,25 @@ "type": "target", "target": "trivialprog9", "operation": "info" + }, + { + "type": "target", + "target": "trivialprog10", + "operation": "info" + }, + { + "type": "target", + "target": "trivialprog11", + "operation": "info" + }, + { + "type": "target", + "target": "trivialprog12", + "operation": "info" + }, + { + "type": "target", + "target": "rightName", + "operation": "info" } ] diff --git a/test cases/rewrite/1 basic/rmTgt.json b/test cases/rewrite/1 basic/rmTgt.json index dbaf025..bc6dc30 100644 --- a/test cases/rewrite/1 basic/rmTgt.json +++ b/test cases/rewrite/1 basic/rmTgt.json @@ -13,5 +13,10 @@ "type": "target", "target": "trivialprog9", "operation": "target_rm" + }, + { + "type": "target", + "target": "rightName", + "operation": "target_rm" } ] diff --git a/test cases/rewrite/7 tricky dataflow/addSrc.json b/test cases/rewrite/7 tricky dataflow/addSrc.json new file mode 100644 index 0000000..17e4292 --- /dev/null +++ b/test cases/rewrite/7 tricky dataflow/addSrc.json @@ -0,0 +1,77 @@ +[ + { + "type": "target", + "target": "tgt1", + "operation": "src_add", + "sources": [ + "new.c" + ] + }, + { + "type": "target", + "target": "tgt2", + "operation": "src_add", + "sources": [ + "new.c" + ] + }, + { + "type": "target", + "target": "tgt3", + "operation": "src_add", + "sources": [ + "new.c" + ] + }, + { + "type": "target", + "target": "tgt5", + "operation": "src_add", + "sources": [ + "new.c" + ] + }, + { + "type": "target", + "target": "tgt6", + "operation": "src_add", + "sources": [ + "new.c" + ] + }, + { + "type": "target", + "target": "tgt1", + "operation": "info" + }, + { + "type": "target", + "target": "tgt2", + "operation": "info" + }, + { + "type": "target", + "target": "tgt3", + "operation": "info" + }, + { + "type": "target", + "target": "tgt4", + "operation": "info" + }, + { + "type": "target", + "target": "tgt5", + "operation": "info" + }, + { + "type": "target", + "target": "tgt6", + "operation": "info" + }, + { + "type": "target", + "target": "tgt7", + "operation": "info" + } +] diff --git a/test cases/rewrite/7 tricky dataflow/info.json b/test cases/rewrite/7 tricky dataflow/info.json new file mode 100644 index 0000000..8d4ac55 --- /dev/null +++ b/test cases/rewrite/7 tricky dataflow/info.json @@ -0,0 +1,37 @@ +[ + { + "type": "target", + "target": "tgt1", + "operation": "info" + }, + { + "type": "target", + "target": "tgt2", + "operation": "info" + }, + { + "type": "target", + "target": "tgt3", + "operation": "info" + }, + { + "type": "target", + "target": "tgt4", + "operation": "info" + }, + { + "type": "target", + "target": "tgt5", + "operation": "info" + }, + { + "type": "target", + "target": "tgt6", + "operation": "info" + }, + { + "type": "target", + "target": "tgt7", + "operation": "info" + } +] diff --git a/test cases/rewrite/7 tricky dataflow/meson.build b/test cases/rewrite/7 tricky dataflow/meson.build new file mode 100644 index 0000000..ab572ea --- /dev/null +++ b/test cases/rewrite/7 tricky dataflow/meson.build @@ -0,0 +1,41 @@ +project('rewrite tricky dataflow', 'c') + +# Adding a file to `begin` will add this file to the sources of `tgt1`, but +# not to any other target. But a buggy rewriter might think that adding a file +# to `begin` will also add this file to `end` and will refuse to add a file to +# `begin`. +begin = ['foo.c'] +tgt1 = library('tgt1', begin) +distraction = executable('distraction', link_with: tgt1) + + +tgt2_srcs = ['foo.c'] +if meson.host_machine().system() == 'windows' # Some condition that cannot be known statically + tgt2_srcs += ['bar.c'] +endif +executable('tgt2', tgt2_srcs) + + +tgt34_srcs = ['foo.c'] +executable('tgt3', tgt34_srcs) +if meson.host_machine().system() == 'windows' + tgt34_srcs += ['bar.c'] +endif +executable('tgt4', tgt34_srcs) + + +dont_add_here_5 = ['foo.c'] +ct = custom_target('ct', output: 'out.c', input: dont_add_here_5, command: ['placeholder', '@INPUT@', '@OUTPUT@']) +executable('tgt5', ct) + + +dont_add_here_6 = ['foo.c'] +gen = generator(find_program('cp'), output: '@BASENAME@_copy.c', arguments: ['@INPUT@', '@OUTPUT@']) +generated = gen.process(dont_add_here_6) +executable('tgt6', generated) + +if false + # Should produce a warning, but should not crash + var = not_defined_1 + executable('tgt7', not_defined_2, var) +endif diff --git a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml index 8c5351a..dd8525b 100644 --- a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml +++ b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/Cargo.toml @@ -27,6 +27,9 @@ features = ["f1"] [dependencies.libname] version = "1" +[target."cfg(unix)".dependencies.unixdep] +version = "0.1" + [features] default = ["f1"] f1 = ["f2", "f3"] diff --git a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs index 4497dc4..c579815 100644 --- a/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs +++ b/test cases/rust/22 cargo subproject/subprojects/foo-0-rs/lib.rs @@ -8,6 +8,11 @@ extern "C" { #[cfg(feature = "foo")] #[no_mangle] pub extern "C" fn rust_func() -> i32 { + #[cfg(unix)] + { + extern crate unixdep; + assert!(unixdep::only_on_unix() == 0); + } assert!(common::common_func() == 0); assert!(libothername::stuff() == 42); let v: i32; diff --git a/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs.wrap b/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs.wrap new file mode 100644 index 0000000..99686e9 --- /dev/null +++ b/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs.wrap @@ -0,0 +1,2 @@ +[wrap-file] +method = cargo diff --git a/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs/Cargo.toml b/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs/Cargo.toml new file mode 100644 index 0000000..d72fb39 --- /dev/null +++ b/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs/Cargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unixdep" +version = "0.1" +edition = "2021" + +[lib] +path = "lib.rs" diff --git a/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs/lib.rs b/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs/lib.rs new file mode 100644 index 0000000..a736e8a --- /dev/null +++ b/test cases/rust/22 cargo subproject/subprojects/unixdep-0.1-rs/lib.rs @@ -0,0 +1,8 @@ +pub fn only_on_unix() -> i32 { + 0 +} + +#[cfg(not(unix))] +pub fn broken() -> i32 { + plop +} diff --git a/test cases/swift/10 mixed cpp/main.swift b/test cases/swift/10 mixed cpp/main.swift new file mode 100644 index 0000000..c055dcd --- /dev/null +++ b/test cases/swift/10 mixed cpp/main.swift @@ -0,0 +1,6 @@ +testCallFromSwift() +testCallWithParam("Calling C++ function from Swift with param is working") + +var test = Test() +var testtwo = Test(1) +test.testCallFromClass() diff --git a/test cases/swift/10 mixed cpp/meson.build b/test cases/swift/10 mixed cpp/meson.build new file mode 100644 index 0000000..94b70f0 --- /dev/null +++ b/test cases/swift/10 mixed cpp/meson.build @@ -0,0 +1,12 @@ +project('mixed cpp', 'cpp', 'swift') + +swiftc = meson.get_compiler('swift') + +# Testing C++ and Swift interoperability requires Swift 5.9 +if not swiftc.version().version_compare('>= 5.9') + error('MESON_SKIP_TEST Test requires Swift 5.9') +endif + +lib = static_library('mylib', 'mylib.cpp') +exe = executable('prog', 'main.swift', 'mylib.h', link_with: lib) +test('cpp interface', exe) diff --git a/test cases/swift/10 mixed cpp/mylib.cpp b/test cases/swift/10 mixed cpp/mylib.cpp new file mode 100644 index 0000000..0c61681 --- /dev/null +++ b/test cases/swift/10 mixed cpp/mylib.cpp @@ -0,0 +1,22 @@ +#include "mylib.h" +#include <iostream> + +Test::Test() { + std::cout << "Test initialized" << std::endl; +} + +Test::Test(int param) { + std::cout << "Test initialized with param " << param << std::endl; +} + +void Test::testCallFromClass() { + std::cout << "Calling C++ class function from Swift is working" << std::endl; +} + +void testCallFromSwift() { + std::cout << "Calling this C++ function from Swift is working" << std::endl; +} + +void testCallWithParam(const std::string ¶m) { + std::cout << param << std::endl; +} diff --git a/test cases/swift/10 mixed cpp/mylib.h b/test cases/swift/10 mixed cpp/mylib.h new file mode 100644 index 0000000..c465be4 --- /dev/null +++ b/test cases/swift/10 mixed cpp/mylib.h @@ -0,0 +1,13 @@ +#pragma once +#include <string> + +class Test { +public: + Test(); + Test(int param); + + void testCallFromClass(); +}; + +void testCallFromSwift(); +void testCallWithParam(const std::string ¶m); diff --git a/test cases/swift/11 c std passthrough/header.h b/test cases/swift/11 c std passthrough/header.h new file mode 100644 index 0000000..287cdf4 --- /dev/null +++ b/test cases/swift/11 c std passthrough/header.h @@ -0,0 +1,10 @@ +#pragma once + +// let's just assume the default isn't c18. +#if __STDC_VERSION__ == 201710L +typedef struct Datatype { + int x; +} Datatype; +#else +#error C standard version not set! +#endif diff --git a/test cases/swift/11 c std passthrough/main.swift b/test cases/swift/11 c std passthrough/main.swift new file mode 100644 index 0000000..f6358db --- /dev/null +++ b/test cases/swift/11 c std passthrough/main.swift @@ -0,0 +1,3 @@ +let d = Datatype(x: 1) + +precondition(d.x == 1) diff --git a/test cases/swift/11 c std passthrough/meson.build b/test cases/swift/11 c std passthrough/meson.build new file mode 100644 index 0000000..202768f --- /dev/null +++ b/test cases/swift/11 c std passthrough/meson.build @@ -0,0 +1,3 @@ +project('c std passthrough', 'swift', 'c', default_options: {'c_std': 'c18'}) + +executable('program', 'main.swift', 'header.h') diff --git a/test cases/swift/11 mixed objcpp/main.swift b/test cases/swift/11 mixed objcpp/main.swift new file mode 100644 index 0000000..cd6dd2b --- /dev/null +++ b/test cases/swift/11 mixed objcpp/main.swift @@ -0,0 +1,2 @@ +var test: ObjCPPTest = ObjCPPTest() +test.testCallToObjCPP() diff --git a/test cases/swift/11 mixed objcpp/meson.build b/test cases/swift/11 mixed objcpp/meson.build new file mode 100644 index 0000000..69098e2 --- /dev/null +++ b/test cases/swift/11 mixed objcpp/meson.build @@ -0,0 +1,12 @@ +project('mixed objcpp', 'objcpp', 'swift') + +swiftc = meson.get_compiler('swift') + +# Testing Objective-C++ and Swift interoperability requires Swift 5.9 +if not swiftc.version().version_compare('>= 5.9') + error('MESON_SKIP_TEST Test requires Swift 5.9') +endif + +lib = static_library('mylib', 'mylib.mm') +exe = executable('prog', 'main.swift', 'mylib.h', link_with: lib) +test('objcpp interface', exe) diff --git a/test cases/swift/11 mixed objcpp/mylib.h b/test cases/swift/11 mixed objcpp/mylib.h new file mode 100644 index 0000000..1e7b23d --- /dev/null +++ b/test cases/swift/11 mixed objcpp/mylib.h @@ -0,0 +1,17 @@ +#pragma once +#import <Foundation/Foundation.h> + +class Test { +public: + Test(); + + void testCallFromClass(); +}; + +@interface ObjCPPTest: NSObject { + @private Test *test; +} +- (id)init; +- (void)dealloc; +- (void)testCallToObjCPP; +@end diff --git a/test cases/swift/11 mixed objcpp/mylib.mm b/test cases/swift/11 mixed objcpp/mylib.mm new file mode 100644 index 0000000..f7e9ab3 --- /dev/null +++ b/test cases/swift/11 mixed objcpp/mylib.mm @@ -0,0 +1,29 @@ +#include "mylib.h" +#include <iostream> + +Test::Test() { + std::cout << "Test initialized" << std::endl; +} + +void Test::testCallFromClass() { + std::cout << "Calling Objective-C++ class function from Swift is working" << std::endl; +} + +@implementation ObjCPPTest +- (id)init { + self = [super init]; + if (self) { + test = new Test(); + } + return self; +} + +- (void)dealloc { + delete test; + [super dealloc]; +} + +- (void)testCallToObjCPP { + test->testCallFromClass(); +} +@end diff --git a/test cases/unit/120 rewrite/meson.build b/test cases/unit/120 rewrite/meson.build index 7d0330b..654b09d 100644 --- a/test cases/unit/120 rewrite/meson.build +++ b/test cases/unit/120 rewrite/meson.build @@ -62,6 +62,7 @@ cppcoro = declare_dependency( ) +cpp_compiler = meson.get_compiler('cpp') if get_option('unicode') #if comment #if comment 2 mfc=cpp_compiler.find_library(get_option('debug')?'mfc140ud':'mfc140u') @@ -80,6 +81,10 @@ assert(not (3 in [1, 2]), '''3 shouldn't be in [1, 2]''') assert('b' in ['a', 'b'], ''''b' should be in ['a', 'b']''') assert('c' not in ['a', 'b'], ''''c' shouldn't be in ['a', 'b']''') +exe1 = 'exe1' +exe2 = 'exe2' +exe3 = 'exe3' + assert(exe1 in [exe1, exe2], ''''exe1 should be in [exe1, exe2]''') assert(exe3 not in [exe1, exe2], ''''exe3 shouldn't be in [exe1, exe2]''') @@ -185,5 +190,11 @@ if a \ debug('help!') endif +if false + # Should produce a warning, but should not crash + foo = not_defined + message(not_defined) +endif + # End of file comment with no linebreak
\ No newline at end of file diff --git a/test cases/unit/129 vala internal glib/lib.vala b/test cases/unit/129 vala internal glib/lib.vala new file mode 100644 index 0000000..e62e632 --- /dev/null +++ b/test cases/unit/129 vala internal glib/lib.vala @@ -0,0 +1,3 @@ +public int func() { + return 42; +} diff --git a/test cases/unit/129 vala internal glib/meson.build b/test cases/unit/129 vala internal glib/meson.build new file mode 100644 index 0000000..9479082 --- /dev/null +++ b/test cases/unit/129 vala internal glib/meson.build @@ -0,0 +1,21 @@ +project('vala internal glib') + +if not add_languages('vala', required: false) + error('MESON_SKIP_TEST valac not installed') +endif + +glib_ver = get_option('glib-version') +if glib_ver == 'unset' + error('Required to set -Dglib-version') +endif + +glib_dep = declare_dependency(version: glib_ver) +meson.override_dependency('glib-2.0', glib_dep) + +named_glib_dep = dependency('glib-2.0') + +assert(named_glib_dep.type_name() == 'internal') +assert(glib_dep == named_glib_dep) + +tgt = static_library('vala-tgt', 'lib.vala', + dependencies: named_glib_dep) diff --git a/test cases/unit/129 vala internal glib/meson.options b/test cases/unit/129 vala internal glib/meson.options new file mode 100644 index 0000000..f8a1ece --- /dev/null +++ b/test cases/unit/129 vala internal glib/meson.options @@ -0,0 +1 @@ +option('glib-version', type: 'string', value: 'unset') diff --git a/test cases/vala/31 generated ui file subdirectory/meson.build b/test cases/vala/31 generated ui file subdirectory/meson.build new file mode 100644 index 0000000..4210581 --- /dev/null +++ b/test cases/vala/31 generated ui file subdirectory/meson.build @@ -0,0 +1,22 @@ +project('demo', 'c', 'vala') + +gnome = import('gnome', required: false) + +if not gnome.found() + error('MESON_SKIP_TEST: gnome module not supported') +endif + +deps = [ + dependency('glib-2.0', version : '>=2.50'), + dependency('gobject-2.0'), + dependency('gtk+-3.0'), +] + +subdir('subdir') + +executable( + 'demo', + 'test.vala', + resources, + dependencies: deps, +) diff --git a/test cases/vala/31 generated ui file subdirectory/subdir/TestBox.ui.in b/test cases/vala/31 generated ui file subdirectory/subdir/TestBox.ui.in new file mode 100644 index 0000000..bf5c831 --- /dev/null +++ b/test cases/vala/31 generated ui file subdirectory/subdir/TestBox.ui.in @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<interface> + <requires lib="gtk" version="3.0"/> + <template class="TestBox" parent="GtkBox"> + </template> +</interface> diff --git a/test cases/vala/31 generated ui file subdirectory/subdir/meson.build b/test cases/vala/31 generated ui file subdirectory/subdir/meson.build new file mode 100644 index 0000000..dbe9344 --- /dev/null +++ b/test cases/vala/31 generated ui file subdirectory/subdir/meson.build @@ -0,0 +1,13 @@ +ui_tgt = custom_target( + input: 'TestBox.ui.in', + output: 'TestBox.ui', + command: [find_program('cat')], + feed: true, + capture: true, +) + +resources = gnome.compile_resources('test-resources', + 'test.gresource.xml', + c_name: 'test_res', + dependencies: ui_tgt, +) diff --git a/test cases/vala/31 generated ui file subdirectory/subdir/test.gresource.xml b/test cases/vala/31 generated ui file subdirectory/subdir/test.gresource.xml new file mode 100644 index 0000000..382b951 --- /dev/null +++ b/test cases/vala/31 generated ui file subdirectory/subdir/test.gresource.xml @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<gresources> + <gresource prefix="/com/mesonbuild/test"> + <file>TestBox.ui</file> + </gresource> +</gresources> diff --git a/test cases/vala/31 generated ui file subdirectory/test.vala b/test cases/vala/31 generated ui file subdirectory/test.vala new file mode 100644 index 0000000..36f565b --- /dev/null +++ b/test cases/vala/31 generated ui file subdirectory/test.vala @@ -0,0 +1,7 @@ +[GtkTemplate (ui = "/com/mesonbuild/test/TestBox.ui")] +class TestBox: Gtk.Box { +} + +int main() { + return 0; +} diff --git a/test cases/windows/21 masm/meson.build b/test cases/windows/21 masm/meson.build index b6b8fbb..5335a0d 100644 --- a/test cases/windows/21 masm/meson.build +++ b/test cases/windows/21 masm/meson.build @@ -1,9 +1,5 @@ project('test-masm', 'c') -if get_option('backend').startswith('vs') - error('MESON_SKIP_TEST: masm is not supported by vs backend') -endif - cc = meson.get_compiler('c') # MASM must be found when using MSVC, otherwise it is optional diff --git a/test cases/windows/25 embed manifest/DPIAware.manifest b/test cases/windows/25 embed manifest/DPIAware.manifest new file mode 100644 index 0000000..f2708ec --- /dev/null +++ b/test cases/windows/25 embed manifest/DPIAware.manifest @@ -0,0 +1,9 @@ +<?xml version="1.0" encoding="UTF-8" standalone="yes"?> +<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3"> + <asmv3:application> + <asmv3:windowsSettings> + <dpiAware xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">true</dpiAware> + <dpiAwareness xmlns="http://schemas.microsoft.com/SMI/2016/WindowsSettings">PerMonitorV2</dpiAwareness> + </asmv3:windowsSettings> + </asmv3:application> +</assembly> diff --git a/test cases/windows/25 embed manifest/meson.build b/test cases/windows/25 embed manifest/meson.build new file mode 100644 index 0000000..0f4c9b4 --- /dev/null +++ b/test cases/windows/25 embed manifest/meson.build @@ -0,0 +1,11 @@ +project('can-manifests-be-embedded', 'c') + +cc = meson.get_compiler('c') + +if cc.get_linker_id() not in ['link', 'lld-link', 'xilink'] # cc.get_linker_argument_syntax() != 'link' + error('MESON_SKIP_TEST: test is only relevant for the Microsoft linker') +endif + +# Ensure that the manifest can be embedded +executable('prog', 'prog.c', + link_args: ['/MANIFEST:EMBED', '/MANIFESTINPUT:' + meson.project_source_root() / 'DPIAware.manifest']) diff --git a/test cases/windows/25 embed manifest/prog.c b/test cases/windows/25 embed manifest/prog.c new file mode 100644 index 0000000..b1d9c2c --- /dev/null +++ b/test cases/windows/25 embed manifest/prog.c @@ -0,0 +1,3 @@ +int main(int argc, char *argv[]) { + return 0; +} diff --git a/tools/run_with_cov.py b/tools/run_with_cov.py deleted file mode 100755 index 0d3fba6..0000000 --- a/tools/run_with_cov.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 -# SPDX-License-Identifier: Apache-2.0 -# Copyright 2021 The Meson development team - -import subprocess -import coverage -import os -import sys -from pathlib import Path - -root_path = Path(__file__).parent.parent.absolute() - -# Python magic so we can import mesonlib -sys.path.append(root_path.as_posix()) -from mesonbuild import mesonlib - -def generate_coveragerc() -> Path: - i_file = (root_path / 'data' / '.coveragerc.in') - o_file = (root_path / '.coveragerc') - raw = i_file.read_text(encoding='utf-8') - raw = raw.replace('@ROOT@', root_path.as_posix()) - o_file.write_text(raw, encoding='utf-8') - return o_file - -def main() -> int: - # Remove old run data - out_dir = root_path / '.coverage' - mesonlib.windows_proof_rmtree(out_dir.as_posix()) - out_dir.mkdir(parents=True, exist_ok=True) - - # Setup coverage - python_path = (root_path / 'ci').as_posix() - os.environ['PYTHONPATH'] = os.pathsep.join([python_path, os.environ.get('PYTHONPATH', '')]) - os.environ['COVERAGE_PROCESS_START'] = generate_coveragerc().as_posix() - coverage.process_startup() - - # Run the actual command - cmd = mesonlib.python_command + sys.argv[1:] - return subprocess.run(cmd, env=os.environ.copy()).returncode - -if __name__ == '__main__': - raise SystemExit(main()) diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 0618b20..8f56611 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -29,7 +29,7 @@ import mesonbuild.coredata import mesonbuild.machinefile import mesonbuild.modules.gnome from mesonbuild.mesonlib import ( - BuildDirLock, MachineChoice, is_windows, is_osx, is_cygwin, is_dragonflybsd, + DirectoryLock, DirectoryLockAction, MachineChoice, is_windows, is_osx, is_cygwin, is_dragonflybsd, is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, quote_arg, relpath, is_linux, git, search_version, do_conf_file, do_conf_str, default_prefix, MesonException, EnvironmentException, @@ -222,6 +222,47 @@ class AllPlatformTests(BasePlatformTests): confdata.values = {'VAR': (['value'], 'description')} self.assertRaises(MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson') + def test_cmake_configuration(self): + if self.backend is not Backend.ninja: + raise SkipTest('ninja backend needed to configure with cmake') + + cmake = ExternalProgram('cmake') + if not cmake.found(): + raise SkipTest('cmake not available') + + cmake_version = cmake.get_version() + if not version_compare(cmake_version, '>=3.13.5'): + raise SkipTest('cmake is too old') + + with tempfile.TemporaryDirectory() as tmpdir: + srcdir = os.path.join(tmpdir, 'src') + + shutil.copytree(os.path.join(self.src_root, 'test cases', 'common', '14 configure file'), srcdir) + self.init(srcdir) + + cmake_builddir = os.path.join(srcdir, "cmake_builddir") + self.assertNotEqual(self.builddir, cmake_builddir) + self._run([cmake.path, '-G', 'Ninja', '-S', srcdir, '-B', cmake_builddir]) + + header_list = [ + 'config7.h', + 'config10.h', + ] + + for header in header_list: + meson_header = "" + cmake_header = "" + + with open(os.path.join(self.builddir, header), encoding='utf-8') as f: + meson_header = f.read() + + cmake_header_path = os.path.join(cmake_builddir, header) + with open(os.path.join(cmake_builddir, header), encoding='utf-8') as f: + cmake_header = f.read() + + self.assertTrue(cmake_header, f'cmake generated header {header} is empty') + self.assertEqual(cmake_header, meson_header) + def test_absolute_prefix_libdir(self): ''' Tests that setting absolute paths for --prefix and --libdir work. Can't @@ -1099,110 +1140,144 @@ class AllPlatformTests(BasePlatformTests): for lang, evar in langs: # Detect with evar and do sanity checks on that if evar in os.environ: - ecc = compiler_from_language(env, lang, MachineChoice.HOST) - self.assertTrue(ecc.version) - elinker = detect_static_linker(env, ecc) - # Pop it so we don't use it for the next detection - evalue = os.environ.pop(evar) - # Very rough/strict heuristics. Would never work for actual - # compiler detection, but should be ok for the tests. - ebase = os.path.basename(evalue) - if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')): - self.assertIsInstance(ecc, gnu) - self.assertIsInstance(elinker, ar) - elif 'clang-cl' in ebase: - self.assertIsInstance(ecc, clangcl) - self.assertIsInstance(elinker, lib) - elif 'clang' in ebase: - self.assertIsInstance(ecc, clang) - self.assertIsInstance(elinker, ar) - elif ebase.startswith('ic'): - self.assertIsInstance(ecc, intel) - self.assertIsInstance(elinker, ar) - elif ebase.startswith('cl'): - self.assertIsInstance(ecc, msvc) - self.assertIsInstance(elinker, lib) - else: - raise AssertionError(f'Unknown compiler {evalue!r}') - # Check that we actually used the evalue correctly as the compiler - self.assertEqual(ecc.get_exelist(), split_args(evalue)) + with self.subTest(lang=lang, evar=evar): + try: + ecc = compiler_from_language(env, lang, MachineChoice.HOST) + except EnvironmentException: + # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind + if is_ci(): + self.fail(f'Could not find a compiler for {lang}') + if sys.version_info < (3, 11): + continue + self.skipTest(f'No valid compiler for {lang}.') + finally: + # Pop it so we don't use it for the next detection + evalue = os.environ.pop(evar) + assert ecc is not None, "Something went really wrong" + self.assertTrue(ecc.version) + elinker = detect_static_linker(env, ecc) + # Very rough/strict heuristics. Would never work for actual + # compiler detection, but should be ok for the tests. + ebase = os.path.basename(evalue) + if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')): + self.assertIsInstance(ecc, gnu) + self.assertIsInstance(elinker, ar) + elif 'clang-cl' in ebase: + self.assertIsInstance(ecc, clangcl) + self.assertIsInstance(elinker, lib) + elif 'clang' in ebase: + self.assertIsInstance(ecc, clang) + self.assertIsInstance(elinker, ar) + elif ebase.startswith('ic'): + self.assertIsInstance(ecc, intel) + self.assertIsInstance(elinker, ar) + elif ebase.startswith('cl'): + self.assertIsInstance(ecc, msvc) + self.assertIsInstance(elinker, lib) + else: + self.fail(f'Unknown compiler {evalue!r}') + # Check that we actually used the evalue correctly as the compiler + self.assertEqual(ecc.get_exelist(), split_args(evalue)) + # Do auto-detection of compiler based on platform, PATH, etc. - cc = compiler_from_language(env, lang, MachineChoice.HOST) - self.assertTrue(cc.version) - linker = detect_static_linker(env, cc) - # Check compiler type - if isinstance(cc, gnu): - self.assertIsInstance(linker, ar) - if is_osx(): - self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker) - elif is_sunos(): - self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin)) - else: - self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin) - if isinstance(cc, clangcl): - self.assertIsInstance(linker, lib) - self.assertIsInstance(cc.linker, linkers.ClangClDynamicLinker) - if isinstance(cc, clang): - self.assertIsInstance(linker, ar) - if is_osx(): - self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker) - elif is_windows(): - # This is clang, not clang-cl. This can be either an - # ld-like linker of link.exe-like linker (usually the - # former for msys2, the latter otherwise) - self.assertIsInstance(cc.linker, (linkers.MSVCDynamicLinker, linkers.GnuLikeDynamicLinkerMixin)) - elif is_sunos(): - self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin)) - else: - self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin) - if isinstance(cc, intel): - self.assertIsInstance(linker, ar) - if is_osx(): - self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker) - elif is_windows(): - self.assertIsInstance(cc.linker, linkers.XilinkDynamicLinker) - else: - self.assertIsInstance(cc.linker, linkers.GnuDynamicLinker) - if isinstance(cc, msvc): - self.assertTrue(is_windows()) - self.assertIsInstance(linker, lib) - self.assertEqual(cc.id, 'msvc') - self.assertTrue(hasattr(cc, 'is_64')) - self.assertIsInstance(cc.linker, linkers.MSVCDynamicLinker) - # If we're on Windows CI, we know what the compiler will be - if 'arch' in os.environ: - if os.environ['arch'] == 'x64': - self.assertTrue(cc.is_64) + with self.subTest(lang=lang): + try: + cc = compiler_from_language(env, lang, MachineChoice.HOST) + except EnvironmentException: + # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind + if is_ci(): + self.fail(f'Could not find a compiler for {lang}') + if sys.version_info < (3, 11): + continue + self.skipTest(f'No valid compiler for {lang}.') + assert cc is not None, "Something went really wrong" + self.assertTrue(cc.version) + linker = detect_static_linker(env, cc) + # Check compiler type + if isinstance(cc, gnu): + self.assertIsInstance(linker, ar) + if is_osx(): + self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker) + elif is_sunos(): + self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin)) else: - self.assertFalse(cc.is_64) + self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin) + if isinstance(cc, clangcl): + self.assertIsInstance(linker, lib) + self.assertIsInstance(cc.linker, linkers.ClangClDynamicLinker) + if isinstance(cc, clang): + self.assertIsInstance(linker, ar) + if is_osx(): + self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker) + elif is_windows(): + # This is clang, not clang-cl. This can be either an + # ld-like linker of link.exe-like linker (usually the + # former for msys2, the latter otherwise) + self.assertIsInstance(cc.linker, (linkers.MSVCDynamicLinker, linkers.GnuLikeDynamicLinkerMixin)) + elif is_sunos(): + self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin)) + else: + self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin) + if isinstance(cc, intel): + self.assertIsInstance(linker, ar) + if is_osx(): + self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker) + elif is_windows(): + self.assertIsInstance(cc.linker, linkers.XilinkDynamicLinker) + else: + self.assertIsInstance(cc.linker, linkers.GnuDynamicLinker) + if isinstance(cc, msvc): + self.assertTrue(is_windows()) + self.assertIsInstance(linker, lib) + self.assertEqual(cc.id, 'msvc') + self.assertTrue(hasattr(cc, 'is_64')) + self.assertIsInstance(cc.linker, linkers.MSVCDynamicLinker) + # If we're on Windows CI, we know what the compiler will be + if 'arch' in os.environ: + if os.environ['arch'] == 'x64': + self.assertTrue(cc.is_64) + else: + self.assertFalse(cc.is_64) + # Set evar ourselves to a wrapper script that just calls the same # exelist + some argument. This is meant to test that setting # something like `ccache gcc -pipe` or `distcc ccache gcc` works. - wrapper = os.path.join(testdir, 'compiler wrapper.py') - wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG'] - os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc) - - # Check static linker too - wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args() - os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker) - - # Need a new env to re-run environment loading - env = get_fake_env(testdir, self.builddir, self.prefix) - - wcc = compiler_from_language(env, lang, MachineChoice.HOST) - wlinker = detect_static_linker(env, wcc) - # Pop it so we don't use it for the next detection - os.environ.pop('AR') - # Must be the same type since it's a wrapper around the same exelist - self.assertIs(type(cc), type(wcc)) - self.assertIs(type(linker), type(wlinker)) - # Ensure that the exelist is correct - self.assertEqual(wcc.get_exelist(), wrappercc) - self.assertEqual(wlinker.get_exelist(), wrapperlinker) - # Ensure that the version detection worked correctly - self.assertEqual(cc.version, wcc.version) - if hasattr(cc, 'is_64'): - self.assertEqual(cc.is_64, wcc.is_64) + with self.subTest('wrapper script', lang=lang): + wrapper = os.path.join(testdir, 'compiler wrapper.py') + wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG'] + os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc) + + # Check static linker too + wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args() + os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker) + + # Need a new env to re-run environment loading + env = get_fake_env(testdir, self.builddir, self.prefix) + + try: + wcc = compiler_from_language(env, lang, MachineChoice.HOST) + except EnvironmentException: + # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind + if is_ci(): + self.fail(f'Could not find a compiler for {lang}') + if sys.version_info < (3, 11): + continue + self.skipTest(f'No valid compiler for {lang}.') + wlinker = detect_static_linker(env, wcc) + del os.environ['AR'] + + # Must be the same type since it's a wrapper around the same exelist + self.assertIs(type(cc), type(wcc)) + self.assertIs(type(linker), type(wlinker)) + + # Ensure that the exelist is correct + self.assertEqual(wcc.get_exelist(), wrappercc) + self.assertEqual(wlinker.get_exelist(), wrapperlinker) + + # Ensure that the version detection worked correctly + self.assertEqual(cc.version, wcc.version) + if hasattr(cc, 'is_64'): + self.assertEqual(cc.is_64, wcc.is_64) def test_always_prefer_c_compiler_for_asm(self): testdir = os.path.join(self.common_test_dir, '133 c cpp and asm') @@ -2499,10 +2574,9 @@ class AllPlatformTests(BasePlatformTests): def test_flock(self): exception_raised = False with tempfile.TemporaryDirectory() as tdir: - os.mkdir(os.path.join(tdir, 'meson-private')) - with BuildDirLock(tdir): + with DirectoryLock(tdir, 'lock', DirectoryLockAction.FAIL, 'failed to lock directory'): try: - with BuildDirLock(tdir): + with DirectoryLock(tdir, 'lock', DirectoryLockAction.FAIL, 'expected failure'): pass except MesonException: exception_raised = True @@ -3612,6 +3686,8 @@ class AllPlatformTests(BasePlatformTests): # Account for differences in output res_wb = [i for i in res_wb if i['type'] != 'custom'] for i in res_wb: + if i['id'] == 'test1@exe': + i['build_by_default'] = 'unknown' i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']] for k in ('install_filename', 'dependencies', 'win_subsystem'): if k in i: @@ -3730,7 +3806,7 @@ class AllPlatformTests(BasePlatformTests): }, { 'name': 'bugDep1', - 'required': True, + 'required': 'unknown', 'version': [], 'has_fallback': False, 'conditional': False @@ -4453,6 +4529,10 @@ class AllPlatformTests(BasePlatformTests): self.assertIn(f'TEST_C="{expected}"', o) self.assertIn('export TEST_C', o) + cmd = self.meson_command + ['devenv', '-C', self.builddir] + python_command + ['-c', 'import sys; sys.exit(42)'] + result = subprocess.run(cmd, encoding='utf-8') + self.assertEqual(result.returncode, 42) + def test_clang_format_check(self): if self.backend is not Backend.ninja: raise SkipTest(f'Skipping clang-format tests with {self.backend.name} backend') @@ -5217,7 +5297,7 @@ class AllPlatformTests(BasePlatformTests): env = get_fake_env() cc = detect_c_compiler(env, MachineChoice.HOST) has_rsp = cc.linker.id in { - 'ld.bfd', 'ld.gold', 'ld.lld', 'ld.mold', 'ld.qcld', 'ld.wasm', + 'ld.bfd', 'ld.eld', 'ld.gold', 'ld.lld', 'ld.mold', 'ld.qcld', 'ld.wasm', 'link', 'lld-link', 'mwldarm', 'mwldeppc', 'optlink', 'xilink', } self.assertEqual(cc.linker.get_accepts_rsp(), has_rsp) diff --git a/unittests/cargotests.py b/unittests/cargotests.py index d1ac838..eeb676b 100644 --- a/unittests/cargotests.py +++ b/unittests/cargotests.py @@ -101,6 +101,12 @@ class CargoCfgTest(unittest.TestCase): (TokenType.IDENTIFIER, 'unix'), (TokenType.RPAREN, None), ]), + ('cfg(windows)', [ + (TokenType.CFG, None), + (TokenType.LPAREN, None), + (TokenType.IDENTIFIER, 'windows'), + (TokenType.RPAREN, None), + ]), ] for data, expected in cases: with self.subTest(): @@ -130,63 +136,44 @@ class CargoCfgTest(unittest.TestCase): cfg.Equal(cfg.Identifier("target_arch"), cfg.String("x86")), cfg.Equal(cfg.Identifier("target_os"), cfg.String("linux")), ]))), + ('cfg(all(any(target_os = "android", target_os = "linux"), any(custom_cfg)))', + cfg.All([ + cfg.Any([ + cfg.Equal(cfg.Identifier("target_os"), cfg.String("android")), + cfg.Equal(cfg.Identifier("target_os"), cfg.String("linux")), + ]), + cfg.Any([ + cfg.Identifier("custom_cfg"), + ]), + ])), ] for data, expected in cases: with self.subTest(): self.assertEqual(cfg.parse(iter(cfg.lexer(data))), expected) - def test_ir_to_meson(self) -> None: - build = builder.Builder('') - HOST_MACHINE = build.identifier('host_machine') - + def test_eval_ir(self) -> None: + d = { + 'target_os': 'unix', + 'unix': '', + } cases = [ - ('target_os = "windows"', - build.equal(build.method('system', HOST_MACHINE), - build.string('windows'))), - ('target_arch = "x86"', - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86'))), - ('target_family = "unix"', - build.equal(build.method('system', HOST_MACHINE), - build.string('unix'))), - ('not(target_arch = "x86")', - build.not_(build.equal( - build.method('cpu_family', HOST_MACHINE), - build.string('x86')))), - ('any(target_arch = "x86", target_arch = "x86_64")', - build.or_( - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86')), - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86_64')))), - ('any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")', - build.or_( - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86')), - build.or_( - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86_64')), - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('aarch64'))))), - ('all(target_arch = "x86", target_arch = "x86_64")', - build.and_( - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86')), - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86_64')))), - ('all(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")', - build.and_( - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86')), - build.and_( - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('x86_64')), - build.equal(build.method('cpu_family', HOST_MACHINE), - build.string('aarch64'))))), + ('target_os = "windows"', False), + ('target_os = "unix"', True), + ('doesnotexist = "unix"', False), + ('not(target_os = "windows")', True), + ('any(target_os = "windows", target_arch = "x86_64")', False), + ('any(target_os = "windows", target_os = "unix")', True), + ('all(target_os = "windows", target_os = "unix")', False), + ('all(not(target_os = "windows"), target_os = "unix")', True), + ('any(unix, windows)', True), + ('all()', True), + ('any()', False), + ('cfg(unix)', True), + ('cfg(windows)', False), ] for data, expected in cases: with self.subTest(): - value = cfg.ir_to_meson(cfg.parse(iter(cfg.lexer(data))), build) + value = cfg.eval_cfg(data, d) self.assertEqual(value, expected) class CargoLockTest(unittest.TestCase): diff --git a/unittests/internaltests.py b/unittests/internaltests.py index d7994ee..d803f04 100644 --- a/unittests/internaltests.py +++ b/unittests/internaltests.py @@ -555,9 +555,9 @@ class InternalTests(unittest.TestCase): 'libbar.so.7.10', 'libbar.so.7.9', 'libbar.so.7.9.3']: libpath = Path(tmpdir) / i libpath.write_text('', encoding='utf-8') - found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True) + found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True, ignore_system_dirs=False) self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0') - found = cc._find_library_real('bar', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True) + found = cc._find_library_real('bar', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True, ignore_system_dirs=False) self.assertEqual(os.path.basename(found[0]), 'libbar.so.7.10') def test_find_library_patterns(self): diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py index 6b896d7..376c395 100644 --- a/unittests/linuxliketests.py +++ b/unittests/linuxliketests.py @@ -986,6 +986,22 @@ class LinuxlikeTests(BasePlatformTests): self.assertEqual(got_rpath, yonder_libdir, rpath_format) @skip_if_not_base_option('b_sanitize') + def test_env_cflags_ldflags(self): + if is_cygwin(): + raise SkipTest('asan not available on Cygwin') + if is_openbsd(): + raise SkipTest('-fsanitize=address is not supported on OpenBSD') + + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = {'CFLAGS': '-fsanitize=address', 'LDFLAGS': '-I.'} + self.init(testdir, override_envvars=env) + self.build() + compdb = self.get_compdb() + for i in compdb: + self.assertIn("-fsanitize=address", i["command"]) + self.wipe() + + @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): if is_cygwin(): raise SkipTest('asan not available on Cygwin') diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py index b2839e6..7f88a54 100644 --- a/unittests/machinefiletests.py +++ b/unittests/machinefiletests.py @@ -550,8 +550,8 @@ class NativeFileTests(BasePlatformTests): # into augments. self.assertEqual(found, 2, 'Did not find all two sections') - def test_builtin_options_subprojects_overrides_buildfiles(self): - # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + def test_builtin_options_machinefile_overrides_subproject(self): + # The buildfile says subproject(... default_library: static), the machinefile overrides it testcase = os.path.join(self.common_test_dir, '223 persubproject options') config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}}) @@ -563,11 +563,18 @@ class NativeFileTests(BasePlatformTests): check = cm.exception.stdout self.assertIn(check, 'Parent should override default_library') - def test_builtin_options_subprojects_dont_inherits_parent_override(self): - # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + def test_builtin_options_machinefile_global_overrides_subproject(self): + # The buildfile says subproject(... default_library: static), ensure that's overridden testcase = os.path.join(self.common_test_dir, '223 persubproject options') config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}}) - self.init(testcase, extra_args=['--native-file', config]) + + with self.assertRaises((RuntimeError, subprocess.CalledProcessError)) as cm: + self.init(testcase, extra_args=['--native-file', config]) + if isinstance(cm, RuntimeError): + check = str(cm.exception) + else: + check = cm.exception.stdout + self.assertIn(check, 'Parent should override default_library') def test_builtin_options_compiler_properties(self): # the properties section can have lang_args, and those need to be diff --git a/unittests/optiontests.py b/unittests/optiontests.py index 5ed601f..3e87b5c 100644 --- a/unittests/optiontests.py +++ b/unittests/optiontests.py @@ -35,13 +35,29 @@ class OptionTests(unittest.TestCase): optstore.initialize_from_top_level_project_call({OptionKey('someoption'): new_value}, {}, {}) self.assertEqual(optstore.get_value_for(k), new_value) + def test_machine_vs_project(self): + optstore = OptionStore(False) + name = 'backend' + default_value = 'ninja' + proj_value = 'xcode' + mfile_value = 'vs2010' + k = OptionKey(name) + prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr') + optstore.add_system_option('prefix', prefix) + vo = UserStringOption(k.name, 'You know what this is', default_value) + optstore.add_system_option(k.name, vo) + self.assertEqual(optstore.get_value_for(k), default_value) + optstore.initialize_from_top_level_project_call({OptionKey(name): proj_value}, {}, + {OptionKey(name): mfile_value}) + self.assertEqual(optstore.get_value_for(k), mfile_value) + def test_subproject_system_option(self): """Test that subproject system options get their default value from the global option (e.g. "sub:b_lto" can be initialized from "b_lto").""" optstore = OptionStore(False) - name = 'someoption' - default_value = 'somevalue' - new_value = 'new_value' + name = 'b_lto' + default_value = 'false' + new_value = 'true' k = OptionKey(name) subk = k.evolve(subproject='sub') optstore.initialize_from_top_level_project_call({}, {}, {OptionKey(name): new_value}) @@ -202,3 +218,97 @@ class OptionTests(unittest.TestCase): optstore = OptionStore(False) value = optstore.get_default_for_b_option(OptionKey('b_vscrt')) self.assertEqual(value, 'from_buildtype') + + def test_b_nonexistent(self): + optstore = OptionStore(False) + self.assertTrue(optstore.accept_as_pending_option(OptionKey('b_ndebug'))) + self.assertFalse(optstore.accept_as_pending_option(OptionKey('b_whatever'))) + + def test_backend_option_pending(self): + optstore = OptionStore(False) + # backend options are known after the first invocation + self.assertTrue(optstore.accept_as_pending_option(OptionKey('backend_whatever'), set(), True)) + self.assertFalse(optstore.accept_as_pending_option(OptionKey('backend_whatever'), set(), False)) + + def test_reconfigure_b_nonexistent(self): + optstore = OptionStore(False) + optstore.set_from_configure_command(['b_ndebug=true'], []) + + def test_subproject_nonexistent(self): + optstore = OptionStore(False) + subprojects = {'found'} + self.assertFalse(optstore.accept_as_pending_option(OptionKey('foo', subproject='found'), subprojects)) + self.assertTrue(optstore.accept_as_pending_option(OptionKey('foo', subproject='whatisthis'), subprojects)) + + def test_subproject_cmdline_override_global(self): + name = 'optimization' + subp = 'subp' + new_value = '0' + + optstore = OptionStore(False) + prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr') + optstore.add_system_option('prefix', prefix) + o = UserComboOption(name, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's']) + optstore.add_system_option(name, o) + + toplevel_proj_default = {OptionKey(name): 's'} + subp_proj_default = {OptionKey(name): '3'} + cmd_line = {OptionKey(name): new_value} + + optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {}) + optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {}) + self.assertEqual(optstore.get_value_for(name, subp), new_value) + self.assertEqual(optstore.get_value_for(name), new_value) + + def test_subproject_cmdline_override_global_and_augment(self): + name = 'optimization' + subp = 'subp' + global_value = 's' + new_value = '0' + + optstore = OptionStore(False) + prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr') + optstore.add_system_option('prefix', prefix) + o = UserComboOption(name, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's']) + optstore.add_system_option(name, o) + + toplevel_proj_default = {OptionKey(name): '1'} + subp_proj_default = {OptionKey(name): '3'} + cmd_line = {OptionKey(name): global_value, OptionKey(name, subproject=subp): new_value} + + optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {}) + optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {}) + self.assertEqual(optstore.get_value_for(name, subp), new_value) + self.assertEqual(optstore.get_value_for(name), global_value) + + def test_subproject_cmdline_override_toplevel(self): + name = 'default_library' + subp = 'subp' + toplevel_value = 'both' + subp_value = 'static' + + optstore = OptionStore(False) + prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr') + optstore.add_system_option('prefix', prefix) + o = UserComboOption(name, 'Kind of library', 'both', choices=['shared', 'static', 'both']) + optstore.add_system_option(name, o) + + toplevel_proj_default = {OptionKey(name): 'shared'} + subp_proj_default = {OptionKey(name): subp_value} + cmd_line = {OptionKey(name, subproject=''): toplevel_value} + + optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {}) + optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {}) + self.assertEqual(optstore.get_value_for(name, subp), subp_value) + self.assertEqual(optstore.get_value_for(name), toplevel_value) + + def test_deprecated_nonstring_value(self): + # TODO: add a lot more deprecated option tests + optstore = OptionStore(False) + name = 'deprecated' + do = UserStringOption(name, 'An option with some deprecation', '0', + deprecated={'true': '1'}) + optstore.add_system_option(name, do) + optstore.set_option(OptionKey(name), True) + value = optstore.get_value(name) + self.assertEqual(value, '1') diff --git a/unittests/platformagnostictests.py b/unittests/platformagnostictests.py index d6c0078..9c5e2bd 100644 --- a/unittests/platformagnostictests.py +++ b/unittests/platformagnostictests.py @@ -95,6 +95,23 @@ class PlatformAgnosticTests(BasePlatformTests): testdir = os.path.join(self.unit_test_dir, '102 python without pkgconfig') self.init(testdir, override_envvars={'PKG_CONFIG': 'notfound'}) + def test_vala_target_with_internal_glib(self): + testdir = os.path.join(self.unit_test_dir, '129 vala internal glib') + for run in [{ 'version': '2.84.4', 'expected': '2.84'}, { 'version': '2.85.2', 'expected': '2.84' }]: + self.new_builddir() + self.init(testdir, extra_args=[f'-Dglib-version={run["version"]}']) + try: + with open(os.path.join(self.builddir, 'meson-info', 'intro-targets.json'), 'r', encoding='utf-8') as tgt_intro: + intro = json.load(tgt_intro) + target = list(filter(lambda tgt: tgt['name'] == 'vala-tgt', intro)) + self.assertLength(target, 1) + sources = target[0]['target_sources'] + vala_sources = filter(lambda src: src.get('language') == 'vala', sources) + for src in vala_sources: + self.assertIn(('--target-glib', run['expected']), zip(src['parameters'], src['parameters'][1:])) + except FileNotFoundError: + self.skipTest('Current backend does not produce introspection data') + def test_debug_function_outputs_to_meson_log(self): testdir = os.path.join(self.unit_test_dir, '104 debug function') log_msg = 'This is an example debug output, should only end up in debug log' @@ -175,7 +192,7 @@ class PlatformAgnosticTests(BasePlatformTests): with self.subTest('Changing the backend'): with self.assertRaises(subprocess.CalledProcessError) as cm: self.setconf('-Dbackend=none') - self.assertIn("ERROR: Tried to modify read only option 'backend'", cm.exception.stdout) + self.assertIn('ERROR: Tried to modify read only option "backend"', cm.exception.stdout) # Check that the new value was not written in the store. with self.subTest('option is stored correctly'): @@ -203,10 +220,10 @@ class PlatformAgnosticTests(BasePlatformTests): # Reconfigure of not empty builddir should work self.new_builddir() Path(self.builddir, 'dummy').touch() - self.init(testdir, extra_args=['--reconfigure']) + self.init(testdir, extra_args=['--reconfigure', '--buildtype=custom']) # Setup a valid builddir should update options but not reconfigure - self.assertEqual(self.getconf('buildtype'), 'debug') + self.assertEqual(self.getconf('buildtype'), 'custom') o = self.init(testdir, extra_args=['-Dbuildtype=release']) self.assertIn('Directory already configured', o) self.assertNotIn('The Meson build system', o) @@ -421,12 +438,12 @@ class PlatformAgnosticTests(BasePlatformTests): with self.subTest('unknown user option'): out = self.init(testdir, extra_args=['-Dnot_an_option=1'], allow_fail=True) - self.assertIn('ERROR: Unknown options: "not_an_option"', out) + self.assertIn('ERROR: Unknown option: "not_an_option"', out) with self.subTest('unknown builtin option'): self.new_builddir() out = self.init(testdir, extra_args=['-Db_not_an_option=1'], allow_fail=True) - self.assertIn('ERROR: Unknown options: "b_not_an_option"', out) + self.assertIn('ERROR: Unknown option: "b_not_an_option"', out) def test_configure_new_option(self) -> None: @@ -451,7 +468,17 @@ class PlatformAgnosticTests(BasePlatformTests): f.write(line) with self.assertRaises(subprocess.CalledProcessError) as e: self.setconf('-Dneg_int_opt=0') - self.assertIn('Unknown options: ":neg_int_opt"', e.exception.stdout) + self.assertIn('Unknown option: "neg_int_opt"', e.exception.stdout) + + def test_reconfigure_option(self) -> None: + testdir = self.copy_srcdir(os.path.join(self.common_test_dir, '40 options')) + self.init(testdir) + self.assertEqual(self.getconf('neg_int_opt'), -3) + with self.assertRaises(subprocess.CalledProcessError) as e: + self.init(testdir, extra_args=['--reconfigure', '-Dneg_int_opt=0']) + self.assertEqual(self.getconf('neg_int_opt'), -3) + self.init(testdir, extra_args=['--reconfigure', '-Dneg_int_opt=-2']) + self.assertEqual(self.getconf('neg_int_opt'), -2) def test_configure_option_changed_constraints(self) -> None: """Changing the constraints of an option without reconfiguring should work.""" @@ -491,7 +518,7 @@ class PlatformAgnosticTests(BasePlatformTests): os.unlink(os.path.join(testdir, 'meson_options.txt')) with self.assertRaises(subprocess.CalledProcessError) as e: self.setconf('-Dneg_int_opt=0') - self.assertIn('Unknown options: ":neg_int_opt"', e.exception.stdout) + self.assertIn('Unknown option: "neg_int_opt"', e.exception.stdout) def test_configure_options_file_added(self) -> None: """A new project option file should be detected.""" diff --git a/unittests/rewritetests.py b/unittests/rewritetests.py index 57a6782..767c291 100644 --- a/unittests/rewritetests.py +++ b/unittests/rewritetests.py @@ -46,6 +46,18 @@ class RewriterTests(BasePlatformTests): args = [args] return self.rewrite_raw(directory, ['command'] + args) + # The rewriter sorts the sources alphabetically, but this is very unstable + # and buggy, so we do not test it. + def assertEqualIgnoreOrder(self, a, b): + def deepsort(x): + if isinstance(x, list): + return sorted(deepsort(el) for el in x) + elif isinstance(x, dict): + return {k: deepsort(v) for k,v in x.items()} + else: + return x + self.assertDictEqual(deepsort(a), deepsort(b)) + def test_target_source_list(self): self.prime('1 basic') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) @@ -61,32 +73,40 @@ class RewriterTests(BasePlatformTests): 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp'], 'extra_files': []}, + 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) def test_target_add_sources(self): self.prime('1 basic') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) expected = { 'target': { - 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []}, - 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, - 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []}, + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp'], 'extra_files': []}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp', 'a7.cpp'], 'extra_files': []}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, - 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []}, - 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []}, + 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'a5.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['fileB.cpp', 'fileC.cpp', 'a3.cpp', 'main.cpp'], 'extra_files': []}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp'], 'extra_files': []}, - 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, - 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, - 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a6.cpp' ], 'extra_files': []}, + 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'a1.cpp'], 'extra_files': []}, + 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['a1.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}, + 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['a1.cpp', 'fileA.cpp', 'fileB.cpp', 'main.cpp'], 'extra_files': []}, + 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) def test_target_add_sources_abs(self): self.prime('1 basic') @@ -95,7 +115,7 @@ class RewriterTests(BasePlatformTests): inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}]) self.rewrite(self.builddir, add) out = self.rewrite(self.builddir, inf) - expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}}} + expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp'], 'extra_files': []}}} self.assertDictEqual(out, expected) def test_target_remove_sources(self): @@ -103,28 +123,32 @@ class RewriterTests(BasePlatformTests): out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json')) expected = { 'target': { - 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp'], 'extra_files': []}, - 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': []}, - 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp'], 'extra_files': []}, + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp'], 'extra_files': []}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp'], 'extra_files': []}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp'], 'extra_files': []}, - 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp'], 'extra_files': []}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['fileB.cpp', 'fileC.cpp'], 'extra_files': []}, 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': []}, - 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp'], 'extra_files': []}, - 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp'], 'extra_files': []}, - 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp'], 'extra_files': []}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp'], 'extra_files': []}, + 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp'], 'extra_files': []}, + 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) def test_target_subdir(self): self.prime('2 subdirs') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) - expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c'], 'extra_files': []} + expected = {'name': 'something', 'sources': ['third.c', f'sub2{os.path.sep}first.c', f'sub2{os.path.sep}second.c'], 'extra_files': []} self.assertDictEqual(list(out['target'].values())[0], expected) # Check the written file @@ -145,9 +169,12 @@ class RewriterTests(BasePlatformTests): 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp'], 'extra_files': []}, + 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) def test_target_add(self): self.prime('1 basic') @@ -166,10 +193,14 @@ class RewriterTests(BasePlatformTests): 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, - 'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp'], 'extra_files': []}, + 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp'], 'extra_files': []}, + 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []}, + 'trivialprog13@sha': {'name': 'trivialprog13', 'sources': ['new1.cpp', 'new2.cpp'], 'extra_files': []}, + 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) def test_target_remove_subdir(self): self.prime('2 subdirs') @@ -181,7 +212,7 @@ class RewriterTests(BasePlatformTests): self.prime('2 subdirs') self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) - expected = {'name': 'something', 'sources': ['first.c', 'second.c'], 'extra_files': []} + expected = {'name': 'something', 'sources': [f'sub2{os.path.sep}first.c', f'sub2{os.path.sep}second.c'], 'extra_files': []} self.assertDictEqual(out['target']['94b671c@@something@exe'], expected) def test_target_source_sorting(self): @@ -228,16 +259,23 @@ class RewriterTests(BasePlatformTests): } } } + for k1, v1 in expected.items(): + for k2, v2 in v1.items(): + for k3, v3 in v2.items(): + if isinstance(v3, list): + for i in range(len(v3)): + v3[i] = v3[i].replace('/', os.path.sep) self.assertDictEqual(out, expected) def test_target_same_name_skip(self): self.prime('4 same name targets') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) - expected = {'name': 'myExe', 'sources': ['main.cpp'], 'extra_files': []} + expected1 = {'name': 'myExe', 'sources': ['main.cpp'], 'extra_files': []} + expected2 = {'name': 'myExe', 'sources': [f'sub1{os.path.sep}main.cpp'], 'extra_files': []} self.assertEqual(len(out['target']), 2) - for val in out['target'].values(): - self.assertDictEqual(expected, val) + self.assertDictEqual(expected1, out['target']['myExe@exe']) + self.assertDictEqual(expected2, out['target']['9a11041@@myExe@exe']) def test_kwargs_info(self): self.prime('3 kwargs') @@ -347,48 +385,68 @@ class RewriterTests(BasePlatformTests): out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addExtraFiles.json')) expected = { 'target': { - 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp', 'a7.hpp', 'fileB.hpp', 'fileC.hpp']}, - 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']}, + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['fileA.hpp', 'main.hpp', 'fileB.hpp', 'fileC.hpp']}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'fileA.hpp', 'main.hpp']}, 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['main.cpp'], 'extra_files': ['a7.hpp', 'fileB.hpp', 'fileC.hpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp'], 'extra_files': ['a5.hpp', 'fileA.hpp', 'main.hpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp'], 'extra_files': ['a5.hpp', 'main.hpp', 'fileA.hpp']}, - 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['a3.hpp', 'main.hpp', 'a7.hpp', 'fileB.hpp', 'fileC.hpp']}, - 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']}, - 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['a3.hpp', 'main.hpp', 'fileB.hpp', 'fileC.hpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['fileA.hpp', 'main.hpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp'], 'extra_files': ['a2.hpp', 'a7.hpp']}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp'], 'extra_files': ['a8.hpp', 'a9.hpp']}, 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a4.hpp']}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) def test_target_remove_extra_files(self): self.prime('6 extra_files') out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmExtraFiles.json')) expected = { 'target': { - 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileC.hpp']}, - 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']}, - 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['main.cpp'], 'extra_files': ['fileC.hpp']}, + 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp', 'fileB.hpp', 'fileC.hpp']}, + 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp']}, + 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['main.cpp'], 'extra_files': ['fileB.hpp', 'fileC.hpp']}, 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']}, 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']}, - 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileC.hpp']}, - 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']}, - 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']}, + 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['fileB.hpp', 'fileC.hpp', 'main.hpp']}, + 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp']}, + 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp']}, 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp'], 'extra_files': []}, 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp'], 'extra_files': []}, 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp'], 'extra_files': []}, } } - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) # Check the written file out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) - self.assertDictEqual(out, expected) + self.assertEqualIgnoreOrder(out, expected) + + def test_tricky_dataflow(self): + self.prime('7 tricky dataflow') + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json')) + expected = { + 'target': { + 'tgt1@sha': {'name': 'tgt1', 'sources': ['foo.c', 'new.c'], 'extra_files': []}, + 'tgt2@exe': {'name': 'tgt2', 'sources': ['new.c', 'unknown'], 'extra_files': []}, + 'tgt3@exe': {'name': 'tgt3', 'sources': ['foo.c', 'new.c'], 'extra_files': []}, + 'tgt4@exe': {'name': 'tgt4', 'sources': ['unknown'], 'extra_files': []}, + 'tgt5@exe': {'name': 'tgt5', 'sources': ['unknown', 'new.c'], 'extra_files': []}, + 'tgt6@exe': {'name': 'tgt6', 'sources': ['unknown', 'new.c'], 'extra_files': []}, + 'tgt7@exe': {'name': 'tgt7', 'sources': ['unknown', 'unknown'], 'extra_files': []}, + } + } + self.assertEqualIgnoreOrder(out, expected) + + # Check the written file + out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json')) + self.assertEqualIgnoreOrder(out, expected) def test_raw_printer_is_idempotent(self): test_path = Path(self.unit_test_dir, '120 rewrite') @@ -421,3 +479,24 @@ class RewriterTests(BasePlatformTests): } } self.assertDictEqual(out, expected) + + # Asserts that AstInterpreter.dataflow_dag is what it should be + def test_dataflow_dag(self): + test_path = Path(self.rewrite_test_dir, '1 basic') + interpreter = IntrospectionInterpreter(test_path, '', 'ninja') + interpreter.analyze() + + def sortkey(node): + return (node.lineno, node.colno, node.end_lineno, node.end_colno) + + def node_to_str(node): + return f"{node.__class__.__name__}({node.lineno}:{node.colno})" + + dag_as_str = "" + for target in sorted(interpreter.dataflow_dag.tgt_to_srcs.keys(), key=sortkey): + dag_as_str += f"Data flowing to {node_to_str(target)}:\n" + for source in sorted(interpreter.dataflow_dag.tgt_to_srcs[target], key=sortkey): + dag_as_str += f" {node_to_str(source)}\n" + + expected = Path(test_path / "expected_dag.txt").read_text().strip() + self.assertEqual(dag_as_str.strip(), expected) |