diff options
286 files changed, 8721 insertions, 3223 deletions
diff --git a/.appveyor.yml b/.appveyor.yml deleted file mode 100644 index 85725c3..0000000 --- a/.appveyor.yml +++ /dev/null @@ -1,139 +0,0 @@ -version: '{build}' - -os: Visual Studio 2015 - -environment: - matrix: - - arch: x86 - compiler: msys2-mingw - backend: ninja - - - arch: x64 - compiler: msys2-mingw - backend: ninja - - - arch: x64 - compiler: cygwin - backend: ninja - - - arch: x86 - compiler: msvc2015 - backend: ninja - BOOST_ROOT: C:\Libraries\Boost_1_60_0 - - - arch: x86 - compiler: msvc2015 - backend: vs2015 - BOOST_ROOT: C:\Libraries\Boost_1_60_0 - - - arch: x64 - compiler: msvc2017 - backend: ninja - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - BOOST_ROOT: C:\Libraries\Boost_1_64_0 - - - arch: x64 - compiler: msvc2017 - backend: vs2017 - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 - BOOST_ROOT: C:\Libraries\Boost_1_64_0 - -platform: - - x64 - -branches: - only: - - master - # Release branches - - /^[0-9]+\.[0-9]+$/ - -init: - - ps: | - If($Env:compiler -like 'msvc2015') { - Set-WinSystemLocale de-DE - Start-Sleep -s 5 - Restart-Computer - } - -install: - - ps: | - function DownloadFile([String] $Source, [String] $Destination) { - $retries = 10 - for ($i = 1; $i -le $retries; $i++) { - try { - (New-Object net.webclient).DownloadFile($Source, $Destination) - break # succeeded - } catch [net.WebException] { - if ($i -eq $retries) { - throw # fail on last retry - } - $backoff = (10 * $i) # backoff 10s, 20s, 30s... - echo ('{0}: {1}' -f $Source, $_.Exception.Message) - echo ('Retrying in {0}s...' -f $backoff) - Start-Sleep -m ($backoff * 1000) - } - } - } - - cmd: set "ORIG_PATH=%PATH%" - # Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219 - - cmd: set "MESON_FIXED_NINJA=1" - - ps: DownloadFile -Source 'https://github.com/mesonbuild/cidata/raw/master/ninja.exe' -Destination 'C:\projects\meson\ninja.exe' - # Use the x86 python only when building for x86 for the cpython tests. - # For all other archs (including, say, arm), use the x64 python. - - cmd: if %arch%==x86 (set MESON_PYTHON_PATH=C:\python35) else (set MESON_PYTHON_PATH=C:\python35-x64) - - # Skip CI requires python - - cmd: python ./skip_ci.py --base-branch-env=APPVEYOR_REPO_BRANCH --is-pull-env=APPVEYOR_PULL_REQUEST_NUMBER - - # Set paths for BOOST dll files - - cmd: if %compiler%==msvc2015 ( if %arch%==x86 ( set "PATH=%PATH%;%BOOST_ROOT%\lib32-msvc-14.0" ) else ( set "PATH=%PATH%;%BOOST_ROOT%\lib64-msvc-14.0" ) ) - - cmd: if %compiler%==msvc2017 ( if %arch%==x86 ( set "PATH=%PATH%;%BOOST_ROOT%\lib32-msvc-14.1" ) else ( set "PATH=%PATH%;%BOOST_ROOT%\lib64-msvc-14.1" ) ) - - # Set paths and config for each build type. - - cmd: if %compiler%==msvc2015 ( call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" %arch% ) - - cmd: if %compiler%==msvc2017 ( call "C:\Program Files (x86)\Microsoft Visual Studio\2017\Community\Common7\Tools\VsDevCmd.bat" -arch=%arch% ) - - cmd: if %compiler%==cygwin ( set PYTHON=python3 ) else ( set PYTHON=python ) - # MinGW setup, lines are split to prevent "The input line is too long." error. - - cmd: if %arch%==x86 ( set "PACMAN_ARCH=i686" ) else ( set "PACMAN_ARCH=x86_64" ) - - cmd: if %arch%==x86 ( set "PACMAN_BITS=32" ) else ( set "PACMAN_BITS=64" ) - - cmd: if %compiler%==msys2-mingw ( set "PATH=C:\msys64\mingw%PACMAN_BITS%\bin;%PATH%" ) - - cmd: if %compiler%==msys2-mingw ( set "MESON_PYTHON_PATH=C:\msys64\mingw%PACMAN_BITS%\bin" ) - - cmd: if %compiler%==msys2-mingw ( set "PYTHON=python3" ) - - cmd: if %compiler%==msys2-mingw ( C:\msys64\usr\bin\pacman -S --needed --noconfirm "mingw%PACMAN_BITS%/mingw-w64-%PACMAN_ARCH%-python3" "mingw%PACMAN_BITS%/mingw-w64-%PACMAN_ARCH%-python3-setuptools" ) - # Cygwin - - cmd: if not %compiler%==cygwin ( set "PATH=%cd%;%MESON_PYTHON_PATH%;%PATH%;" ) - - cmd: if %compiler%==cygwin ( set WRAPPER=ci\run-in-cygwin.bat ) - - cmd: if %compiler%==cygwin ( %WRAPPER% which %PYTHON% ) else ( where %PYTHON% ) - - # pkg-config is needed for the pkg-config tests on msvc - - ps: | - If($Env:compiler.StartsWith('msvc')) { - DownloadFile -Source 'http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe' ` - -Destination 'C:\projects\meson\pkg-config.exe' - } - - cmd: if %compiler%==cygwin ( call ci\appveyor-install.bat ) - - ps: | - If($Env:compiler -like 'msvc*') { - DownloadFile -Source "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi" ` - -Destination "C:\projects\msmpisdk.msi" - c:\windows\system32\msiexec.exe /i C:\projects\msmpisdk.msi /quiet - DownloadFile -Source "https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe" ` - -Destination "C:\projects\MSMpiSetup.exe" - c:\projects\MSMpiSetup.exe -unattend -full - } - -build_script: - - cmd: echo No build step. - - cmd: if %backend%==ninja ( %WRAPPER% ninja.exe --version ) else ( MSBuild /version & echo. ) - -test_script: - - cmd: echo Running tests for %arch% and %compiler% with the %backend% backend - - cmd: "%WRAPPER% %PYTHON% run_tests.py --backend=%backend%" - -on_finish: - - set "PATH=%ORIG_PATH%" - - appveyor PushArtifact meson-test-run.txt -DeploymentName "Text test logs" - - appveyor PushArtifact meson-test-run.xml -DeploymentName "XML test logs" - -cache: - - C:\cache diff --git a/.gitattributes b/.gitattributes index de66d2b..7fd80e2 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,5 @@ .gitignore export-ignore .gitattributes export-ignore - +* text eol=lf +*.png binary +*.icns binary diff --git a/.travis.yml b/.travis.yml index dd5cebb..bd8d48c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,7 +41,7 @@ matrix: before_install: - python ./skip_ci.py --base-branch-env=TRAVIS_BRANCH --is-pull-env=TRAVIS_PULL_REQUEST - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install qt; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install qt llvm; fi # # Run one macOS build without pkg-config available, and the other (unity=on) with pkg-config - if [[ "$TRAVIS_OS_NAME" == "osx" && "$MESON_ARGS" =~ .*unity=on.* ]]; then brew install pkg-config; fi # Use a Ninja with QuLogic's patch: https://github.com/ninja-build/ninja/issues/1219 @@ -62,4 +62,5 @@ script: withgit \ /bin/sh -c "cd /root && mkdir -p tools; wget -c http://nirbheek.in/files/binaries/ninja/linux-amd64/ninja -O /root/tools/ninja; chmod +x /root/tools/ninja; CC=$CC CXX=$CXX OBJC=$CC OBJCXX=$CXX PATH=/root/tools:$PATH MESON_FIXED_NINJA=1 ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS && chmod -R a+rwX .coverage" fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) CPPFLAGS=-I/usr/local/include LDFLAGS=-L/usr/local/lib OBJC=$CC OBJCXX=$CXX PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH MESON_FIXED_NINJA=1 ./run_tests.py --backend=ninja -- $MESON_ARGS ; fi + # Ensure that llvm is added after $PATH, otherwise the clang from that llvm install will be used instead of the native apple clang. + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) CPPFLAGS=-I/usr/local/include LDFLAGS=-L/usr/local/lib OBJC=$CC OBJCXX=$CXX PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH:$(brew --prefix llvm)/bin MESON_FIXED_NINJA=1 ./run_tests.py --backend=ninja -- $MESON_ARGS ; fi @@ -9,7 +9,10 @@ build system. [](https://pypi.python.org/pypi/meson) [](https://travis-ci.org/mesonbuild/meson) [](https://ci.appveyor.com/project/mesonbuild/meson) +[](https://dev.azure.com/jussi0947/jussi/_build/latest?definitionId=1) [](https://codecov.io/gh/mesonbuild/meson/branch/master) +[](https://lgtm.com/projects/g/mesonbuild/meson/context:python) +[](https://lgtm.com/projects/g/mesonbuild/meson/alerts) #### Dependencies @@ -65,7 +68,7 @@ you may need to run this command with sudo. #### Contributing -We love code contributions. See the contributing.txt file for +We love code contributions. See the [contributing.md](contributing.md) file for details. @@ -73,10 +76,13 @@ details. The irc channel for Meson is `#mesonbuild` over at Freenode. +You can use [FreeNode's official webchat](https://webchat.freenode.net/#mesonbuild) +to connect to this channel. + #### Further info More information about the Meson build system can be found at the [project's home page](http://mesonbuild.com). -Meson is a registered trademark of Jussi Pakkanen +Meson is a registered trademark of Jussi Pakkanen. diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..5b2447e --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,159 @@ +name: $(BuildID) + +trigger: + branches: + include: + - 'master' + # Release branches + - '0.*' + +variables: + MESON_FIXED_NINJA: 1 + CI: 1 + +jobs: +- job: vs2015 + pool: + vmImage: vs2015-win2012r2 + + strategy: + matrix: + vc2015x86ninja: + arch: x86 + compiler: msvc2015 + backend: ninja + vc2015x86vs: + arch: x86 + compiler: msvc2015 + backend: vs2015 + + steps: + - template: ci/azure-steps.yml + +- job: vs2017 + pool: + vmImage: VS2017-Win2016 + + strategy: + matrix: + vc2017x64ninja: + arch: x64 + compiler: msvc2017 + backend: ninja + vc2017x64vs: + arch: x64 + compiler: msvc2017 + backend: vs2017 + clangclx64ninja: + arch: x64 + compiler: clang-cl + backend: ninja + + steps: + - template: ci/azure-steps.yml + +- job: cygwin + pool: + vmImage: VS2017-Win2016 + strategy: + matrix: + gccx64ninja: {} + variables: + CYGWIN_ROOT: $(System.Workfolder)\cygwin + CYGWIN_MIRROR: http://cygwin.mirror.constant.com + steps: + - script: | + choco install cygwin --params="/InstallDir:%CYGWIN_ROOT%" + displayName: Install Cygwin + - script: | + %CYGWIN_ROOT%\cygwinsetup.exe -qnNdO -R "%CYGWIN_ROOT%" -s "%CYGWIN_MIRROR%" -g -P ^ + cmake,^ + gcc-objc++,^ + gcc-objc,^ + git,^ + gobject-introspection,^ + libboost-devel,^ + libglib2.0-devel,^ + libgtk3-devel,^ + ninja,^ + python3-pip,^ + vala,^ + zlib-devel + displayName: Install Dependencies + - script: | + set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 + env.exe -- python3 run_tests.py --backend=ninja + displayName: Run Tests + - task: CopyFiles@2 + condition: not(canceled()) + inputs: + contents: 'meson-test-run.*' + targetFolder: $(Build.ArtifactStagingDirectory) + - task: PublishBuildArtifacts@1 + inputs: + artifactName: $(System.JobName) + # publishing artifacts from PRs from a fork is currently blocked + condition: and(eq(variables['system.pullrequest.isfork'], false), not(canceled())) + - task: PublishTestResults@2 + condition: not(canceled()) + inputs: + testResultsFiles: meson-test-run.xml + testRunTitle: $(System.JobName) + +- job: msys2_mingw + pool: + vmImage: VS2017-Win2016 + strategy: + matrix: + gccx86ninja: + MSYSTEM: MINGW32 + MSYS2_ARCH: i686 + gccx64ninja: + MSYSTEM: MINGW64 + MSYS2_ARCH: x86_64 + variables: + MSYS2_ROOT: $(System.Workfolder)\msys64 + steps: + - script: | + choco install msys2 --params="/InstallDir:%MSYS2_ROOT% /NoUpdate /NoPath" + displayName: Install MSYS2 + - script: | + set PATH=%MSYS2_ROOT%\usr\bin;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem + %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syyuu + %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu + displayName: Update MSYS2 + - script: | + set PATH=%MSYS2_ROOT%\usr\bin;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem + %MSYS2_ROOT%\usr\bin\pacman --noconfirm --needed -S ^ + base-devel ^ + git ^ + mercurial ^ + mingw-w64-$(MSYS2_ARCH)-cmake ^ + mingw-w64-$(MSYS2_ARCH)-python2 ^ + mingw-w64-$(MSYS2_ARCH)-python3 ^ + mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^ + mingw-w64-$(MSYS2_ARCH)-toolchain + displayName: Install Dependencies + - script: | + set PATH=%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem + %MSYS2_ROOT%\usr\bin\bash -lc "wget https://github.com/mesonbuild/cidata/raw/master/ninja.exe; mv ninja.exe /$MSYSTEM/bin" + set PATHEXT=%PATHEXT%;.py + %MSYS2_ROOT%\usr\bin\bash -lc "MSYSTEM= python3 run_tests.py --backend=ninja" + env: + CHERE_INVOKING: yes + displayName: Run Tests + - task: CopyFiles@2 + condition: not(canceled()) + inputs: + contents: 'meson-test-run.*' + targetFolder: $(Build.ArtifactStagingDirectory) + - task: PublishBuildArtifacts@1 + inputs: + artifactName: $(System.JobName) + # publishing artifacts from PRs from a fork is currently blocked + condition: and(eq(variables['system.pullrequest.isfork'], false), not(canceled())) + - task: PublishTestResults@2 + condition: not(canceled()) + inputs: + testResultsFiles: meson-test-run.xml + testRunTitle: $(System.JobName) diff --git a/ci/appveyor-install.bat b/ci/appveyor-install.bat deleted file mode 100755 index 1e60179..0000000 --- a/ci/appveyor-install.bat +++ /dev/null @@ -1,22 +0,0 @@ -set CACHE=C:\cache -set CYGWIN_MIRROR=http://cygwin.mirror.constant.com - -if _%arch%_ == _x64_ set SETUP=setup-x86_64.exe && set CYGWIN_ROOT=C:\cygwin64 -if _%arch%_ == _x86_ set SETUP=setup-x86.exe && set CYGWIN_ROOT=C:\cygwin - -if not exist %CACHE% mkdir %CACHE% - -echo Updating Cygwin and installing ninja and test prerequisites -%CYGWIN_ROOT%\%SETUP% -qnNdO -R "%CYGWIN_ROOT%" -s "%CYGWIN_MIRROR%" -l "%CACHE%" -g -P ^ -gcc-objc++,^ -gcc-objc,^ -gobject-introspection,^ -libboost-devel,^ -libglib2.0-devel,^ -libgtk3-devel,^ -ninja,^ -python3-pip,^ -vala,^ -zlib-devel - -echo Install done diff --git a/ci/azure-steps.yml b/ci/azure-steps.yml new file mode 100644 index 0000000..71642f0 --- /dev/null +++ b/ci/azure-steps.yml @@ -0,0 +1,167 @@ +steps: +- powershell: | + python ./skip_ci.py --base-branch-env=SYSTEM_PULLREQUEST_TARGETBRANCH --is-pull-env=SYSTEM_PULLREQUEST_PULLREQUESTID --base-branch-origin + if ($LastExitCode -ne 0) { + exit 0 + } + + # remove MinGW from path, so we don't find gfortran and try to use it + $env:Path = ($env:Path.Split(';') | Where-Object { $_ -notlike '*mingw*' }) -join ';' + + # download and install prerequisites + function DownloadFile([String] $Source, [String] $Destination) { + $retries = 10 + for ($i = 1; $i -le $retries; $i++) { + try { + (New-Object net.webclient).DownloadFile($Source, $Destination) + break # succeeded + } catch [net.WebException] { + if ($i -eq $retries) { + throw # fail on last retry + } + $backoff = (10 * $i) # backoff 10s, 20s, 30s... + echo ('{0}: {1}' -f $Source, $_.Exception.Message) + echo ('Retrying in {0}s...' -f $backoff) + Start-Sleep -m ($backoff * 1000) + } + } + } + + DownloadFile -Source 'https://github.com/mesonbuild/cidata/raw/master/ninja.exe' -Destination $(System.WorkFolder)\ninja.exe + DownloadFile -Source 'http://nirbheek.in/files/binaries/pkg-config/win32/pkg-config.exe' -Destination $(System.WorkFolder)\pkg-config.exe + DownloadFile -Source 'https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/msmpisdk.msi' -Destination msmpisdk.msi + DownloadFile -Source 'https://download.microsoft.com/download/D/B/B/DBB64BA1-7B51-43DB-8BF1-D1FB45EACF7A/MSMpiSetup.exe' -Destination MSMpiSetup.exe + if ($env:compiler -ne 'msvc2015') { + Start-Process msiexec.exe -ArgumentList '/i msmpisdk.msi /quiet' -Wait + # installer fails "requires an interactive window station" with vs2015 image + Start-Process .\MSMpiSetup.exe -ArgumentList '-unattend -full' -Wait + } + + # import ms-mpi env vars (set by installer) + foreach ($p in "MSMPI_INC", "MSMPI_LIB32", "MSMPI_LIB64") { + $v = [Environment]::GetEnvironmentVariable($p, "Machine") + Set-Content "env:$p" "$v" + } + + if ($env:compiler -eq 'msvc2015') { + if ($env:arch -eq 'x86') { + $forcex86 = "--forcex86" + } + + # download and install python3 and add to path (since it's not installed in vs2015 image!) + Set-ExecutionPolicy Bypass -Scope Process -Force + iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1')) + choco install python3 -y --no-progress $forcex86 --params "/InstallDir:C:\Python3" + $env:Path = "C:\Python3;$env:Path" + + # add JDK installed in vs2015 image to PATH + $env:Path = "C:\java\jdk\jdk1.8.0_102\bin\;$env:Path" + } + + # install boost (except for clang-cl) + if ($env:arch -eq 'x86') { $boost_bitness = '32' } else { $boost_bitness = '64' } + if ($env:compiler -eq 'msvc2015') { + $boost_version = '1.60.0' ; $boost_abi_tag = '14.0' + } elseif ($env:compiler -eq 'msvc2017') { + $boost_version = '1.64.0' ; $boost_abi_tag = '14.1' + } + if ($boost_version) { + $boost_filename = $boost_version.Replace('.', '_') + Downloadfile -Source "https://sourceforge.net/projects/boost/files/boost-binaries/$boost_version/boost_$boost_filename-msvc-$boost_abi_tag-$boost_bitness.exe" -Destination boost_$boost_filename-msvc-$boost_abi_tag-$boost_bitness.exe + Start-Process "boost_$boost_filename-msvc-$boost_abi_tag-$boost_bitness.exe" -ArgumentList "/dir=$(System.WorkFolder)\boost_$boost_filename /silent" -Wait + $env:BOOST_ROOT = "$(System.WorkFolder)\boost_$boost_filename" + $env:Path = "$env:Path;$env:BOOST_ROOT\lib$boost_bitness-msvc-$boost_abi_tag" + } + + # install D compiler and dub packages + if ($env:backend -eq 'ninja') { + & .\ci\install-dmd.ps1 + $arch = 'x86_mscoff' + if ($Env:arch -eq 'x64') { + $arch = 'x86_64' + } + & dub fetch urld + & dub build urld --compiler=dmd --arch=$arch + & dub fetch dubtestproject + & dub build dubtestproject:test1 --compiler=dmd --arch=$arch + & dub build dubtestproject:test2 --compiler=dmd --arch=$arch + } + + # test_find_program exercises some behaviour which relies on .py being in PATHEXT + $env:PATHEXT += ';.py' + + # add downloads to PATH + $env:Path = "$env:SYSTEM_WORKFOLDER;$env:Path" + + $origPath = $env:Path + # import visual studio variables + if ($env:compiler -eq 'msvc2015') { + $vcvars = "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" + } else { + $vcvars = "C:\Program Files (x86)\Microsoft Visual Studio\2017\Enterprise\VC\Auxiliary\Build\vcvarsall.bat" + } + + ## ask cmd.exe to output the environment table after the batch file completes + $tempFile = [IO.Path]::GetTempFileName() + cmd /c " `"$vcvars`" $env:arch && set > `"$tempFile`" " + + ## go through the environment variables in the temp file. + ## for each of them, set the variable in our local environment. + Get-Content $tempFile | Foreach-Object { + if($_ -match "^(.*?)=(.*)$") { + Set-Content "env:\$($matches[1])" $matches[2] + } + } + Remove-Item $tempFile + + if ($env:compiler -eq 'clang-cl') { + # drop visual studio from PATH + # (but leave INCLUDE, LIB and WindowsSdkDir environment variables set) + $env:Path = $origPath + + # install llvm for clang-cl builds + DownloadFile -Source 'http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe' -Destination LLVM-7.0.0-win64.exe + Start-Process .\LLVM-7.0.0-win64.exe -ArgumentList '/S' -Wait + $env:Path = "C:\Program Files\LLVM\bin;$env:Path" + $env:CC = "clang-cl" + $env:CXX = "clang-cl" + + # and use Windows SDK tools + $env:Path = "$env:WindowsSdkDir\bin\$env:Arch;$env:Path" + } + + # add .NET framework tools to path for resgen for C# tests + # (always use 32-bit tool, as there doesn't seem to be a 64-bit tool) + if ((Get-Command "resgen.exe" -ErrorAction SilentlyContinue) -eq $null) { + $env:Path = "$env:WindowsSDK_ExecutablePath_x86;$env:Path" + } + + if ($env:backend -eq 'ninja') { + ninja --version + } else { + MSBuild /version + } + + where.exe python + python --version + + python run_tests.py --backend $(backend) + +- task: PublishTestResults@2 + inputs: + testResultsFiles: meson-test-run.xml + testRunTitle: $(System.JobName) + publishRunAttachments: true + condition: not(canceled()) + +- task: CopyFiles@2 + inputs: + contents: 'meson-test-run.*' + targetFolder: $(Build.ArtifactStagingDirectory) + condition: not(canceled()) + +- task: PublishBuildArtifacts@1 + inputs: + artifactName: $(System.JobName) + # publishing artifacts from PRs from a fork is currently blocked + condition: eq(variables['system.pullrequest.isfork'], false) diff --git a/ci/install-dmd.ps1 b/ci/install-dmd.ps1 new file mode 100644 index 0000000..fc8226c --- /dev/null +++ b/ci/install-dmd.ps1 @@ -0,0 +1,71 @@ +param ( + [string]$Version = $null +) +Set-StrictMode -Version latest +$ErrorActionPreference = "Stop" +$ProgressPreference = "SilentlyContinue" + +# default installation directory +$dmd_install = "C:\D" +$dmd_version_file = "C:\cache\DMD_LATEST" + +#echo "Fetching latest DMD version..." +if (!$Version) { + $dmd_latest_url = "http://downloads.dlang.org/releases/LATEST" + $retries = 10 + for ($i = 1; $i -le $retries; $i++) { + try { + [system.io.directory]::CreateDirectory((Split-Path -parent $dmd_version_file)) > $null + Invoke-WebRequest -URI $dmd_latest_url -OutFile $dmd_version_file + break + } catch [net.WebException] { + if ($i -eq $retries) { + break + } + $backoff = (10 * $i) # backoff 10s, 20s, 30s... + echo ('{0}: {1}' -f $dmd_latest_url, $_.Exception.Message) + echo ('Retrying in {0}s...' -f $backoff) + Start-Sleep -m ($backoff * 1000) + } catch { + throw + } + } + if (Test-Path $dmd_version_file) { + $dmd_version = Get-Content -Path $dmd_version_file + } else { + throw "Failed to resolve latest DMD version" + } +} else { + $dmd_version = $Version +} +$dmd_url = "http://downloads.dlang.org/releases/2.x/$dmd_version/dmd.$dmd_version.windows.zip" +$dmd_filename = [System.IO.Path]::GetFileName($dmd_url) +$dmd_archive = Join-Path ($env:temp) $dmd_filename + +#echo "Downloading $dmd_filename..." +$retries = 10 +for ($i = 1; $i -le $retries; $i++) { + try { + (New-Object net.webclient).DownloadFile($dmd_url, $dmd_archive) + break + } catch [net.WebException] { + if ($i -eq $retries) { + throw # fail on last retry + } + $backoff = (10 * $i) # backoff 10s, 20s, 30s... + echo ('{0}: {1}' -f $dmd_url, $_.Exception.Message) + echo ('Retrying in {0}s...' -f $backoff) + Start-Sleep -m ($backoff * 1000) + } +} + +#echo "Extracting $dmd_filename..." +Expand-Archive $dmd_archive -Force -DestinationPath $dmd_install + +# add to environment path +#echo "Installing DMD..." +$dmd_bin = Join-Path $dmd_install "dmd2\windows\bin" +$Env:Path = $Env:Path + ";" + $dmd_bin + +#echo "Testing DMD..." +& dmd.exe --version 2>&1>$null diff --git a/ci/run-in-cygwin.bat b/ci/run-in-cygwin.bat deleted file mode 100644 index 146d28e..0000000 --- a/ci/run-in-cygwin.bat +++ /dev/null @@ -1,6 +0,0 @@ -if _%arch%_ == _x64_ set CYGWIN_ROOT=C:\cygwin64 -if _%arch%_ == _x86_ set CYGWIN_ROOT=C:\cygwin - -set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 - -env.exe -- %* diff --git a/ciimage/Dockerfile b/ciimage/Dockerfile index 326013b..ac59ca9 100644 --- a/ciimage/Dockerfile +++ b/ciimage/Dockerfile @@ -12,7 +12,8 @@ RUN apt-get -y update && apt-get -y upgrade \ && apt-get -y install qt4-linguist-tools \ && apt-get -y install python-dev \ && apt-get -y install libomp-dev openssh-client \ -&& apt-get -y install -y clang libclang-dev llvm-dev flex \ +&& apt-get -y install clang libclang-dev llvm-dev flex \ +&& apt-get -y install libgcrypt11-dev \ && apt-get -y install gdc ldc \ && python3 -m pip install hotdoc codecov \ && dub fetch urld \ diff --git a/cross/ccrx.txt b/cross/ccrx.txt new file mode 100644 index 0000000..5474bb0 --- /dev/null +++ b/cross/ccrx.txt @@ -0,0 +1,20 @@ +# This file assumes that path to the Renesas CC-RX toolchain is added +# to the environment(PATH) variable, so that Meson can find +# ccrx and rlink while building. +[binaries] +c = 'ccrx' +cpp = 'ccrx' +ar = 'rlink' +strip = 'rlink' + +[properties] +# The '--cpu' option with the appropriate target type should be mentioned +# to cross compile c/c++ code with ccrx,. +c_args = ['--cpu=rx600'] +cpp_args = ['--cpu=rx600'] + +[host_machine] +system = 'bare metal' # Update with your system name - bare metal/OS. +cpu_family = 'rx' +cpu = 'rx600' +endian = 'little' diff --git a/docs/.editorconfig b/docs/.editorconfig new file mode 100644 index 0000000..b5276f1 --- /dev/null +++ b/docs/.editorconfig @@ -0,0 +1,2 @@ +[sitemap.txt] +indent_style = tab diff --git a/docs/README.md b/docs/README.md index 18509c7..55fc3ec 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,15 +1,40 @@ -# Building the documentation +# Meson Documentation -1. Get [hotdoc](https://hotdoc.github.io/installing.html) (0.8.9 required) -1. Run hotdoc in the docs/ directory: +## Build dependencies - ../meson/meson.py build/ +Meson uses itself and [hotdoc](https://github.com/hotdoc/hotdoc) for generating documentation. + +Minimum required version of hotdoc is *0.8.9*. + +Instructions on how to install hotdoc are [here](https://hotdoc.github.io/installing.html). + +## Building the documentation + +From the Meson repository root dir: +``` +$ cd docs/ +$ meson built_docs +$ ninja -C built_docs/ upload +``` +Now you should be able to open the documentation locally +``` +built_docs/Meson documentation-doc/html/index.html +``` ## Upload -We are using the git-upload hotdoc plugin which basically +Meson uses the git-upload hotdoc plugin which basically removes the html pages and replaces with the new content. You can simply run: +``` +$ ninja -C built_docs/ upload +``` + +## Contributing to the documentation - ninja -C build/ upload +Commits that only change documentation should have `[skip ci]` in their commit message, so CI is not run (it is quite slow). +For example: +``` +A commit message [skip ci] +``` diff --git a/docs/markdown/Adding-arguments.md b/docs/markdown/Adding-arguments.md index 117622b..8dd8488 100644 --- a/docs/markdown/Adding-arguments.md +++ b/docs/markdown/Adding-arguments.md @@ -37,6 +37,19 @@ You should set only the most essential flags with this setting, you should *not* set debug or optimization flags. Instead they should be specified by selecting an appropriate build type. +Project arguments +-- + +Project arguments work similar to global arguments except that they +are valid only within the current subproject. The usage is simple: + +```meson +add_project_arguments('-DMYPROJ=projname', language : 'c') +``` + +This would add the compiler flags to all C sources in the current +project. + Per target arguments -- diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md index 00fd0d7..c79cfd3 100644 --- a/docs/markdown/Adding-new-projects-to-wrapdb.md +++ b/docs/markdown/Adding-new-projects-to-wrapdb.md @@ -1,84 +1,120 @@ -# Adding new projects to wrap +# Adding new projects to WrapDB -**If you don't have permissions to do something on this page, please - open issue against https://github.com/mesonbuild/wrapweb/issues to - let us know that you want to start new project.** -## Overview +## How it works -The wrap provider service is a simple web service that makes it easy -to download build definitions for projects. It works in much the same -way as Debian: we take the unaltered upstream source package and add a -new build system to it as a patch. These build systems are stored as -Git repositories on GitHub. They only contain build definition -files. You may also think of them as an overlay to upstream source. +Each wrap repository has a master branch with only one initial commit and *no* wrap files. +And that is the only commit ever made on that branch. -## Creator script +For every release of a project a new branch is created. The new branch is named after the +the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for +this particular release. + +There are two types of wraps on WrapDB - regular wraps and wraps with Meson build +definition patches. A wrap file in a repository on WrapDB must have a name `upstream.wrap`. + +Wraps with Meson build definition patches work in much the same way as Debian: we take the unaltered upstream source package and add a new build system to it as a patch. These build systems are stored as Git repositories on GitHub. They only contain build definition files. You may also think of them as an overlay to upstream source. + +Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated +with an incremented version number. All the old releases remain unaltered. +New commits are always done via GitHub merge requests and must be reviewed by +someone other than the submitter. -The WrapDB repository has a [helper -script](https://github.com/mesonbuild/wrapweb/blob/master/mesonwrap.py) -to generate new repositories, verify them and update them. The documentation below roughly explains -what it does to create a new wrap repository using plain shell commands. +Note that your Git repo with wrap must not contain the subdirectory of the source +release. That gets added automatically by the service. You also must not commit +any source code from the original tarball into the wrap repository. ## Choosing the repository name Wrapped subprojects are used much like external dependencies. Thus -they should have the same name as the upstream projects. If the -project provides a pkg-config file, then the repository name should be +they should have the same name as the upstream projects. + +If the project provides a pkg-config file, then the repository name should be the same as the pkg-config name. Usually this is the name of the project, such as `libpng`. Sometimes it is slightly different, however. As an example the libogg project's chosen pkg-config name is `ogg` instead of `libogg`, which is the reason why the repository is named plain `ogg`. -## Adding new project to the Wrap provider service +If there is no a pkg-config file, the name the project uses/promotes should be used, +lowercase only (Catch2 -> catch2). -Each project gets its own repo. It is initialized like this: +## How to contribute a new wrap - git init - git add readme.txt - git add LICENSE.build - git commit -a -m 'Create project foobar' - git remote add origin <repo url> - git push -u origin master +If the project already uses Meson build system, then only a wrap file - `upstream.wrap` +should be provided. In other case a Meson build definition patch - a set of `meson.build` +files - should be also provided. -Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches. +### Request a new repository or branch -Repo names must fully match this regexp: `[a-z0-9._]+`. +Create an issue on the [wrapweb bug tracker](https://github.com/mesonbuild/wrapweb/issues) +using *Title* and *Description* below as a template. -## Adding a new branch to an existing project +*Title:* `new wrap: <project_name>` -Create a new branch whose name matches the upstream release number. +*Description:* +``` +upstream url: <link_to_updastream> +version: <version_you_have_a_wrap_for> +``` - git checkout master - git checkout -b 1.0.0 - git push origin 1.0.0 - (or from GitHub web page, remember to branch from master) +Wait until the new repository or branch is created. A link to the new repository or branch +will be posted in a comment to this issue. -Branch names must fully match this regexp: `[a-z0-9._]+`. +### Add a new wrap -## Adding a new release to an existing branch +First you need to fork the repository to your own page. +Then you can create the first Wrap commit that usually looks something like this. -Here is where the magic happens. Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated with an incremented version number. All the old releases remain unaltered. New commits are always done via GitHub merge requests and must be reviewed by someone other than the submitter. +``` +tar xzf libfoo-1.0.0.tar.gz +git clone -b 1.0.0 git@github.com:yourusername/libfoo.git tmpdir +mv tmpdir/.git libfoo-1.0.0 +rm -rf tmpdir +cd libfoo-1.0.0 +git reset --hard +emacs upstream.wrap meson.build +<verify that your project builds and runs> +git add upstream.wrap meson.build +git commit -a -m 'Add wrap files for libfoo-1.0.0' +git push origin 1.0.0 +``` -Note that your Git repo must *not* contain the subdirectory of the source release. That gets added automatically by the service. You also must *not* commit any source code from the original tarball into the wrap repository. +Now you should create a pull request on GitHub. Remember to create it against the +correct branch rather than master (`1.0.0` branch in this example). GitHub should do +this automatically. -First you need to fork the repository to your own page. Then you can create the first Wrap commit that usually looks something like this. +## What is done by WrapDB maintainers - tar xzf libfoo_1.0.0.tar.gz - git clone -b 1.0.0 git@github.com:yourusername/libfoo.git tmpdir - mv tmpdir/.git libfoo-1.0.0 - rm -rf tmpdir - cd libfoo-1.0.0 - git reset --hard - emacs upstream.wrap meson.build - <verify that your project builds and runs> - git add upstream.wrap meson.build - git commit -a -m 'Created wrap files for libfoo-1.0.0.' - git push origin 1.0.0 +### Adding new project to the Wrap provider service -Now you can file a merge request. Remember to file it against branch -1.0.0 rather than master. GitHub should do this automatically. +Each project gets its own repo. It is initialized like this: + +``` +git init +git add readme.txt +git add LICENSE.build +git commit -a -m 'Create project foobar' +git remote add origin <repo url> +git push -u origin master +``` + +Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches. + +Repo names must fully match this regexp: `[a-z0-9._]+`. + +### Adding a new branch to an existing project + +Create a new branch whose name matches the upstream release number. + +``` +git checkout master +git checkout -b 1.0.0 +git push origin 1.0.0 +(or from GitHub web page, remember to branch from master) +``` + +Branch names must fully match this regexp: `[a-z0-9._]+`. ## Changes to original source @@ -93,6 +129,12 @@ to functionality. All such changes must be submitted to upstream. You may also host your own Git repo with the changes if you wish. The Wrap system has native support for Git subprojects. +## Creator script + +The WrapDB repository has a +[helper script](https://github.com/mesonbuild/wrapweb/blob/master/mesonwrap.py) +to generate new repositories, verify them and update them. + ## Reviewing wraps See [Wrap review guidelines](Wrap-review-guidelines.md). diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index 55352aa..ce14304 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -82,6 +82,7 @@ platforms or with all compilers: | b_pgo | off | off, generate, use | Use profile guided optimization | | b_sanitize | none | see below | Code sanitizer to use | | b_staticpic | true | true, false | Build static libraries as position independent | +| b_pie | false | true, false | Build position-independent executables (since 0.49.0)| The value of `b_sanitize` can be one of: `none`, `address`, `thread`, `undefined`, `memory`, `address,undefined`. @@ -119,4 +120,8 @@ compiler being used: The default values of `c_winlibs` and `cpp_winlibs` are in compiler-specific argument forms, but the libraries are: kernel32, user32, gdi32, winspool, -shell32, ole32, oleaut32, uuid, comdlg32, advapi32 +shell32, ole32, oleaut32, uuid, comdlg32, advapi32. + +c_args, cpp_args, c_link_args, and cpp_link_args only affect native builds, +when cross compiling they will not be applied to binaries or libraries +targeting the host system, only those being run on the build system. diff --git a/docs/markdown/Compiler-properties.md b/docs/markdown/Compiler-properties.md index 1228f42..4f5ebdb 100644 --- a/docs/markdown/Compiler-properties.md +++ b/docs/markdown/Compiler-properties.md @@ -29,9 +29,17 @@ Compiler id == The compiler object has a method called `get_id`, which returns a -lower case string describing the "family" of the compiler. See -[reference tables](Reference-tables.md) for a list of supported -compiler ids. +lower case string describing the "family" of the compiler. + +The compiler object also has a method `get_argument_syntax` which +returns a lower case string of `gcc`, `msvc`, or another undefined string +value; identifying whether the compiler arguments use the same syntax as +either `gcc` or `msvc`, or that its arguments are not like either. This should +only be used to select the syntax of the arguments, such as those to test +with `has_argument`. + +See [reference tables](Reference-tables.md#compiler-ids) for a list of supported compiler +ids and their argument type. Does code compile? == diff --git a/docs/markdown/Configuration.md b/docs/markdown/Configuration.md index 8b79bc6..cd1af14 100644 --- a/docs/markdown/Configuration.md +++ b/docs/markdown/Configuration.md @@ -121,6 +121,38 @@ you to specify which file encoding to use. It is however strongly advised to convert your non utf-8 file to utf-8 whenever possible. Supported file encodings are those of python3, see [standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings). +## Using dictionaries + +Since *0.49.0* `configuration_data()` takes an optional dictionary as first +argument. If provided, each key/value pair is added into the +`configuration_data` as if `set()` method was called for each of them. +`configure_file()`'s `configuration` kwarg also accepts a dictionary instead of +a configuration_data object. + +Example: +```meson +cdata = configuration_data({ + 'STRING' : '"foo"', + 'INT' : 42, + 'DEFINED' : true, + 'UNDEFINED' : false, +}) + +configure_file(output : 'config1.h', + configuration : cdata, +) + +configure_file(output : 'config2.h', + configuration : { + 'STRING' : '"foo"', + 'INT' : 42, + 'DEFINED' : true, + 'UNDEFINED' : false, + } +) + +``` + # A full example Generating and using a configuration file requires the following steps: diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index c0eea29..2881837 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -156,19 +156,19 @@ subdirectory. They are not run as part of `./run_project_tests.py`. ### Skipping integration tests Meson uses several continuous integration testing systems that have slightly -different interface. To promote consistent naming policy, use: - -- `[skip ci]` in the commit title if you want to disable all integration tests -- `[skip appveyor]` in the commit title if you want to disable Windows-only tests +different interfaces for indicating a commit should be skipped. Continuous integration systems currently used: - -- [Travis-CI](https://docs.travis-ci.com/user/customizing-the-build/#Skipping-a-build) +- [Travis-CI](https://docs.travis-ci.com/user/customizing-the-build#skipping-a-build) allows `[skip ci]` anywhere in the commit messages. -- [AppVeyor](https://www.appveyor.com/docs/how-to/filtering-commits/#skip-directive-in-commit-message) - requires `[skip ci]` or `[skip appveyor]` in the commit title. +- [Azure Pipelines](https://docs.microsoft.com/en-us/azure/devops/pipelines/scripts/git-commands?view=vsts&tabs=yaml#how-do-i-avoid-triggering-a-ci-build-when-the-script-pushes) + allows `***NO_CI***` in the commit message. - [Sider](https://sider.review) - runs Flake8 (see below) + runs Flake8 ([see below](#python-coding-style)) + +To promote consistent naming policy, use: + + - `[skip ci]` in the commit title if you want to disable all integration tests ## Documentation @@ -291,4 +291,3 @@ Environment variables are like global variables, except that they are also hidden by default. Envvars should be avoided whenever possible, all functionality should be exposed in better ways such as command line switches. - diff --git a/docs/markdown/Custom-build-targets.md b/docs/markdown/Custom-build-targets.md index 30a16e3..9a0f2a1 100644 --- a/docs/markdown/Custom-build-targets.md +++ b/docs/markdown/Custom-build-targets.md @@ -27,6 +27,8 @@ This would generate the binary `output.bin` and install it to `${prefix}/subdir/output.bin`. Variable substitution works just like it does for source generation. +See [Generating Sources](Generating-sources.md) for more information on this topic. + ## Details on compiler invocations Meson only permits you to specify one command to run. This is by diff --git a/docs/markdown/D.md b/docs/markdown/D.md index 15de2f7..2b0eaac 100644 --- a/docs/markdown/D.md +++ b/docs/markdown/D.md @@ -14,15 +14,21 @@ project('myapp', 'd') executable('myapp', 'app.d') ``` -## Compiling different versions +## [Conditional compilation](https://dlang.org/spec/version.html) -If you are using the [version()](https://dlang.org/spec/version.html) feature for conditional compilation, +If you are using the [version()](https://dlang.org/spec/version.html#version-specification) feature for conditional compilation, you can use it using the `d_module_versions` target property: ```meson project('myapp', 'd') executable('myapp', 'app.d', d_module_versions: ['Demo', 'FeatureA']) ``` +For debugging, [debug()](https://dlang.org/spec/version.html#debug) conditions are compiled automatically in debug builds, and extra identifiers can be added with the `d_debug` argument: +```meson +project('myapp', 'd') +executable('myapp', 'app.d', d_debug: [3, 'DebugFeatureA']) +``` + ## Using embedded unittests If you are using embedded [unittest functions](https://dlang.org/spec/unittest.html), your source code needs diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md index 08ff1e2..e3fedc4 100644 --- a/docs/markdown/Dependencies.md +++ b/docs/markdown/Dependencies.md @@ -114,13 +114,40 @@ of all the work behind the scenes to make this work. You can use the keyword `method` to let meson know what method to use when searching for the dependency. The default value is `auto`. -Aditional dependencies methods are `pkg-config`, `config-tool`, +Aditional dependencies methods are `pkg-config`, `config-tool`, `cmake`, `system`, `sysconfig`, `qmake`, `extraframework` and `dub`. ```meson cups_dep = dependency('cups', method : 'pkg-config') ``` +The dependency method order for `auto` is: + + 1. `pkg-config` + 2. `cmake` + 3. `extraframework` (OSX only) + +## CMake + +Meson can use the CMake `find_package()` function to detect +dependencies with the builtin `Find<NAME>.cmake` modules and exported +project configurations (usually in `/usr/lib/cmake`). Meson is able +to use both the old-style `<NAME>_LIBRARIES` variables as well as +imported targets. + +It is possible to manually specify a list of CMake targets that should +be used with the `modules` property. Howerver, this step is optional +since meson tries to automatically guess the correct target based on the +name of the dependency. + +Depending on the dependency it may be neccessary to explicitly specify +a CMake target with the `modules` property if meson is unable to guess +it automatically. + +```meson + cmake_dep = dependency('ZLIB', method : 'cmake', modules : ['ZLIB::ZLIB']) +``` + ### Some notes on Dub Please understand that meson is only able to find dependencies that @@ -165,15 +192,16 @@ wmf_dep = dependency('libwmf', method : 'config-tool') ## Dependencies using config tools [CUPS](#cups), [LLVM](#llvm), [pcap](#pcap), [WxWidgets](#wxwidgets), -[libwmf](#libwmf), and GnuStep either do not provide pkg-config +[libwmf](#libwmf), [GCrypt](#libgcrypt), and GnuStep either do not provide pkg-config modules or additionally can be detected via a config tool -(cups-config, llvm-config, etc). Meson has native support for these +(cups-config, llvm-config, libgcrypt-config, etc). Meson has native support for these tools, and they can be found like other dependencies: ```meson pcap_dep = dependency('pcap', version : '>=1.0') cups_dep = dependency('cups', version : '>=1.4') llvm_dep = dependency('llvm', version : '>=4.0') +libgcrypt_dep = dependency('libgcrypt', version: '>= 1.8') ``` ## AppleFrameworks @@ -217,7 +245,7 @@ libraries that have been compiled for single-threaded use instead. ## CUPS -`method` may be `auto`, `config-tool`, `pkg-config` or `extraframework`. +`method` may be `auto`, `config-tool`, `pkg-config`, `cmake` or `extraframework`. ## GL @@ -309,6 +337,12 @@ The `language` keyword may used. `method` may be `auto`, `config-tool` or `pkg-config`. +## libgcrypt + +*(added 0.49.0)* + +`method` may be `auto`, `config-tool` or `pkg-config`. + ## Python3 Python3 is handled specially by meson: diff --git a/docs/markdown/Generating-sources.md b/docs/markdown/Generating-sources.md index cbe6c0d..306bee3 100644 --- a/docs/markdown/Generating-sources.md +++ b/docs/markdown/Generating-sources.md @@ -46,10 +46,79 @@ file individually by index. Then you just put that in your program and you're done. +### Generating headers + +Adding a generated header to a source list will ensure that the header is +generated and that the proper include paths are created for the target: + ```meson -executable('program', 'main.c', gen_src) +prog_python = import('python').find_installation('python3') + +foo_c = custom_target( + 'foo.c', + output : 'foo.c', + input : 'my_gen.py', + command : [prog_python, '@INPUT@', '--code', '@OUTPUT@'], +] + +foo_h = custom_target( + 'foo.h', + output : 'foo.h', + input : 'my_gen.py', + command : [prog_python, '@INPUT@', '--header', '@OUTPUT@'], +] + +libfoo = static_library('foo', [foo_c, foo_h]) + +executable('myexe', ['main.c', foo_h], link_with : libfoo) ``` +Each target that depends on a generated header should add that header to it's sources, +as seen above with `libfoo` and `myexe`. This is because there is no way for +meson or the backend to know that `myexe` depends on `foo.h` just because +`libfoo` does, it could be a private header. + +### Generating multiple files at a time + +Sometimes it makes sense for a single generator to create two or more files at +a time, (perhaps a header and source file), meson has this case covered as +well. `custom_target`s can be indexed like a list to get each output file +separately. The order is the same as the order of the output argument to +`custom_target` + +```meson +prog_python = import('python').find_installation('python3') + +foo_ch = custom_target( + 'foo.[ch]', + output : ['foo.c', 'foo.h'], + input : 'my_gen.py', + command : [prog_python, '@INPUT@', '@OUTPUT@'], +] + +libfoo = static_library('foo', [foo_ch]) + +executable('myexe', ['main.c', foo_ch[1]], link_with : libfoo) +``` + +In this case `libfoo` depends on both `foo.c` and `foo.h` but `myexe` only +depends on `foo.h`, the second output. + +### Using dependencies to manage generated resources + +In some cases it might be easier to use `declare_dependency` to "bundle" the header +and library dependency, especially if there are many generated headers: + +```meson +idep_foo = declare_dependency( + sources : [foo_h, bar_h], + link_with : [libfoo], +) +``` + +See [dependencies](Dependencies.md#declaring-your-own), and +[reference](Reference-manual.md#decalre_dependency) for more information. + ## Using generator() Generators are similar to custom targets, except that we define a diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index cc85d87..dcd843f 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -323,6 +323,7 @@ of the module. * `dependencies`: a list of dependencies * `fixxref_args`: a list of arguments to pass to `gtkdoc-fixxref` * `gobject_typesfile`: a list of type files +* `include_directories`: extra include paths to pass to `gtkdoc-scangobj` * `ignore_headers`: a list of header files to ignore * `html_assets`: a list of assets for the HTML pages * `html_args` a list of arguments to pass to `gtkdoc-mkhtml` @@ -332,8 +333,11 @@ of the module. * `main_xml`: specifies the main XML file * `main_sgml`: equal to `main_xml` * `mkdb_args`: a list of arguments to pass to `gtkdoc-mkdb` +* `namespace`: specifies the name space to pass to `gtkdoc-mkdb` +* `module_version`: the version of the module, affects the installed location and the devhelp2 file location * `scan_args`: a list of arguments to pass to `gtkdoc-scan` * `scanobjs_args`: a list of arguments to pass to `gtkdoc-scangobj` +* `c_args`: (*Added 0.48.0*) additional compile arguments to pass * `src_dir`: include_directories to include This creates a `$module-doc` target that can be ran to build docs and diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 2ce4b78..5f0c0a6 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -30,12 +30,48 @@ In order to make code completion work, you need the compiler flags for each comp Note that if the target has dependencies (such as generated sources), then the commands for those show up in this list as well, so you need to do some filtering. Alternatively you can grab every command invocation in the [Clang tools db](https://clang.llvm.org/docs/JSONCompilationDatabase.html) format that is written to a file called `compile_commands.json` in the build directory. +## Build Options + The next thing to display is the list of options that can be set. These include build type and so on. Here's how to extract them. meson introspect --buildoptions +This command returns a list of all supported buildoptions with the format: + +```json +{ + "name": "name of the option", + "description": "the description", + "type": "type ID", + "value": "value depends on type", + "section": "section ID" +} +``` + +The supported types are: + + - string + - boolean + - combo + - integer + - array + +For the type `combo` the key `choices` is also present. Here all valid values for the option are stored. + +The possible values for `section` are: + + - core + - backend + - base + - compiler + - directory + - user + - test + To set the options, use the `meson configure` command. +## Tests + Compilation and unit tests are done as usual by running the `ninja` and `ninja test` commands. A JSON formatted result log can be found in `workspace/project/builddir/meson-logs/testlog.json`. When these tests fail, the user probably wants to run the failing test in a debugger. To make this as integrated as possible, extract the test test setups with this command. diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md index 8348d4a..1aa444a 100644 --- a/docs/markdown/Installing.md +++ b/docs/markdown/Installing.md @@ -26,7 +26,7 @@ Other install commands are the following. ```meson install_headers('header.h', subdir : 'projname') # -> include/projname/header.h -install_man('foo.1') # -> share/man/man1/foo.1.gz +install_man('foo.1') # -> share/man/man1/foo.1 install_data('datafile.dat', install_dir : join_paths(get_option('datadir'), 'progname')) # -> share/progname/datafile.dat ``` diff --git a/docs/markdown/Native-environments.md b/docs/markdown/Native-environments.md new file mode 100644 index 0000000..af7edd2 --- /dev/null +++ b/docs/markdown/Native-environments.md @@ -0,0 +1,76 @@ +--- +short-description: Setting up native compilation +... + +# Persistent native environments + +New in 0.49.0 + +Meson has [cross files for describing cross compilation environments](Cross-compilation.md), +for describing native environments it has equivalent "native files". + +Natives describe the *build machine*, and can be used to override properties of +non-cross builds, as well as properties that are marked as "native" in a cross +build. + +There are a couple of reasons you might want to use a native file to keep a +persistent environment: + +* To build with a non-default native tool chain (such as clang instead of gcc) +* To use a non-default version of another binary, such as yacc, or llvm-config + + +## Changing native file settings + +All of the rules about cross files and changed settings apply to native files +as well, see [here](Cross-compilation.md#Changing-cross-file-settings) + + +## Defining the environment + +### Binaries + +Currently the only use of native files is to override native binaries. This +includes the compilers and binaries collected with `find_program`, and those +used by dependencies that use a config-tool instead of pkgconfig for detection, +like `llvm-config` + +```ini +[binaries] +c = '/usr/local/bin/clang' +cpp = '/usr/local/bin/clang++' +rust = '/usr/local/bin/rust' +llvm-conifg = '/usr/local/llvm-svn/bin/llvm-config' +``` + +## Loading multiple native files + +Unlike cross file, native files allow layering. More than one native file can be +loaded, with values from a previous file being overridden by the next. The +intention of this is not overriding, but to allow composing native files. + +For example, if there is a project using C and C++, python 3.4-3.7, and LLVM +5-7, and it needs to build with clang 5, 6, and 7, and gcc 5.x, 6.x, and 7.x; +expressing all of these configurations in monolithic configurations would +result in 81 different native files. By layering them, it can be expressed by +just 12 native files. + + +## Native file locations + +Like cross files, native files may be installed to user or system wide +locations, defined as: + - $XDG_DATA_DIRS/meson/native + (/usr/local/share/meson/native:/usr/share/meson/native if $XDG_DATA_DIRS is + undefined) + - $XDG_DATA_HOME/meson/native ($HOME/.local/share/meson/native if + $XDG_DATA_HOME is undefined) + +The order of locations tried is as follows: + - A file relative to the local dir + - The user local location + - The system wide locations in order + +These files are not intended to be shipped by distributions, unless they are +specifically for distribution packaging, they are mainly intended for +developers. diff --git a/docs/markdown/Pkg-config-files.md b/docs/markdown/Pkg-config-files.md index 0427b0e..305a6d8 100644 --- a/docs/markdown/Pkg-config-files.md +++ b/docs/markdown/Pkg-config-files.md @@ -18,4 +18,4 @@ pkg.generate(libraries : libs, This causes a file called `simple.pc` to be created and placed into the install directory during the install phase. -More infromation on the pkg-config module and the parameters can be found on the [pkgconfig-module](http://mesonbuild.com/Pkgconfig-module.html) page. +More infromation on the pkg-config module and the parameters can be found on the [pkgconfig-module](Pkgconfig-module.md) page. diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md index 77db809..9a34e14 100644 --- a/docs/markdown/Pkgconfig-module.md +++ b/docs/markdown/Pkgconfig-module.md @@ -14,16 +14,16 @@ can, of course, replace the name `pkg` with anything else. The generated file's properties are specified with the following keyword arguments. -- `description` a string describing the library +- `description` a string describing the library, used to set the `Description:` field - `extra_cflags` a list of extra compiler flags to be added to the `Cflags` field after the header search path -- `filebase`, the base name to use for the pkg-config file, as an - example the value of `libfoo` would produce a pkg-config file called +- `filebase` the base name to use for the pkg-config file; as an + example, the value of `libfoo` would produce a pkg-config file called `libfoo.pc` - `install_dir` the directory to install to, defaults to the value of option `libdir` followed by `/pkgconfig` - `libraries` a list of built libraries (usually results of - shared_library) that the user needs to link against. Arbitraty strings can + shared_library) that the user needs to link against. Arbitrary strings can also be provided and they will be added into the `Libs` field. Since 0.45.0 dependencies of built libraries will be automatically added to `Libs.private` field. If a dependency is provided by pkg-config then it will be added in @@ -33,7 +33,7 @@ keyword arguments. - `libraries_private` list of built libraries or strings to put in the `Libs.private` field. Since 0.45.0 it can also contain dependency objects, their `link_args` will be added to `Libs.private`. -- `name` the name of this library +- `name` the name of this library, used to set the `Name:` field - `subdirs` which subdirs of `include` should be added to the header search path, for example if you install headers into `${PREFIX}/include/foobar-1`, the correct value for this argument @@ -48,7 +48,8 @@ keyword arguments. reference other pkgconfig variables, e.g. `datadir=${prefix}/share`. The names `prefix`, `libdir` and `installdir` are reserved and may not be used. -- `version` a string describing the version of this library +- `version` a string describing the version of this library, used to set the + `Version:` field. Defaults to the project version if unspecified. - `d_module_versions` a list of module version flags used when compiling D sources referred to by this pkg-config file diff --git a/docs/markdown/Python-3-module.md b/docs/markdown/Python-3-module.md index 7dda672..50ec4e4 100644 --- a/docs/markdown/Python-3-module.md +++ b/docs/markdown/Python-3-module.md @@ -3,14 +3,19 @@ This module provides support for dealing with Python 3. It has the following methods. +This module is deprecated and replaced by the [python](Python-module.md) module. + ## find_python This is a cross platform way of finding the Python 3 executable, which may have a different name on different operating systems. Returns an -[external program](Reference-manual.html#external-program-object) object. +[external program](Reference-manual.md#external-program-object) object. *Added 0.38.0* +Deprecated, replaced by [`find_installation`](Python-module.md#find_installation) +function from `python` module. + ## extension_module Creates a `shared_module` target that is named according to the naming @@ -24,15 +29,24 @@ need to add `dependencies : dependency('python3')`, see *Added 0.38.0* +Deprecated, replaced by [`extension_module`](Python-module.md#extension_module) +method from `python` module. + ## language_version Returns a string with the Python language version such as `3.5`. *Added 0.40.0* +Deprecated, replaced by [`language_version`](Python-module.md#language_version) +method from `python` module. + ## sysconfig_path Returns the Python sysconfig path without prefix, such as `lib/python3.6/site-packages`. *Added 0.40.0* + +Deprecated, replaced by [`get_path`](Python-module.md#get_path) +method from `python` module. diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md index 51721f0..a50a33d 100644 --- a/docs/markdown/Python-module.md +++ b/docs/markdown/Python-module.md @@ -45,7 +45,12 @@ Keyword arguments are the following: abort if no python installation can be found. If `required` is set to `false`, Meson will continue even if no python installation was found. You can then use the `.found()` method on the returned object to check - whether it was found or not. + whether it was found or not. Since *0.48.0* the value of a + [`feature`](Build-options.md#features) option can also be passed to the + `required` keyword argument. +- `disabler`: if `true` and no python installation can be found, return a + [disabler object](Reference-manual.md#disabler-object) instead of a not-found object. + *Since 0.49.0* **Returns**: a [python installation][`python_installation` object] diff --git a/docs/markdown/Qt5-module.md b/docs/markdown/Qt5-module.md index 46fbd84..3a51954 100644 --- a/docs/markdown/Qt5-module.md +++ b/docs/markdown/Qt5-module.md @@ -9,6 +9,9 @@ This method takes the following keyword arguments: - `moc_headers`, `moc_sources`, `ui_files`, `qresources`, which define the files that require preprocessing with `moc`, `uic` and `rcc` - `include_directories`, the directories to add to header search path for `moc` (optional) - `moc_extra_arguments`, any additional arguments to `moc` (optional). Available since v0.44.0. + - `uic_extra_arguments`, any additional arguments to `uic` (optional). Available since v0.49.0. + - `rcc_extra_arguments`, any additional arguments to `rcc` (optional). Available since v0.49.0. + - `dependencies`, dependency objects needed by moc. Available since v0.48.0. It returns an opaque object that should be passed to a main build target. @@ -39,7 +42,8 @@ qt5_dep = dependency('qt5', modules: ['Core', 'Gui']) inc = include_directories('includes') moc_files = qt5.preprocess(moc_headers : 'myclass.h', moc_extra_arguments: ['-DMAKES_MY_MOC_HEADER_COMPILE'], - include_directories: inc) + include_directories: inc, + dependencies: qt5_dep) translations = qt5.compile_translations(ts_files : 'myTranslation_fr.ts', build_by_default : true) executable('myprog', 'main.cpp', 'myclass.cpp', moc_files, include_directories: inc, diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index b616d3e..a723b03 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -105,6 +105,9 @@ the following: - `exe_wrapper` a list containing the wrapper command or script followed by the arguments to it - `gdb` if `true`, the tests are also run under `gdb` - `timeout_multiplier` a number to multiply the test timeout with +- `is_default` a bool to set whether this is the default test setup. + If `true`, the setup will be used whenever `meson test` is run + without the `--setup` option. Since 0.49.0 To use the test setup, run `meson test --setup=*name*` inside the build dir. @@ -112,6 +115,14 @@ Note that all these options are also available while running the `meson test` script for running tests instead of `ninja test` or `msbuild RUN_TESTS.vcxproj`, etc depending on the backend. +### assert() + +``` meson + void assert(*condition*, *message*) +``` + +Abort with an error message if `condition` evaluates to `false`. + ### benchmark() ``` meson @@ -167,13 +178,17 @@ methods section](#build-target-object) below. ### configuration_data() ``` meson - configuration_data_object = configuration_data() + configuration_data_object = configuration_data(...) ``` Creates an empty configuration object. You should add your configuration with [its method calls](#configuration-data-object) and finally use it in a call to `configure_file`. +Since *0.49.0* takes an optional dictionary as first argument. If provided, each +key/value pair is added into the `configuration_data` as if `set()` method was +called for each of them. + ### configure_file() ``` meson @@ -187,7 +202,8 @@ When a [`configuration_data()`](#configuration_data) object is passed to the `configuration:` keyword argument, it takes a template file as the `input:` (optional) and produces the `output:` (required) by substituting values from the configuration data as detailed in [the -configuration file documentation](Configuration.md). +configuration file documentation](Configuration.md). Since *0.49.0* a dictionary +can be passed instead of a [`configuration_data()`](#configuration_data) object. When a list of strings is passed to the `command:` keyword argument, it takes any source or configured file as the `input:` and assumes @@ -242,7 +258,7 @@ Create a custom top level build target. The only positional argument is the name of this target and the keyword arguments are the following. -- `build_by_default` *(added 0.38.0)* causes, when set to true, to +- `build_by_default` *(added 0.38)* causes, when set to true, to have this target be built by default, that is, when invoking plain `ninja`; the default value is false - `build_always` (deprecated) if `true` this target is always considered out of @@ -256,7 +272,7 @@ following. this argument is set to true, Meson captures `stdout` and writes it to the target file. Note that your command argument list may not contain `@OUTPUT@` when capture mode is active. -- `console` keyword argument conflicts with `capture`, and is meant +- `console` *(added 0.48)* keyword argument conflicts with `capture`, and is meant for commands that are resource-intensive and take a long time to finish. With the Ninja backend, setting this will add this target to [Ninja's `console` pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool), @@ -349,10 +365,10 @@ keyword arguments. ``` Finds an external dependency (usually a library installed on your -system) with the given name with `pkg-config` if possible, as a -framework (OSX only), and with +system) with the given name with `pkg-config` and [with CMake](Dependencies.md#CMake) +if `pkg-config` fails. Additionally, frameworks (OSX only) and [library-specific fallback detection logic](Dependencies.md#dependencies-with-custom-lookup-functionality) -otherwise. This function supports the following keyword arguments: +are also supported. This function supports the following keyword arguments: - `default_options` *(added 0.37.0)* an array of default option values that override those set in the subproject's `meson_options.txt` @@ -395,6 +411,9 @@ otherwise. This function supports the following keyword arguments: [library-specific](Dependencies.md#dependencies-with-custom-lookup-functionality) keywords may also be accepted (e.g. `modules` specifies submodules to use for dependencies such as Qt5 or Boost. ) +- `disabler` if `true` and the dependency couldn't be found, return a + [disabler object](#disabler-object) instead of a not-found dependency. + *Since 0.49.0* If dependency_name is `''`, the dependency is always not found. So with `required: false`, this always returns a dependency object for which the @@ -537,7 +556,9 @@ be passed to [shared and static libraries](#library). - `d_import_dirs` list of directories to look in for string imports used in the D programming language - `d_unittest`, when set to true, the D modules are compiled in debug mode -- `d_module_versions` list of module versions set when compiling D sources +- `d_module_versions` list of module version identifiers set when compiling D sources +- `d_debug` list of module debug identifiers set when compiling D sources +- `pie` *(added 0.49.0)* build a position-independent executable The list of `sources`, `objects`, and `dependencies` is always flattened, which means you can freely nest and add lists while @@ -585,6 +606,10 @@ Keyword arguments are the following: defined there, then from the system. If set to `true`, the cross file is ignored and the program is only searched from the system. +- `disabler` if `true` and the program couldn't be found, return a + [disabler object](#disabler-object) instead of a not-found object. + *Since 0.49.0* + Meson will also autodetect scripts with a shebang line and run them with the executable/interpreter specified in it both on Windows (because the command invocator will reject the command otherwise) and @@ -679,6 +704,7 @@ argument also accepts the following: - `@OUTPUT@`: the full path to the output file - `@INPUT@`: the full path to the input file +- `@DEPFILE@`: the full path to the depfile - `@SOURCE_DIR@`: the full path to the root of the source tree - `@CURRENT_SOURCE_DIR@`: this is the directory where the currently processed meson.build is located in - `@BUILD_DIR@`: the full path to the root of the build dir where the output will be placed @@ -893,8 +919,7 @@ An example value could be `['rwxr-sr-x', 'root', 'root']`. Installs the specified man files from the source tree into system's man directory during the install step. This directory can be overridden by specifying it with the `install_dir` keyword -argument. All man pages are compressed during installation and -installed with a `.gz` suffix. +argument. The `install_mode` argument can be used to specify the file mode in symbolic format and optionally the owner/uid and group/gid for the installed files. @@ -983,7 +1008,7 @@ the jar with `java -jar file.jar`. ### join_paths() ``` meson - string join_paths(string1, string2, ...) +string join_paths(string1, string2, ...) ``` Joins the given strings into a file system path segment. For example @@ -993,6 +1018,15 @@ dropped. That means that `join_paths('foo', '/bar')` returns `/bar`. *Added 0.36.0* +Since 0.49.0 using the`/` operator on strings is equivalent to calling +`join_paths`. + +```meson +# res1 and res2 will have identical values +res1 = join_paths(foo, bar) +res2 = foo / bar +``` + ### library() ``` meson @@ -1172,6 +1206,9 @@ and subdirectory the target was defined in, respectively. Assigns a value to the given variable name. Calling `set_variable('foo', bar)` is equivalent to `foo = bar`. +**Note:** Prior to v0.46.1, the `value` parameter could not be an +array type, due to flattening of the function parameters. + ### shared_library() ``` meson @@ -1405,7 +1442,7 @@ be up to date on every build. Keywords are similar to `custom_target`. Meson will read the contents of `input`, substitute the `replace_string` with the detected revision number, and write the -result to `output`. This method returns a +result to `output`. This method returns a [`custom_target`](#custom_target) object that (as usual) should be used to signal dependencies if other targets use the file outputted by this. @@ -1428,14 +1465,14 @@ The `meson` object allows you to introspect various properties of the system. This object is always mapped in the `meson` variable. It has the following methods. -- `add_dist_script` causes the script given as argument to run during +- `add_dist_script(script_name, arg1, arg, ...)` causes the script given as argument to run during `dist` operation after the distribution source has been generated but before it is archived. Note that this runs the script file that is in the _staging_ directory, not the one in the source directory. If the script file can not be found in the staging directory, it is a hard error. This command can only invoked from the main project, calling it from a subproject is a hard - error. Available since 0.48.0. + error. Available since 0.48.0. Before 0.49.0, the function only accepted a single argument. - `add_install_script(script_name, arg1, arg2, ...)` causes the script given as an argument to be run during the install step, this script @@ -1530,7 +1567,7 @@ the following methods. compilation to reduce build time) and `false` otherwise. - `override_find_program(progname, program)` [*(Added - 0.46.0)*](Release-notes-for-0-46-0.html#can-override-find_program) + 0.46.0)*](Release-notes-for-0.46.0.md#can-override-find_program) specifies that whenever `find_program` is used to find a program named `progname`, Meson should not not look it up on the system but instead return `program`, which may either be the result of @@ -1564,7 +1601,7 @@ methods: etc. - `system()` returns the operating system name. [This - table](Reference-tables.html#operating-system-names) Lists all of + table](Reference-tables.md#operating-system-names) Lists all of the currently known Operating System names, these are guaranteed to continue working. @@ -1622,6 +1659,9 @@ the following methods: the positional argument, you can specify external dependencies to use with `dependencies` keyword argument. +- `cmd_array()` returns an array containing the command arguments for + the current compiler. + - `compiles(code)` returns true if the code fragment given in the positional argument compiles, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a @@ -1645,6 +1685,9 @@ the following methods: argument, which can be either a string or a list of strings. Since *0.47.0* the value of a [`feature`](Build-options.md#features) option can also be passed to the `required` keyword argument. + *Since 0.49.0* if the keyword argument `disabler` is `true` and the + dependency couldn't be found, return a [disabler object](#disabler-object) + instead of a not-found dependency. - `first_supported_argument(list_of_strings)`, given a list of strings, returns the first argument that passes the `has_argument` @@ -1660,7 +1703,14 @@ the following methods: the compiler would. E.g. `"a" "b"` will become `"ab"`. - `get_id()` returns a string identifying the compiler. For example, - `gcc`, `msvc`, [and more](Reference-tables.html#compiler-ids). + `gcc`, `msvc`, [and more](Reference-tables.md#compiler-ids). + +- `get_argument_syntax()` *(new in 0.49.0)* returns a string identifying the type + of arguments the compiler takes. Can be one of `gcc`, `msvc`, or an undefined + string value. This method is useful for identifying compilers that are not + gcc or msvc, but use the same argument syntax as one of those two compilers + such as clang or icc, especially when they use different syntax on different + operating systems. - `get_supported_arguments(list_of_string)` *(added 0.43.0)* returns an array containing only the arguments supported by the compiler, @@ -1759,7 +1809,7 @@ the following methods: compiler supports the GNU style (`__attribute__(...)`) `name`. This is preferable to manual compile checks as it may be optimized for compilers that do not support such attributes. - [This table](Reference-tables.html#gcc-attribute-support) Lists all of the + [This table](Reference-tables.md#gcc-__attribute__) lists all of the supported attributes. - `get_supported_function_attributes(list_of_names)` *(added in 0.48.0) @@ -1773,7 +1823,7 @@ The following keyword arguments can be used: required to find the header or symbol. For example, you might need to pass the include path `-Isome/path/to/header` if a header is not in the default include path. In versions newer than 0.38.0 you - should use the `include_directories` keyword described above. You + should use the `include_directories` keyword described below. You may also want to pass a library name `-lfoo` for `has_function` to check for a function. Supported by all methods except `get_id`, `version`, and `find_library`. @@ -1786,6 +1836,9 @@ The following keyword arguments can be used: `run()`. If this keyword argument is not passed to those methods, no message will be printed about the check. +- `no_builtin_args` when set to true, the compiler arguments controlled + by built-in configuration options are not added. + - `prefix` can be used to add #includes and other things that are required for the symbol to be declared. System definitions should be passed via compiler args (eg: `_GNU_SOURCE` is often required for @@ -1794,7 +1847,7 @@ The following keyword arguments can be used: `sizeof`, `has_type`, `has_function`, `has_member`, `has_members`, `check_header`, `has_header`, `has_header_symbol`. -**Note:** These compiler checks do not use compiler arguments added with +**Note:** These compiler checks do not use compiler arguments added with `add_*_arguments()`, via `-Dlang_args` on the command-line, or through `CFLAGS`/`LDFLAGS`, etc in the environment. Hence, you can trust that the tests will be fully self-contained, and won't fail because of custom diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index ab79abd..b561e86 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -2,38 +2,41 @@ ## Compiler ids -These are return values of the `get_id` method in a compiler object. - -| Value | Compiler family | -| ----- | ---------------- | -| gcc | The GNU Compiler Collection | -| clang | The Clang compiler | -| msvc | Microsoft Visual Studio | -| intel | Intel compiler | -| llvm | LLVM-based compiler (Swift, D) | -| mono | Xamarin C# compiler | -| dmd | D lang reference compiler | -| rustc | Rust compiler | -| valac | Vala compiler | -| pathscale | The Pathscale Fortran compiler | -| pgi | The Portland Fortran compiler | -| sun | Sun Fortran compiler | -| g95 | The G95 Fortran compiler | -| open64 | The Open64 Fortran Compiler | -| nagfor | The NAG Fortran compiler | -| lcc | Elbrus C/C++/Fortran Compiler | -| arm | ARM compiler | -| armclang | ARMCLANG compiler | +These are return values of the `get_id` (Compiler family) and +`get_argument_syntax` (Argument syntax) method in a compiler object. + +| Value | Compiler family | Argument syntax | +| ----- | --------------- | --------------- | +| arm | ARM compiler | | +| armclang | ARMCLANG compiler | | +| ccrx | Renesas RX Family C/C++ compiler | | +| clang | The Clang compiler | gcc | +| clang-cl | The Clang compiler (MSVC compatible driver) | msvc | +| dmd | D lang reference compiler | | +| g95 | The G95 Fortran compiler | | +| gcc | The GNU Compiler Collection | gcc | +| intel | Intel compiler | msvc on windows, otherwise gcc | +| lcc | Elbrus C/C++/Fortran Compiler | | +| llvm | LLVM-based compiler (Swift, D) | | +| mono | Xamarin C# compiler | | +| msvc | Microsoft Visual Studio | msvc | +| nagfor | The NAG Fortran compiler | | +| open64 | The Open64 Fortran Compiler | | +| pathscale | The Pathscale Fortran compiler | | +| pgi | The Portland Fortran compiler | | +| rustc | Rust compiler | | +| sun | Sun Fortran compiler | | +| valac | Vala compiler | | ## Script environment variables | Value | Comment | | ----- | ------- | -| MESON_SOURCE_ROOT | Absolute path to the source dir | -| MESON_BUILD_ROOT | Absolute path to the build dir | | MESONINTROSPECT | Command to run to run the introspection command, may be of the form `python /path/to/meson introspect`, user is responsible for splitting the path if necessary. | -| MESON_SUBDIR | Current subdirectory, only set for `run_command` | +| MESON_BUILD_ROOT | Absolute path to the build dir | | MESON_DIST_ROOT | Points to the root of the staging directory, only set when running `dist` scripts | +| MESON_SOURCE_ROOT | Absolute path to the source dir | +| MESON_SUBDIR | Current subdirectory, only set for `run_command` | ## CPU families @@ -44,40 +47,48 @@ set in the cross file. | Value | Comment | | ----- | ------- | -| x86 | 32 bit x86 processor | -| x86_64 | 64 bit x86 processor | -| ia64 | Itanium processor | -| arm | 32 bit ARM processor | | aarch64 | 64 bit ARM processor | +| arc | 32 bit ARC processor | +| arm | 32 bit ARM processor | +| e2k | MCST Elbrus processor | +| ia64 | Itanium processor | | mips | 32 bit MIPS processor | | mips64 | 64 bit MIPS processor | +| parisc | HP PA-RISC processor | | ppc | 32 bit PPC processors | | ppc64 | 64 bit PPC processors | -| e2k | MCST Elbrus processor | -| parisc | HP PA-RISC processor | | riscv32 | 32 bit RISC-V Open ISA| | riscv64 | 64 bit RISC-V Open ISA| +| rx | Renesas RX 32 bit MCU | +| s390x | IBM zSystem s390x | | sparc | 32 bit SPARC | | sparc64 | SPARC v9 processor | -| s390x | IBM zSystem s390x | +| x86 | 32 bit x86 processor | +| x86_64 | 64 bit x86 processor | Any cpu family not listed in the above list is not guaranteed to remain stable in future releases. +Those porting from autotools should note that meson does not add +endianness to the name of the cpu_family. For example, autotools +will call little endian PPC64 "ppc64le", meson will not, you must +also check the `.endian()` value of the machine for this information. + ## Operating system names These are provided by the `.system()` method call. | Value | Comment | | ----- | ------- | -| linux | | -| darwin | Either OSX or iOS | -| windows | Any version of Windows | | cygwin | The Cygwin environment for Windows | -| haiku | | -| freebsd | FreeBSD and its derivatives | +| darwin | Either OSX or iOS | | dragonfly | DragonFly BSD | +| freebsd | FreeBSD and its derivatives | +| gnu | GNU Hurd | +| haiku | | +| linux | | | netbsd | | +| windows | Any version of Windows | Any string not listed above is not guaranteed to remain stable in future releases. @@ -100,6 +111,19 @@ These are the parameter names for passing language specific arguments to your bu | Rust | rust_args | | Vala | vala_args | +## Compiler and linker flag environment variables + +These environment variables will be used to modify the compiler and +linker flags. + +| CFLAGS | Flags for the C compiler | +| CXXFLAGS | Flags for the C++ compiler | +| OBJCFLAGS | Flags for the Objective C compiler | +| FFLAGS | Flags for the Fortran compiler | +| DFLAGS | Flags for the D compiler | +| VALAFLAGS | Flags for the Vala compiler | +| RUSTFLAGS | Flags for the Rust compiler | +| LDFLAGS | The linker flags, used for all languages | ## Function Attributes @@ -107,7 +131,7 @@ These are the parameters names that are supported using `compiler.has_function_attribute()` or `compiler.get_supported_function_attributes()` -### GCC __attribute__ +### GCC `__attribute__` These values are supported using the GCC style `__attribute__` annotations, which are supported by GCC, Clang, and other compilers. diff --git a/docs/markdown/Release-notes-for-0.47.0.md b/docs/markdown/Release-notes-for-0.47.0.md index 9736256..a427a72 100644 --- a/docs/markdown/Release-notes-for-0.47.0.md +++ b/docs/markdown/Release-notes-for-0.47.0.md @@ -34,11 +34,10 @@ The environment path should be set properly for the ARM compiler executables. The `--target`, `-mcpu` options with the appropriate values should be mentioned in the cross file as shown in the snippet below. -``` +```ini [properties] c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] cpp_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] - ``` Note: @@ -106,7 +105,7 @@ When called without an input file, `configure_file` generates a C header file by default. A keyword argument was added to allow specifying the output format, for example for use with nasm or yasm: -``` +```meson conf = configuration_data() conf.set('FOO', 1) diff --git a/docs/markdown/Release-notes-for-0.48.0.md b/docs/markdown/Release-notes-for-0.48.0.md index cf3db4c..99673a4 100644 --- a/docs/markdown/Release-notes-for-0.48.0.md +++ b/docs/markdown/Release-notes-for-0.48.0.md @@ -1,17 +1,307 @@ --- title: Release 0.48 -short-description: Release notes for 0.48 (preliminary) +short-description: Release notes for 0.48 ... # New features -This page is a placeholder for the eventual release notes. +## Toggles for build type, optimization and vcrt type -Notable new features should come with release note updates. This is -done by creating a file snippet called `snippets/featurename.md` and -whose contents should look like this: +Since the very beginning Meson has provided different project types to +use, such as *debug* and *minsize*. There is also a *plain* type that +adds nothing by default but instead makes it the user's responsibility +to add everything by hand. This works but is a bit tedious. - ## Feature name +In this release we have added new new options to manually toggle +e.g. optimization levels and debug info so those can be changed +independently of other options. For example by default the debug +buildtype has no optmization enabled at all. If you wish to use GCC's +`-Og` instead, you could set it with the following command: - A short description explaining the new feature and how it should be used. +``` +meson configure -Doptimization=g +``` +Similarly we have added a toggle option to select the version of +Visual Studio C runtime to use. By default it uses the debug runtime +DLL debug builds and release DLL for release builds but this can be +manually changed with the new base option `b_vscrt`. + +## Meson warns if two calls to `configure_file()` write to the same file + +If two calls to [`configure_file()`](#Reference-manual.md#configure_file) +write to the same file Meson will print a `WARNING:` message during +configuration. For example: +```meson +project('configure_file', 'cpp') + +configure_file( + input: 'a.in', + output: 'out', + command: ['./foo.sh'] +) +configure_file( + input: 'a.in', + output: 'out', + command: ['./foo.sh'] +) +``` + +This will output: + +``` +The Meson build system +Version: 0.47.0.dev1 +Source dir: /path/to/srctree +Build dir: /path/to/buildtree +Build type: native build +Project name: configure_file +Project version: undefined +Build machine cpu family: x86_64 +Build machine cpu: x86_64 +Configuring out with command +WARNING: Output file out for configure_file overwritten. First time written in line 3 now in line 8 +Configuring out with command +Build targets in project: 0 +Found ninja-1.8.2 at /usr/bin/ninja +``` + +## New kwarg `console` for `custom_target()` + +This keyword argument conflicts with `capture`, and is meant for +commands that are resource-intensive and take a long time to +finish. With the Ninja backend, setting this will add this target to +[Ninja's `console` +pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool), +which has special properties such as not buffering stdout and +serializing all targets in this pool. + +The primary use-case for this is to be able to run external commands +that take a long time to exeute. Without setting this, the user does +not receive any feedback about what the program is doing. + +## `dependency(version:)` now applies to all dependency types + +Previously, version constraints were only enforced for dependencies found using +the pkg-config dependency provider. These constraints now apply to dependencies +found using any dependency provider. + +Some combinations of dependency, host and method do not currently support +discovery of the version. In these cases, the dependency will not be found if a +version constraint is applied, otherwise the `version()` method for the +dependency object will return `'unknown'`. + +(If discovering the version in one of these combinations is important to you, +and a method exists to determine the version in that case, please file an issue +with as much information as possible.) + +## python3 module is deprecated + +A generic module `python` has been added in Meson `0.46.0` and has a superset of +the features implemented by the previous `python3` module. + +In most cases, it is a simple matter of renaming: +```meson +py3mod = import('python3') +python = py3mod.find_python() +``` + +becomes + +```meson +pymod = import('python') +python = pymod.find_installation() +``` + +## Dictionary addition + +Dictionaries can now be added, values from the second dictionary overrides values +from the first + +```meson +d1 = {'a' : 'b'} +d3 = d1 + {'a' : 'c'} +d3 += {'d' : 'e'} +``` + +## Dist scripts + +You can now specify scripts that are run as part of the `dist` +target. An example usage would go like this: + +```meson +project('foo', 'c') + +# other stuff here + +meson.add_dist_script('dist_cleanup.py') +``` + +## Fatal warnings + +A new command line option has been added: `--fatal-meson-warnings`. When enabled, any +warning message printed by Meson will be fatal and raise an exception. It is +intended to be used by developers and CIs to easily catch deprecation warnings, +or any other potential issues. + +## Helper methods added for checking GNU style attributes: `__attribute__(...)` + +A set of new helpers have been added to the C and C++ compiler objects for +checking GNU style function attributes. These are not just simpler to use, they +may be optimized to return fast on compilers that don't support these +attributes. Currently this is true for MSVC. + +```meson +cc = meson.get_compiler('c') +if cc.has_function_attribute('aligned') + add_project_arguments('-DHAVE_ALIGNED', language : 'c') +endif +``` + +Would replace code like: + +```meson +if cc.compiles('''into foo(void) __attribute__((aligned(32)))''') + add_project_arguments('-DHAVE_ALIGNED', language : 'c') +endif +``` + +Additionally, a multi argument version has been added: + +```meson +foreach s : cc.get_supported_function_attributes(['hidden', 'alias']) + add_project_arguments('-DHAVE_@0@'.format(s.to_upper()), language : 'c') +endforeach +``` + +## `gnome.generate_gir()` now optionally accepts multiple libraries + +The GNOME module can now generate a single gir for multiple libraries, which +is something `g-ir-scanner` supported, but had not been exposed yet. + +gnome.generate_gir() will now accept multiple positional arguments, if none +of these arguments are an `Executable` instance. + +## Hotdoc module + +A new module has been written to ease generation of +[hotdoc](https://hotdoc.github.io/) based documentation. It supports +complex use cases such as hotdoc subprojects (to create documentation +portals) and makes it straight forward to leverage full capabilities +of hotdoc. + +Simple usage: + +``` meson +hotdoc = import('hotdoc') + +hotdoc.generate_doc( + 'foobar', + c_smart_index: true, + project_version: '0.1', + sitemap: 'sitemap.txt', + index: 'index.md', + c_sources: ['path/to/file.c'], + languages: ['c'], + install: true, +) +``` + +## `i18n.merge_file()` now fully supports variable substitutions defined in `custom_target()` + +Filename substitutions like @BASENAME@ and @PLAINNAME@ were previously +accepted but the name of the build target wasn't altered leading to +colliding target names when using the substitution twice. +i18n.merge_file() now behaves as custom_target() in this regard. + +## Projects args can be set separately for cross and native builds (potentially breaking change) + +It has been a longstanding bug (or let's call it a "delayed bug fix") +that if yo do this: + +```meson +add_project_arguments('-DFOO', language : 'c') +``` + +Then the flag is used both in native and cross compilations. This is +very confusing and almost never what you want. To fix this a new +keyword `native` has been added to all functions that add arguments, +namely `add_global_arguments`, `add_global_link_arguments`, +`add_project_arguments` and `add_project_link_arguments` that behaves +like the following: + +```meson +# Added to native builds when compiling natively and to cross +# compilations when doing cross compiles. +add_project_arguments(...) + +# Added only to native compilations, not used in cross compilations. +add_project_arguments(..., native : true) + +# Added only to cross compilations, not used in native compilations. +add_project_arguments(..., native : false) +``` + +Also remember that cross compilation is a property of each +target. There can be target that are compiled with the native compiler +and some which are compiled with the cross compiler. + +Unfortunately this change is backwards incompatible and may cause some +projects to fail building. However this should be very rare in practice. + +## More flexible `override_find_program()`. + +It is now possible to pass an `executable` to +`override_find_program()` if the overridden program is not used during +configure. + +This is particularly useful for fallback dependencies like Protobuf +that also provide a tool like protoc. + +## `shared_library()` now supports setting dylib compatibility and current version + +Now, by default `shared_library()` sets `-compatibility_version` and +`-current_version` of a macOS dylib using the `soversion`. + +This can be overriden by using the `darwin_versions:` kwarg to +[`shared_library()`](Reference-manual.md#shared_library). As usual, you can +also pass this kwarg to `library()` or `build_target()` and it will be used in +the appropriate circumstances. + +## Version comparison + +`dependency(version:)` and other version constraints now handle versions +containing non-numeric characters better, comparing versions using the rpmvercmp +algorithm (as using the `pkg-config` autoconf macro `PKG_CHECK_MODULES` does). + +This is a breaking change for exact comparison constraints which rely on the +previous comparison behaviour of extending the compared versions with `'0'` +elements, up to the same length of `'.'`-separated elements. + +For example, a version of `'0.11.0'` would previously match a version constraint +of `'==0.11'`, but no longer does, being instead considered strictly greater. + +Instead, use a version constraint which exactly compares with the precise +version required, e.g. `'==0.11.0'`. + +## Keyword argument for GNU symbol visibility + +Build targets got a new keyword, `gnu_symbol_visibility` that controls +how symbols are exported from shared libraries. This is most commonly +used to hide implementation symbols like this: + +```meson +shared_library('mylib', ... + gnu_symbol_visibility: 'hidden') +``` + +In this case only symbols explicitly marked as visible in the source +files get exported. + +## Git wraps can now clone submodules automatically + +To enable this, the following needs to be added to the `.wrap` file: + +```ini +clone-recursive=true +``` diff --git a/docs/markdown/Release-notes-for-0.49.0.md b/docs/markdown/Release-notes-for-0.49.0.md new file mode 100644 index 0000000..bdf5769 --- /dev/null +++ b/docs/markdown/Release-notes-for-0.49.0.md @@ -0,0 +1,22 @@ +--- +title: Release 0.49 +short-description: Release notes for 0.49 (preliminary) +... + +# New features + +This page is a placeholder for the eventual release notes. + +Notable new features should come with release note updates. This is +done by creating a file snippet called `snippets/featurename.md` and +whose contents should look like this: + + ## Feature name + + A short description explaining the new feature and how it should be used. + +## Libgcrypt dependency now supports libgcrypt-config + +Earlier, `dependency('libgcrypt')` could only detect the library with pkg-config +files. Now, if pkg-config files are not found, Meson will look for +`libgcrypt-config` and if it's found, will use that to find the library. diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md index 00b70ed..426e87d 100644 --- a/docs/markdown/Running-Meson.md +++ b/docs/markdown/Running-Meson.md @@ -92,6 +92,22 @@ You can then open the generated solution with Visual Studio and compile it in the usual way. A list of backends can be obtained with `meson --help`. +Environment Variables +-- + +Sometimes you want to add extra compiler flags, this can be done by +passing them in environment variables when calling meson. See [the +reference +tables](Reference-tables.md#compiler-and-linker-flag-envrionment-variables) +for a list of all the environment variables. Be aware however these +environment variables are only used for the native compiler and will +not affect the compiler used for cross-compiling, where the flags +specified in the cross file will be used. + +Furthermore it is possible to stop meson from adding flags itself by +using the `--buildtype=plain` option, in this case you must provide +the full compiler and linker arguments needed. + Building the source == diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md index 80ed3e7..2e3e2ea 100644 --- a/docs/markdown/Subprojects.md +++ b/docs/markdown/Subprojects.md @@ -14,45 +14,46 @@ Meson tries to solve this problem by making it extremely easy to provide both at the same time. The way this is done is that Meson allows you to take any other Meson project and make it a part of your build without (in the best case) any changes to its Meson setup. It -becomes a transparent part of the project. The basic idiom goes -something like this. +becomes a transparent part of the project. -```meson -dep = dependency('foo', fallback : [subproject_name, variable_name]) -``` +It should be noted that this only works for subprojects that are built +with Meson. It can not be used with any other build system. The reason +is the simple fact that there is no possible way to do this reliably +with mixed build systems. + +## A subproject example + +Usually dependencies consist of some header files plus a library to link against. +To declare this internal dependency use `declare_dependency` function. As an example, suppose we have a simple project that provides a shared -library. It would be set up like this. +library. It's `meson.build` would look like this. ```meson -project('simple', 'c') -i = include_directories('include') -l = shared_library('simple', 'simple.c', include_directories : i, install : true) -simple_dep = declare_dependency(include_directories : i, - link_with : l) -``` +project('libsimple', 'c') -Then we could use that from a master project. First we generate a -subdirectory called `subprojects` in the root of the master -directory. Then we create a subdirectory called `simple` and put the -subproject in that directory. Now the subproject can be used like -this. +inc = include_directories('include') +libsimple = shared_library('simple', + 'simple.c', + include_directories : inc, + install : true) -```meson -project('master', 'c') -dep = dependency('simple', fallback : ['simple', 'simple_dep']) -exe = executable('prog', 'prog.c', - dependencies : dep, install : true) +libsimple_dep = declare_dependency(include_directories : inc, + link_with : libsimple) ``` -With this setup the system dependency is used when it is available, -otherwise we fall back on the bundled version. If you wish to always -use the embedded version, then you would declare it like this: +### Naming convention for dependency variables -```meson -simple_sp = subproject('simple') -dep = simple_sp.get_variable('simple_dep') -``` +Ideally the dependency variable name should be of `<project_name>_dep` form. +This way one can just use it without even looking inside build definitions of that subproject. + +In cases where there are multiple dependencies need to be declared, the default one +should be named as `<project_name>_dep` (e.g. `gtest_dep`), and others can have +`<project_name>_<other>_<name>_dep` form (e.g. `gtest_main_dep` - gtest with main function). + +There may be exceptions to these rules where common sense should be applied. + +### Build options in subproject All Meson features of the subproject, such as project options keep working and can be set in the master project. There are a few @@ -62,17 +63,111 @@ must not set global arguments because there is no way to do that reliably over multiple subprojects. To check whether you are running as a subproject, use the `is_subproject` function. -It should be noted that this only works for subprojects that are built -with Meson. It can not be used with any other build system. The reason -is the simple fact that there is no possible way to do this reliably -with mixed build systems. +## Using a subproject + +All subprojects must be inside `subprojects` directory. +The `subprojects` directory must be at the top level of your project. +Subproject declaration must be in your top level `meson.build`. + +### A simple example + +Let's use `libsimple` as a subproject. + +At the top level of your project create `subprojects` directory. +Then copy `libsimple` into `subprojects` directory. + +Your project's `meson.build` should look like this. + +```meson +project('my_project', 'cpp') + +libsimple_proj = subproject('libsimple') +libsimple_dep = libsimple_proj.get_variable('libsimple_dep') + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +Note that the subproject object is *not* used as the dependency, but +rather you need to get the declared dependency from it with +`get_variable` because a subproject may have multiple declared +dependencies. + +### Toggling between system libraries and embedded sources + +When building distro packages it is very important that you do not +embed any sources. Some distros have a rule forbidding embedded +dependencies so your project must be buildable without them or +otherwise the packager will hate you. + +Here's how you would use system libraries and fall back to embedding sources +if the dependency is not available. + +```meson +project('my_project', 'cpp') + +libsimple_dep = dependency('libsimple', required : false) + +if not libsimple_dep.found() + libsimple_proj = subproject('libsimple') + libsimple_dep = libsimple_proj.get_variable('libsimple_dep') +endif + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +Because this is such a common operation, Meson provides a shortcut for +this use case. + +```meson +dep = dependency('foo', fallback : [subproject_name, variable_name]) +``` + +The `fallback` keyword argument takes two items, the name of the +subproject and the name of the variable that holds the dependency. If +you need to do something more complicated, such as extract several +different variables, then you need to do it yourself with the manual +method described above. + +Using this shortcut the build definition would look like this. + +```meson +project('my_project', 'cpp') + +libsimple_dep = dependency('libsimple', fallback : ['libsimple', 'libsimple_dep']) + +executable('my_project', + 'my_project.cpp', + dependencies : libsimple_dep, + install : true) +``` + +With this setup when libsimple is provided by the system, we use it. When +that is not the case we use the embedded version (the one from subprojects). + +Note that `libsimple_dep` can point to an external or an internal dependency but +you don't have to worry about their differences. Meson will take care +of the details for you. + +### Subprojects depending on other subprojects Subprojects can use other subprojects, but all subprojects must reside in the top level `subprojects` directory. Recursive use of subprojects is not allowed, though, so you can't have subproject `a` that uses subproject `b` and have `b` also use `a`. -# Command-line options +## Obtaining subprojects + +Meson ships with a dependency system to automatically obtain +dependency subprojects. It is documented in the [Wrap dependency +system manual](Wrap-dependency-system-manual.md). + +## Command-line options The usage of subprojects can be controlled by users and distros with the following command-line options: @@ -101,13 +196,7 @@ the following command-line options: want to specifically build against the library sources provided by your subprojects. -# Obtaining subprojects - -Meson ships with a dependency system to automatically obtain -dependency subprojects. It is documented in the [Wrap dependency -system manual](Wrap-dependency-system-manual.md). - -# Why must all subprojects be inside a single directory? +## Why must all subprojects be inside a single directory? There are several reasons. diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index efb12e4..22b8be3 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -283,6 +283,17 @@ Note appending to an array will always create a new array object and assign it to `my_array` instead of modifying the original since all objects in Meson are immutable. +Since 0.49.0, you can check if an array contains an element like this: +```meson +my_array = [1, 2] +if 1 in my_array +# This condition is true +endif +if 1 not in my_array +# This condition is false +endif +``` + #### Array methods The following methods are defined for all arrays: @@ -316,6 +327,20 @@ Dictionaries are available since 0.47.0. Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read about the methods exposed by dictionaries. +Since 0.49.0, you can check if a dictionary contains a key like this: +```meson +my_dict = {'foo': 42, 'foo': 43} +if 'foo' in my_dict +# This condition is true +endif +if 42 in my_dict +# This condition is false +endif +if 'foo' not in my_dict +# This condition is false +endif +``` + Function calls -- @@ -354,11 +379,34 @@ else endif opt = get_option('someoption') -if opt == 'foo' +if opt != 'foo' do_something() endif ``` +Logical operations +-- + +Meson has the standard range of logical operations which can be used in +`if` statements. + +```meson +if a and b + # do something +endif +if c or d + # do something +endif +if not e + # do something +endif +if not (f or g) + # do something +endif +``` + +Logical operations work only on boolean values. + ## Foreach statements To do an operation on all elements of an iterable, use the `foreach` @@ -409,28 +457,24 @@ foreach name, sources : components endforeach ``` -Logical operations --- +### Foreach `break` and `continue` -Meson has the standard range of logical operations. +Since 0.49.0 `break` and `continue` keywords can be used inside foreach loops. ```meson -if a and b - # do something -endif -if c or d - # do something -endif -if not e - # do something -endif -if not (f or g) - # do something -endif +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach +# result is ['a', 'b'] ``` -Logical operations work only on boolean values. - Comments -- diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md index c555abc..c4436d0 100644 --- a/docs/markdown/Tutorial.md +++ b/docs/markdown/Tutorial.md @@ -9,6 +9,19 @@ definition for a simple project. Then we expand it to use external dependencies to show how easily they can be integrated into your project. +This tutorial has been written mostly for Linux usage. It assumes that +you have GTK development libraries available on the system. On +Debian-derived systems such as Ubuntu they can be installed with the +following command: + +``` +sudo apt install libgtk-3-dev +``` + +It is possible to build the GUI application On other platforms such as +Windows and macOS but it requires for you to install the dependency +libraries using a dependendy provider of your choice. + The humble beginning ----- diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index d328b97..57257f9 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -9,13 +9,14 @@ listed in the [`meson` GitHub topic](https://github.com/topics/meson). - [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3 - [Arduino sample project](https://github.com/jpakkane/mesonarduino) - - [bolt](https://gitlab.freedesktop.org/bolt/bolt) Userpsace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux + - [bolt](https://gitlab.freedesktop.org/bolt/bolt) Userspace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux - [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop), a desktop environment built on GNOME technologies - [casync](https://github.com/systemd/casync), Content-Addressable Data Synchronization Tool - [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop), the cinnamon desktop library - [dbus-broker](https://github.com/bus1/dbus-broker), Linux D-Bus Message Broker - [Dpdk](http://dpdk.org/browse/dpdk), Data plane development kit, a set of libraries and drivers for fast packet processing - [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine + - [elementary OS](https://github.com/elementary/), Linux desktop oriented distribution - [Emeus](https://github.com/ebassi/emeus), Constraint based layout manager for GTK+ - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson) Sample project for using the ESP8266 Arduino port with Meson - [Fractal](https://wiki.gnome.org/Apps/Fractal/), a Matrix messaging client for GNOME diff --git a/docs/markdown/Videos.md b/docs/markdown/Videos.md index d9ea34d..65fccfe 100644 --- a/docs/markdown/Videos.md +++ b/docs/markdown/Videos.md @@ -4,9 +4,16 @@ short-description: Videos about Meson # Videos + - [Compiling Multi-Million Line C++ Code Bases Effortlessly with the + Meson Build system](https://www.youtube.com/watch?v=SCZLnopmYBM), + CppCon 2018 + - [The Meson Build System, 4+ years of work to become an overnight success](https://www.youtube.com/watch?v=gHdTzdXkhRY), Linux.conf.au 2018 + - [Power through simplicity, using Python in the Meson Build + System](https://youtu.be/3jF3oVsjIEM), Piter.py, 2017 + - [Meson and the changing Linux build landscape](https://media.ccc.de/v/ASG2017-111-meson_and_the_changing_linux_build_landscape), All Systems Go 2017 diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 38e1ab2..619492a 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -27,10 +27,24 @@ itself in a way that makes it easy to use (usually this means as a static library). To use this kind of a project as a dependency you could just copy and -extract it inside your project's `subprojects` directory. However -there is a simpler way. You can specify a Wrap file that tells Meson -how to download it for you. An example wrap file would look like this -and should be put in `subprojects/foobar.wrap`: +extract it inside your project's `subprojects` directory. + +However there is a simpler way. You can specify a Wrap file that tells Meson +how to download it for you. If you then use this subproject in your build, +Meson will automatically download and extract it during build. This makes +subproject embedding extremely easy. + +All wrap files must have a name of `<project_name>.wrap` form and be in `subprojects` dir. + +Currently Meson has three kinds of wraps: +- wrap-file +- wrap-file with Meson build patch +- wrap-git + +## wrap-file + +An example wrap file for `libfoobar` would have a name `libfoobar.wrap` +and would look like this: ```ini [wrap-file] @@ -41,16 +55,22 @@ source_filename = foobar-1.0.tar.gz source_hash = 5ebeea0dfb75d090ea0e7ff84799b2a7a1550db3fe61eb5f6f61c2e971e57663 ``` -If you then use this subproject in your build, Meson will -automatically download and extract it during build. This makes -subproject embedding extremely easy. +`source_hash` is *sha256sum* of `source_filename`. + +Since *0.49.0* if `source_filename` is found in project's +`subprojects/packagecache` directory, it will be used instead of downloading the +source, even if `--wrap-mode` option is set to `nodownload`. The file's hash will +be checked. + +## wrap-file with Meson build patch Unfortunately most software projects in the world do not build with Meson. Because of this Meson allows you to specify a patch URL. This works in much the same way as Debian's distro patches. That is, they are downloaded and automatically applied to the subproject. These -files contain a Meson build definition for the given subproject. A -wrap file with an additional patch URL would look like this. +files contain a Meson build definition for the given subproject. + +A wrap file with an additional patch URL would look like this: ```ini [wrap-file] @@ -76,7 +96,14 @@ thousands of lines of code. Once you have a working build definition, just zip up the Meson build files (and others you have changed) and put them somewhere where you can download them. -## Branching subprojects directly from git +Since *0.49.0* if `patch_filename` is found in project's +`subprojects/packagecache` directory, it will be used instead of downloading the +patch, even if `--wrap-mode` option is set to `nodownload`. The file's hash will +be checked. + +## wrap-git + +This type of wrap allows branching subprojects directly from git. The above mentioned scheme assumes that your subproject is working off packaged files. Sometimes you want to check code out directly from @@ -85,9 +112,9 @@ slightly different wrap file. ```ini [wrap-git] -directory=samplesubproject -url=https://github.com/jpakkane/samplesubproject.git -revision=head +directory = samplesubproject +url = https://github.com/jpakkane/samplesubproject.git +revision = head ``` The format is straightforward. The only thing to note is the revision @@ -106,84 +133,26 @@ these cases you can specify the upload URL by adding the following at the end of your wrap file: ```ini -push-url=git@git.example.com:projects/someproject.git # Supported since version 0.37.0 -``` - -## Using wrapped projects - -To use a subproject simply do this in your top level `meson.build`. - -```meson -foobar_sp = subproject('foobar') +push-url = git@git.example.com:projects/someproject.git # Supported since version 0.37.0 ``` -Usually dependencies consist of some header files plus a library to -link against. To do this you would declare this internal dependency -like this: +If the git repo contains submodules, you can tell Meson to clone them +automatically by adding the following *(since 0.48.0)*: -```meson -foobar_dep = declare_dependency(link_with : mylib, - include_directories : myinc) -``` - -Then in your main project you would use them like this: - -```meson -executable('toplevel_exe', 'prog.c', - dependencies : foobar_sp.get_variable('foobar_dep')) -``` - -Note that the subproject object is *not* used as the dependency, but -rather you need to get the declared dependency from it with -`get_variable` because a subproject may have multiple declared -dependencies. - -## Toggling between distro packages and embedded source - -When building distro packages it is very important that you do not -embed any sources. Some distros have a rule forbidding embedded -dependencies so your project must be buildable without them or -otherwise the packager will hate you. - -Doing this with Meson and Wrap is simple. Here's how you would use -distro packages and fall back to embedding if the dependency is not -available. - -```meson -foobar_dep = dependency('foobar', required : false) - -if not foobar_dep.found() - foobar_subproj = subproject('foobar') - # the subproject defines an internal dependency with - # the command declare_dependency(). - foobar_dep = foobar_subproj.get_variable('foobar_dep') -endif - -executable('toplevel_exe', 'prog.c', - dependencies : foobar_dep) +```ini +clone-recursive = true ``` -Because this is such a common operation, Meson provides a shortcut for -this use case. +## Using wrapped projects -```meson -foobar_dep = dependency('foobar', fallback : ['foobar', 'foobar_dep']) -``` +Wraps provide a convenient way of obtaining a project into your subproject directory. +Then you use it as a regular subproject (see [subprojects](Subprojects.md)). -The `fallback` keyword argument takes two items, the name of the -subproject and the name of the variable that holds the dependency. If -you need to do something more complicated, such as extract several -different variables, then you need to do it yourself with the manual -method described above. +## Getting wraps -With this setup when foobar is provided by the system, we use it. When -that is not the case we use the embedded version. Note that -`foobar_dep` can point to an external or an internal dependency but -you don't have to worry about their differences. Meson will take care -of the details for you. +Usually you don't want to write your wraps by hand. -## Getting wraps +There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides +many dependencies ready to use. You can read more about WrapDB [here](Using-the-WrapDB.md). -Usually you don't want to write your wraps by hand. There is an online -repository called [WrapDB](Using-the-WrapDB.md) that provides many -dependencies ready to use. +There is also a Meson subcommand to get and manage wraps (see [using wraptool](Using-wraptool.md)). diff --git a/docs/markdown/index.md b/docs/markdown/index.md index cffd488..e57cd69 100644 --- a/docs/markdown/index.md +++ b/docs/markdown/index.md @@ -33,13 +33,13 @@ developers. The first one is the mailing list, which is hosted at The second way is via IRC. The channel to use is `#mesonbuild` at [Freenode](https://freenode.net/). -### [Projects using Meson](http://mesonbuild.com/Users.html) +### [Projects using Meson](Users.md) Many projects out there are using Meson and their communities are also a great resource for learning about what (and what not too!) do when trying to convert to using Meson. -[A short list of Meson users can be found here](http://mesonbuild.com/Users.html) +[A short list of Meson users can be found here](Users.md) but there are many more. We would love to hear about your success stories too and how things could be improved too! diff --git a/docs/markdown/snippets/add_release_note_snippets_here b/docs/markdown/snippets/add_release_note_snippets_here index e69de29..bc4039c 100644 --- a/docs/markdown/snippets/add_release_note_snippets_here +++ b/docs/markdown/snippets/add_release_note_snippets_here @@ -0,0 +1,3 @@ +## Added `cpp_std` option for the Visual Studio C++ compiler +Allows the use of C++17 features and experimental not-yet-standardized +features. Valid options are `c++11`, `c++14`, `c++17`, and `c++latest`. diff --git a/docs/markdown/snippets/buildopts_section.md b/docs/markdown/snippets/buildopts_section.md new file mode 100644 index 0000000..74cf8a1 --- /dev/null +++ b/docs/markdown/snippets/buildopts_section.md @@ -0,0 +1,14 @@ +## New `section` key for the buildoptions introspection + +Meson now has a new `section` key in each build option. This allows +IDEs to group these options similar to `meson configure`. + +The possible values for `section` are: + + - core + - backend + - base + - compiler + - directory + - user + - test diff --git a/docs/markdown/snippets/buildtype_toggles.md b/docs/markdown/snippets/buildtype_toggles.md deleted file mode 100644 index e6ae53d..0000000 --- a/docs/markdown/snippets/buildtype_toggles.md +++ /dev/null @@ -1,21 +0,0 @@ -## Toggles for build type, optimization and vcrt type - -Since the very beginning Meson has provided different project types to -use, such as *debug* and *minsize*. There is also a *plain* type that -adds nothing by default but instead makes it the user's responsibility -to add everything by hand. This works but is a bit tedious. - -In this release we have added new new options to manually toggle -e.g. optimization levels and debug info so those can be changed -independently of other options. For example by default the debug -buildtype has no optmization enabled at all. If you wish to use GCC's -`-Og` instead, you could set it with the following command: - -``` -meson configure -Doptimization=g -``` - -Similarly we have added a toggle option to select the version of -Visual Studio C runtime to use. By default it uses the debug runtime -DLL debug builds and release DLL for release builds but this can be -manually changed with the new base option `b_vscrt`. diff --git a/docs/markdown/snippets/ccrx_toolchain_support.md b/docs/markdown/snippets/ccrx_toolchain_support.md new file mode 100644 index 0000000..6bf7e5b --- /dev/null +++ b/docs/markdown/snippets/ccrx_toolchain_support.md @@ -0,0 +1,16 @@ +## CC-RX compiler for C and CPP + +Cross-compilation is now supported for Renesas RX targets with the CC-RX compiler. + +The environment path should be set properly for the CC-RX compiler executables. +The `-cpu` option with the appropriate value should be mentioned in the cross-file as shown in the snippet below. + +```ini +[properties] +c_args = ['-cpu=rx600'] +cpp_args = ['-cpu=rx600'] +``` + +The default extension of the executable output is `.abs`. +Other target specific arguments to the compiler and linker will need to be added explicitly from the cross-file(`c_args`/`c_link_args`/`cpp_args`/`cpp_link_args`) or some other way. +Refer to the CC-RX User's manual for additional compiler and linker options.
\ No newline at end of file diff --git a/docs/markdown/snippets/cmake.md b/docs/markdown/snippets/cmake.md new file mode 100644 index 0000000..8848c7b --- /dev/null +++ b/docs/markdown/snippets/cmake.md @@ -0,0 +1,19 @@ +## CMake `find_package` dependency backend + +Meson can now use the CMake `find_package` ecosystem to +detect dependencies. Both the old-style `<NAME>_LIBRARIES` +variables as well as imported targets are supported. Meson +can automatically guess the correct CMake target in most +cases but it is also possible to manually specify a target +with the `modules` property. + +```meson +# Implicitly uses CMake as a fallback and guesses a target +dep1 = dependency('KF5TextEditor') + +# Manually specify one or more CMake targets to use +dep2 = dependency('ZLIB', method : 'cmake', modules : ['ZLIB::ZLIB']) +``` + +CMake is automatically used after `pkg-config` fails when +no `method` (or `auto`) was provided in the dependency options. diff --git a/docs/markdown/snippets/compiler_argument_syntax.md b/docs/markdown/snippets/compiler_argument_syntax.md new file mode 100644 index 0000000..6ae32d4 --- /dev/null +++ b/docs/markdown/snippets/compiler_argument_syntax.md @@ -0,0 +1,22 @@ +## new compiler method `get_argument_syntax` + +The compiler object now has `get_argument_syntax` method, which returns a +string value of `gcc`, `msvc`, or an undefined value string value. This can be +used to determine if a compiler uses gcc syntax (`-Wfoo`), msvc syntax +(`/w1234`), or some other kind of arguments. + +```meson +cc = meson.get_compiler('c') + +if cc.get_argument_syntax() == 'msvc' + if cc.has_argument('/w1235') + add_project_arguments('/w1235', language : ['c']) + endif +elif cc.get_argument_syntax() == 'gcc' + if cc.has_argument('-Wfoo') + add_project_arguments('-Wfoo', language : ['c']) + endif +elif cc.get_id() == 'some other compiler' + add_project_arguments('--error-on-foo', language : ['c']) +endif +``` diff --git a/docs/markdown/snippets/configure_file_overwrite_warning.md b/docs/markdown/snippets/configure_file_overwrite_warning.md deleted file mode 100644 index 550407d..0000000 --- a/docs/markdown/snippets/configure_file_overwrite_warning.md +++ /dev/null @@ -1,39 +0,0 @@ -## Meson warns if two calls to configure_file() write to the same file - -If two calls to [`configure_file()`](#Reference-manual.md#configure_file) -write to the same file Meson will print a `WARNING:` message during -configuration. For example: -```meson -project('configure_file', 'cpp') - -configure_file( - input: 'a.in', - output: 'out', - command: ['./foo.sh'] - ) -configure_file( - input: 'a.in', - output: 'out', - command: ['./foo.sh'] -) - -``` - -This will output: - -``` -The Meson build system -Version: 0.47.0.dev1 -Source dir: /path/to/srctree -Build dir: /path/to/buildtree -Build type: native build -Project name: configure_file -Project version: undefined -Build machine cpu family: x86_64 -Build machine cpu: x86_64 -Configuring out with command -WARNING: Output file out for configure_file overwritten. First time written in line 3 now in line 8 -Configuring out with command -Build targets in project: 0 -Found ninja-1.8.2 at /usr/bin/ninja -``` diff --git a/docs/markdown/snippets/custom_target_console_pool.md b/docs/markdown/snippets/custom_target_console_pool.md deleted file mode 100644 index 8b9bb34..0000000 --- a/docs/markdown/snippets/custom_target_console_pool.md +++ /dev/null @@ -1,13 +0,0 @@ -## New kwarg `console` for `custom_target()` - -This keyword argument conflicts with `capture`, and is meant for -commands that are resource-intensive and take a long time to -finish. With the Ninja backend, setting this will add this target to -[Ninja's `console` -pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool), -which has special properties such as not buffering stdout and -serializing all targets in this pool. - -The primary use-case for this is to be able to run external commands -that take a long time to exeute. Without setting this, the user does -not receive any feedback about what the program is doing. diff --git a/docs/markdown/snippets/dependency_version.md b/docs/markdown/snippets/dependency_version.md deleted file mode 100644 index 4bbf346..0000000 --- a/docs/markdown/snippets/dependency_version.md +++ /dev/null @@ -1,14 +0,0 @@ -## `dependency(version:)` now applies to all dependency types - -Previously, version constraints were only enforced for dependencies found using -the pkg-config dependency provider. These constraints now apply to dependencies -found using any dependency provider. - -Some combinations of dependency, host and method do not currently support -discovery of the version. In these cases, the dependency will not be found if a -version constraint is applied, otherwise the `version()` method for the -dependency object will return `'unknown'`. - -(If discovering the version in one of these combinations is important to you, -and a method exists to determine the version in that case, please file an issue -with as much information as possible.) diff --git a/docs/markdown/snippets/dict_add.md b/docs/markdown/snippets/dict_add.md deleted file mode 100644 index cde5b57..0000000 --- a/docs/markdown/snippets/dict_add.md +++ /dev/null @@ -1,10 +0,0 @@ -## Dictionary addition - -Dictionaries can now be added, values from the second dictionary overrides values -from the first - -```meson -d1 = {'a' : 'b'} -d3 = d1 + {'a' : 'c'} -d3 += {'d' : 'e'} -``` diff --git a/docs/markdown/snippets/disabler.md b/docs/markdown/snippets/disabler.md new file mode 100644 index 0000000..76874f6 --- /dev/null +++ b/docs/markdown/snippets/disabler.md @@ -0,0 +1,6 @@ +## Return `Disabler()` instead of not-found object + +Functions such as `dependency()`, `find_library()`, `find_program()`, and +`python.find_installation()` have a new keyword argument: `disabler`. When set +to `true` those functions return `Disabler()` objects instead of not-found +objects. diff --git a/docs/markdown/snippets/distscript.md b/docs/markdown/snippets/distscript.md deleted file mode 100644 index 37d05fe..0000000 --- a/docs/markdown/snippets/distscript.md +++ /dev/null @@ -1,12 +0,0 @@ -## Dist scripts - -You can now specify scripts that are run as part of the `dist` -target. An example usage would go like this: - -```meson -project('foo', 'c') - -# other stuff here - -meson.add_dist_script('dist_cleanup.py') -``` diff --git a/docs/markdown/snippets/fatal_warnings.md b/docs/markdown/snippets/fatal_warnings.md deleted file mode 100644 index adf3334..0000000 --- a/docs/markdown/snippets/fatal_warnings.md +++ /dev/null @@ -1,6 +0,0 @@ -## Fatal warnings - -A new command line option has been added: `--fatal-meson-warnings`. When enabled, any -warning message printed by Meson will be fatal and raise an exception. It is -intended to be used by developers and CIs to easily catch deprecation warnings, -or any other potential issues. diff --git a/docs/markdown/snippets/function_attributes.md b/docs/markdown/snippets/function_attributes.md deleted file mode 100644 index 5514494..0000000 --- a/docs/markdown/snippets/function_attributes.md +++ /dev/null @@ -1,29 +0,0 @@ -## Helper methods added for checking GNU style attributes: __attribute__(...) - -A set of new helpers have been added to the C and C++ compiler objects for -checking GNU style function attributes. These are not just simpler to use, they -may be optimized to return fast on compilers that don't support these -attributes. Currently this is true for MSVC. - -```meson -cc = meson.get_compiler('c') -if cc.has_function_attribute('aligned') - add_project_arguments('-DHAVE_ALIGNED', language : 'c') -endif -``` - -Would replace code like: - -```meson -if cc.compiles('''into foo(void) __attribute__((aligned(32)))''') - add_project_arguments('-DHAVE_ALIGNED', language : 'c') -endif -``` - -Additionally, a multi argument version has been added: - -```meson -foreach s : cc.get_supported_function_attributes(['hidden', 'alias']) - add_project_arguments('-DHAVE_@0@'.format(s.to_upper()), language : 'c') -endforeach -``` diff --git a/docs/markdown/snippets/generate_gir_multiple_libraries.md b/docs/markdown/snippets/generate_gir_multiple_libraries.md deleted file mode 100644 index 3541b71..0000000 --- a/docs/markdown/snippets/generate_gir_multiple_libraries.md +++ /dev/null @@ -1,7 +0,0 @@ -## gnome.generate_gir() now optionally accepts multiple libraries - -The GNOME module can now generate a single gir for multiple libraries, which -is something `g-ir-scanner` supported, but had not been exposed yet. - -gnome.generate_gir() will now accept multiple positional arguments, if none -of these arguments are an `Executable` instance. diff --git a/docs/markdown/snippets/hotdoc_module.md b/docs/markdown/snippets/hotdoc_module.md deleted file mode 100644 index 4662ea2..0000000 --- a/docs/markdown/snippets/hotdoc_module.md +++ /dev/null @@ -1,22 +0,0 @@ -## Hotdoc module - -A new module has been written to ease generation of [hotdoc](https://hotdoc.github.io/) based -documentation. It supports complex use cases such as hotdoc subprojects (to create documentation -portals) and makes it straight forward to leverage full capabilities of hotdoc. - -Simple usage: - -``` meson -hotdoc = import('hotdoc') - -hotdoc.generate_doc( - 'foobar', - c_smart_index: true, - project_version: '0.1', - sitemap: 'sitemap.txt', - index: 'index.md', - c_sources: ['path/to/file.c'], - languages: ['c'], - install: true, -) -```
\ No newline at end of file diff --git a/docs/markdown/snippets/i18n_variable_substitution.md b/docs/markdown/snippets/i18n_variable_substitution.md deleted file mode 100644 index b58f62a..0000000 --- a/docs/markdown/snippets/i18n_variable_substitution.md +++ /dev/null @@ -1,4 +0,0 @@ -## i18n.merge_file() now fully supports variable substitutions defined in custom_target() - -Filename substitutions like @BASENAME@ and @PLAINNAME@ were previously accepted but the name of the build target wasn't altered leading to colliding target names when using the substitution twice. -i18n.merge_file() now behaves as custom_target() in this regard. diff --git a/docs/markdown/snippets/introspect_projectinfo.md b/docs/markdown/snippets/introspect_projectinfo.md new file mode 100644 index 0000000..40558b8 --- /dev/null +++ b/docs/markdown/snippets/introspect_projectinfo.md @@ -0,0 +1,35 @@ +## `introspect --projectinfo` can now be used without configured build directory + +This allows IDE integration to get information about the project before the user has configured a build directory. + +Before you could use `meson.py introspect --projectinfo build-directory`. +Now you also can use `meson.py introspect --projectinfo project-dir/meson.build`. + +The output is similiar to the output with a build directory but additionally also includes information from `introspect --buildsystem-files`. + +For example `meson.py introspect --projectinfo test\ cases/common/47\ subproject\ options/meson.build` +This outputs (pretty printed for readability): +``` +{ + "buildsystem_files": [ + "meson_options.txt", + "meson.build" + ], + "name": "suboptions", + "version": null, + "descriptive_name": "suboptions", + "subprojects": [ + { + "buildsystem_files": [ + "subprojects/subproject/meson_options.txt", + "subprojects/subproject/meson.build" + ], + "name": "subproject", + "version": "undefined", + "descriptive_name": "subproject" + } + ] +} +``` + +Both usages now include a new `descriptive_name` property which always shows the name set in the project. diff --git a/docs/markdown/snippets/manpage_compression.md b/docs/markdown/snippets/manpage_compression.md new file mode 100644 index 0000000..8c96807 --- /dev/null +++ b/docs/markdown/snippets/manpage_compression.md @@ -0,0 +1,7 @@ +## Manpages are no longer compressed implicitly + +Earlier, the `install_man` command has automatically compressed installed +manpages into `.gz` format. This collided with manpage compression hooks +already used by various distributions. Now, manpages are installed uncompressed +and distributors are expected to handle compressing them according to their own +compression preferences. diff --git a/docs/markdown/snippets/native_args.md b/docs/markdown/snippets/native_args.md deleted file mode 100644 index 54c6de2..0000000 --- a/docs/markdown/snippets/native_args.md +++ /dev/null @@ -1,34 +0,0 @@ -## Projects args can be set separately for cross and native builds (potentially breaking change) - -It has been a longstanding bug (or let's call it a "delayed bug fix") -that if yo do this: - -```meson -add_project_arguments('-DFOO', language : 'c') -``` - -Then the flag is used both in native and cross compilations. This is -very confusing and almost never what you want. To fix this a new -keyword `native` has been added to all functions that add arguments, -namely `add_global_arguments`, `add_global_link_arguments`, -`add_project_arguments` and `add_project_link_arguments` that behaves -like the following: - -``` -## Added to native builds when compiling natively and to cross -## compilations when doing cross compiles. -add_project_arguments(...) - -## Added only to native compilations, not used in cross compilations. -add_project_arguments(..., native : true) - -## Added only to cross compilations, not used in native compilations. -add_project_arguments(..., native : false) -``` - -Also remember that cross compilation is a property of each -target. There can be target that are compiled with the native compiler -and some which are compiled with the cross compiler. - -Unfortunately this change is backwards incompatible and may cause some -projects to fail building. However this should be very rare in practice. diff --git a/docs/markdown/snippets/native_files.md b/docs/markdown/snippets/native_files.md new file mode 100644 index 0000000..7bc3644 --- /dev/null +++ b/docs/markdown/snippets/native_files.md @@ -0,0 +1,15 @@ +## Native config files + +Native files are the counterpart to cross files, and allow specifying +information about the build machine, both when cross compiling and when not. + +Currently the native files only allow specifying the names of binaries, similar +to the cross file, for example: + +```ini +[binaries] +llvm-config = "/opt/llvm-custom/bin/llvm-config" +``` + +Will override the llvm-config used for *native* binaries. Targets for the host +machine will continue to use the cross file. diff --git a/docs/markdown/snippets/new_syntax.md b/docs/markdown/snippets/new_syntax.md new file mode 100644 index 0000000..98eccd0 --- /dev/null +++ b/docs/markdown/snippets/new_syntax.md @@ -0,0 +1,42 @@ +## Foreach `break` and `continue` + +`break` and `continue` keywords can be used inside foreach loops. + +```meson +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach +# result is ['a', 'b'] +``` + +You can check if an array contains an element like this: +```meson +my_array = [1, 2] +if 1 in my_array +# This condition is true +endif +if 1 not in my_array +# This condition is false +endif +``` + +You can check if a dictionary contains a key like this: +```meson +my_dict = {'foo': 42, 'foo': 43} +if 'foo' in my_dict +# This condition is true +endif +if 42 in my_dict +# This condition is false +endif +if 'foo' not in my_dict +# This condition is false +endif +``` diff --git a/docs/markdown/snippets/overrideexe.md b/docs/markdown/snippets/overrideexe.md deleted file mode 100644 index 59213c5..0000000 --- a/docs/markdown/snippets/overrideexe.md +++ /dev/null @@ -1,8 +0,0 @@ -## More flexible `override_find_program()`. - -It is now possible to pass an `executable` to -`override_find_program()` if the overridden program is not used during -configure. - -This is particularly useful for fallback dependencies like Protobuf -that also provide a tool like protoc. diff --git a/docs/markdown/snippets/pathdivision.md b/docs/markdown/snippets/pathdivision.md new file mode 100644 index 0000000..6da6005 --- /dev/null +++ b/docs/markdown/snippets/pathdivision.md @@ -0,0 +1,15 @@ +## Joining paths with / + +Joining two paths has traditionally been done with the `join_paths` function. + +```meson +joined = join_paths('foo', 'bar') +``` + +Now you can use the simpler notation using the `/` operator. + +```meson +joined = 'foo' / 'bar' +``` + +This only works for strings. diff --git a/docs/markdown/snippets/pie.md b/docs/markdown/snippets/pie.md new file mode 100644 index 0000000..a9be174 --- /dev/null +++ b/docs/markdown/snippets/pie.md @@ -0,0 +1,6 @@ +## Position-independent executables + +When `b_pie` option, or `executable()`'s `pie` keyword argument is set to +`true`, position-independent executables are built. All their objects are built +with `-fPIE` and the executable is linked with `-pie`. Any static library they +link must be built with `pic` set to `true` (see `b_staticpic` option). diff --git a/docs/markdown/snippets/shared_library_darwin_versions.md b/docs/markdown/snippets/shared_library_darwin_versions.md deleted file mode 100644 index ad137f3..0000000 --- a/docs/markdown/snippets/shared_library_darwin_versions.md +++ /dev/null @@ -1,9 +0,0 @@ -## `shared_library()` now supports setting dylib compatibility and current version - -Now, by default `shared_library()` sets `-compatibility_version` and -`-current_version` of a macOS dylib using the `soversion`. - -This can be overriden by using the `darwin_versions:` kwarg to -[`shared_library()`](Reference-manual.md#shared_library). As usual, you can -also pass this kwarg to `library()` or `build_target()` and it will be used in -the appropriate circumstances. diff --git a/docs/markdown/snippets/test_setup_is_default.md b/docs/markdown/snippets/test_setup_is_default.md new file mode 100644 index 0000000..2274dc9 --- /dev/null +++ b/docs/markdown/snippets/test_setup_is_default.md @@ -0,0 +1,14 @@ +## New keyword argument `is_default` to `add_test_setup()` + +The keyword argument `is_default` may be used to set whether the test +setup should be used by default whenever `meson test` is run without +the `--setup` option. + +```meson +add_test_setup('default', is_default: true, env: 'G_SLICE=debug-blocks') +add_test_setup('valgrind', env: 'G_SLICE=always-malloc', ...) +test('mytest', exe) +``` + +For the example above, running `meson test` and `meson test +--setup=default` is now equivalent. diff --git a/docs/markdown/snippets/version_comparison.md b/docs/markdown/snippets/version_comparison.md deleted file mode 100644 index 861a3ee..0000000 --- a/docs/markdown/snippets/version_comparison.md +++ /dev/null @@ -1,15 +0,0 @@ -## Version comparison - -`dependency(version:)` and other version constraints now handle versions -containing non-numeric characters better, comparing versions using the rpmvercmp -algorithm (as using the `pkg-config` autoconf macro `PKG_CHECK_MODULES` does). - -This is a breaking change for exact comparison constraints which rely on the -previous comparison behaviour of extending the compared versions with `'0'` -elements, up to the same length of `'.'`-separated elements. - -For example, a version of `'0.11.0'` would previously match a version constraint -of `'==0.11'`, but no longer does, being instead considered strictly greater. - -Instead, use a version constraint which exactly compares with the precise -version required, e.g. `'==0.11.0'`. diff --git a/docs/markdown/snippets/visibility.md b/docs/markdown/snippets/visibility.md deleted file mode 100644 index bbb99f1..0000000 --- a/docs/markdown/snippets/visibility.md +++ /dev/null @@ -1,13 +0,0 @@ -## Keyword argument for GNU symbol visibility - -Build targets got a new keyword, `gnu_symbol_visibility` that controls -how symbols are exported from shared libraries. This is most commonly -used to hide implementation symbols like this: - -```meson -shared_library('mylib', ... - gnu_symbol_visibility: 'hidden') -``` - -In this case only symbols explicitly marked as visible in the source -files get exported. diff --git a/docs/sitemap.txt b/docs/sitemap.txt index 4ba1b90..f79eb05 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -9,6 +9,7 @@ index.md Using-with-Visual-Studio.md Meson-sample.md Syntax.md + Native-environments.md Build-targets.md Include-directories.md Installing.md @@ -69,6 +70,7 @@ index.md Shipping-prebuilt-binaries-as-wraps.md fallback-wraptool.md Release-notes.md + Release-notes-for-0.49.0.md Release-notes-for-0.48.0.md Release-notes-for-0.47.0.md Release-notes-for-0.46.0.md diff --git a/man/meson.1 b/man/meson.1 index 747def8..a171b0b 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "July 2018" "meson 0.47.0" "User Commands" +.TH MESON "1" "September 2018" "meson 0.48.0" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION diff --git a/manual tests/4 standalone binaries/myapp.iss b/manual tests/4 standalone binaries/myapp.iss index dda1537..2bd441d 100644 --- a/manual tests/4 standalone binaries/myapp.iss +++ b/manual tests/4 standalone binaries/myapp.iss @@ -1,18 +1,18 @@ -; Innosetup file for My app.
-
-[Setup]
-AppName=My App
-AppVersion=1.0
-DefaultDirName={pf}\My App
-DefaultGroupName=My App
-UninstallDisplayIcon={app}\myapp.exe
-Compression=lzma2
-SolidCompression=yes
-OutputDir=.
-
-[Files]
-Source: "myapp.exe"; DestDir: "{app}"
-Source: "SDL2.dll"; DestDir: "{app}"
-
-;[Icons]
-;Name: "{group}\My App"; Filename: "{app}\myapp.exe"
+; Innosetup file for My app. + +[Setup] +AppName=My App +AppVersion=1.0 +DefaultDirName={pf}\My App +DefaultGroupName=My App +UninstallDisplayIcon={app}\myapp.exe +Compression=lzma2 +SolidCompression=yes +OutputDir=. + +[Files] +Source: "myapp.exe"; DestDir: "{app}" +Source: "SDL2.dll"; DestDir: "{app}" + +;[Icons] +;Name: "{group}\My App"; Filename: "{app}\myapp.exe" diff --git a/mesonbuild/astinterpreter.py b/mesonbuild/astinterpreter.py index 32d0845..a447a55 100644 --- a/mesonbuild/astinterpreter.py +++ b/mesonbuild/astinterpreter.py @@ -18,7 +18,7 @@ from . import interpreterbase, mlog, mparser, mesonlib from . import environment -from .interpreterbase import InterpreterException, InvalidArguments +from .interpreterbase import InterpreterException, InvalidArguments, BreakRequest, ContinueRequest import os, sys @@ -46,7 +46,6 @@ REMOVE_SOURCE = 1 class AstInterpreter(interpreterbase.InterpreterBase): def __init__(self, source_root, subdir): super().__init__(source_root, subdir) - self.asts = {} self.funcs.update({'project': self.func_do_nothing, 'test': self.func_do_nothing, 'benchmark': self.func_do_nothing, @@ -76,7 +75,72 @@ class AstInterpreter(interpreterbase.InterpreterBase): 'vcs_tag': self.func_do_nothing, 'add_languages': self.func_do_nothing, 'declare_dependency': self.func_do_nothing, - 'files': self.func_files, + 'files': self.func_do_nothing, + 'executable': self.func_do_nothing, + 'static_library': self.func_do_nothing, + 'shared_library': self.func_do_nothing, + 'library': self.func_do_nothing, + 'build_target': self.func_do_nothing, + 'custom_target': self.func_do_nothing, + 'run_target': self.func_do_nothing, + 'subdir': self.func_do_nothing, + 'set_variable': self.func_do_nothing, + 'get_variable': self.func_do_nothing, + 'is_variable': self.func_do_nothing, + }) + + def func_do_nothing(self, node, args, kwargs): + return True + + def method_call(self, node): + return True + + def evaluate_arithmeticstatement(self, cur): + return 0 + + def evaluate_plusassign(self, node): + return 0 + + def evaluate_indexing(self, node): + return 0 + + def unknown_function_called(self, func_name): + pass + + def reduce_arguments(self, args): + assert(isinstance(args, mparser.ArgumentNode)) + if args.incorrect_order(): + raise InvalidArguments('All keyword arguments must be after positional arguments.') + return args.arguments, args.kwargs + + def evaluate_comparison(self, node): + return False + + def evaluate_foreach(self, node): + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + pass + except BreakRequest: + pass + + def evaluate_if(self, node): + for i in node.ifs: + self.evaluate_codeblock(i.block) + if not isinstance(node.elseblock, mparser.EmptyNode): + self.evaluate_codeblock(node.elseblock) + + def get_variable(self, varname): + return 0 + + def assignment(self, node): + pass + +class RewriterInterpreter(AstInterpreter): + def __init__(self, source_root, subdir): + super().__init__(source_root, subdir) + self.asts = {} + self.funcs.update({'files': self.func_files, 'executable': self.func_executable, 'static_library': self.func_static_lib, 'shared_library': self.func_shared_lib, @@ -90,12 +154,6 @@ class AstInterpreter(interpreterbase.InterpreterBase): 'is_variable': self.func_is_variable, }) - def func_do_nothing(self, node, args, kwargs): - return True - - def method_call(self, node): - return True - def func_executable(self, node, args, kwargs): if args[0] == self.targetname: if self.operation == ADD_SOURCE: @@ -147,21 +205,6 @@ class AstInterpreter(interpreterbase.InterpreterBase): return [args] return args - def evaluate_arithmeticstatement(self, cur): - return 0 - - def evaluate_plusassign(self, node): - return 0 - - def evaluate_indexing(self, node): - return 0 - - def reduce_arguments(self, args): - assert(isinstance(args, mparser.ArgumentNode)) - if args.incorrect_order(): - raise InvalidArguments('All keyword arguments must be after positional arguments.') - return args.arguments, args.kwargs - def transform(self): self.load_root_meson_file() self.asts[''] = self.ast @@ -181,9 +224,6 @@ class AstInterpreter(interpreterbase.InterpreterBase): self.filename = filename self.transform() - def unknown_function_called(self, func_name): - mlog.warning('Unknown function called: ' + func_name) - def add_source_to_target(self, node, args, kwargs): namespan = node.args.arguments[0].bytespan buildfilename = os.path.join(self.source_root, self.subdir, environment.build_filename) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 0e7e8e0..4040251 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -13,6 +13,7 @@ # limitations under the License. import os, pickle, re +import textwrap from .. import build from .. import dependencies from .. import mesonlib @@ -22,15 +23,11 @@ import subprocess from ..mesonlib import MesonException, OrderedSet from ..mesonlib import classify_unity_sources from ..mesonlib import File -from ..compilers import CompilerArgs, get_macos_dylib_install_name +from ..compilers import CompilerArgs, VisualStudioCCompiler from collections import OrderedDict import shlex from functools import lru_cache -@lru_cache(maxsize=None) -def get_target_macos_dylib_install_name(ld): - return get_macos_dylib_install_name(ld.prefix, ld.name, ld.suffix, ld.soversion) - class CleanTrees: ''' @@ -134,8 +131,8 @@ class Backend: self.build = build self.environment = build.environment self.processed_targets = {} - self.build_to_src = os.path.relpath(self.environment.get_source_dir(), - self.environment.get_build_dir()) + self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), + self.environment.get_build_dir()) def get_target_filename(self, t): if isinstance(t, build.CustomTarget): @@ -214,6 +211,7 @@ class Backend: def get_target_private_dir_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) + @lru_cache(maxsize=None) def get_target_generated_dir(self, target, gensrc, src): """ Takes a BuildTarget, a generator source (CustomTarget or GeneratedList), @@ -361,6 +359,7 @@ class Backend: @staticmethod def _libdir_is_system(libdir, compilers, env): + libdir = os.path.normpath(libdir) for cc in compilers.values(): if libdir in cc.get_library_dirs(env): return True @@ -444,7 +443,13 @@ class Backend: sources.append(File(True, dirpart, fnamepart)) # Filter out headers and all non-source files - sources = [s for s in sources if self.environment.is_source(s) and not self.environment.is_header(s)] + filtered_sources = [] + for s in sources: + if self.environment.is_source(s) and not self.environment.is_header(s): + filtered_sources.append(s) + elif self.environment.is_object(s): + result.append(s.relative_name()) + sources = filtered_sources # extobj could contain only objects and no sources if not sources: @@ -486,7 +491,7 @@ class Backend: return args extra_args = [] # Compiler-specific escaping is needed for -D args but not for any others - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): # MSVC needs escaping when a -D argument ends in \ or \" for arg in args: if arg.startswith('-D') or arg.startswith('/D'): @@ -553,6 +558,8 @@ class Backend: # Set -fPIC for static libraries by default unless explicitly disabled if isinstance(target, build.StaticLibrary) and target.pic: commands += compiler.get_pic_args() + if isinstance(target, build.Executable) and target.pie: + commands += compiler.get_pie_args() # Add compile args needed to find external dependencies. Link args are # added while generating the link command. # NOTE: We must preserve the order in which external deps are @@ -600,6 +607,8 @@ class Backend: continue if compiler.get_language() == 'd': arg = '-Wl,' + arg + else: + arg = compiler.get_linker_lib_prefix() + arg args.append(arg) return args @@ -731,10 +740,11 @@ class Backend: def exe_object_to_cmd_array(self, exe): if self.environment.is_cross_build() and \ isinstance(exe, build.BuildTarget) and exe.is_cross: - if self.environment.exe_wrapper is None: - s = 'Can not use target %s as a generator because it is cross-built\n' - s += 'and no exe wrapper is defined. You might want to set it to native instead.' - s = s % exe.name + if self.environment.exe_wrapper is None and self.environment.cross_info.need_exe_wrapper(): + s = textwrap.dedent(''' + Can not use target {} as a generator because it is cross-built + and no exe wrapper is defined or needs_exe_wrapper is true. + You might want to set it to native instead.'''.format(exe.name)) raise MesonException(s) if isinstance(exe, build.BuildTarget): exe_arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))] @@ -969,61 +979,11 @@ class Backend: with open(install_data_file, 'wb') as ofile: pickle.dump(d, ofile) - def get_target_install_dirs(self, t): - # Find the installation directory. - if isinstance(t, build.SharedModule): - default_install_dir = self.environment.get_shared_module_dir() - elif isinstance(t, build.SharedLibrary): - default_install_dir = self.environment.get_shared_lib_dir() - elif isinstance(t, build.StaticLibrary): - default_install_dir = self.environment.get_static_lib_dir() - elif isinstance(t, build.Executable): - default_install_dir = self.environment.get_bindir() - elif isinstance(t, build.CustomTarget): - default_install_dir = None - else: - assert(isinstance(t, build.BuildTarget)) - # XXX: Add BuildTarget-specific install dir cases here - default_install_dir = self.environment.get_libdir() - outdirs = t.get_custom_install_dir() - if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: - # Either the value is set to a non-default value, or is set to - # False (which means we want this specific output out of many - # outputs to not be installed). - custom_install_dir = True - else: - custom_install_dir = False - outdirs[0] = default_install_dir - return outdirs, custom_install_dir - - def get_target_link_deps_mappings(self, t, prefix): - ''' - On macOS, we need to change the install names of all built libraries - that a target depends on using install_name_tool so that the target - continues to work after installation. For this, we need a dictionary - mapping of the install_name value to the new one, so we can change them - on install. - ''' - result = {} - if isinstance(t, build.StaticLibrary): - return result - for ld in t.get_all_link_deps(): - if ld is t or not isinstance(ld, build.SharedLibrary): - continue - old = get_target_macos_dylib_install_name(ld) - if old in result: - continue - fname = ld.get_filename() - outdirs, _ = self.get_target_install_dirs(ld) - new = os.path.join(prefix, outdirs[0], fname) - result.update({old: new}) - return result - def generate_target_install(self, d): for t in self.build.get_targets().values(): if not t.should_install(): continue - outdirs, custom_install_dir = self.get_target_install_dirs(t) + outdirs, custom_install_dir = t.get_install_dir(self.environment) # Sanity-check the outputs and install_dirs num_outdirs, num_out = len(outdirs), len(t.get_outputs()) if num_outdirs != 1 and num_outdirs != num_out: @@ -1038,7 +998,7 @@ class Backend: # Install primary build output (library/executable/jar, etc) # Done separately because of strip/aliases/rpath if outdirs[0] is not False: - mappings = self.get_target_link_deps_mappings(t, d.prefix) + mappings = t.get_link_deps_mapping(d.prefix, self.environment) i = TargetInstallData(self.get_target_filename(t), outdirs[0], t.get_aliases(), should_strip, mappings, t.install_rpath, install_mode) @@ -1133,7 +1093,7 @@ class Backend: if subdir is None: subdir = os.path.join(manroot, 'man' + num) srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) - dstabs = os.path.join(subdir, os.path.basename(f.fname) + '.gz') + dstabs = os.path.join(subdir, os.path.basename(f.fname)) i = [srcabs, dstabs, m.get_custom_install_mode()] d.man.append(i) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 6b2a00a..5606c41 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -29,7 +29,7 @@ from .. import build from .. import mlog from .. import dependencies from .. import compilers -from ..compilers import CompilerArgs, CCompiler +from ..compilers import CompilerArgs, CCompiler, VisualStudioCCompiler from ..linkers import ArLinker from ..mesonlib import File, MesonException, OrderedSet from ..mesonlib import get_compiler_for_source, has_path_sep @@ -169,7 +169,7 @@ class NinjaBackend(backends.Backend): Detect the search prefix to use.''' for compiler in self.build.compilers.values(): # Have to detect the dependency format - if compiler.id == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): break else: # None of our compilers are MSVC, we're done. @@ -185,7 +185,8 @@ int dummy; # and locale dependent. Any attempt at converting it to # Python strings leads to failure. We _must_ do this detection # in raw byte mode and write the result in raw bytes. - pc = subprocess.Popen(['cl', '/showIncludes', '/c', 'incdetect.c'], + pc = subprocess.Popen([compiler.get_exelist(), + '/showIncludes', '/c', 'incdetect.c'], cwd=self.environment.get_scratch_dir(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) (stdo, _) = pc.communicate() @@ -195,7 +196,7 @@ int dummy; # different locales have different messages with a different # number of colons. Match up to the the drive name 'd:\'. matchre = re.compile(rb"^(.*\s)[a-zA-Z]:\\.*stdio.h$") - for line in stdo.split(b'\r\n'): + for line in re.split(rb'\r?\n', stdo): match = matchre.match(line) if match: with open(tempfilename, 'ab') as binfile: @@ -767,6 +768,15 @@ int dummy; main_class = target.get_main_class() if main_class != '': e = 'e' + + # Add possible java generated files to src list + generated_sources = self.get_target_generated_sources(target) + for rel_src, gensrc in generated_sources.items(): + dirpart, fnamepart = os.path.split(rel_src) + raw_src = File(True, dirpart, fnamepart) + if rel_src.endswith('.java'): + src_list.append(raw_src) + for src in src_list: plain_class_path = self.generate_single_java_compile(src, target, compiler, outfile) class_list.append(plain_class_path) @@ -867,6 +877,10 @@ int dummy; def generate_single_java_compile(self, src, target, compiler, outfile): deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + generated_sources = self.get_target_generated_sources(target) + for rel_src, gensrc in generated_sources.items(): + if rel_src.endswith('.java'): + deps.append(rel_src) args = [] args += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target)) args += self.build.get_global_args(compiler, target.is_cross) @@ -1527,7 +1541,7 @@ rule FORTRAN_DEP_HACK%s command_template = ' command = {executable} $ARGS {cross_args} {output_args} {compile_only_args} $in\n' command = command_template.format( executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]), - cross_args=' '.join(self.get_cross_info_lang_args(compiler.language, is_cross)), + cross_args=' '.join(compiler.get_cross_extra_flags(self.environment, False)) if is_cross else '', output_args=' '.join(compiler.get_output_args('$out')), compile_only_args=' '.join(compiler.get_compile_only_args()) ) @@ -1538,14 +1552,6 @@ rule FORTRAN_DEP_HACK%s outfile.write('\n') self.created_llvm_ir_rule = True - def get_cross_info_lang_args(self, lang, is_cross): - if is_cross: - try: - return self.environment.cross_info.config['properties'][lang + '_args'] - except KeyError: - pass - return [] - def generate_compile_rule_for(self, langname, compiler, is_cross, outfile): if langname == 'java': if not is_cross: @@ -1579,7 +1585,11 @@ rule FORTRAN_DEP_HACK%s if d != '$out' and d != '$in': d = quote_func(d) quoted_depargs.append(d) - cross_args = self.get_cross_info_lang_args(langname, is_cross) + + if is_cross: + cross_args = compiler.get_cross_extra_flags(self.environment, False) + else: + cross_args = '' if compiler.can_linker_accept_rsp(): command_template = ''' command = {executable} @$out.rsp rspfile = $out.rsp @@ -1595,7 +1605,7 @@ rule FORTRAN_DEP_HACK%s compile_only_args=' '.join(compiler.get_compile_only_args()) ) description = ' description = Compiling %s object $out.\n' % compiler.get_display_language() - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' @@ -1618,7 +1628,7 @@ rule FORTRAN_DEP_HACK%s cross_args = [] if is_cross: try: - cross_args = self.environment.cross_info.config['properties'][langname + '_args'] + cross_args = compiler.get_cross_extra_flags(self.environment, False) except KeyError: pass @@ -1627,7 +1637,7 @@ rule FORTRAN_DEP_HACK%s if d != '$out' and d != '$in': d = quote_func(d) quoted_depargs.append(d) - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): output = '' else: output = ' '.join(compiler.get_output_args('$out')) @@ -1639,7 +1649,7 @@ rule FORTRAN_DEP_HACK%s compile_only_args=' '.join(compiler.get_compile_only_args()) ) description = ' description = Precompiling header %s.\n' % '$in' - if compiler.get_id() == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): deps = ' deps = msvc\n' else: deps = ' deps = gcc\n' @@ -1830,7 +1840,7 @@ rule FORTRAN_DEP_HACK%s return compiler.get_no_stdinc_args() def get_compile_debugfile_args(self, compiler, target, objfile): - if compiler.id != 'msvc': + if not isinstance(compiler, VisualStudioCCompiler): return [] # The way MSVC uses PDB files is documented exactly nowhere so # the following is what we have been able to decipher via @@ -1944,6 +1954,8 @@ rule FORTRAN_DEP_HACK%s if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): continue idir = self.get_target_dir(i) + if not idir: + idir = '.' if idir not in custom_target_include_dirs: custom_target_include_dirs.append(idir) incs = [] @@ -2192,7 +2204,7 @@ rule FORTRAN_DEP_HACK%s ''.format(target.get_basename()) raise InvalidArguments(msg) compiler = target.compilers[lang] - if compiler.id == 'msvc': + if isinstance(compiler, VisualStudioCCompiler): src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1]) (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch) extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) @@ -2243,6 +2255,8 @@ rule FORTRAN_DEP_HACK%s # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio) if target.import_filename: commands += linker.gen_import_library_args(os.path.join(self.get_target_dir(target), target.import_filename)) + if target.pie: + commands += linker.get_pie_link_args() elif isinstance(target, build.SharedLibrary): if isinstance(target, build.SharedModule): options = self.environment.coredata.base_options @@ -2382,7 +2396,8 @@ rule FORTRAN_DEP_HACK%s # Add buildtype linker args: optimization level, etc. commands += linker.get_buildtype_linker_args(self.get_option_for_target('buildtype', target)) # Add /DEBUG and the pdb filename when using MSVC - commands += self.get_link_debugfile_args(linker, target, outname) + if self.get_option_for_target('debug', target): + commands += self.get_link_debugfile_args(linker, target, outname) # Add link args specific to this BuildTarget type, such as soname args, # PIC, import library generation, etc. commands += self.get_target_type_link_args(target, linker) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 2e86ca9..ea52f12 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -26,7 +26,7 @@ from .. import mlog from .. import compilers from ..compilers import CompilerArgs from ..mesonlib import MesonException, File, python_command -from ..environment import Environment +from ..environment import Environment, build_filename def autodetect_vs_version(build): vs_version = os.getenv('VisualStudioVersion', None) @@ -417,7 +417,7 @@ class Vs2010Backend(backends.Backend): pref = ET.SubElement(ig, 'ProjectReference', Include=include) ET.SubElement(pref, 'Project').text = '{%s}' % projid - def create_basic_crap(self, target): + def create_basic_crap(self, target, guid): project_name = target.name root = ET.Element('Project', {'DefaultTargets': "Build", 'ToolsVersion': '4.0', @@ -431,7 +431,7 @@ class Vs2010Backend(backends.Backend): pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = '{%s}' % self.environment.coredata.test_guid + guidelem.text = '{%s}' % guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') @@ -460,7 +460,7 @@ class Vs2010Backend(backends.Backend): return root def gen_run_target_vcxproj(self, target, ofname, guid): - root = self.create_basic_crap(target) + root = self.create_basic_crap(target, guid) action = ET.SubElement(root, 'ItemDefinitionGroup') customstep = ET.SubElement(action, 'PostBuildEvent') cmd_raw = [target.command] + target.args @@ -486,7 +486,7 @@ class Vs2010Backend(backends.Backend): self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) def gen_custom_target_vcxproj(self, target, ofname, guid): - root = self.create_basic_crap(target) + root = self.create_basic_crap(target, guid) action = ET.SubElement(root, 'ItemDefinitionGroup') customstep = ET.SubElement(action, 'CustomBuildStep') # We need to always use absolute paths because our invocation is always @@ -730,8 +730,9 @@ class Vs2010Backend(backends.Backend): ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'OnlyExplicitInline' elif '/Ob2' in o_flags: ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'AnySuitable' - # Size-preserving flags - if '/Os' in o_flags: + # In modern MSVC parlance "/O1" means size optimization. + # "/Os" has been deprecated. + if '/O1' in o_flags: ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Size' else: ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Speed' @@ -945,7 +946,6 @@ class Vs2010Backend(backends.Backend): ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs) target_defines.append('%(PreprocessorDefinitions)') ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines) - ET.SubElement(clconf, 'MinimalRebuild').text = 'true' ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true' pch_node = ET.SubElement(clconf, 'PrecompiledHeader') # Warning level @@ -1098,6 +1098,9 @@ class Vs2010Backend(backends.Backend): else: raise MesonException('Unsupported Visual Studio target machine: ' + targetmachine) + meson_file_group = ET.SubElement(root, 'ItemGroup') + ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename)) + extra_files = target.extra_files if len(headers) + len(gen_hdrs) + len(extra_files) > 0: inc_hdrs = ET.SubElement(root, 'ItemGroup') @@ -1173,7 +1176,7 @@ class Vs2010Backend(backends.Backend): pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = '{%s}' % self.environment.coredata.test_guid + guidelem.text = '{%s}' % self.environment.coredata.regen_guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index 9a9f88b..a550d91 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -16,7 +16,8 @@ from . import backends from .. import build from .. import dependencies from .. import mesonlib -import uuid, os +from .. import mlog +import uuid, os, operator from ..mesonlib import MesonException @@ -24,9 +25,9 @@ class XCodeBackend(backends.Backend): def __init__(self, build): super().__init__(build) self.name = 'xcode' - self.project_uid = self.environment.coredata.guid.replace('-', '')[:24] + self.project_uid = self.environment.coredata.lang_guids['default'].replace('-', '')[:24] self.project_conflist = self.gen_id() - self.indent = ' ' + self.indent = '\t' # Recent versions of Xcode uses tabs self.indent_level = 0 self.xcodetypemap = {'c': 'sourcecode.c.c', 'a': 'archive.ar', @@ -43,6 +44,8 @@ class XCodeBackend(backends.Backend): 'inc': 'sourcecode.c.h', 'dylib': 'compiled.mach-o.dylib', 'o': 'compiled.mach-o.objfile', + 's': 'sourcecode.asm', + 'asm': 'sourcecode.asm', } self.maingroup_id = self.gen_id() self.all_id = self.gen_id() @@ -60,6 +63,12 @@ class XCodeBackend(backends.Backend): os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) return dirname + def target_to_build_root(self, target): + if self.get_target_dir(target) == '': + return '' + directories = os.path.normpath(self.get_target_dir(target)).split(os.sep) + return os.sep.join(['..'] * len(directories)) + def write_line(self, text): self.ofile.write(self.indent * self.indent_level + text) if not text.endswith('\n'): @@ -105,7 +114,11 @@ class XCodeBackend(backends.Backend): self.generate_suffix() def get_xcodetype(self, fname): - return self.xcodetypemap[fname.split('.')[-1]] + xcodetype = self.xcodetypemap.get(fname.split('.')[-1].lower()) + if not xcodetype: + xcodetype = 'sourcecode.unknown' + mlog.warning('Unknown file type "%s" fallbacking to "%s". Xcode project might be malformed.' % (fname, xcodetype)) + return xcodetype def generate_filemap(self): self.filemap = {} # Key is source file relative to src root. @@ -202,38 +215,38 @@ class XCodeBackend(backends.Backend): self.source_phase[t] = self.gen_id() def generate_pbx_aggregate_target(self): + target_dependencies = list(map(lambda t: self.pbx_dep_map[t], self.build.targets)) + aggregated_targets = [] + aggregated_targets.append((self.all_id, 'ALL_BUILD', self.all_buildconf_id, [], target_dependencies)) + aggregated_targets.append((self.test_id, 'RUN_TESTS', self.test_buildconf_id, [self.test_command_id], [])) + # Sort objects by ID before writing + sorted_aggregated_targets = sorted(aggregated_targets, key=operator.itemgetter(0)) self.ofile.write('\n/* Begin PBXAggregateTarget section */\n') - self.write_line('%s /* ALL_BUILD */ = {' % self.all_id) - self.indent_level += 1 - self.write_line('isa = PBXAggregateTarget;') - self.write_line('buildConfigurationList = %s;' % self.all_buildconf_id) - self.write_line('buildPhases = (') - self.write_line(');') - self.write_line('dependencies = (') - self.indent_level += 1 - for t in self.build.targets: - self.write_line('%s /* PBXTargetDependency */,' % self.pbx_dep_map[t]) - self.indent_level -= 1 - self.write_line(');') - self.write_line('name = ALL_BUILD;') - self.write_line('productName = ALL_BUILD;') - self.indent_level -= 1 - self.write_line('};') - self.write_line('%s /* RUN_TESTS */ = {' % self.test_id) - self.indent_level += 1 - self.write_line('isa = PBXAggregateTarget;') - self.write_line('buildConfigurationList = %s;' % self.test_buildconf_id) - self.write_line('buildPhases = (') - self.indent_level += 1 - self.write_line('%s /* test run command */,' % self.test_command_id) - self.indent_level -= 1 - self.write_line(');') - self.write_line('dependencies = (') - self.write_line(');') - self.write_line('name = RUN_TESTS;') - self.write_line('productName = RUN_TESTS;') - self.indent_level -= 1 - self.write_line('};') + for t in sorted_aggregated_targets: + name = t[1] + buildconf_id = t[2] + build_phases = t[3] + dependencies = t[4] + self.write_line('%s /* %s */ = {' % (t[0], name)) + self.indent_level += 1 + self.write_line('isa = PBXAggregateTarget;') + self.write_line('buildConfigurationList = %s /* Build configuration list for PBXAggregateTarget "%s" */;' % (buildconf_id, name)) + self.write_line('buildPhases = (') + self.indent_level += 1 + for bp in build_phases: + self.write_line('%s /* ShellScript */,' % bp) + self.indent_level -= 1 + self.write_line(');') + self.write_line('dependencies = (') + self.indent_level += 1 + for td in dependencies: + self.write_line('%s /* PBXTargetDependency */,' % td) + self.indent_level -= 1 + self.write_line(');') + self.write_line('name = %s;' % name) + self.write_line('productName = %s;' % name) + self.indent_level -= 1 + self.write_line('};') self.ofile.write('/* End PBXAggregateTarget section */\n') def generate_pbx_build_file(self): @@ -246,7 +259,7 @@ class XCodeBackend(backends.Backend): for dep in t.get_external_deps(): if isinstance(dep, dependencies.AppleFrameworks): for f in dep.frameworks: - self.ofile.write('%s /* %s.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = %s /* %s.framework */; };\n' % (self.native_frameworks[f], f, self.native_frameworks_fileref[f], f)) + self.write_line('%s /* %s.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = %s /* %s.framework */; };\n' % (self.native_frameworks[f], f, self.native_frameworks_fileref[f], f)) for s in t.sources: if isinstance(s, mesonlib.File): @@ -259,17 +272,18 @@ class XCodeBackend(backends.Backend): fileref = self.filemap[s] fullpath2 = fullpath compiler_args = '' - self.ofile.write(templ % (idval, fullpath, fileref, fullpath2, compiler_args)) + self.write_line(templ % (idval, fullpath, fileref, fullpath2, compiler_args)) for o in t.objects: o = os.path.join(t.subdir, o) idval = self.buildmap[o] fileref = self.filemap[o] fullpath = os.path.join(self.environment.get_source_dir(), o) fullpath2 = fullpath - self.ofile.write(otempl % (idval, fullpath, fileref, fullpath2)) + self.write_line(otempl % (idval, fullpath, fileref, fullpath2)) self.ofile.write('/* End PBXBuildFile section */\n') def generate_pbx_build_style(self): + # FIXME: Xcode 9 and later does not uses PBXBuildStyle and it gets removed. Maybe we can remove this part. self.ofile.write('\n/* Begin PBXBuildStyle section */\n') for name, idval in self.buildstylemap.items(): self.write_line('%s /* %s */ = {\n' % (idval, name)) @@ -301,19 +315,18 @@ class XCodeBackend(backends.Backend): def generate_pbx_file_reference(self): self.ofile.write('\n/* Begin PBXFileReference section */\n') - for t in self.build.targets.values(): for dep in t.get_external_deps(): if isinstance(dep, dependencies.AppleFrameworks): for f in dep.frameworks: - self.ofile.write('%s /* %s.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = %s.framework; path = System/Library/Frameworks/%s.framework; sourceTree = SDKROOT; };\n' % (self.native_frameworks_fileref[f], f, f, f)) + self.write_line('%s /* %s.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = %s.framework; path = System/Library/Frameworks/%s.framework; sourceTree = SDKROOT; };\n' % (self.native_frameworks_fileref[f], f, f, f)) src_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; fileEncoding = 4; name = "%s"; path = "%s"; sourceTree = SOURCE_ROOT; };\n' for fname, idval in self.filemap.items(): fullpath = os.path.join(self.environment.get_source_dir(), fname) xcodetype = self.get_xcodetype(fname) name = os.path.basename(fname) path = fname - self.ofile.write(src_templ % (idval, fullpath, xcodetype, name, path)) + self.write_line(src_templ % (idval, fullpath, xcodetype, name, path)) target_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; path = %s; refType = %d; sourceTree = BUILT_PRODUCTS_DIR; };\n' for tname, idval in self.target_filemap.items(): t = self.build.targets[tname] @@ -328,13 +341,12 @@ class XCodeBackend(backends.Backend): else: typestr = self.get_xcodetype(fname) path = '"%s"' % t.get_filename() - self.ofile.write(target_templ % (idval, tname, typestr, path, reftype)) + self.write_line(target_templ % (idval, tname, typestr, path, reftype)) self.ofile.write('/* End PBXFileReference section */\n') def generate_pbx_frameworks_buildphase(self): for tname, t in self.build.targets.items(): self.ofile.write('\n/* Begin PBXFrameworksBuildPhase section */\n') - self.indent_level += 1 self.write_line('%s /* %s */ = {\n' % (t.buildphasemap['Frameworks'], 'Frameworks')) self.indent_level += 1 self.write_line('isa = PBXFrameworksBuildPhase;\n') @@ -389,7 +401,7 @@ class XCodeBackend(backends.Backend): self.indent_level -= 1 self.write_line(');') self.write_line('name = Sources;') - self.write_line('sourcetree = "<group>";') + self.write_line('sourceTree = "<group>";') self.indent_level -= 1 self.write_line('};') @@ -524,7 +536,7 @@ class XCodeBackend(backends.Backend): self.write_line('BuildIndependentTargetsInParallel = YES;') self.indent_level -= 1 self.write_line('};') - conftempl = 'buildConfigurationList = %s /* build configuration list for PBXProject "%s"*/;' + conftempl = 'buildConfigurationList = %s /* Build configuration list for PBXProject "%s" */;' self.write_line(conftempl % (self.project_conflist, self.build.project_name)) self.write_line('buildSettings = {') self.write_line('};') @@ -553,7 +565,7 @@ class XCodeBackend(backends.Backend): def generate_pbx_shell_build_phase(self, test_data): self.ofile.write('\n/* Begin PBXShellScriptBuildPhase section */\n') - self.write_line('%s = {' % self.test_command_id) + self.write_line('%s /* ShellScript */ = {' % self.test_command_id) self.indent_level += 1 self.write_line('isa = PBXShellScriptBuildPhase;') self.write_line('buildActionMask = 2147483647;') @@ -595,14 +607,20 @@ class XCodeBackend(backends.Backend): self.ofile.write('/* End PBXSourcesBuildPhase section */\n') def generate_pbx_target_dependency(self): - self.ofile.write('\n/* Begin PBXTargetDependency section */\n') + targets = [] for t in self.build.targets: idval = self.pbx_dep_map[t] # VERIFY: is this correct? - self.write_line('%s /* PBXTargetDependency */ = {' % idval) + targets.append((idval, self.native_targets[t], t, self.containerproxy_map[t])) + + # Sort object by ID + sorted_targets = sorted(targets, key=operator.itemgetter(0)) + self.ofile.write('\n/* Begin PBXTargetDependency section */\n') + for t in sorted_targets: + self.write_line('%s /* PBXTargetDependency */ = {' % t[0]) self.indent_level += 1 self.write_line('isa = PBXTargetDependency;') - self.write_line('target = %s /* %s */;' % (self.native_targets[t], t)) - self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % self.containerproxy_map[t]) + self.write_line('target = %s /* %s */;' % (t[1], t[2])) + self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % t[3]) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXTargetDependency section */\n') @@ -637,7 +655,7 @@ class XCodeBackend(backends.Backend): self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = NO;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') - self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");') + self.write_line('GCC_PREPROCESSOR_DEFINITIONS = "";') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') self.write_line('INSTALL_PATH = "";') self.write_line('OTHER_CFLAGS = " ";') @@ -647,7 +665,7 @@ class XCodeBackend(backends.Backend): self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir()) self.write_line('USE_HEADERMAP = NO;') - self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') + self.write_build_setting_line('WARNING_CFLAGS', ['-Wmost', '-Wno-four-char-constants', '-Wno-unknown-pragmas']) self.indent_level -= 1 self.write_line('};') self.write_line('name = "%s";' % buildtype) @@ -665,7 +683,7 @@ class XCodeBackend(backends.Backend): self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = NO;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') - self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");') + self.write_line('GCC_PREPROCESSOR_DEFINITIONS = "";') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') self.write_line('INSTALL_PATH = "";') self.write_line('OTHER_CFLAGS = " ";') @@ -675,7 +693,7 @@ class XCodeBackend(backends.Backend): self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir()) self.write_line('USE_HEADERMAP = NO;') - self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') + self.write_build_setting_line('WARNING_CFLAGS', ['-Wmost', '-Wno-four-char-constants', '-Wno-unknown-pragmas']) self.indent_level -= 1 self.write_line('};') self.write_line('name = "%s";' % buildtype) @@ -721,9 +739,13 @@ class XCodeBackend(backends.Backend): for lang in self.environment.coredata.compilers: if lang not in langnamemap: continue + # Add compile args added using add_project_arguments() + pargs = self.build.projects_args.get(target.subproject, {}).get(lang, []) + # Add compile args added using add_global_arguments() + # These override per-project arguments gargs = self.build.global_args.get(lang, []) targs = target.get_extra_args(lang) - args = gargs + targs + args = pargs + gargs + targs if len(args) > 0: langargs[langnamemap[lang]] = args symroot = os.path.join(self.environment.get_build_dir(), target.subdir) @@ -744,7 +766,20 @@ class XCodeBackend(backends.Backend): self.write_line('GCC_GENERATE_DEBUGGING_SYMBOLS = YES;') self.write_line('GCC_INLINES_ARE_PRIVATE_EXTERN = NO;') self.write_line('GCC_OPTIMIZATION_LEVEL = 0;') - self.write_line('GCC_PREPROCESSOR_DEFINITIONS = ("");') + if target.has_pch: + # Xcode uses GCC_PREFIX_HEADER which only allows one file per target/executable. Precompiling various header files and + # applying a particular pch to each source file will require custom scripts (as a build phase) and build flags per each + # file. Since Xcode itself already discourages precompiled headers in favor of modules we don't try much harder here. + pchs = target.get_pch('c') + target.get_pch('cpp') + target.get_pch('objc') + target.get_pch('objcpp') + # Make sure to use headers (other backends require implementation files like *.c *.cpp, etc; these should not be used here) + pchs = [pch for pch in pchs if pch.endswith('.h') or pch.endswith('.hh') or pch.endswith('hpp')] + if pchs: + if len(pchs) > 1: + mlog.warning('Unsupported Xcode configuration: More than 1 precompiled header found "%s". Target "%s" might not compile correctly.' % (str(pchs), target.name)) + relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)" + self.write_line('GCC_PRECOMPILE_PREFIX_HEADER = YES;') + self.write_line('GCC_PREFIX_HEADER = "$(PROJECT_DIR)/%s";' % relative_pch_path) + self.write_line('GCC_PREPROCESSOR_DEFINITIONS = "";') self.write_line('GCC_SYMBOLS_PRIVATE_EXTERN = NO;') if len(headerdirs) > 0: quotedh = ','.join(['"\\"%s\\""' % i for i in headerdirs]) @@ -754,23 +789,24 @@ class XCodeBackend(backends.Backend): if isinstance(target, build.SharedLibrary): self.write_line('LIBRARY_STYLE = DYNAMIC;') for langname, args in langargs.items(): - argstr = ' '.join(args) - self.write_line('OTHER_%sFLAGS = "%s";' % (langname, argstr)) + self.write_build_setting_line('OTHER_%sFLAGS' % langname, args) self.write_line('OTHER_LDFLAGS = "%s";' % ldstr) self.write_line('OTHER_REZFLAGS = "";') self.write_line('PRODUCT_NAME = %s;' % product_name) self.write_line('SECTORDER_FLAGS = "";') self.write_line('SYMROOT = "%s";' % symroot) + self.write_build_setting_line('SYSTEM_HEADER_SEARCH_PATHS', [self.environment.get_build_dir()]) self.write_line('USE_HEADERMAP = NO;') - self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') + self.write_build_setting_line('WARNING_CFLAGS', ['-Wmost', '-Wno-four-char-constants', '-Wno-unknown-pragmas']) self.indent_level -= 1 self.write_line('};') - self.write_line('name = "%s";' % buildtype) + self.write_line('name = %s;' % buildtype) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End XCBuildConfiguration section */\n') def generate_xc_configurationList(self): + # FIXME: sort items self.ofile.write('\n/* Begin XCConfigurationList section */\n') self.write_line('%s /* Build configuration list for PBXProject "%s" */ = {' % (self.project_conflist, self.build.project_name)) self.indent_level += 1 @@ -829,11 +865,35 @@ class XCodeBackend(backends.Backend): self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') - self.write_line('defaultConfigurationName = "%s";' % typestr) + self.write_line('defaultConfigurationName = %s;' % typestr) self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End XCConfigurationList section */\n') + def write_build_setting_line(self, flag_name, flag_values, explicit=False): + if flag_values: + if len(flag_values) == 1: + value = flag_values[0] + if (' ' in value): + # If path contains spaces surround it with double colon + self.write_line('%s = "\\"%s\\"";' % (flag_name, value)) + else: + self.write_line('%s = "%s";' % (flag_name, value)) + else: + self.write_line('%s = (' % flag_name) + self.indent_level += 1 + for value in flag_values: + if (' ' in value): + # If path contains spaces surround it with double colon + self.write_line('"\\"%s\\"",' % value) + else: + self.write_line('"%s",' % value) + self.indent_level -= 1 + self.write_line(');') + else: + if explicit: + self.write_line('%s = "";' % flag_name) + def generate_prefix(self): self.ofile.write('// !$*UTF8*$!\n{\n') self.indent_level += 1 @@ -847,6 +907,6 @@ class XCodeBackend(backends.Backend): def generate_suffix(self): self.indent_level -= 1 self.write_line('};\n') - self.write_line('rootObject = ' + self.project_uid + ';') + self.write_line('rootObject = ' + self.project_uid + ' /* Project object */;') self.indent_level -= 1 self.write_line('}\n') diff --git a/mesonbuild/build.py b/mesonbuild/build.py index ec6e1e6..e47bd6f 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -15,6 +15,7 @@ import copy, os, re from collections import OrderedDict import itertools, pathlib +import hashlib import pickle from functools import lru_cache @@ -25,7 +26,7 @@ from .mesonlib import File, MesonException, listify, extract_as_list, OrderedSet from .mesonlib import typeslistify, stringlistify, classify_unity_sources from .mesonlib import get_filenames_templates_dict, substitute_values from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep -from .compilers import is_object, clink_langs, sort_clink, lang_suffixes +from .compilers import is_object, clink_langs, sort_clink, lang_suffixes, get_macos_dylib_install_name from .interpreterbase import FeatureNew pch_kwargs = set(['c_pch', 'cpp_pch']) @@ -37,6 +38,7 @@ lang_arg_kwargs = set([ 'd_import_dirs', 'd_unittest', 'd_module_versions', + 'd_debug', 'fortran_args', 'java_args', 'objc_args', @@ -83,12 +85,16 @@ known_build_target_kwargs = ( rust_kwargs | cs_kwargs) -known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic'} +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} known_shmod_kwargs = known_build_target_kwargs known_stlib_kwargs = known_build_target_kwargs | {'pic'} known_jar_kwargs = known_exe_kwargs | {'main_class'} +@lru_cache(maxsize=None) +def get_target_macos_dylib_install_name(ld): + return get_macos_dylib_install_name(ld.prefix, ld.name, ld.suffix, ld.soversion) + class InvalidArguments(MesonException): pass @@ -130,9 +136,23 @@ class Build: self.dep_manifest = {} self.cross_stdlibs = {} self.test_setups = {} + self.test_setup_default_name = None self.find_overrides = {} self.searched_programs = set() # The list of all programs that have been searched for. + def copy(self): + other = Build(self.environment) + for k, v in self.__dict__.items(): + if isinstance(v, (list, dict, set, OrderedDict)): + other.__dict__[k] = v.copy() + else: + other.__dict__[k] = v + return other + + def merge(self, other): + for k, v in other.__dict__.items(): + self.__dict__[k] = v + def add_compiler(self, compiler): if self.static_linker is None and compiler.needs_static_linker(): self.static_linker = self.environment.detect_static_linker(compiler) @@ -324,25 +344,58 @@ a hard error in the future.''' % name) self.build_always_stale = False self.option_overrides = {} + def get_install_dir(self, environment): + # Find the installation directory. + default_install_dir = self.get_default_install_dir(environment) + outdirs = self.get_custom_install_dir() + if outdirs[0] is not None and outdirs[0] != default_install_dir and outdirs[0] is not True: + # Either the value is set to a non-default value, or is set to + # False (which means we want this specific output out of many + # outputs to not be installed). + custom_install_dir = True + else: + custom_install_dir = False + outdirs[0] = default_install_dir + return outdirs, custom_install_dir + def get_basename(self): return self.name def get_subdir(self): return self.subdir - def get_id(self): + @staticmethod + def _get_id_hash(target_id): + # We don't really need cryptographic security here. + # Small-digest hash function with unlikely collision is good enough. + h = hashlib.sha256() + h.update(target_id.encode(encoding='utf-8', errors='replace')) + # This ID should be case-insensitive and should work in Visual Studio, + # e.g. it should not start with leading '-'. + return h.hexdigest()[:7] + + @staticmethod + def construct_id_from_path(subdir, name, type_suffix): + """Construct target ID from subdir, name and type suffix. + + This helper function is made public mostly for tests.""" # This ID must also be a valid file name on all OSs. # It should also avoid shell metacharacters for obvious # reasons. '@' is not used as often as '_' in source code names. # In case of collisions consider using checksums. # FIXME replace with assert when slash in names is prohibited - name_part = self.name.replace('/', '@').replace('\\', '@') - assert not has_path_sep(self.type_suffix()) - myid = name_part + self.type_suffix() - if self.subdir: - subdir_part = self.subdir.replace('/', '@').replace('\\', '@') - myid = subdir_part + '@@' + myid - return myid + name_part = name.replace('/', '@').replace('\\', '@') + assert not has_path_sep(type_suffix) + my_id = name_part + type_suffix + if subdir: + subdir_part = Target._get_id_hash(subdir) + # preserve myid for better debuggability + return subdir_part + '@@' + my_id + return my_id + + def get_id(self): + return self.construct_id_from_path( + self.subdir, self.name, self.type_suffix()) def process_kwargs(self, kwargs): if 'build_by_default' in kwargs: @@ -363,6 +416,8 @@ a hard error in the future.''' % name) result[k] = v return result + def is_linkable_target(self): + return False class BuildTarget(Target): known_kwargs = known_build_target_kwargs @@ -393,6 +448,8 @@ class BuildTarget(Target): self.generated = [] self.extra_files = [] self.d_features = {} + self.pic = False + self.pie = False # Sources can be: # 1. Pre-existing source files in the source tree # 2. Pre-existing sources generated by configure_file in the build tree @@ -676,6 +733,20 @@ class BuildTarget(Target): result += i.get_all_link_deps() return result + def get_link_deps_mapping(self, prefix, environment): + return self.get_transitive_link_deps_mapping(prefix, environment) + + @lru_cache(maxsize=None) + def get_transitive_link_deps_mapping(self, prefix, environment): + result = {} + for i in self.link_targets: + mapping = i.get_link_deps_mapping(prefix, environment) + #we are merging two dictionaries, while keeping the earlier one dominant + result_tmp = mapping.copy() + result_tmp.update(result) + result = result_tmp + return result + @lru_cache(maxsize=None) def get_link_dep_subdirs(self): result = OrderedSet() @@ -684,6 +755,9 @@ class BuildTarget(Target): result.update(i.get_link_dep_subdirs()) return result + def get_default_install_dir(self, environment): + return environment.get_libdir() + def get_custom_install_dir(self): return self.install_dir @@ -735,9 +809,12 @@ just like those detected with the dependency() function.''') dfeature_unittest = kwargs.get('d_unittest', False) if dfeature_unittest: dfeatures['unittest'] = dfeature_unittest - dfeature_versions = kwargs.get('d_module_versions', None) + dfeature_versions = kwargs.get('d_module_versions', []) if dfeature_versions: dfeatures['versions'] = dfeature_versions + dfeature_debug = kwargs.get('d_debug', []) + if dfeature_debug: + dfeatures['debug'] = dfeature_debug if 'd_import_dirs' in kwargs: dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs', unholder=True) for d in dfeature_import_dirs: @@ -828,13 +905,14 @@ This will become a hard error in a future Meson release.''') # since library loading is done differently) if for_darwin(self.is_cross, self.environment) or for_windows(self.is_cross, self.environment): self.pic = True - elif '-fPIC' in clist + cpplist: - mlog.warning("Use the 'pic' kwarg instead of passing -fPIC manually to static library {!r}".format(self.name)) - self.pic = True else: - self.pic = kwargs.get('pic', False) - if not isinstance(self.pic, bool): - raise InvalidArguments('Argument pic to static library {!r} must be boolean'.format(self.name)) + self.pic = self._extract_pic_pie(kwargs, 'pic') + if isinstance(self, Executable): + # Executables must be PIE on Android + if for_android(self.is_cross, self.environment): + self.pie = True + else: + self.pie = self._extract_pic_pie(kwargs, 'pie') self.implicit_include_directories = kwargs.get('implicit_include_directories', True) if not isinstance(self.implicit_include_directories, bool): raise InvalidArguments('Implicit_include_directories must be a boolean.') @@ -847,6 +925,18 @@ This will become a hard error in a future Meson release.''') raise InvalidArguments('GNU symbol visibility arg %s not one of: %s', self.symbol_visibility, ', '.join(permitted)) + def _extract_pic_pie(self, kwargs, arg): + # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags + all_flags = self.extra_args['c'] + self.extra_args['cpp'] + if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags: + mlog.warning("Use the '{}' kwarg instead of passing '{}' manually to {!r}".format(arg, '-f' + arg, self.name)) + return True + + val = kwargs.get(arg, False) + if not isinstance(val, bool): + raise InvalidArguments('Argument {} to {!r} must be boolean'.format(arg, self.name)) + return val + def get_filename(self): return self.filename @@ -1099,13 +1189,10 @@ You probably should put it in link_with instead.''') ''' linker, _ = self.get_clink_dynamic_linker_and_stdlibs() # Mixing many languages with MSVC is not supported yet so ignore stdlibs. - if linker and linker.get_id() in ['msvc', 'llvm', 'dmd']: + if linker and linker.get_id() in ['msvc', 'clang-cl', 'llvm', 'dmd']: return True return False - def is_linkable_target(self): - return False - def check_module_linking(self): ''' Warn if shared modules are linked with target: (link_with) #2865 @@ -1269,6 +1356,8 @@ class Executable(BuildTarget): known_kwargs = known_exe_kwargs def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs): + if 'pie' not in kwargs and 'b_pie' in environment.coredata.base_options: + kwargs['pie'] = environment.coredata.base_options['b_pie'].value super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) # Unless overridden, executables have no suffix or prefix. Except on # Windows and with C#/Mono executables where the suffix is 'exe' @@ -1279,12 +1368,14 @@ class Executable(BuildTarget): if (for_windows(is_cross, environment) or for_cygwin(is_cross, environment) or 'cs' in self.compilers): self.suffix = 'exe' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('arm') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('arm')): + self.suffix = 'axf' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')): + self.suffix = 'abs' else: - if ('c' in self.compilers and self.compilers['c'].get_id().startswith('arm') or - 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('arm')): - self.suffix = 'axf' - else: - self.suffix = '' + self.suffix = '' self.filename = self.name if self.suffix: self.filename += '.' + self.suffix @@ -1324,6 +1415,9 @@ class Executable(BuildTarget): # Only linkwithable if using export_dynamic self.is_linkwithable = self.export_dynamic + def get_default_install_dir(self, environment): + return environment.get_bindir() + def description(self): '''Human friendly description of the executable''' return self.name @@ -1385,6 +1479,12 @@ class StaticLibrary(BuildTarget): self.filename = self.prefix + self.name + '.' + self.suffix self.outputs = [self.filename] + def get_link_deps_mapping(self, prefix, environment): + return {} + + def get_default_install_dir(self, environment): + return environment.get_static_lib_dir() + def type_suffix(self): return "@sta" @@ -1431,6 +1531,21 @@ class SharedLibrary(BuildTarget): self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}' self.determine_filenames(is_cross, environment) + def get_link_deps_mapping(self, prefix, environment): + result = {} + mappings = self.get_transitive_link_deps_mapping(prefix, environment) + old = get_target_macos_dylib_install_name(self) + if old not in mappings: + fname = self.get_filename() + outdirs, _ = self.get_install_dir(self.environment) + new = os.path.join(prefix, outdirs[0], fname) + result.update({old: new}) + mappings.update(result) + return mappings + + def get_default_install_dir(self, environment): + return environment.get_shared_lib_dir() + def determine_filenames(self, is_cross, env): """ See https://github.com/mesonbuild/meson/pull/417 for details. @@ -1455,13 +1570,9 @@ class SharedLibrary(BuildTarget): prefix = '' suffix = '' self.filename_tpl = self.basic_filename_tpl - # If the user already provided the prefix and suffix to us, we don't - # need to do any filename suffix/prefix detection. # NOTE: manual prefix/suffix override is currently only tested for C/C++ - if self.prefix is not None and self.suffix is not None: - pass # C# and Mono - elif 'cs' in self.compilers: + if 'cs' in self.compilers: prefix = '' suffix = 'dll' self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' @@ -1470,8 +1581,8 @@ class SharedLibrary(BuildTarget): # For all other targets/platforms import_filename stays None elif for_windows(is_cross, env): suffix = 'dll' - self.vs_import_filename = '{0}.lib'.format(self.name) - self.gcc_import_filename = 'lib{0}.dll.a'.format(self.name) + self.vs_import_filename = '{0}{1}.lib'.format(self.prefix if self.prefix is not None else '', self.name) + self.gcc_import_filename = '{0}{1}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) if self.get_using_msvc(): # Shared library is of the form foo.dll prefix = '' @@ -1490,7 +1601,7 @@ class SharedLibrary(BuildTarget): self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' elif for_cygwin(is_cross, env): suffix = 'dll' - self.gcc_import_filename = 'lib{0}.dll.a'.format(self.name) + self.gcc_import_filename = '{0}{1}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) # Shared library is of the form cygfoo.dll # (ld --dll-search-prefix=cyg is the default) prefix = 'cyg' @@ -1702,6 +1813,10 @@ class SharedModule(SharedLibrary): raise MesonException('Shared modules must not specify the soversion kwarg.') super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) + def get_default_install_dir(self, environment): + return environment.get_shared_module_dir() + + class CustomTarget(Target): known_kwargs = set([ 'input', @@ -1739,6 +1854,9 @@ class CustomTarget(Target): mlog.warning('Unknown keyword arguments in target %s: %s' % (self.name, ', '.join(unknowns))) + def get_default_install_dir(self, environment): + return None + def __lt__(self, other): return self.get_id() < other.get_id() diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py index bb6c9a9..31b7b89 100644 --- a/mesonbuild/compilers/__init__.py +++ b/mesonbuild/compilers/__init__.py @@ -14,16 +14,7 @@ # Public symbols for compilers sub-package when using 'from . import compilers' __all__ = [ - 'CLANG_OSX', - 'CLANG_STANDARD', - 'CLANG_WIN', - 'GCC_CYGWIN', - 'GCC_MINGW', - 'GCC_OSX', - 'GCC_STANDARD', - 'ICC_OSX', - 'ICC_STANDARD', - 'ICC_WIN', + 'CompilerType', 'all_languages', 'base_options', @@ -54,6 +45,8 @@ __all__ = [ 'ClangCPPCompiler', 'ClangObjCCompiler', 'ClangObjCPPCompiler', + 'ClangClCCompiler', + 'ClangClCPPCompiler', 'CompilerArgs', 'CPPCompiler', 'DCompiler', @@ -85,6 +78,9 @@ __all__ = [ 'PathScaleFortranCompiler', 'PGIFortranCompiler', 'RustCompiler', + 'CcrxCCompiler', + 'CcrxCompiler', + 'CcrxCPPCompiler', 'SunFortranCompiler', 'SwiftCompiler', 'ValaCompiler', @@ -94,16 +90,7 @@ __all__ = [ # Bring symbols from each module into compilers sub-package namespace from .compilers import ( - GCC_OSX, - GCC_MINGW, - GCC_CYGWIN, - GCC_STANDARD, - CLANG_OSX, - CLANG_WIN, - CLANG_STANDARD, - ICC_OSX, - ICC_WIN, - ICC_STANDARD, + CompilerType, all_languages, base_options, clib_langs, @@ -126,15 +113,18 @@ from .compilers import ( CompilerArgs, GnuCompiler, IntelCompiler, + CcrxCompiler, ) from .c import ( CCompiler, ArmCCompiler, ArmclangCCompiler, ClangCCompiler, + ClangClCCompiler, GnuCCompiler, ElbrusCCompiler, IntelCCompiler, + CcrxCCompiler, VisualStudioCCompiler, ) from .cpp import ( @@ -142,9 +132,11 @@ from .cpp import ( ArmCPPCompiler, ArmclangCPPCompiler, ClangCPPCompiler, + ClangClCPPCompiler, GnuCPPCompiler, ElbrusCPPCompiler, IntelCPPCompiler, + CcrxCPPCompiler, VisualStudioCPPCompiler, ) from .cs import MonoCompiler, VisualStudioCsCompiler diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 1d531a6..b009645 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -30,7 +30,6 @@ from ..mesonlib import ( from .c_function_attributes import C_FUNC_ATTRIBUTES from .compilers import ( - GCC_MINGW, get_largefile_args, gnu_winlibs, msvc_winlibs, @@ -46,6 +45,7 @@ from .compilers import ( ElbrusCompiler, IntelCompiler, RunResult, + CcrxCompiler, ) gnu_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt') @@ -121,8 +121,8 @@ class CCompiler(Compiler): # The default behavior is this, override in MSVC @functools.lru_cache(maxsize=None) def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): - if self.id == 'clang' and self.clang_type == compilers.CLANG_OSX: - return self.build_osx_rpath_args(build_dir, rpath_paths, build_rpath) + if self.compiler_type.is_windows_compiler: + return [] return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath) def get_dependency_gen_args(self, outtarget, outfile): @@ -156,30 +156,6 @@ class CCompiler(Compiler): ''' return self.get_no_optimization_args() - def get_allow_undefined_link_args(self): - ''' - Get args for allowing undefined symbols when linking to a shared library - ''' - if self.id == 'clang': - if self.clang_type == compilers.CLANG_OSX: - # Apple ld - return ['-Wl,-undefined,dynamic_lookup'] - else: - # GNU ld and LLVM lld - return ['-Wl,--allow-shlib-undefined'] - elif self.id == 'gcc': - if self.gcc_type == compilers.GCC_OSX: - # Apple ld - return ['-Wl,-undefined,dynamic_lookup'] - else: - # GNU ld and LLVM lld - return ['-Wl,--allow-shlib-undefined'] - elif self.id == 'msvc': - # link.exe - return ['/FORCE:UNRESOLVED'] - # FIXME: implement other linkers - return [] - def get_output_args(self, target): return ['-o', target] @@ -212,7 +188,9 @@ class CCompiler(Compiler): def _get_search_dirs(self, env): extra_args = ['--print-search-dirs'] stdo = None - with self._build_wrapper('', env, extra_args, None, 'compile', True) as p: + with self._build_wrapper('', env, extra_args=extra_args, + dependencies=None, mode='compile', + want_output=True) as p: stdo = p.stdo return stdo @@ -220,9 +198,25 @@ class CCompiler(Compiler): def _split_fetch_real_dirs(pathstr, sep=':'): paths = [] for p in pathstr.split(sep): - p = Path(p) - if p.exists(): - paths.append(p.resolve().as_posix()) + # GCC returns paths like this: + # /usr/lib/gcc/x86_64-linux-gnu/8/../../../../x86_64-linux-gnu/lib + # It would make sense to normalize them to get rid of the .. parts + # Sadly when you are on a merged /usr fs it also kills these: + # /lib/x86_64-linux-gnu + # since /lib is a symlink to /usr/lib. This would mean + # paths under /lib would be considered not a "system path", + # which is wrong and breaks things. Store everything, just to be sure. + pobj = Path(p) + unresolved = pobj.as_posix() + if pobj.exists(): + if unresolved not in paths: + paths.append(unresolved) + try: + resolved = Path(p).resolve().as_posix() + if resolved not in paths: + paths.append(resolved) + except FileNotFoundError: + pass return tuple(paths) def get_compiler_dirs(self, env, name): @@ -236,8 +230,34 @@ class CCompiler(Compiler): return () @functools.lru_cache() - def get_library_dirs(self, env): - return self.get_compiler_dirs(env, 'libraries') + def get_library_dirs(self, env, elf_class = None): + dirs = self.get_compiler_dirs(env, 'libraries') + if elf_class is None or elf_class == 0: + return dirs + + # if we do have an elf class for 32-bit or 64-bit, we want to check that + # the directory in question contains libraries of the appropriate class. Since + # system directories aren't mixed, we only need to check one file for each + # directory and go by that. If we can't check the file for some reason, assume + # the compiler knows what it's doing, and accept the directory anyway. + retval = [] + for d in dirs: + files = [f for f in os.listdir(d) if f.endswith('.so') and os.path.isfile(os.path.join(d, f))] + # if no files, accept directory and move on + if len(files) == 0: + retval.append(d) + continue + file_to_check = os.path.join(d, files[0]) + with open(file_to_check, 'rb') as fd: + header = fd.read(5) + # if file is not an ELF file, it's weird, but accept dir + # if it is elf, and the class matches, accept dir + if header[1:4] != b'ELF' or int(header[4]) == elf_class: + retval.append(d) + # at this point, it's an ELF file which doesn't match the + # appropriate elf_class, so skip this one + pass + return tuple(retval) @functools.lru_cache() def get_program_dirs(self, env): @@ -338,13 +358,14 @@ class CCompiler(Compiler): code = 'int main(int argc, char **argv) { int class=0; return class; }\n' return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) - def check_header(self, hname, prefix, env, extra_args=None, dependencies=None): + def check_header(self, hname, prefix, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #include <{header}>''' - return self.compiles(code.format(**fargs), env, extra_args, dependencies) + return self.compiles(code.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) - def has_header(self, hname, prefix, env, extra_args=None, dependencies=None): + def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #ifdef __has_include @@ -354,10 +375,10 @@ class CCompiler(Compiler): #else #include <{header}> #endif''' - return self.compiles(code.format(**fargs), env, extra_args, - dependencies, 'preprocess') + return self.compiles(code.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies, mode='preprocess') - def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None): + def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} t = '''{prefix} #include <{header}> @@ -367,13 +388,16 @@ class CCompiler(Compiler): {symbol}; #endif }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): if extra_args is None: extra_args = [] - elif isinstance(extra_args, str): - extra_args = [extra_args] + else: + extra_args = listify(extra_args) + extra_args = listify([e(mode) if callable(e) else e for e in extra_args]) + if dependencies is None: dependencies = [] elif not isinstance(dependencies, list): @@ -403,7 +427,13 @@ class CCompiler(Compiler): args += env.coredata.get_external_preprocess_args(self.language) elif mode == 'compile': # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env - args += env.coredata.get_external_args(self.language) + sys_args = env.coredata.get_external_args(self.language) + # Apparently it is a thing to inject linker flags both + # via CFLAGS _and_ LDFLAGS, even though the former are + # also used during linking. These flags can break + # argument checks. Thanks, Autotools. + cleaned_sys_args = self.remove_linkerlike_args(sys_args) + args += cleaned_sys_args elif mode == 'link': # Add LDFLAGS from the env args += env.coredata.get_external_link_args(self.language) @@ -412,7 +442,7 @@ class CCompiler(Compiler): args += extra_args return args - def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'): + def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): with self._build_wrapper(code, env, extra_args, dependencies, mode) as p: return p.returncode == 0 @@ -420,10 +450,11 @@ class CCompiler(Compiler): args = self._get_compiler_check_args(env, extra_args, dependencies, mode) return self.compile(code, args, mode, want_output=want_output) - def links(self, code, env, extra_args=None, dependencies=None): - return self.compiles(code, env, extra_args, dependencies, mode='link') + def links(self, code, env, *, extra_args=None, dependencies=None): + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode='link') - def run(self, code, env, extra_args=None, dependencies=None): + def run(self, code, env, *, extra_args=None, dependencies=None): if self.is_cross and self.exe_wrapper is None: raise CrossNoRunException('Can not run test applications in this cross environment.') with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p: @@ -453,7 +484,8 @@ class CCompiler(Compiler): t = '''#include <stdio.h> {prefix} int main() {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def cross_compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies): # Try user's guess first @@ -503,7 +535,7 @@ class CCompiler(Compiler): return low - def compute_int(self, expression, low, high, guess, prefix, env, extra_args=None, dependencies=None): + def compute_int(self, expression, low, high, guess, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] if self.is_cross: @@ -515,14 +547,15 @@ class CCompiler(Compiler): printf("%ld\\n", (long)({expression})); return 0; }};''' - res = self.run(t.format(**fargs), env, extra_args, dependencies) + res = self.run(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) if not res.compiled: return -1 if res.returncode != 0: raise EnvironmentException('Could not run compute_int test binary.') return int(res.stdout) - def cross_sizeof(self, typename, prefix, env, extra_args=None, dependencies=None): + def cross_sizeof(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -531,30 +564,33 @@ class CCompiler(Compiler): int main(int argc, char **argv) {{ {type} something; }}''' - if not self.compiles(t.format(**fargs), env, extra_args, dependencies): + if not self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies): return -1 return self.cross_compute_int('sizeof(%s)' % typename, None, None, None, prefix, env, extra_args, dependencies) - def sizeof(self, typename, prefix, env, extra_args=None, dependencies=None): + def sizeof(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} if self.is_cross: - return self.cross_sizeof(typename, prefix, env, extra_args, dependencies) + return self.cross_sizeof(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) t = '''#include<stdio.h> {prefix} int main(int argc, char **argv) {{ printf("%ld\\n", (long)(sizeof({type}))); return 0; }};''' - res = self.run(t.format(**fargs), env, extra_args, dependencies) + res = self.run(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) if not res.compiled: return -1 if res.returncode != 0: raise EnvironmentException('Could not run sizeof test binary.') return int(res.stdout) - def cross_alignment(self, typename, prefix, env, extra_args=None, dependencies=None): + def cross_alignment(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename} @@ -563,7 +599,8 @@ class CCompiler(Compiler): int main(int argc, char **argv) {{ {type} something; }}''' - if not self.compiles(t.format(**fargs), env, extra_args, dependencies): + if not self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies): return -1 t = '''#include <stddef.h> {prefix} @@ -573,11 +610,12 @@ class CCompiler(Compiler): }};''' return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t.format(**fargs), env, extra_args, dependencies) - def alignment(self, typename, prefix, env, extra_args=None, dependencies=None): + def alignment(self, typename, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] if self.is_cross: - return self.cross_alignment(typename, prefix, env, extra_args, dependencies) + return self.cross_alignment(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) fargs = {'prefix': prefix, 'type': typename} t = '''#include <stdio.h> #include <stddef.h> @@ -590,7 +628,8 @@ class CCompiler(Compiler): printf("%d", (int)offsetof(struct tmp, target)); return 0; }}''' - res = self.run(t.format(**fargs), env, extra_args, dependencies) + res = self.run(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) if not res.compiled: raise EnvironmentException('Could not compile alignment test.') if res.returncode != 0: @@ -634,7 +673,7 @@ class CCompiler(Compiler): int main(int argc, char *argv[]) {{ printf ("{fmt}", {cast} {f}()); }}'''.format(**fargs) - res = self.run(code, env, extra_args, dependencies) + res = self.run(code, env, extra_args=extra_args, dependencies=dependencies) if not res.compiled: m = 'Could not get return value of {}()' raise EnvironmentException(m.format(fname)) @@ -703,7 +742,7 @@ class CCompiler(Compiler): }}''' return head, main - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): """ First, this function looks for the symbol in the default libraries provided by the compiler (stdlib + a few others usually). If that @@ -751,7 +790,8 @@ class CCompiler(Compiler): head, main = self._no_prototype_templ() templ = head + stubs_fail + main - if self.links(templ.format(**fargs), env, extra_args, dependencies): + if self.links(templ.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies): return True # MSVC does not have compiler __builtin_-s. @@ -784,9 +824,10 @@ class CCompiler(Compiler): #endif #endif }}''' - return self.links(t.format(**fargs), env, extra_args, dependencies) + return self.links(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) - def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None): + def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] fargs = {'prefix': prefix, 'type': typename, 'name': 'foo'} @@ -800,7 +841,8 @@ class CCompiler(Compiler): {type} {name}; {members} }};''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def has_type(self, typename, prefix, env, extra_args, dependencies=None): fargs = {'prefix': prefix, 'type': typename} @@ -808,7 +850,8 @@ class CCompiler(Compiler): void bar() {{ sizeof({type}); }};''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def symbols_have_underscore_prefix(self, env): ''' @@ -868,17 +911,17 @@ class CCompiler(Compiler): stlibext = ['a'] # We've always allowed libname to be both `foo` and `libfoo`, # and now people depend on it - if strict and self.id != 'msvc': # lib prefix is not usually used with msvc + if strict and not isinstance(self, VisualStudioCCompiler): # lib prefix is not usually used with msvc prefixes = ['lib'] else: prefixes = ['lib', ''] # Library suffixes and prefixes if for_darwin(env.is_cross_build(), env): - shlibext = ['dylib'] + shlibext = ['dylib', 'so'] elif for_windows(env.is_cross_build(), env): # FIXME: .lib files can be import or static so we should read the # file, figure out which one it is, and reject the wrong kind. - if self.id == 'msvc': + if isinstance(self, VisualStudioCCompiler): shlibext = ['lib'] else: shlibext = ['dll.a', 'lib', 'dll'] @@ -939,6 +982,13 @@ class CCompiler(Compiler): return f return None + @functools.lru_cache() + def output_is_64bit(self, env): + ''' + returns true if the output produced is 64-bit, false if 32-bit + ''' + return self.sizeof('void *', '', env) == 8 + def find_library_real(self, libname, env, extra_dirs, code, libtype): # First try if we can just add the library as -l. # Gcc + co seem to prefer builtin lib dirs to -L dirs. @@ -954,8 +1004,18 @@ class CCompiler(Compiler): # Not found or we want to use a specific libtype? Try to find the # library file itself. patterns = self.get_library_naming(env, libtype) + # try to detect if we are 64-bit or 32-bit. If we can't + # detect, we will just skip path validity checks done in + # get_library_dirs() call + try: + if self.output_is_64bit(env): + elf_class = 2 + else: + elf_class = 1 + except: + elf_class = 0 # Search in the specified dirs, and then in the system libraries - for d in itertools.chain(extra_dirs, self.get_library_dirs(env)): + for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)): for p in patterns: trial = self._get_trials_from_pattern(p, d, libname) if not trial: @@ -990,12 +1050,12 @@ class CCompiler(Compiler): return self.find_library_impl(libname, env, extra_dirs, code, libtype) def thread_flags(self, env): - if for_haiku(self.is_cross, env): + if for_haiku(self.is_cross, env) or for_darwin(self.is_cross, env): return [] return ['-pthread'] def thread_link_flags(self, env): - if for_haiku(self.is_cross, env): + if for_haiku(self.is_cross, env) or for_darwin(self.is_cross, env): return [] return ['-pthread'] @@ -1064,9 +1124,9 @@ class CCompiler(Compiler): class ClangCCompiler(ClangCompiler, CCompiler): - def __init__(self, exelist, version, clang_type, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ClangCompiler.__init__(self, clang_type) + ClangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -1092,15 +1152,15 @@ class ClangCCompiler(ClangCompiler, CCompiler): def get_linker_always_args(self): basic = super().get_linker_always_args() - if self.clang_type == compilers.CLANG_OSX: + if self.compiler_type.is_osx_compiler: return basic + ['-Wl,-headerpad_max_install_names'] return basic class ArmclangCCompiler(ArmclangCompiler, CCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmclangCompiler.__init__(self) + ArmclangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -1126,9 +1186,9 @@ class ArmclangCCompiler(ArmclangCompiler, CCompiler): class GnuCCompiler(GnuCompiler, CCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - GnuCompiler.__init__(self, gcc_type, defines) + GnuCompiler.__init__(self, compiler_type, defines) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -1140,7 +1200,7 @@ class GnuCCompiler(GnuCompiler, CCompiler): ['none', 'c89', 'c99', 'c11', 'gnu89', 'gnu99', 'gnu11'], 'none')}) - if self.gcc_type == GCC_MINGW: + if self.compiler_type.is_windows_compiler: opts.update({ 'c_winlibs': coredata.UserArrayOption('c_winlibs', 'Standard Win libraries to link against', gnu_winlibs), }) @@ -1154,21 +1214,18 @@ class GnuCCompiler(GnuCompiler, CCompiler): return args def get_option_link_args(self, options): - if self.gcc_type == GCC_MINGW: + if self.compiler_type.is_windows_compiler: return options['c_winlibs'].value[:] return [] - def get_std_shared_lib_link_args(self): - return ['-shared'] - def get_pch_use_args(self, pch_dir, header): return ['-fpch-preprocess', '-include', os.path.basename(header)] class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None, **kwargs): - GnuCCompiler.__init__(self, exelist, version, gcc_type, is_cross, exe_wrapper, defines, **kwargs) - ElbrusCompiler.__init__(self, gcc_type, defines) + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs): + GnuCCompiler.__init__(self, exelist, version, compiler_type, is_cross, exe_wrapper, defines, **kwargs) + ElbrusCompiler.__init__(self, compiler_type, defines) # It does support some various ISO standards and c/gnu 90, 9x, 1x in addition to those which GNU CC supports. def get_options(self): @@ -1182,22 +1239,24 @@ class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler): # Elbrus C compiler does not have lchmod, but there is only linker warning, not compiler error. # So we should explicitly fail at this case. - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): if funcname == 'lchmod': return False else: - return super().has_function(funcname, prefix, env, extra_args, dependencies) + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) class IntelCCompiler(IntelCompiler, CCompiler): - def __init__(self, exelist, version, icc_type, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - IntelCompiler.__init__(self, icc_type) + IntelCompiler.__init__(self, compiler_type) self.lang_header = 'c-header' - default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', '-Wpch-messages'] + default_warn_args = ['-Wall', '-w3', '-diag-disable:remark'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], - '3': default_warn_args + ['-Wextra', '-Wpedantic']} + '3': default_warn_args + ['-Wextra']} def get_options(self): opts = CCompiler.get_options(self) @@ -1217,12 +1276,6 @@ class IntelCCompiler(IntelCompiler, CCompiler): args.append('-std=' + std.value) return args - def get_std_shared_lib_link_args(self): - return ['-shared'] - - def has_arguments(self, args, env, code, mode): - return super().has_arguments(args + ['-diag-error', '10006'], env, code, mode) - class VisualStudioCCompiler(CCompiler): std_warn_args = ['/W3'] @@ -1266,7 +1319,7 @@ class VisualStudioCCompiler(CCompiler): def get_buildtype_args(self, buildtype): args = compilers.msvc_buildtype_args[buildtype] - if version_compare(self.version, '<18.0'): + if self.id == 'msvc' and version_compare(self.version, '<18.0'): args = [arg for arg in args if arg != '/Gw'] return args @@ -1284,6 +1337,8 @@ class VisualStudioCCompiler(CCompiler): def get_pch_use_args(self, pch_dir, header): base = os.path.basename(header) + if self.id == 'clang-cl': + base = header pchname = self.get_pch_name(header) return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)] @@ -1311,7 +1366,12 @@ class VisualStudioCCompiler(CCompiler): return [] def get_linker_exelist(self): - return ['link'] # FIXME, should have same path as compiler. + # FIXME, should have same path as compiler. + # FIXME, should be controllable via cross-file. + if self.id == 'clang-cl': + return ['lld-link'] + else: + return ['link'] def get_linker_always_args(self): return ['/nologo'] @@ -1419,6 +1479,8 @@ class VisualStudioCCompiler(CCompiler): # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t def has_arguments(self, args, env, code, mode): warning_text = '4044' if mode == 'link' else '9002' + if self.id == 'clang-cl' and mode != 'link': + args = args + ['-Werror=unknown-argument'] with self._build_wrapper(code, env, extra_args=args, mode=mode) as p: if p.returncode != 0: return False @@ -1434,7 +1496,7 @@ class VisualStudioCCompiler(CCompiler): # build obviously, which is why we only do this when PCH is on. # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx - if pch and version_compare(self.version, '>=18.0'): + if pch and self.id == 'msvc' and version_compare(self.version, '>=18.0'): args = ['/FS'] + args return args @@ -1451,7 +1513,7 @@ class VisualStudioCCompiler(CCompiler): def get_instruction_set_args(self, instruction_set): if self.is_64: return vs64_instruction_set_args.get(instruction_set, None) - if self.version.split('.')[0] == '16' and instruction_set == 'avx': + if self.id == 'msvc' and self.version.split('.')[0] == '16' and instruction_set == 'avx': # VS documentation says that this exists and should work, but # it does not. The headers do not contain AVX intrinsics # and the can not be called. @@ -1459,6 +1521,10 @@ class VisualStudioCCompiler(CCompiler): return vs32_instruction_set_args.get(instruction_set, None) def get_toolset_version(self): + if self.id == 'clang-cl': + # I have no idea + return '14.1' + # See boost/config/compiler/visualc.cpp for up to date mapping try: version = int(''.join(self.version.split('.')[0:2])) @@ -1513,11 +1579,24 @@ class VisualStudioCCompiler(CCompiler): # false without compiling anything return name in ['dllimport', 'dllexport'] + def get_argument_syntax(self): + return 'msvc' + + def get_allow_undefined_link_args(self): + # link.exe + return ['/FORCE:UNRESOLVED'] + + +class ClangClCCompiler(VisualStudioCCompiler): + def __init__(self, exelist, version, is_cross, exe_wrap, is_64): + super().__init__(exelist, version, is_cross, exe_wrap, is_64) + self.id = 'clang-cl' + class ArmCCompiler(ArmCompiler, CCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmCompiler.__init__(self) + ArmCompiler.__init__(self, compiler_type) def get_options(self): opts = CCompiler.get_options(self) @@ -1532,3 +1611,45 @@ class ArmCCompiler(ArmCompiler, CCompiler): if std.value != 'none': args.append('--' + std.value) return args + +class CcrxCCompiler(CcrxCompiler, CCompiler): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): + CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) + CcrxCompiler.__init__(self, compiler_type) + + # Override CCompiler.get_always_args + def get_always_args(self): + return ['-nologo'] + + def get_options(self): + opts = CCompiler.get_options(self) + opts.update({'c_std': coredata.UserComboOption('c_std', 'C language standard to use', + ['none', 'c89', 'c99'], + 'none')}) + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['c_std'] + if std.value == 'c89': + args.append('-lang=c') + elif std.value == 'c99': + args.append('-lang=c99') + return args + + def get_compile_only_args(self): + return [] + + def get_no_optimization_args(self): + return ['-optimize=0'] + + def get_output_args(self, target): + return ['-output=obj=%s' % target] + + def get_linker_output_args(self, outputname): + return ['-output=%s' % outputname] + + def get_include_args(self, path, is_system): + if path == '': + path = '.' + return ['-include=' + path] diff --git a/mesonbuild/compilers/c_function_attributes.py b/mesonbuild/compilers/c_function_attributes.py index 9aeaaf2..a522a1a 100644 --- a/mesonbuild/compilers/c_function_attributes.py +++ b/mesonbuild/compilers/c_function_attributes.py @@ -91,10 +91,10 @@ C_FUNC_ATTRIBUTES = { 'used': 'int foo(void) __attribute__((used));', 'visibility': ''' - int foo_def(void) __attribute__((visibility(("default")))); - int foo_hid(void) __attribute__((visibility(("hidden")))); - int foo_int(void) __attribute__((visibility(("internal")))); - int foo_pro(void) __attribute__((visibility(("protected"))));''', + int foo_def(void) __attribute__((visibility("default"))); + int foo_hid(void) __attribute__((visibility("hidden"))); + int foo_int(void) __attribute__((visibility("internal"))); + int foo_pro(void) __attribute__((visibility("protected")));''', 'warning': 'int foo(void) __attribute__((warning("")));', 'warn_unused_result': diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 40d6880..3761433 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -12,14 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import contextlib, os.path, re, tempfile, shlex +import abc, contextlib, enum, os.path, re, tempfile, shlex import subprocess from ..linkers import StaticLinker from .. import coredata from .. import mlog from .. import mesonlib -from ..mesonlib import EnvironmentException, MesonException, OrderedSet, version_compare, Popen_safe +from ..mesonlib import ( + EnvironmentException, MesonException, OrderedSet, version_compare, + Popen_safe +) """This file contains the data files of all compilers Meson knows about. To support a new compiler, add its information below. @@ -127,26 +130,40 @@ gnulike_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': [], 'release': [], - 'minsize': []} + 'minsize': [], + 'custom': [], + } armclang_buildtype_args = {'plain': [], 'debug': ['-O0', '-g'], 'debugoptimized': ['-O1', '-g'], 'release': ['-Os'], - 'minsize': ['-Oz']} + 'minsize': ['-Oz'], + 'custom': [], + } arm_buildtype_args = {'plain': [], 'debug': ['-O0', '--debug'], 'debugoptimized': ['-O1', '--debug'], 'release': ['-O3', '-Otime'], 'minsize': ['-O3', '-Ospace'], + 'custom': [], } +ccrx_buildtype_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], + } + msvc_buildtype_args = {'plain': [], 'debug': ["/ZI", "/Ob0", "/Od", "/RTC1"], 'debugoptimized': ["/Zi", "/Ob1"], 'release': ["/Ob2", "/Gw"], 'minsize': ["/Zi", "/Gw"], + 'custom': [], } apple_buildtype_linker_args = {'plain': [], @@ -154,6 +171,7 @@ apple_buildtype_linker_args = {'plain': [], 'debugoptimized': [], 'release': [], 'minsize': [], + 'custom': [], } gnulike_buildtype_linker_args = {'plain': [], @@ -161,6 +179,7 @@ gnulike_buildtype_linker_args = {'plain': [], 'debugoptimized': [], 'release': ['-Wl,-O1'], 'minsize': [], + 'custom': [], } arm_buildtype_linker_args = {'plain': [], @@ -168,8 +187,17 @@ arm_buildtype_linker_args = {'plain': [], 'debugoptimized': [], 'release': [], 'minsize': [], + 'custom': [], } +ccrx_buildtype_linker_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], + } + msvc_buildtype_linker_args = {'plain': [], 'debug': [], 'debugoptimized': [], @@ -178,6 +206,7 @@ msvc_buildtype_linker_args = {'plain': [], # REF implies ICF. 'release': ['/OPT:REF'], 'minsize': ['/INCREMENTAL:NO', '/OPT:REF'], + 'custom': [], } java_buildtype_args = {'plain': [], @@ -185,6 +214,7 @@ java_buildtype_args = {'plain': [], 'debugoptimized': ['-g'], 'release': [], 'minsize': [], + 'custom': [], } rust_buildtype_args = {'plain': [], @@ -192,6 +222,7 @@ rust_buildtype_args = {'plain': [], 'debugoptimized': [], 'release': [], 'minsize': [], + 'custom': [], } d_gdc_buildtype_args = {'plain': [], @@ -199,6 +230,7 @@ d_gdc_buildtype_args = {'plain': [], 'debugoptimized': ['-finline-functions'], 'release': ['-frelease', '-finline-functions'], 'minsize': [], + 'custom': [], } d_ldc_buildtype_args = {'plain': [], @@ -206,6 +238,7 @@ d_ldc_buildtype_args = {'plain': [], 'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'], 'release': ['-release', '-enable-inlining', '-Hkeep-all-bodies'], 'minsize': [], + 'custom': [], } d_dmd_buildtype_args = {'plain': [], @@ -213,6 +246,7 @@ d_dmd_buildtype_args = {'plain': [], 'debugoptimized': ['-inline'], 'release': ['-release', '-inline'], 'minsize': [], + 'custom': [], } mono_buildtype_args = {'plain': [], @@ -220,6 +254,7 @@ mono_buildtype_args = {'plain': [], 'debugoptimized': ['-optimize+'], 'release': ['-optimize+'], 'minsize': [], + 'custom': [], } swift_buildtype_args = {'plain': [], @@ -227,6 +262,7 @@ swift_buildtype_args = {'plain': [], 'debugoptimized': [], 'release': [], 'minsize': [], + 'custom': [], } gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32', @@ -246,6 +282,22 @@ clang_color_args = {'auto': ['-Xclang', '-fcolor-diagnostics'], 'never': ['-Xclang', '-fno-color-diagnostics'], } +arm_optimization_args = {'0': ['-O0'], + 'g': ['-g'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': [], + } + +armclang_optimization_args = {'0': ['-O0'], + 'g': ['-g'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Os'] + } + clike_optimization_args = {'0': [], 'g': [], '1': ['-O1'], @@ -262,11 +314,19 @@ gnu_optimization_args = {'0': [], 's': ['-Os'], } +ccrx_optimization_args = {'0': ['-optimize=0'], + 'g': ['-optimize=0'], + '1': ['-optimize=1'], + '2': ['-optimize=2'], + '3': ['-optimize=max'], + 's': ['-optimize=2', '-size'] + } + msvc_optimization_args = {'0': [], 'g': ['/O0'], '1': ['/O1'], '2': ['/O2'], - '3': ['/O3'], + '3': ['/O2'], 's': ['/O1'], # Implies /Os. } @@ -276,6 +336,9 @@ clike_debug_args = {False: [], msvc_debug_args = {False: [], True: []} # Fixme! +ccrx_debug_args = {False: [], + True: ['-debug']} + base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled headers', True), 'b_lto': coredata.UserBooleanOption('b_lto', 'Use link time optimization', False), 'b_sanitize': coredata.UserComboOption('b_sanitize', @@ -298,6 +361,9 @@ base_options = {'b_pch': coredata.UserBooleanOption('b_pch', 'Use precompiled he 'b_staticpic': coredata.UserBooleanOption('b_staticpic', 'Build static libraries as position independent', True), + 'b_pie': coredata.UserBooleanOption('b_pie', + 'Build executables as position independent', + False), 'b_bitcode': coredata.UserBooleanOption('b_bitcode', 'Generate and embed bitcode (only macOS and iOS)', False), @@ -391,9 +457,9 @@ def get_base_compile_args(options, compiler): try: pgo_val = options['b_pgo'].value if pgo_val == 'generate': - args.append('-fprofile-generate') + args.extend(compiler.get_profile_generate_args()) elif pgo_val == 'use': - args.append('-fprofile-use') + args.extend(compiler.get_profile_use_args()) except KeyError: pass try: @@ -437,9 +503,9 @@ def get_base_link_args(options, linker, is_shared_module): try: pgo_val = options['b_pgo'].value if pgo_val == 'generate': - args.append('-fprofile-generate') + args.extend(linker.get_profile_generate_args()) elif pgo_val == 'use': - args.append('-fprofile-use') + args.extend(linker.get_profile_use_args()) except KeyError: pass try: @@ -470,6 +536,31 @@ def get_base_link_args(options, linker, is_shared_module): pass return args +def prepare_rpaths(raw_rpaths, build_dir, from_dir): + internal_format_rpaths = [evaluate_rpath(p, build_dir, from_dir) for p in raw_rpaths] + ordered_rpaths = order_rpaths(internal_format_rpaths) + return ordered_rpaths + +def order_rpaths(rpath_list): + # We want rpaths that point inside our build dir to always override + # those pointing to other places in the file system. This is so built + # binaries prefer our libraries to the ones that may lie somewhere + # in the file system, such as /lib/x86_64-linux-gnu. + # + # The correct thing to do here would be C++'s std::stable_partition. + # Python standard library does not have it, so replicate it with + # sort, which is guaranteed to be stable. + return sorted(rpath_list, key=os.path.isabs) + +def evaluate_rpath(p, build_dir, from_dir): + if p == from_dir: + return '' # relpath errors out in this case + elif os.path.isabs(p): + return p # These can be outside of build dir. + else: + return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir)) + + class CrossNoRunException(MesonException): pass @@ -583,7 +674,6 @@ class CompilerArgs(list): to recursively search for symbols in the libraries. This is not needed with other linkers. ''' - # A standalone argument must never be deduplicated because it is # defined by what comes _after_ it. Thus dedupping this: # -D FOO -D BAR @@ -780,10 +870,10 @@ class Compiler: def compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies): raise EnvironmentException('%s does not support compute_int ' % self.get_id()) - def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None): + def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None): raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) - def has_type(self, typename, prefix, env, extra_args, dependencies=None): + def has_type(self, typename, prefix, env, extra_args, *, dependencies=None): raise EnvironmentException('%s does not support has_type ' % self.get_id()) def symbols_have_underscore_prefix(self, env): @@ -810,6 +900,9 @@ class Compiler: def get_linker_always_args(self): return [] + def get_linker_lib_prefix(self): + return '' + def gen_import_library_args(self, implibname): """ Used only on Windows for libraries that need an import library. @@ -1055,46 +1148,30 @@ class Compiler: def get_instruction_set_args(self, instruction_set): return None - def build_osx_rpath_args(self, build_dir, rpath_paths, build_rpath): - # Ensure that there is enough space for large RPATHs and install_name - args = ['-Wl,-headerpad_max_install_names'] - if not rpath_paths and not build_rpath: - return args - # On OSX, rpaths must be absolute. - abs_rpaths = [os.path.join(build_dir, p) for p in rpath_paths] - if build_rpath != '': - abs_rpaths.append(build_rpath) - # Need to deduplicate abs_rpaths, as rpath_paths and - # build_rpath are not guaranteed to be disjoint sets - args += ['-Wl,-rpath,' + rp for rp in OrderedSet(abs_rpaths)] - return args - def build_unix_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): if not rpath_paths and not install_rpath and not build_rpath: return [] - # The rpaths we write must be relative, because otherwise - # they have different length depending on the build + args = [] + if mesonlib.is_osx(): + # Ensure that there is enough space for install_name_tool in-place editing of large RPATHs + args.append('-Wl,-headerpad_max_install_names') + # @loader_path is the equivalent of $ORIGIN on macOS + # https://stackoverflow.com/q/26280738 + origin_placeholder = '@loader_path' + else: + origin_placeholder = '$ORIGIN' + # The rpaths we write must be relative if they point to the build dir, + # because otherwise they have different length depending on the build # directory. This breaks reproducible builds. - rel_rpaths = [] - for p in rpath_paths: - if p == from_dir: - relative = '' # relpath errors out in this case - else: - relative = os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir)) - rel_rpaths.append(relative) - paths = ':'.join([os.path.join('$ORIGIN', p) for p in rel_rpaths]) + processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir) + # Need to deduplicate rpaths, as macOS's install_name_tool + # is *very* allergic to duplicate -delete_rpath arguments + # when calling depfixer on installation. + all_paths = OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths]) # Build_rpath is used as-is (it is usually absolute). if build_rpath != '': - if paths != '': - paths += ':' - paths += build_rpath - if len(paths) < len(install_rpath): - padding = 'X' * (len(install_rpath) - len(paths)) - if not paths: - paths = padding - else: - paths = paths + ':' + padding - args = [] + all_paths.add(build_rpath) + if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd(): # This argument instructs the compiler to record the value of # ORIGIN in the .dynamic section of the elf. On Linux this is done @@ -1102,7 +1179,23 @@ class Compiler: # $ORIGIN in the runtime path will be undefined and any binaries # linked against local libraries will fail to resolve them. args.append('-Wl,-z,origin') - args.append('-Wl,-rpath,' + paths) + + if mesonlib.is_osx(): + # macOS does not support colon-separated strings in LC_RPATH, + # hence we have to pass each path component individually + args += ['-Wl,-rpath,' + rp for rp in all_paths] + else: + # In order to avoid relinking for RPATH removal, the binary needs to contain just + # enough space in the ELF header to hold the final installation RPATH. + paths = ':'.join(all_paths) + if len(paths) < len(install_rpath): + padding = 'X' * (len(install_rpath) - len(paths)) + if not paths: + paths = padding + else: + paths = paths + ':' + padding + args.append('-Wl,-rpath,' + paths) + if get_compiler_is_linuxlike(self): # Rpaths to use while linking must be absolute. These are not # written to the binary. Needed only with GNU ld: @@ -1141,26 +1234,79 @@ class Compiler: raise EnvironmentException( 'Language {} does not support function attributes.'.format(self.get_display_language())) -GCC_STANDARD = 0 -GCC_OSX = 1 -GCC_MINGW = 2 -GCC_CYGWIN = 3 + def get_pic_args(self): + m = 'Language {} does not support position-independent code' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_args(self): + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_link_args(self): + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_argument_syntax(self): + """Returns the argument family type. + + Compilers fall into families if they try to emulate the command line + interface of another compiler. For example, clang is in the GCC family + since it accepts most of the same arguments as GCC. ICL (ICC on + windows) is in the MSVC family since it accepts most of the same + arguments as MSVC. + """ + return 'other' + + def get_profile_generate_args(self): + raise EnvironmentException( + '%s does not support get_profile_generate_args ' % self.get_id()) + + def get_profile_use_args(self): + raise EnvironmentException( + '%s does not support get_profile_use_args ' % self.get_id()) + + def get_undefined_link_args(self): + ''' + Get args for allowing undefined symbols when linking to a shared library + ''' + return [] + + def remove_linkerlike_args(self, args): + return [x for x in args if not x.startswith('-Wl')] + + +@enum.unique +class CompilerType(enum.Enum): + GCC_STANDARD = 0 + GCC_OSX = 1 + GCC_MINGW = 2 + GCC_CYGWIN = 3 + + CLANG_STANDARD = 10 + CLANG_OSX = 11 + CLANG_MINGW = 12 + # Possibly clang-cl? + + ICC_STANDARD = 20 + ICC_OSX = 21 + ICC_WIN = 22 -CLANG_STANDARD = 0 -CLANG_OSX = 1 -CLANG_WIN = 2 -# Possibly clang-cl? + ARM_WIN = 30 -ICC_STANDARD = 0 -ICC_OSX = 1 -ICC_WIN = 2 + CCRX_WIN = 40 + + @property + def is_standard_compiler(self): + return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD') + + @property + def is_osx_compiler(self): + return self.name in ('GCC_OSX', 'CLANG_OSX', 'ICC_OSX') + + @property + def is_windows_compiler(self): + return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN', 'ARM_WIN', 'CCRX_WIN') -# GNU ld cannot be installed on macOS -# https://github.com/Homebrew/homebrew-core/issues/17794#issuecomment-328174395 -# Hence, we don't need to differentiate between OS and ld -# for the sake of adding as-needed support -GNU_LD_AS_NEEDED = '-Wl,--as-needed' -APPLE_LD_AS_NEEDED = '-Wl,-dead_strip_dylibs' def get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion): install_name = prefix + shlib_name @@ -1169,14 +1315,14 @@ def get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion): install_name += '.dylib' return '@rpath/' + install_name -def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, soversion, darwin_versions, is_shared_module): - if gcc_type == GCC_STANDARD: +def get_gcc_soname_args(compiler_type, prefix, shlib_name, suffix, soversion, darwin_versions, is_shared_module): + if compiler_type.is_standard_compiler: sostr = '' if soversion is None else '.' + soversion return ['-Wl,-soname,%s%s.%s%s' % (prefix, shlib_name, suffix, sostr)] - elif gcc_type in (GCC_MINGW, GCC_CYGWIN): + elif compiler_type.is_windows_compiler: # For PE/COFF the soname argument has no effect with GNU LD return [] - elif gcc_type == GCC_OSX: + elif compiler_type.is_osx_compiler: if is_shared_module: return [] name = get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion) @@ -1188,20 +1334,21 @@ def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, soversion, darwin_ raise RuntimeError('Not implemented yet.') def get_compiler_is_linuxlike(compiler): - if (getattr(compiler, 'gcc_type', None) == GCC_STANDARD) or \ - (getattr(compiler, 'clang_type', None) == CLANG_STANDARD) or \ - (getattr(compiler, 'icc_type', None) == ICC_STANDARD): - return True - return False + compiler_type = getattr(compiler, 'compiler_type', None) + return compiler_type and compiler_type.is_standard_compiler def get_compiler_uses_gnuld(c): # FIXME: Perhaps we should detect the linker in the environment? # FIXME: Assumes that *BSD use GNU ld, but they might start using lld soon - if (getattr(c, 'gcc_type', None) in (GCC_STANDARD, GCC_MINGW, GCC_CYGWIN)) or \ - (getattr(c, 'clang_type', None) in (CLANG_STANDARD, CLANG_WIN)) or \ - (getattr(c, 'icc_type', None) in (ICC_STANDARD, ICC_WIN)): - return True - return False + compiler_type = getattr(c, 'compiler_type', None) + return compiler_type in ( + CompilerType.GCC_STANDARD, + CompilerType.GCC_MINGW, + CompilerType.GCC_CYGWIN, + CompilerType.CLANG_STANDARD, + CompilerType.CLANG_MINGW, + CompilerType.ICC_STANDARD, + CompilerType.ICC_WIN) def get_largefile_args(compiler): ''' @@ -1260,116 +1407,179 @@ def gnulike_default_include_dirs(compiler, lang): mlog.warning('No include directory found parsing "{cmd}" output'.format(cmd=" ".join(cmd))) return paths -class GnuCompiler: - # Functionality that is common to all GNU family compilers. - def __init__(self, gcc_type, defines): - self.id = 'gcc' - self.gcc_type = gcc_type - self.defines = defines or {} + +class GnuLikeCompiler(abc.ABC): + """ + GnuLikeCompiler is a common interface to all compilers implementing + the GNU-style commandline interface. This includes GCC, Clang + and ICC. Certain functionality between them is different and requires + that the actual concrete subclass define their own implementation. + """ + def __init__(self, compiler_type): + self.compiler_type = compiler_type self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', - 'b_colorout', 'b_ndebug', 'b_staticpic'] - if self.gcc_type == GCC_OSX: - self.base_options.append('b_bitcode') - else: + 'b_ndebug', 'b_staticpic', 'b_pie'] + if not self.compiler_type.is_osx_compiler and not self.compiler_type.is_windows_compiler: self.base_options.append('b_lundef') - self.base_options.append('b_asneeded') - # All GCC backends can do assembly + if not self.compiler_type.is_windows_compiler: + self.base_options.append('b_asneeded') + # All GCC-like backends can do assembly self.can_compile_suffixes.add('s') - # TODO: centralise this policy more globally, instead - # of fragmenting it into GnuCompiler and ClangCompiler def get_asneeded_args(self): - if self.gcc_type == GCC_OSX: - return APPLE_LD_AS_NEEDED + # GNU ld cannot be installed on macOS + # https://github.com/Homebrew/homebrew-core/issues/17794#issuecomment-328174395 + # Hence, we don't need to differentiate between OS and ld + # for the sake of adding as-needed support + if self.compiler_type.is_osx_compiler: + return '-Wl,-dead_strip_dylibs' else: - return GNU_LD_AS_NEEDED - - def get_colorout_args(self, colortype): - if mesonlib.version_compare(self.version, '>=4.9.0'): - return gnu_color_args[colortype][:] - return [] - - def get_warn_args(self, level): - args = super().get_warn_args(level) - if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args: - # -Wpedantic was added in 4.8.0 - # https://gcc.gnu.org/gcc-4.8/changes.html - args[args.index('-Wpedantic')] = '-pedantic' - return args - - def has_builtin_define(self, define): - return define in self.defines - - def get_builtin_define(self, define): - if define in self.defines: - return self.defines[define] + return '-Wl,--as-needed' def get_pic_args(self): - if self.gcc_type in (GCC_CYGWIN, GCC_MINGW, GCC_OSX): + if self.compiler_type.is_osx_compiler or self.compiler_type.is_windows_compiler: return [] # On Window and OS X, pic is always on. return ['-fPIC'] + def get_pie_args(self): + return ['-fPIE'] + + def get_pie_link_args(self): + return ['-pie'] + def get_buildtype_args(self, buildtype): return gnulike_buildtype_args[buildtype] + @abc.abstractmethod def get_optimization_args(self, optimization_level): - return gnu_optimization_args[optimization_level] + raise NotImplementedError("get_optimization_args not implemented") def get_debug_args(self, is_debug): return clike_debug_args[is_debug] def get_buildtype_linker_args(self, buildtype): - if self.gcc_type == GCC_OSX: + if self.compiler_type.is_osx_compiler: return apple_buildtype_linker_args[buildtype] return gnulike_buildtype_linker_args[buildtype] + @abc.abstractmethod def get_pch_suffix(self): - return 'gch' + raise NotImplementedError("get_pch_suffix not implemented") def split_shlib_to_parts(self, fname): return os.path.dirname(fname), fname def get_soname_args(self, *args): - return get_gcc_soname_args(self.gcc_type, *args) + return get_gcc_soname_args(self.compiler_type, *args) def get_std_shared_lib_link_args(self): return ['-shared'] + def get_std_shared_module_link_args(self, options): + if self.compiler_type.is_osx_compiler: + return ['-bundle', '-Wl,-undefined,dynamic_lookup'] + return ['-shared'] + def get_link_whole_for(self, args): + if self.compiler_type.is_osx_compiler: + result = [] + for a in args: + result += ['-Wl,-force_load', a] + return result return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive'] + def get_instruction_set_args(self, instruction_set): + return gnulike_instruction_set_args.get(instruction_set, None) + + def get_default_include_dirs(self): + return gnulike_default_include_dirs(self.exelist, self.language) + + @abc.abstractmethod + def openmp_flags(self): + raise NotImplementedError("openmp_flags not implemented") + + def gnu_symbol_visibility_args(self, vistype): + return gnu_symbol_visibility_args[vistype] + def gen_vs_module_defs_args(self, defsfile): if not isinstance(defsfile, str): raise RuntimeError('Module definitions file should be str') # On Windows targets, .def files may be specified on the linker command # line like an object file. - if self.gcc_type in (GCC_CYGWIN, GCC_MINGW): + if self.compiler_type.is_windows_compiler: return [defsfile] # For other targets, discard the .def file. return [] - def get_gui_app_args(self, value): - if self.gcc_type in (GCC_CYGWIN, GCC_MINGW) and value: - return ['-mwindows'] + def get_argument_syntax(self): + return 'gcc' + + def get_profile_generate_args(self): + return ['-fprofile-generate'] + + def get_profile_use_args(self): + return ['-fprofile-use', '-fprofile-correction'] + + def get_allow_undefined_link_args(self): + if self.compiler_type.is_osx_compiler: + # Apple ld + return ['-Wl,-undefined,dynamic_lookup'] + else: + # GNU ld and LLVM lld + return ['-Wl,--allow-shlib-undefined'] + + +class GnuCompiler(GnuLikeCompiler): + """ + GnuCompiler represents an actual GCC in its many incarnations. + Compilers imitating GCC (Clang/Intel) should use the GnuLikeCompiler ABC. + """ + def __init__(self, compiler_type, defines): + super().__init__(compiler_type) + self.id = 'gcc' + self.defines = defines or {} + self.base_options.append('b_colorout') + + def get_colorout_args(self, colortype): + if mesonlib.version_compare(self.version, '>=4.9.0'): + return gnu_color_args[colortype][:] return [] - def get_instruction_set_args(self, instruction_set): - return gnulike_instruction_set_args.get(instruction_set, None) + def get_warn_args(self, level): + args = super().get_warn_args(level) + if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args: + # -Wpedantic was added in 4.8.0 + # https://gcc.gnu.org/gcc-4.8/changes.html + args[args.index('-Wpedantic')] = '-pedantic' + return args - def get_default_include_dirs(self): - return gnulike_default_include_dirs(self.exelist, self.language) + def has_builtin_define(self, define): + return define in self.defines + + def get_builtin_define(self, define): + if define in self.defines: + return self.defines[define] + + def get_optimization_args(self, optimization_level): + return gnu_optimization_args[optimization_level] + + def get_pch_suffix(self): + return 'gch' + + def get_gui_app_args(self, value): + if self.compiler_type.is_windows_compiler and value: + return ['-mwindows'] + return [] def openmp_flags(self): return ['-fopenmp'] - def gnu_symbol_visibility_args(self, vistype): - return gnu_symbol_visibility_args[vistype] class ElbrusCompiler(GnuCompiler): # Elbrus compiler is nearly like GCC, but does not support # PCH, LTO, sanitizers and color output as of version 1.21.x. - def __init__(self, gcc_type, defines): - GnuCompiler.__init__(self, gcc_type, defines) + def __init__(self, compiler_type, defines): + GnuCompiler.__init__(self, compiler_type, defines) self.id = 'lcc' self.base_options = ['b_pgo', 'b_coverage', 'b_ndebug', 'b_staticpic', @@ -1403,50 +1613,23 @@ class ElbrusCompiler(GnuCompiler): break return paths -class ClangCompiler: - def __init__(self, clang_type): + +class ClangCompiler(GnuLikeCompiler): + def __init__(self, compiler_type): + super().__init__(compiler_type) self.id = 'clang' - self.clang_type = clang_type - self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', - 'b_ndebug', 'b_staticpic', 'b_colorout'] - if self.clang_type == CLANG_OSX: + self.base_options.append('b_colorout') + if self.compiler_type.is_osx_compiler: self.base_options.append('b_bitcode') - else: - self.base_options.append('b_lundef') - self.base_options.append('b_asneeded') - # All Clang backends can do assembly and LLVM IR - self.can_compile_suffixes.update(['ll', 's']) - - # TODO: centralise this policy more globally, instead - # of fragmenting it into GnuCompiler and ClangCompiler - def get_asneeded_args(self): - if self.clang_type == CLANG_OSX: - return APPLE_LD_AS_NEEDED - else: - return GNU_LD_AS_NEEDED - - def get_pic_args(self): - if self.clang_type in (CLANG_WIN, CLANG_OSX): - return [] # On Window and OS X, pic is always on. - return ['-fPIC'] + # All Clang backends can also do LLVM IR + self.can_compile_suffixes.add('ll') def get_colorout_args(self, colortype): return clang_color_args[colortype][:] - def get_buildtype_args(self, buildtype): - return gnulike_buildtype_args[buildtype] - - def get_buildtype_linker_args(self, buildtype): - if self.clang_type == CLANG_OSX: - return apple_buildtype_linker_args[buildtype] - return gnulike_buildtype_linker_args[buildtype] - def get_optimization_args(self, optimization_level): return clike_optimization_args[optimization_level] - def get_debug_args(self, is_debug): - return clike_debug_args[is_debug] - def get_pch_suffix(self): return 'pch' @@ -1456,17 +1639,6 @@ class ClangCompiler: # so it might change semantics at any time. return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] - def get_soname_args(self, *args): - if self.clang_type == CLANG_STANDARD: - gcc_type = GCC_STANDARD - elif self.clang_type == CLANG_OSX: - gcc_type = GCC_OSX - elif self.clang_type == CLANG_WIN: - gcc_type = GCC_MINGW - else: - raise MesonException('Unreachable code when converting clang type to gcc type.') - return get_gcc_soname_args(gcc_type, *args) - def has_multi_arguments(self, args, env): myargs = ['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument'] if mesonlib.version_compare(self.version, '>=3.6.0'): @@ -1475,35 +1647,17 @@ class ClangCompiler: myargs + args, env) - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): if extra_args is None: extra_args = [] # Starting with XCode 8, we need to pass this to force linker # visibility to obey OS X and iOS minimum version targets with # -mmacosx-version-min, -miphoneos-version-min, etc. # https://github.com/Homebrew/homebrew-core/issues/3727 - if self.clang_type == CLANG_OSX and version_compare(self.version, '>=8.0'): + if self.compiler_type.is_osx_compiler and version_compare(self.version, '>=8.0'): extra_args.append('-Wl,-no_weak_imports') - return super().has_function(funcname, prefix, env, extra_args, dependencies) - - def get_std_shared_module_link_args(self, options): - if self.clang_type == CLANG_OSX: - return ['-bundle', '-Wl,-undefined,dynamic_lookup'] - return ['-shared'] - - def get_link_whole_for(self, args): - if self.clang_type == CLANG_OSX: - result = [] - for a in args: - result += ['-Wl,-force_load', a] - return result - return ['-Wl,--whole-archive'] + args + ['-Wl,--no-whole-archive'] - - def get_instruction_set_args(self, instruction_set): - return gnulike_instruction_set_args.get(instruction_set, None) - - def get_default_include_dirs(self): - return gnulike_default_include_dirs(self.exelist, self.language) + return super().has_function(funcname, prefix, env, extra_args=extra_args, + dependencies=dependencies) def openmp_flags(self): if version_compare(self.version, '>=3.8.0'): @@ -1514,11 +1668,9 @@ class ClangCompiler: # Shouldn't work, but it'll be checked explicitly in the OpenMP dependency. return [] - def gnu_symbol_visibility_args(self, vistype): - return gnu_symbol_visibility_args[vistype] class ArmclangCompiler: - def __init__(self): + def __init__(self, compiler_type): if not self.is_cross: raise EnvironmentException('armclang supports only cross-compilation.') # Check whether 'armlink.exe' is available in path @@ -1544,6 +1696,7 @@ class ArmclangCompiler: if not version_compare(self.version, '==' + linker_ver): raise EnvironmentException('armlink version does not match with compiler version') self.id = 'armclang' + self.compiler_type = compiler_type self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_colorout'] # Assembly @@ -1590,26 +1743,44 @@ class ArmclangCompiler: def get_linker_exelist(self): return [self.linker_exe] + def get_optimization_args(self, optimization_level): + return armclang_optimization_args[optimization_level] -# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1 -class IntelCompiler: - def __init__(self, icc_type): - self.id = 'intel' - self.icc_type = icc_type - self.lang_header = 'none' - self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', - 'b_colorout', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded'] - # Assembly - self.can_compile_suffixes.add('s') + def get_debug_args(self, is_debug): + return clike_debug_args[is_debug] - def get_pic_args(self): - return ['-fPIC'] + def gen_export_dynamic_link_args(self, env): + """ + The args for export dynamic + """ + return ['--export_dynamic'] - def get_buildtype_args(self, buildtype): - return gnulike_buildtype_args[buildtype] + def gen_import_library_args(self, implibname): + """ + The args of the outputted import library - def get_buildtype_linker_args(self, buildtype): - return gnulike_buildtype_linker_args[buildtype] + ArmLinker's symdefs output can be used as implib + """ + return ['--symdefs=' + implibname] + + +# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0.0 +class IntelCompiler(GnuLikeCompiler): + + def __init__(self, compiler_type): + super().__init__(compiler_type) + # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support. + # + # It does have IPO, which serves much the same purpose as LOT, but + # there is an unfortunate rule for using IPO (you can't control the + # name of the output file) which break assumptions meson makes + self.base_options = ['b_pch', 'b_lundef', 'b_asneeded', 'b_pgo', + 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_pie'] + self.id = 'intel' + self.lang_header = 'none' + + def get_optimization_args(self, optimization_level): + return clike_optimization_args[optimization_level] def get_pch_suffix(self): return 'pchi' @@ -1621,53 +1792,38 @@ class IntelCompiler: def get_pch_name(self, header_name): return os.path.basename(header_name) + '.' + self.get_pch_suffix() - def split_shlib_to_parts(self, fname): - return os.path.dirname(fname), fname - - def get_soname_args(self, *args): - if self.icc_type == ICC_STANDARD: - gcc_type = GCC_STANDARD - elif self.icc_type == ICC_OSX: - gcc_type = GCC_OSX - elif self.icc_type == ICC_WIN: - gcc_type = GCC_MINGW - else: - raise MesonException('Unreachable code when converting icc type to gcc type.') - return get_gcc_soname_args(gcc_type, *args) - - # TODO: centralise this policy more globally, instead - # of fragmenting it into GnuCompiler and ClangCompiler - def get_asneeded_args(self): - if self.icc_type == CLANG_OSX: - return APPLE_LD_AS_NEEDED - else: - return GNU_LD_AS_NEEDED - - def get_std_shared_lib_link_args(self): - # FIXME: Don't know how icc works on OSX - # if self.icc_type == ICC_OSX: - # return ['-bundle'] - return ['-shared'] - - def get_default_include_dirs(self): - return gnulike_default_include_dirs(self.exelist, self.language) - def openmp_flags(self): if version_compare(self.version, '>=15.0.0'): return ['-qopenmp'] else: return ['-openmp'] - def get_link_whole_for(self, args): - return GnuCompiler.get_link_whole_for(self, args) + def compiles(self, *args, **kwargs): + # This covers a case that .get('foo', []) doesn't, that extra_args is + # defined and is None + extra_args = kwargs.get('extra_args') or [] + kwargs['extra_args'] = [ + extra_args, + '-diag-error', '10006', # ignoring unknown option + '-diag-error', '10148', # Option not supported + '-diag-error', '1292', # unknown __attribute__ + ] + return super().compiles(*args, **kwargs) + + def get_profile_generate_args(self): + return ['-prof-gen=threadsafe'] + + def get_profile_use_args(self): + return ['-prof-use'] class ArmCompiler: # Functionality that is common to all ARM family compilers. - def __init__(self): + def __init__(self, compiler_type): if not self.is_cross: raise EnvironmentException('armcc supports only cross-compilation.') self.id = 'arm' + self.compiler_type = compiler_type default_warn_args = [] self.warn_args = {'1': default_warn_args, '2': default_warn_args + [], @@ -1730,3 +1886,103 @@ class ArmCompiler: def get_coverage_link_args(self): return [] + + def get_optimization_args(self, optimization_level): + return arm_optimization_args[optimization_level] + + def get_debug_args(self, is_debug): + return clike_debug_args[is_debug] + +class CcrxCompiler: + def __init__(self, compiler_type): + if not self.is_cross: + raise EnvironmentException('ccrx supports only cross-compilation.') + # Check whether 'rlink.exe' is available in path + self.linker_exe = 'rlink.exe' + args = '--version' + try: + p, stdo, stderr = Popen_safe(self.linker_exe, args) + except OSError as e: + err_msg = 'Unknown linker\nRunning "{0}" gave \n"{1}"'.format(' '.join([self.linker_exe] + [args]), e) + raise EnvironmentException(err_msg) + self.id = 'ccrx' + self.compiler_type = compiler_type + # Assembly + self.can_compile_suffixes.update('s') + default_warn_args = [] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + []} + + def can_linker_accept_rsp(self): + return False + + def get_pic_args(self): + # PIC support is not enabled by default for CCRX, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_buildtype_args(self, buildtype): + return ccrx_buildtype_args[buildtype] + + def get_buildtype_linker_args(self, buildtype): + return ccrx_buildtype_linker_args[buildtype] + + # Override CCompiler.get_std_shared_lib_link_args + def get_std_shared_lib_link_args(self): + return [] + + def get_pch_suffix(self): + return 'pch' + + def get_pch_use_args(self, pch_dir, header): + return [] + + # Override CCompiler.get_dependency_gen_args + def get_dependency_gen_args(self, outtarget, outfile): + return [] + + # Override CCompiler.build_rpath_args + def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + return [] + + def thread_flags(self, env): + return [] + + def thread_link_flags(self, env): + return [] + + def get_linker_exelist(self): + return [self.linker_exe] + + def get_linker_lib_prefix(self): + return '-lib=' + + def get_coverage_args(self): + return [] + + def get_coverage_link_args(self): + return [] + + def get_optimization_args(self, optimization_level): + return ccrx_optimization_args[optimization_level] + + def get_debug_args(self, is_debug): + return ccrx_debug_args[is_debug] + + @classmethod + def unix_args_to_native(cls, args): + result = [] + for i in args: + if i.startswith('-D'): + i = '-define=' + i[2:] + if i.startswith('-I'): + i = '-include=' + i[2:] + if i.startswith('-Wl,-rpath='): + continue + elif i == '--print-search-dirs': + continue + elif i.startswith('-L'): + continue + result.append(i) + return result diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 2173655..87371c0 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -19,9 +19,8 @@ from .. import coredata from .. import mlog from ..mesonlib import MesonException, version_compare -from .c import CCompiler, VisualStudioCCompiler +from .c import CCompiler, VisualStudioCCompiler, ClangClCCompiler from .compilers import ( - GCC_MINGW, gnu_winlibs, msvc_winlibs, ClangCompiler, @@ -30,6 +29,7 @@ from .compilers import ( IntelCompiler, ArmCompiler, ArmclangCompiler, + CcrxCompiler, ) from .c_function_attributes import CXX_FUNC_ATTRIBUTES @@ -61,9 +61,11 @@ class CPPCompiler(CCompiler): # too strict without this and always fails. return super().get_compiler_check_args() + ['-fpermissive'] - def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None): + def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): # Check if it's a C-like symbol - if super().has_header_symbol(hname, symbol, prefix, env, extra_args, dependencies): + if super().has_header_symbol(hname, symbol, prefix, env, + extra_args=extra_args, + dependencies=dependencies): return True # Check if it's a class or a template if extra_args is None: @@ -73,7 +75,8 @@ class CPPCompiler(CCompiler): #include <{header}> using {symbol}; int main () {{ return 0; }}''' - return self.compiles(t.format(**fargs), env, extra_args, dependencies) + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) def _test_cpp_std_arg(self, cpp_std_value): # Test whether the compiler understands a -std=XY argument @@ -126,9 +129,9 @@ class CPPCompiler(CCompiler): class ClangCPPCompiler(ClangCompiler, CPPCompiler): - def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ClangCompiler.__init__(self, cltype) + ClangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -157,9 +160,9 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler): class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): - def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - ArmclangCompiler.__init__(self) + ArmclangCompiler.__init__(self, compiler_type) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -185,9 +188,9 @@ class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): class GnuCPPCompiler(GnuCompiler, CPPCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrap, defines, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap, defines, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) - GnuCompiler.__init__(self, gcc_type, defines) + GnuCompiler.__init__(self, compiler_type, defines) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -202,7 +205,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): 'cpp_debugstl': coredata.UserBooleanOption('cpp_debugstl', 'STL debug mode', False)}) - if self.gcc_type == GCC_MINGW: + if self.compiler_type.is_windows_compiler: opts.update({ 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Standard Win libraries to link against', gnu_winlibs), }) @@ -218,7 +221,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): return args def get_option_link_args(self, options): - if self.gcc_type == GCC_MINGW: + if self.compiler_type.is_windows_compiler: return options['cpp_winlibs'].value[:] return [] @@ -230,9 +233,9 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None, **kwargs): - GnuCPPCompiler.__init__(self, exelist, version, gcc_type, is_cross, exe_wrapper, defines, **kwargs) - ElbrusCompiler.__init__(self, gcc_type, defines) + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs): + GnuCPPCompiler.__init__(self, exelist, version, compiler_type, is_cross, exe_wrapper, defines, **kwargs) + ElbrusCompiler.__init__(self, compiler_type, defines) # It does not support c++/gnu++ 17 and 1z, but still does support 0x, 1y, and gnu++98. def get_options(self): @@ -245,28 +248,33 @@ class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler): # Elbrus C++ compiler does not have lchmod, but there is only linker warning, not compiler error. # So we should explicitly fail at this case. - def has_function(self, funcname, prefix, env, extra_args=None, dependencies=None): + def has_function(self, funcname, prefix, env, *, extra_args=None, dependencies=None): if funcname == 'lchmod': return False else: - return super().has_function(funcname, prefix, env, extra_args, dependencies) + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) class IntelCPPCompiler(IntelCompiler, CPPCompiler): - def __init__(self, exelist, version, icc_type, is_cross, exe_wrap, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) - IntelCompiler.__init__(self, icc_type) + IntelCompiler.__init__(self, compiler_type) self.lang_header = 'c++-header' default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', '-Wpch-messages', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], - '3': default_warn_args + ['-Wextra', '-Wpedantic']} + '3': default_warn_args + ['-Wextra']} def get_options(self): opts = CPPCompiler.get_options(self) - c_stds = [] - g_stds = ['gnu++98'] + # Every Unix compiler under the sun seems to accept -std=c++03, + # with the exception of ICC. Instead of preventing the user from + # globally requesting C++03, we transparently remap it to C++98 + c_stds = ['c++98', 'c++03'] + g_stds = ['gnu++98', 'gnu++03'] if version_compare(self.version, '>=15.0.0'): c_stds += ['c++11', 'c++14'] g_stds += ['gnu++11'] @@ -286,7 +294,11 @@ class IntelCPPCompiler(IntelCompiler, CPPCompiler): args = [] std = options['cpp_std'] if std.value != 'none': - args.append('-std=' + std.value) + remap_cpp03 = { + 'c++03': 'c++98', + 'gnu++03': 'gnu++98' + } + args.append('-std=' + remap_cpp03.get(std.value, std.value)) if options['cpp_debugstl'].value: args.append('-D_GLIBCXX_DEBUG=1') return args @@ -302,11 +314,26 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like def get_options(self): + cpp_stds = ['none', 'c++11', 'vc++11'] + if self.id == 'clang-cl': + cpp_stds.extend(['c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest']) + else: + # Visual Studio 2015 and later + if version_compare(self.version, '>=19'): + cpp_stds.extend(['c++14', 'vc++14', 'c++latest', 'vc++latest']) + # Visual Studio 2017 and later + if version_compare(self.version, '>=19.11'): + cpp_stds.extend(['c++17', 'vc++17']) + opts = CPPCompiler.get_options(self) opts.update({'cpp_eh': coredata.UserComboOption('cpp_eh', 'C++ exception handling type.', ['none', 'a', 's', 'sc'], 'sc'), + 'cpp_std': coredata.UserComboOption('cpp_std', + 'C++ language standard to use', + cpp_stds, + 'none'), 'cpp_winlibs': coredata.UserArrayOption('cpp_winlibs', 'Windows libs to link against.', msvc_winlibs)}) @@ -314,9 +341,41 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): def get_option_compile_args(self, options): args = [] - std = options['cpp_eh'] - if std.value != 'none': - args.append('/EH' + std.value) + + eh = options['cpp_eh'] + if eh.value != 'none': + args.append('/EH' + eh.value) + + vc_version_map = { + 'none': (True, None), + 'vc++11': (True, 11), + 'vc++14': (True, 14), + 'vc++17': (True, 17), + 'c++11': (False, 11), + 'c++14': (False, 14), + 'c++17': (False, 17)} + + permissive, ver = vc_version_map[options['cpp_std'].value] + + if ver is None: + pass + elif ver == 11: + # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can + # which means setting the C++ standard version to C++14, in compilers that support it + # (i.e., after VS2015U3) + # if one is using anything before that point, one cannot set the standard. + if self.id == 'clang-cl' or version_compare(self.version, '>=19.00.24210'): + mlog.warning('MSVC does not support C++11; ' + 'attempting best effort; setting the standard to C++14') + args.append('/std:c++14') + else: + mlog.warning('This version of MSVC does not support cpp_std arguments') + else: + args.append('/std:c++{}'.format(ver)) + + if not permissive and version_compare(self.version, '>=19.11'): + args.append('/permissive-') + return args def get_option_link_args(self, options): @@ -327,11 +386,15 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): # so just use the plain C args. return VisualStudioCCompiler.get_compiler_check_args(self) +class ClangClCPPCompiler(VisualStudioCPPCompiler, ClangClCCompiler): + def __init__(self, exelist, version, is_cross, exe_wrap, is_64): + VisualStudioCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, is_64) + self.id = 'clang-cl' class ArmCPPCompiler(ArmCompiler, CPPCompiler): - def __init__(self, exelist, version, is_cross, exe_wrap=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap=None, **kwargs): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) - ArmCompiler.__init__(self) + ArmCompiler.__init__(self, compiler_type) def get_options(self): opts = CPPCompiler.get_options(self) @@ -354,3 +417,31 @@ class ArmCPPCompiler(ArmCompiler, CPPCompiler): def get_compiler_check_args(self): return [] + + +class CcrxCPPCompiler(CcrxCompiler, CPPCompiler): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap=None, **kwargs): + CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs) + CcrxCompiler.__init__(self, compiler_type) + + # Override CCompiler.get_always_args + def get_always_args(self): + return ['-nologo', '-lang=cpp'] + + def get_option_compile_args(self, options): + return [] + + def get_compile_only_args(self): + return [] + + def get_output_args(self, target): + return ['-output=obj=%s' % target] + + def get_linker_output_args(self, outputname): + return ['-output=%s' % outputname] + + def get_option_link_args(self, options): + return [] + + def get_compiler_check_args(self): + return [] diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index ee9e0c2..2cf0fbd 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -17,8 +17,7 @@ import os.path, subprocess from ..mesonlib import EnvironmentException, version_compare, is_windows, is_osx from .compilers import ( - GCC_STANDARD, - GCC_OSX, + CompilerType, d_dmd_buildtype_args, d_gdc_buildtype_args, d_ldc_buildtype_args, @@ -31,14 +30,17 @@ from .compilers import ( ) d_feature_args = {'gcc': {'unittest': '-funittest', + 'debug': '-fdebug', 'version': '-fversion', 'import_dir': '-J' }, 'llvm': {'unittest': '-unittest', + 'debug': '-d-debug', 'version': '-d-version', 'import_dir': '-J' }, 'dmd': {'unittest': '-unittest', + 'debug': '-debug', 'version': '-version', 'import_dir': '-J' } @@ -69,12 +71,12 @@ class DCompiler(Compiler): 'mtd': ['-mscrtlib=libcmtd'], } - def __init__(self, exelist, version, is_cross, is_64, **kwargs): + def __init__(self, exelist, version, is_cross, arch, **kwargs): self.language = 'd' super().__init__(exelist, version, **kwargs) self.id = 'unknown' self.is_cross = is_cross - self.is_64 = is_64 + self.arch = arch def sanity_check(self, work_dir, environment): source_name = os.path.join(work_dir, 'sanity.d') @@ -152,12 +154,12 @@ class DCompiler(Compiler): if is_windows(): return [] elif is_osx(): - soname_args = get_gcc_soname_args(GCC_OSX, *args) + soname_args = get_gcc_soname_args(CompilerType.GCC_OSX, *args) if soname_args: return ['-Wl,' + ','.join(soname_args)] return [] - return get_gcc_soname_args(GCC_STANDARD, *args) + return get_gcc_soname_args(CompilerType.GCC_STANDARD, *args) def get_feature_args(self, kwargs, build_to_src): res = [] @@ -169,16 +171,53 @@ class DCompiler(Compiler): if unittest: res.append(unittest_arg) + if 'debug' in kwargs: + debug_level = -1 + debugs = kwargs.pop('debug') + if not isinstance(debugs, list): + debugs = [debugs] + + debug_arg = d_feature_args[self.id]['debug'] + if not debug_arg: + raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string()) + + # Parse all debug identifiers and the largest debug level identifier + for d in debugs: + if isinstance(d, int): + if d > debug_level: + debug_level = d + elif isinstance(d, str) and d.isdigit(): + if int(d) > debug_level: + debug_level = int(d) + else: + res.append('{0}={1}'.format(debug_arg, d)) + + if debug_level >= 0: + res.append('{0}={1}'.format(debug_arg, debug_level)) + if 'versions' in kwargs: + version_level = -1 versions = kwargs.pop('versions') if not isinstance(versions, list): versions = [versions] version_arg = d_feature_args[self.id]['version'] if not version_arg: - raise EnvironmentException('D compiler %s does not support the "feature versions" feature.' % self.name_string()) + raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string()) + + # Parse all version identifiers and the largest version level identifier for v in versions: - res.append('{0}={1}'.format(version_arg, v)) + if isinstance(v, int): + if v > version_level: + version_level = v + elif isinstance(v, str) and v.isdigit(): + if int(v) > version_level: + version_level = int(v) + else: + res.append('{0}={1}'.format(version_arg, v)) + + if version_level >= 0: + res.append('{0}={1}'.format(version_arg, version_level)) if 'import_dirs' in kwargs: import_dirs = kwargs.pop('import_dirs') @@ -235,6 +274,8 @@ class DCompiler(Compiler): return ['-Wl,-rpath,{}'.format(paths)] def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): + if callable(extra_args): + extra_args = extra_args(mode) if extra_args is None: extra_args = [] elif isinstance(extra_args, str): @@ -262,7 +303,7 @@ class DCompiler(Compiler): args += extra_args return args - def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'): + def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): args = self._get_compiler_check_args(env, extra_args, dependencies, mode) with self.compile(code, args, mode) as p: @@ -275,7 +316,7 @@ class DCompiler(Compiler): # LDC2 on Windows targets to current OS architecture, but # it should follow the target specified by the MSVC toolchain. if is_windows(): - if self.is_64: + if self.arch == 'x86_64': return ['-m64'] return ['-m32'] return [] @@ -307,7 +348,7 @@ class DCompiler(Compiler): for la in linkargs: dcargs.append('-L=' + la.strip()) continue - elif arg.startswith('-link-defaultlib') or arg.startswith('-linker'): + elif arg.startswith(('-link-defaultlib', '-linker', '-link-internally', '-linkonce-templates', '-lib')): # these are special arguments to the LDC linker call, # arguments like "-link-defaultlib-shared" do *not* # denote a library to be linked, but change the default @@ -340,8 +381,12 @@ class DCompiler(Compiler): dcargs.append('-L=' + arg) continue - - dcargs.append(arg) + elif not arg.startswith('-') and arg.endswith(('.a', '.lib')): + # ensure static libraries are passed through to the linker + dcargs.append('-L=' + arg) + continue + else: + dcargs.append(arg) return dcargs @@ -379,7 +424,11 @@ class DCompiler(Compiler): return args def get_debug_args(self, is_debug): - return clike_debug_args[is_debug] + ddebug_args = [] + if is_debug: + ddebug_args = [d_feature_args[self.id]['debug']] + + return clike_debug_args[is_debug] + ddebug_args def get_crt_args(self, crt_val, buildtype): if not is_windows(): @@ -410,9 +459,12 @@ class DCompiler(Compiler): def get_crt_link_args(self, crt_val, buildtype): return [] + def thread_link_flags(self, env): + return ['-pthread'] + class GnuDCompiler(DCompiler): - def __init__(self, exelist, version, is_cross, is_64, **kwargs): - DCompiler.__init__(self, exelist, version, is_cross, is_64, **kwargs) + def __init__(self, exelist, version, is_cross, arch, **kwargs): + DCompiler.__init__(self, exelist, version, is_cross, arch, **kwargs) self.id = 'gcc' default_warn_args = ['-Wall', '-Wdeprecated'] self.warn_args = {'1': default_warn_args, @@ -466,8 +518,8 @@ class GnuDCompiler(DCompiler): return gnu_optimization_args[optimization_level] class LLVMDCompiler(DCompiler): - def __init__(self, exelist, version, is_cross, is_64, **kwargs): - DCompiler.__init__(self, exelist, version, is_cross, is_64, **kwargs) + def __init__(self, exelist, version, is_cross, arch, **kwargs): + DCompiler.__init__(self, exelist, version, is_cross, arch, **kwargs) self.id = 'llvm' self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt'] @@ -502,11 +554,10 @@ class LLVMDCompiler(DCompiler): class DmdDCompiler(DCompiler): - def __init__(self, exelist, version, is_cross, is_64, **kwargs): - DCompiler.__init__(self, exelist, version, is_cross, is_64, **kwargs) + def __init__(self, exelist, version, is_cross, arch, **kwargs): + DCompiler.__init__(self, exelist, version, is_cross, arch, **kwargs) self.id = 'dmd' self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt'] - self.is_msvc = 'VCINSTALLDIR' in os.environ def get_colorout_args(self, colortype): if colortype == 'always': @@ -522,9 +573,9 @@ class DmdDCompiler(DCompiler): if is_windows(): # DMD links against D runtime only when main symbol is found, # so these needs to be inserted when linking static D libraries. - if self.is_64: + if self.arch == 'x86_64': return ['phobos64.lib'] - elif self.is_msvc: + elif self.arch == 'x86_mscoff': return ['phobos32mscoff.lib'] return ['phobos.lib'] return [] @@ -537,9 +588,9 @@ class DmdDCompiler(DCompiler): # Force the target to 64-bit in order to stay consistent # across the different platforms. if is_windows(): - if self.is_64: + if self.arch == 'x86_64': return ['-m64'] - elif self.is_msvc: + elif self.arch == 'x86_mscoff': return ['-m32mscoff'] return ['-m32'] return [] diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 3c7c2f9..75db26d 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -14,7 +14,7 @@ from .c import CCompiler from .compilers import ( - ICC_STANDARD, + CompilerType, apple_buildtype_linker_args, gnulike_buildtype_args, gnulike_buildtype_linker_args, @@ -144,9 +144,6 @@ end program prog def get_compiler_check_args(self): return CCompiler.get_compiler_check_args(self) - def get_allow_undefined_link_args(self): - return CCompiler.get_allow_undefined_link_args(self) - def get_output_args(self, target): return CCompiler.get_output_args(self, target) @@ -172,7 +169,7 @@ end program prog return ('-I', ) def get_module_outdir_args(self, path): - return ['-module' + path] + return ['-module', path] def module_name_to_filename(self, module_name): return module_name.lower() + '.mod' @@ -210,16 +207,18 @@ end program prog def _get_compiler_check_args(self, env, extra_args, dependencies, mode='compile'): return CCompiler._get_compiler_check_args(self, env, extra_args, dependencies, mode='compile') - def compiles(self, code, env, extra_args=None, dependencies=None, mode='compile'): - return CCompiler.compiles(self, code, env, extra_args, dependencies, mode) + def compiles(self, code, env, *, extra_args=None, dependencies=None, mode='compile'): + return CCompiler.compiles(self, code, env, extra_args=extra_args, + dependencies=dependencies, mode=mode) def _build_wrapper(self, code, env, extra_args, dependencies=None, mode='compile', want_output=False): return CCompiler._build_wrapper(self, code, env, extra_args, dependencies, mode, want_output) - def links(self, code, env, extra_args=None, dependencies=None): - return CCompiler.links(self, code, env, extra_args, dependencies) + def links(self, code, env, *, extra_args=None, dependencies=None): + return CCompiler.links(self, code, env, extra_args=extra_args, + dependencies=dependencies) - def run(self, code, env, extra_args=None, dependencies=None): + def run(self, code, env, *, extra_args=None, dependencies=None): return CCompiler.run(self, code, env, extra_args, dependencies) def _get_patterns(self, *args, **kwargs): @@ -257,9 +256,9 @@ end program prog class GnuFortranCompiler(GnuCompiler, FortranCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None, **kwargs): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs): FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs) - GnuCompiler.__init__(self, gcc_type, defines) + GnuCompiler.__init__(self, compiler_type, defines) default_warn_args = ['-Wall'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], @@ -279,9 +278,9 @@ class GnuFortranCompiler(GnuCompiler, FortranCompiler): class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None, **kwargs): - GnuFortranCompiler.__init__(self, exelist, version, gcc_type, is_cross, exe_wrapper, defines, **kwargs) - ElbrusCompiler.__init__(self, gcc_type, defines) + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs): + GnuFortranCompiler.__init__(self, exelist, version, compiler_type, is_cross, exe_wrapper, defines, **kwargs) + ElbrusCompiler.__init__(self, compiler_type, defines) class G95FortranCompiler(FortranCompiler): def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags): @@ -330,7 +329,7 @@ class IntelFortranCompiler(IntelCompiler, FortranCompiler): FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags) # FIXME: Add support for OS X and Windows in detect_fortran_compiler so # we are sent the type of compiler - IntelCompiler.__init__(self, ICC_STANDARD) + IntelCompiler.__init__(self, CompilerType.ICC_STANDARD) self.id = 'intel' default_warn_args = ['-warn', 'general', '-warn', 'truncated_source'] self.warn_args = {'1': default_warn_args, @@ -340,6 +339,15 @@ class IntelFortranCompiler(IntelCompiler, FortranCompiler): def get_preprocess_only_args(self): return ['-cpp', '-EP'] + def get_always_args(self): + """Ifort doesn't have -pipe.""" + val = super().get_always_args() + val.remove('-pipe') + return val + + def language_stdlib_only_link_flags(self): + return ['-lifcore', '-limf'] + class PathScaleFortranCompiler(FortranCompiler): def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags): diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py index 388e83b..5b2b517 100644 --- a/mesonbuild/compilers/objc.py +++ b/mesonbuild/compilers/objc.py @@ -51,17 +51,21 @@ class ObjCCompiler(CCompiler): class GnuObjCCompiler(GnuCompiler, ObjCCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None): ObjCCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) - GnuCompiler.__init__(self, gcc_type, defines) + GnuCompiler.__init__(self, compiler_type, defines) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} -class ClangObjCCompiler(ClangCompiler, GnuObjCCompiler): - def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None): - GnuObjCCompiler.__init__(self, exelist, version, cltype, is_cross, exe_wrapper) - ClangCompiler.__init__(self, cltype) +class ClangObjCCompiler(ClangCompiler, ObjCCompiler): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None): + ObjCCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) + ClangCompiler.__init__(self, compiler_type) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage'] diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py index c2e4647..e1b7a7d 100644 --- a/mesonbuild/compilers/objcpp.py +++ b/mesonbuild/compilers/objcpp.py @@ -52,17 +52,21 @@ class ObjCPPCompiler(CPPCompiler): class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler): - def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None): ObjCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) - GnuCompiler.__init__(self, gcc_type, defines) + GnuCompiler.__init__(self, compiler_type, defines) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} -class ClangObjCPPCompiler(ClangCompiler, GnuObjCPPCompiler): - def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None): - GnuObjCPPCompiler.__init__(self, exelist, version, cltype, is_cross, exe_wrapper) - ClangCompiler.__init__(self, cltype) +class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler): + def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None): + ObjCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) + ClangCompiler.__init__(self, compiler_type) + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage'] diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 4da5a6d..ae37576 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -22,8 +22,9 @@ from .mesonlib import default_libdir, default_libexecdir, default_prefix from .wrap import WrapMode import ast import argparse +import configparser -version = '0.47.999' +version = '0.48.999' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode'] default_yielding = False @@ -182,6 +183,7 @@ class UserArrayOption(UserOption): ', '.join(bad), ', '.join(self.choices))) return newvalue + class UserFeatureOption(UserComboOption): static_choices = ['enabled', 'disabled', 'auto'] @@ -197,6 +199,72 @@ class UserFeatureOption(UserComboOption): def is_auto(self): return self.value == 'auto' + +def load_configs(filenames): + """Load native files.""" + def gen(): + for f in filenames: + f = os.path.expanduser(os.path.expandvars(f)) + if os.path.exists(f): + yield f + continue + elif sys.platform != 'win32': + f = os.path.basename(f) + paths = [ + os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')), + ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':') + for path in paths: + path_to_try = os.path.join(path, 'meson', 'native', f) + if os.path.isfile(path_to_try): + yield path_to_try + break + else: + raise MesonException('Cannot find specified native file: ' + f) + continue + + raise MesonException('Cannot find specified native file: ' + f) + + config = configparser.SafeConfigParser() + config.read(gen()) + return config + + +def _get_section(config, section): + if config.has_section(section): + final = {} + for k, v in config.items(section): + # Windows paths... + v = v.replace('\\', '\\\\') + try: + final[k] = ast.literal_eval(v) + except SyntaxError: + raise MesonException( + 'Malformed value in native file variable: {}'.format(v)) + return final + return {} + + +class ConfigData: + + """Contains configuration information provided by the user for the build.""" + + def __init__(self, config=None): + if config: + self.binaries = _get_section(config, 'binaries') + # global is a keyword and globals is a builtin, rather than mangle it, + # use a similar word + self.universal = _get_section(config, 'globals') + self.subprojects = {s: _get_section(config, s) for s in config.sections() + if s not in {'binaries', 'globals'}} + else: + self.binaries = {} + self.universal = {} + self.subprojects = {} + + def get_binaries(self, name): + return self.binaries.get(name, None) + + # This class contains all data that must persist over multiple # invocations of Meson. It is roughly the same thing as # cmakecache. @@ -223,12 +291,20 @@ class CoreData: self.base_options = {} self.external_preprocess_args = {} # CPPFLAGS only self.cross_file = self.__load_cross_file(options.cross_file) - self.wrap_mode = options.wrap_mode if options.wrap_mode is not None else WrapMode.default self.compilers = OrderedDict() self.cross_compilers = OrderedDict() self.deps = OrderedDict() # Only to print a warning if it changes between Meson invocations. self.pkgconf_envvar = os.environ.get('PKG_CONFIG_PATH', '') + self.config_files = self.__load_config_files(options.native_file) + + @staticmethod + def __load_config_files(filenames): + if not filenames: + return [] + filenames = [os.path.abspath(os.path.expanduser(os.path.expanduser(f))) + for f in filenames] + return filenames @staticmethod def __load_cross_file(filename): @@ -338,7 +414,10 @@ class CoreData: def get_builtin_option(self, optname): if optname in self.builtins: - return self.builtins[optname].value + v = self.builtins[optname] + if optname == 'wrap_mode': + return WrapMode.from_string(v.value) + return v.value raise RuntimeError('Tried to get unknown builtin option %s.' % optname) def set_builtin_option(self, optname, value): @@ -457,6 +536,51 @@ class CoreData: sub = 'In subproject {}: '.format(subproject) if subproject else '' mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options)) +class CmdLineFileParser(configparser.ConfigParser): + def __init__(self): + # We don't want ':' as key delimiter, otherwise it would break when + # storing subproject options like "subproject:option=value" + super().__init__(delimiters=['=']) + +def get_cmd_line_file(build_dir): + return os.path.join(build_dir, 'meson-private', 'cmd_line.txt') + +def read_cmd_line_file(build_dir, options): + filename = get_cmd_line_file(build_dir) + config = CmdLineFileParser() + config.read(filename) + + # Do a copy because config is not really a dict. options.cmd_line_options + # overrides values from the file. + d = dict(config['options']) + d.update(options.cmd_line_options) + options.cmd_line_options = d + + properties = config['properties'] + if options.cross_file is None: + options.cross_file = properties.get('cross_file', None) + +def write_cmd_line_file(build_dir, options): + filename = get_cmd_line_file(build_dir) + config = CmdLineFileParser() + + properties = {} + if options.cross_file is not None: + properties['cross_file'] = options.cross_file + + config['options'] = options.cmd_line_options + config['properties'] = properties + with open(filename, 'w') as f: + config.write(f) + +def update_cmd_line_file(build_dir, options): + filename = get_cmd_line_file(build_dir) + config = CmdLineFileParser() + config.read(filename) + config['options'].update(options.cmd_line_options) + with open(filename, 'w') as f: + config.write(f) + def load(build_dir): filename = os.path.join(build_dir, 'meson-private', 'coredata.dat') load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename) @@ -468,7 +592,8 @@ def load(build_dir): if not isinstance(obj, CoreData): raise MesonException(load_fail_msg) if obj.version != version: - raise MesonException('Build directory has been generated with Meson version %s, which is incompatible with current version %s.\nPlease delete this build directory AND create a new one.' % + raise MesonException('Build directory has been generated with Meson version %s, ' + 'which is incompatible with current version %s.\n' % (obj.version, version)) return obj @@ -590,33 +715,37 @@ def parse_cmd_line_options(args): delattr(args, name) builtin_options = { - 'buildtype': [UserComboOption, 'Build type to use.', ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'], 'debug'], - 'strip': [UserBooleanOption, 'Strip targets on install.', False], - 'unity': [UserComboOption, 'Unity build.', ['on', 'off', 'subprojects'], 'off'], - 'prefix': [UserStringOption, 'Installation prefix.', default_prefix()], - 'libdir': [UserStringOption, 'Library directory.', default_libdir()], - 'libexecdir': [UserStringOption, 'Library executable directory.', default_libexecdir()], - 'bindir': [UserStringOption, 'Executable directory.', 'bin'], - 'sbindir': [UserStringOption, 'System executable directory.', 'sbin'], - 'includedir': [UserStringOption, 'Header file directory.', 'include'], - 'datadir': [UserStringOption, 'Data file directory.', 'share'], - 'mandir': [UserStringOption, 'Manual page directory.', 'share/man'], - 'infodir': [UserStringOption, 'Info page directory.', 'share/info'], - 'localedir': [UserStringOption, 'Locale data directory.', 'share/locale'], - 'sysconfdir': [UserStringOption, 'Sysconf data directory.', 'etc'], - 'localstatedir': [UserStringOption, 'Localstate data directory.', 'var'], - 'sharedstatedir': [UserStringOption, 'Architecture-independent data directory.', 'com'], - 'werror': [UserBooleanOption, 'Treat warnings as errors.', False], - 'warning_level': [UserComboOption, 'Compiler warning level to use.', ['1', '2', '3'], '1'], - 'layout': [UserComboOption, 'Build directory layout.', ['mirror', 'flat'], 'mirror'], - 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static', 'both'], 'shared'], - 'backend': [UserComboOption, 'Backend to use.', backendlist, 'ninja'], - 'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs.', True], - 'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests.", True], - 'install_umask': [UserUmaskOption, 'Default umask to apply on permissions of installed files.', '022'], - 'auto_features': [UserFeatureOption, "Override value of all 'auto' features.", 'auto'], + 'buildtype': [UserComboOption, 'Build type to use', ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'], 'debug'], + 'strip': [UserBooleanOption, 'Strip targets on install', False], + 'unity': [UserComboOption, 'Unity build', ['on', 'off', 'subprojects'], 'off'], + 'prefix': [UserStringOption, 'Installation prefix', default_prefix()], + 'libdir': [UserStringOption, 'Library directory', default_libdir()], + 'libexecdir': [UserStringOption, 'Library executable directory', default_libexecdir()], + 'bindir': [UserStringOption, 'Executable directory', 'bin'], + 'sbindir': [UserStringOption, 'System executable directory', 'sbin'], + 'includedir': [UserStringOption, 'Header file directory', 'include'], + 'datadir': [UserStringOption, 'Data file directory', 'share'], + 'mandir': [UserStringOption, 'Manual page directory', 'share/man'], + 'infodir': [UserStringOption, 'Info page directory', 'share/info'], + 'localedir': [UserStringOption, 'Locale data directory', 'share/locale'], + 'sysconfdir': [UserStringOption, 'Sysconf data directory', 'etc'], + 'localstatedir': [UserStringOption, 'Localstate data directory', 'var'], + 'sharedstatedir': [UserStringOption, 'Architecture-independent data directory', 'com'], + 'werror': [UserBooleanOption, 'Treat warnings as errors', False], + 'warning_level': [UserComboOption, 'Compiler warning level to use', ['1', '2', '3'], '1'], + 'layout': [UserComboOption, 'Build directory layout', ['mirror', 'flat'], 'mirror'], + 'default_library': [UserComboOption, 'Default library type', ['shared', 'static', 'both'], 'shared'], + 'backend': [UserComboOption, 'Backend to use', backendlist, 'ninja'], + 'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs', True], + 'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests", True], + 'install_umask': [UserUmaskOption, 'Default umask to apply on permissions of installed files', '022'], + 'auto_features': [UserFeatureOption, "Override value of all 'auto' features", 'auto'], 'optimization': [UserComboOption, 'Optimization level', ['0', 'g', '1', '2', '3', 's'], '0'], - 'debug': [UserBooleanOption, 'Debug', True] + 'debug': [UserBooleanOption, 'Debug', True], + 'wrap_mode': [UserComboOption, 'Wrap mode', ['default', + 'nofallback', + 'nodownload', + 'forcefallback'], 'default'], } # Special prefix-dependent defaults for installation directories that reside in diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py index 00b6fa2..afe2a3b 100644 --- a/mesonbuild/dependencies/__init__.py +++ b/mesonbuild/dependencies/__init__.py @@ -16,9 +16,9 @@ from .boost import BoostDependency from .base import ( # noqa: F401 Dependency, DependencyException, DependencyMethods, ExternalProgram, EmptyExternalProgram, NonExistingExternalProgram, ExternalDependency, NotFoundDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency, - PkgConfigDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language) + PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language) from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency -from .misc import (MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency) +from .misc import (MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency) from .platform import AppleFrameworks from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency @@ -39,6 +39,7 @@ packages.update({ 'pcap': PcapDependency, 'cups': CupsDependency, 'libwmf': LibWmfDependency, + 'libgcrypt': LibGCryptDependency, # From platform: 'appleframeworks': AppleFrameworks, diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index a34ebf7..9855b20 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -25,6 +25,8 @@ import shlex import shutil import textwrap import platform +import itertools +import ctypes from enum import Enum from pathlib import PurePath @@ -48,6 +50,7 @@ class DependencyMethods(Enum): AUTO = 'auto' PKGCONFIG = 'pkg-config' QMAKE = 'qmake' + CMAKE = 'cmake' # Just specify the standard link arguments, assuming the operating system provides the library. SYSTEM = 'system' # This is only supported on OSX - search the frameworks directory by name. @@ -174,7 +177,7 @@ class Dependency: parent (if any) and the requested values of any dependencies will be added as well. """ - RuntimeError('Unreachable code in partial_dependency called') + raise RuntimeError('Unreachable code in partial_dependency called') class InternalDependency(Dependency): @@ -399,6 +402,8 @@ class ConfigToolDependency(ExternalDependency): 'Falling back to searching PATH. This may find a ' 'native version of {0}!'.format(self.tool_name)) tools = self.tools + elif self.tool_name in self.env.config_info.binaries: + tools = [self.env.config_info.binaries[self.tool_name]] else: tools = self.tools @@ -448,6 +453,9 @@ class ConfigToolDependency(ExternalDependency): def get_config_value(self, args, stage): p, out, err = Popen_safe([self.config] + args) + # This is required to keep shlex from stripping path separators on + # Windows. Also, don't put escape sequences in config values, okay? + out = out.replace('\\', '\\\\') if p.returncode != 0: if self.required: raise DependencyException( @@ -497,10 +505,10 @@ class PkgConfigDependency(ExternalDependency): if self.required: raise DependencyException('Pkg-config binary missing from cross file') else: - potential_pkgbin = ExternalProgram.from_cross_info(environment.cross_info, 'pkgconfig') + potential_pkgbin = ExternalProgram.from_bin_list( + environment.cross_info.config['binaries'], 'pkgconfig') if potential_pkgbin.found(): self.pkgbin = potential_pkgbin - PkgConfigDependency.class_pkgbin = self.pkgbin else: mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name) # Only search for the native pkg-config the first time and @@ -843,6 +851,613 @@ class PkgConfigDependency(ExternalDependency): def log_tried(self): return self.type_name +class CMakeTraceLine: + def __init__(self, file, line, func, args): + self.file = file + self.line = line + self.func = func.lower() + self.args = args + + def __repr__(self): + s = 'CMake TRACE: {0}:{1} {2}({3})' + return s.format(self.file, self.line, self.func, self.args) + +class CMakeTarget: + def __init__(self, name, type, properies = {}): + self.name = name + self.type = type + self.properies = properies + + def __repr__(self): + s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- properies: {{\n{} }}' + propSTR = '' + for i in self.properies: + propSTR += " '{}': {}\n".format(i, self.properies[i]) + return s.format(self.name, self.type, propSTR) + +class CMakeDependency(ExternalDependency): + # The class's copy of the CMake path. Avoids having to search for it + # multiple times in the same Meson invocation. + class_cmakebin = None + class_cmakevers = None + # We cache all pkg-config subprocess invocations to avoid redundant calls + cmake_cache = {} + # Version string for the minimum CMake version + class_cmake_version = '>=3.4' + # CMake generators to try (empty for no generator) + class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010'] + + def _gen_exception(self, msg): + return DependencyException('Dependency {} not found: {}'.format(self.name, msg)) + + def __init__(self, name, environment, kwargs, language=None): + super().__init__('cmake', environment, language, kwargs) + self.name = name + self.is_libtool = False + # Store a copy of the CMake path on the object itself so it is + # stored in the pickled coredata and recovered. + self.cmakebin = None + self.cmakevers = None + + # Dict of CMake variables: '<var_name>': ['list', 'of', 'values'] + self.vars = {} + + # Dict of CMakeTarget + self.targets = {} + + # Where all CMake "build dirs" are located + self.cmake_root_dir = environment.scratch_dir + + # When finding dependencies for cross-compiling, we don't care about + # the 'native' CMake binary + # TODO: Test if this works as expected + if self.want_cross: + if 'cmake' not in environment.cross_info.config['binaries']: + if self.required: + raise self._gen_exception('CMake binary missing from cross file') + else: + potential_cmake = ExternalProgram.from_cross_info(environment.cross_info, 'cmake') + if potential_cmake.found(): + self.cmakebin = potential_cmake + CMakeDependency.class_cmakebin = self.cmakebin + else: + mlog.debug('Cross CMake %s not found.' % potential_cmake.name) + # Only search for the native CMake the first time and + # store the result in the class definition + elif CMakeDependency.class_cmakebin is None: + self.cmakebin, self.cmakevers = self.check_cmake() + CMakeDependency.class_cmakebin = self.cmakebin + CMakeDependency.class_cmakevers = self.cmakevers + else: + self.cmakebin = CMakeDependency.class_cmakebin + self.cmakevers = CMakeDependency.class_cmakevers + + if not self.cmakebin: + if self.required: + raise self._gen_exception('CMake not found.') + return + + modules = kwargs.get('modules', []) + if not isinstance(modules, list): + modules = [modules] + self._detect_dep(name, modules) + + def __repr__(self): + s = '<{0} {1}: {2} {3}>' + return s.format(self.__class__.__name__, self.name, self.is_found, + self.version_reqs) + + def _detect_dep(self, name, modules): + # Detect a dependency with CMake using the '--find-package' mode + # and the trace output (stderr) + # + # When the trace output is enabled CMake prints all functions with + # parameters to stderr as they are executed. Since CMake 3.4.0 + # variables ("${VAR}") are also replaced in the trace output. + mlog.debug('\nDetermining dependency {!r} with CMake executable ' + '{!r}'.format(name, self.cmakebin.get_path())) + + # Try different CMake generators since specifying no generator may fail + # in cygwin for some reason + for i in CMakeDependency.class_cmake_generators: + mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto')) + + # Prepare options + cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '.'] + if len(i) > 0: + cmake_opts = ['-G', i] + cmake_opts + + # Run CMake + ret1, out1, err1 = self._call_cmake(cmake_opts) + + # Current generator was successful + if ret1 == 0: + break + + mlog.debug('CMake failed for generator {} and package {} with error code {}'.format(i, name, ret1)) + mlog.debug('OUT:\n{}\n\n\nERR:\n{}\n\n'.format(out1, err1)) + + # Check if any generator succeeded + if ret1 != 0: + return + + try: + # First parse the trace + lexer1 = self._lex_trace(err1) + + # All supported functions + functions = { + 'set': self._cmake_set, + 'unset': self._cmake_unset, + 'add_executable': self._cmake_add_executable, + 'add_library': self._cmake_add_library, + 'add_custom_target': self._cmake_add_custom_target, + 'set_property': self._cmake_set_property, + 'set_target_properties': self._cmake_set_target_properties + } + + # Primary pass -- parse everything + for l in lexer1: + # "Execute" the CMake function if supported + fn = functions.get(l.func, None) + if(fn): + fn(l) + + except DependencyException as e: + if self.required: + raise + else: + self.compile_args = [] + self.link_args = [] + self.is_found = False + self.reason = e + return + + # Whether the package is found or not is always stored in PACKAGE_FOUND + self.is_found = self._var_to_bool('PACKAGE_FOUND') + if not self.is_found: + return + + # Try to detect the version + vers_raw = self.get_first_cmake_var_of(['PACKAGE_VERSION']) + + if len(vers_raw) > 0: + self.version = vers_raw[0] + self.version.strip('"\' ') + + # Try guessing a CMake target if none is provided + if len(modules) == 0: + for i in self.targets: + tg = i.lower() + lname = name.lower() + if '{}::{}'.format(lname, lname) == tg or lname == tg.replace('::', ''): + mlog.debug('Guessed CMake target \'{}\''.format(i)) + modules = [i] + break + + # Failed to guess a target --> try the old-style method + if len(modules) == 0: + incDirs = self.get_first_cmake_var_of(['PACKAGE_INCLUDE_DIRS']) + libs = self.get_first_cmake_var_of(['PACKAGE_LIBRARIES']) + + # Try to use old style variables if no module is specified + if len(libs) > 0: + self.compile_args = list(map(lambda x: '-I{}'.format(x), incDirs)) + self.link_args = libs + mlog.debug('using old-style CMake variables for dependency {}'.format(name)) + return + + # Even the old-style approach failed. Nothing else we can do here + self.is_found = False + raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n' + 'Try to explicitly specify one or more targets with the "modules" property.\n' + 'Valid targets are:\n{}'.format(name, list(self.targets.keys()))) + + # Set dependencies with CMake targets + processed_targets = [] + incDirs = [] + compileDefinitions = [] + compileOptions = [] + libraries = [] + for i in modules: + if i not in self.targets: + raise self._gen_exception('CMake: invalid CMake target {} for {}.\n' + 'Try to explicitly specify one or more targets with the "modules" property.\n' + 'Valid targets are:\n{}'.format(i, name, list(self.targets.keys()))) + + targets = [i] + while len(targets) > 0: + curr = targets.pop(0) + + # Skip already processed targets + if curr in processed_targets: + continue + + tgt = self.targets[curr] + cfgs = [] + cfg = '' + otherDeps = [] + mlog.debug(tgt) + + if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properies: + incDirs += tgt.properies['INTERFACE_INCLUDE_DIRECTORIES'] + + if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properies: + tempDefs = list(tgt.properies['INTERFACE_COMPILE_DEFINITIONS']) + tempDefs = list(map(lambda x: '-D{}'.format(re.sub('^-D', '', x)), tempDefs)) + compileDefinitions += tempDefs + + if 'INTERFACE_COMPILE_OPTIONS' in tgt.properies: + compileOptions += tgt.properies['INTERFACE_COMPILE_OPTIONS'] + + if 'IMPORTED_CONFIGURATIONS' in tgt.properies: + cfgs = tgt.properies['IMPORTED_CONFIGURATIONS'] + cfg = cfgs[0] + + if 'RELEASE' in cfgs: + cfg = 'RELEASE' + + if 'IMPORTED_LOCATION_{}'.format(cfg) in tgt.properies: + libraries += tgt.properies['IMPORTED_LOCATION_{}'.format(cfg)] + elif 'IMPORTED_LOCATION' in tgt.properies: + libraries += tgt.properies['IMPORTED_LOCATION'] + + if 'INTERFACE_LINK_LIBRARIES' in tgt.properies: + otherDeps += tgt.properies['INTERFACE_LINK_LIBRARIES'] + + if 'IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg) in tgt.properies: + otherDeps += tgt.properies['IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg)] + elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properies: + otherDeps += tgt.properies['IMPORTED_LINK_DEPENDENT_LIBRARIES'] + + for j in otherDeps: + if j in self.targets: + targets += [j] + + processed_targets += [curr] + + # Make sure all elements in the lists are unique and sorted + incDirs = list(sorted(list(set(incDirs)))) + compileDefinitions = list(sorted(list(set(compileDefinitions)))) + compileOptions = list(sorted(list(set(compileOptions)))) + libraries = list(sorted(list(set(libraries)))) + + mlog.debug('Include Dirs: {}'.format(incDirs)) + mlog.debug('Compiler Definitions: {}'.format(compileDefinitions)) + mlog.debug('Compiler Options: {}'.format(compileOptions)) + mlog.debug('Libraries: {}'.format(libraries)) + + self.compile_args = compileOptions + compileDefinitions + list(map(lambda x: '-I{}'.format(x), incDirs)) + self.link_args = libraries + + def get_first_cmake_var_of(self, var_list): + # Return the first found CMake variable in list var_list + for i in var_list: + if i in self.vars: + return self.vars[i] + + return [] + + def get_cmake_var(self, var): + # Return the value of the CMake variable var or an empty list if var does not exist + for var in self.vars: + return self.vars[var] + + return [] + + def _var_to_bool(self, var): + if var not in self.vars: + return False + + if len(self.vars[var]) < 1: + return False + + if self.vars[var][0].upper() in ['1', 'ON', 'TRUE']: + return True + return False + + def _cmake_set(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/set.html + + # 1st remove PARENT_SCOPE and CACHE from args + args = [] + for i in tline.args: + if i == 'PARENT_SCOPE' or len(i) == 0: + continue + + # Discard everything after the CACHE keyword + if i == 'CACHE': + break + + args.append(i) + + if len(args) < 1: + raise self._gen_exception('CMake: set() requires at least one argument\n{}'.format(tline)) + + if len(args) == 1: + # Same as unset + if args[0] in self.vars: + del self.vars[args[0]] + else: + values = list(itertools.chain(*map(lambda x: x.split(';'), args[1:]))) + self.vars[args[0]] = values + + def _cmake_unset(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/unset.html + if len(tline.args) < 1: + raise self._gen_exception('CMake: unset() requires at least one argument\n{}'.format(tline)) + + if tline.args[0] in self.vars: + del self.vars[tline.args[0]] + + def _cmake_add_executable(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/add_executable.html + args = list(tline.args) # Make a working copy + + # Make sure the exe is imported + if 'IMPORTED' not in args: + raise self._gen_exception('CMake: add_executable() non imported executables are not supported\n{}'.format(tline)) + + args.remove('IMPORTED') + + if len(args) < 1: + raise self._gen_exception('CMake: add_executable() requires at least 1 argument\n{}'.format(tline)) + + self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}) + + def _cmake_add_library(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/add_library.html + args = list(tline.args) # Make a working copy + + # Make sure the lib is imported + if 'IMPORTED' not in args: + raise self._gen_exception('CMake: add_library() non imported libraries are not supported\n{}'.format(tline)) + + args.remove('IMPORTED') + + # No only look at the first two arguments (target_name and target_type) and ignore the rest + if len(args) < 2: + raise self._gen_exception('CMake: add_library() requires at least 2 arguments\n{}'.format(tline)) + + self.targets[args[0]] = CMakeTarget(args[0], args[1], {}) + + def _cmake_add_custom_target(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html + # We only the first parameter (the target name) is interesting + if len(tline.args) < 1: + raise self._gen_exception('CMake: add_custom_target() requires at least one argument\n{}'.format(tline)) + + self.targets[tline.args[0]] = CMakeTarget(tline.args[0], 'CUSTOM', {}) + + def _cmake_set_property(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/set_property.html + args = list(tline.args) + + # We only care for TARGET properties + if args.pop(0) != 'TARGET': + return + + append = False + targets = [] + while len(args) > 0: + curr = args.pop(0) + if curr == 'APPEND' or curr == 'APPEND_STRING': + append = True + continue + + if curr == 'PROPERTY': + break + + targets.append(curr) + + if len(args) == 1: + # Tries to set property to nothing so nothing has to be done + return + + if len(args) < 2: + raise self._gen_exception('CMake: set_property() faild to parse argument list\n{}'.format(tline)) + + propName = args[0] + propVal = list(itertools.chain(*map(lambda x: x.split(';'), args[1:]))) + propVal = list(filter(lambda x: len(x) > 0, propVal)) + + if len(propVal) == 0: + return + + for i in targets: + if i not in self.targets: + raise self._gen_exception('CMake: set_property() TARGET {} not found\n{}'.format(i, tline)) + + if propName not in self.targets[i].properies: + self.targets[i].properies[propName] = [] + + if append: + self.targets[i].properies[propName] += propVal + else: + self.targets[i].properies[propName] = propVal + + def _cmake_set_target_properties(self, tline: CMakeTraceLine): + # DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html + args = list(tline.args) + + targets = [] + while len(args) > 0: + curr = args.pop(0) + if curr == 'PROPERTIES': + break + + targets.append(curr) + + if (len(args) % 2) != 0: + raise self._gen_exception('CMake: set_target_properties() uneven number of property arguments\n{}'.format(tline)) + + while len(args) > 0: + propName = args.pop(0) + propVal = args.pop(0).split(';') + propVal = list(filter(lambda x: len(x) > 0, propVal)) + + if len(propVal) == 0: + continue + + for i in targets: + if i not in self.targets: + raise self._gen_exception('CMake: set_target_properties() TARGET {} not found\n{}'.format(i, tline)) + + self.targets[i].properies[propName] = propVal + + def _lex_trace(self, trace): + # The trace format is: '<file>(<line>): <func>(<args -- can contain \n> )\n' + reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE) + reg_other = re.compile(r'[^\n]*\n') + reg_genexp = re.compile(r'\$<.*>') + loc = 0 + while loc < len(trace): + mo_file_line = reg_tline.match(trace, loc) + if not mo_file_line: + skip_match = reg_other.match(trace, loc) + if not skip_match: + print(trace[loc:]) + raise self._gen_exception('Failed to parse CMake trace') + + loc = skip_match.end() + continue + + loc = mo_file_line.end() + + file = mo_file_line.group(1) + line = mo_file_line.group(3) + func = mo_file_line.group(4) + args = mo_file_line.group(5).split(' ') + args = list(map(lambda x: x.strip(), args)) + args = list(map(lambda x: reg_genexp.sub('', x), args)) # Remove generator expressions + + yield CMakeTraceLine(file, line, func, args) + + def _reset_cmake_cache(self, build_dir): + with open('{}/CMakeCache.txt'.format(build_dir), 'w') as fp: + fp.write('CMAKE_PLATFORM_INFO_INITIALIZED:INTERNAL=1\n') + + def _setup_compiler(self, build_dir): + comp_dir = '{}/CMakeFiles/{}'.format(build_dir, self.cmakevers) + os.makedirs(comp_dir, exist_ok=True) + + c_comp = '{}/CMakeCCompiler.cmake'.format(comp_dir) + cxx_comp = '{}/CMakeCXXCompiler.cmake'.format(comp_dir) + + if not os.path.exists(c_comp): + with open(c_comp, 'w') as fp: + fp.write('''# Fake CMake file to skip the boring and slow stuff +set(CMAKE_C_COMPILER "{}") # Just give CMake a valid full path to any file +set(CMAKE_C_COMPILER_ID "GNU") # Pretend we have found GCC +set(CMAKE_COMPILER_IS_GNUCC 1) +set(CMAKE_C_COMPILER_LOADED 1) +set(CMAKE_C_COMPILER_WORKS TRUE) +set(CMAKE_C_ABI_COMPILED TRUE) +set(CMAKE_SIZEOF_VOID_P "{}") +'''.format(os.path.realpath(__file__), ctypes.sizeof(ctypes.c_voidp))) + + if not os.path.exists(cxx_comp): + with open(cxx_comp, 'w') as fp: + fp.write('''# Fake CMake file to skip the boring and slow stuff +set(CMAKE_CXX_COMPILER "{}") # Just give CMake a valid full path to any file +set(CMAKE_CXX_COMPILER_ID "GNU") # Pretend we have found GCC +set(CMAKE_COMPILER_IS_GNUCXX 1) +set(CMAKE_CXX_COMPILER_LOADED 1) +set(CMAKE_CXX_COMPILER_WORKS TRUE) +set(CMAKE_CXX_ABI_COMPILED TRUE) +set(CMAKE_SIZEOF_VOID_P "{}") +'''.format(os.path.realpath(__file__), ctypes.sizeof(ctypes.c_voidp))) + + def _setup_cmake_dir(self): + # Setup the CMake build environment and return the "build" directory + build_dir = '{}/cmake_{}'.format(self.cmake_root_dir, self.name) + os.makedirs(build_dir, exist_ok=True) + + # Copy the CMakeLists.txt + cmake_lists = '{}/CMakeLists.txt'.format(build_dir) + if not os.path.exists(cmake_lists): + dir_path = os.path.dirname(os.path.realpath(__file__)) + src_cmake = '{}/data/CMakeLists.txt'.format(dir_path) + shutil.copyfile(src_cmake, cmake_lists) + + self._setup_compiler(build_dir) + self._reset_cmake_cache(build_dir) + return build_dir + + def _call_cmake_real(self, args, env): + build_dir = self._setup_cmake_dir() + cmd = self.cmakebin.get_command() + args + p, out, err = Popen_safe(cmd, env=env, cwd=build_dir) + rc = p.returncode + call = ' '.join(cmd) + mlog.debug("Called `{}` in {} -> {}".format(call, build_dir, rc)) + + return rc, out, err + + def _call_cmake(self, args, env=None): + if env is None: + fenv = env + env = os.environ + else: + fenv = frozenset(env.items()) + targs = tuple(args) + + # First check if cached, if not call the real cmake function + cache = CMakeDependency.cmake_cache + if (self.cmakebin, targs, fenv) not in cache: + cache[(self.cmakebin, targs, fenv)] = self._call_cmake_real(args, env) + return cache[(self.cmakebin, targs, fenv)] + + @staticmethod + def get_methods(): + return [DependencyMethods.CMAKE] + + def check_cmake(self): + evar = 'CMAKE' + if evar in os.environ: + cmakebin = os.environ[evar].strip() + else: + cmakebin = 'cmake' + cmakebin = ExternalProgram(cmakebin, silent=True) + cmvers = None + invalid_version = False + if cmakebin.found(): + try: + p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found CMake {!r} but couldn\'t run it' + ''.format(' '.join(cmakebin.get_command()))) + # Set to False instead of None to signify that we've already + # searched for it and not found it + cmakebin = False + except (FileNotFoundError, PermissionError): + cmakebin = False + + cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip() + if not version_compare(cmvers, CMakeDependency.class_cmake_version): + invalid_version = True + else: + cmakebin = False + if not self.silent: + if cmakebin and invalid_version: + mlog.log('Found CMake:', mlog.red('NO'), '(version of', mlog.bold(cmakebin.get_path()), + 'is', mlog.bold(cmvers), 'but version', mlog.bold(CMakeDependency.class_cmake_version), + 'is required)') + elif cmakebin: + mlog.log('Found CMake:', mlog.bold(cmakebin.get_path()), + '(%s)' % cmvers) + else: + mlog.log('Found CMake:', mlog.red('NO')) + + if invalid_version: + cmakebin = False + cmvers = None + + return cmakebin, cmvers + + def log_tried(self): + return self.type_name + class DubDependency(ExternalDependency): class_dubbin = None @@ -870,11 +1485,8 @@ class DubDependency(ExternalDependency): mlog.debug('Determining dependency {!r} with DUB executable ' '{!r}'.format(name, self.dubbin.get_path())) - # we need to know the correct architecture on Windows - if self.compiler.is_64: - arch = 'x86_64' - else: - arch = 'x86' + # we need to know the target architecture + arch = self.compiler.arch # Ask dub for the package ret, res = self._call_dubbin(['describe', name, '--arch=' + arch]) @@ -885,8 +1497,8 @@ class DubDependency(ExternalDependency): comp = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc') packages = [] - j = json.loads(res) - for package in j['packages']: + description = json.loads(res) + for package in description['packages']: packages.append(package['name']) if package['name'] == name: self.is_found = True @@ -897,14 +1509,24 @@ class DubDependency(ExternalDependency): not_lib = False if not_lib: - mlog.error(mlog.bold(name), 'found but it isn\'t a library') + mlog.error(mlog.bold(name), "found but it isn't a library") self.is_found = False return - self.module_path = self._find_right_lib_path(package['path'], comp, j, True, package['targetFileName']) - + self.module_path = self._find_right_lib_path(package['path'], comp, description, True, package['targetFileName']) if not os.path.exists(self.module_path): - mlog.error(mlog.bold(name), 'found but it wasn\'t compiled with', mlog.bold(comp)) + # check if the dependency was built for other archs + archs = [['x86_64'], ['x86'], ['x86', 'x86_mscoff']] + for a in archs: + description_a = copy.deepcopy(description) + description_a['architecture'] = a + arch_module_path = self._find_right_lib_path(package['path'], comp, description_a, True, package['targetFileName']) + if arch_module_path: + mlog.error(mlog.bold(name), "found but it wasn't compiled for", mlog.bold(arch)) + self.is_found = False + return + + mlog.error(mlog.bold(name), "found but it wasn't compiled with", mlog.bold(comp)) self.is_found = False return @@ -943,26 +1565,29 @@ class DubDependency(ExternalDependency): for arg in pkgdep.get_link_args(raw=True): self.raw_link_args.append(arg) - for target in j['targets']: + for target in description['targets']: if target['rootPackage'] in packages: add_lib_args('libs', target) add_lib_args('libs-{}'.format(platform.machine()), target) for file in target['buildSettings']['linkerFiles']: - self.link_args.append(self._find_right_lib_path(file, comp, j)) + lib_path = self._find_right_lib_path(file, comp, description) + if lib_path: + self.link_args.append(lib_path) + else: + self.is_found = False def get_compiler(self): return self.compiler - def _find_right_lib_path(self, default_path, comp, j, folder_only=False, file_name=''): - path = '' - - module_build_path = lib_file_name = '' + def _find_right_lib_path(self, default_path, comp, description, folder_only=False, file_name=''): + module_path = lib_file_name = '' if folder_only: - module_build_path = default_path + module_path = default_path lib_file_name = file_name else: - module_build_path = os.path.dirname(default_path) + module_path = os.path.dirname(default_path) lib_file_name = os.path.basename(default_path) + module_build_path = os.path.join(module_path, '.dub', 'build') # Get D version implemented in the compiler # gdc doesn't support this @@ -970,7 +1595,6 @@ class DubDependency(ExternalDependency): if ret != 0: mlog.error('Failed to run {!r}', mlog.bold(comp)) - self.is_found = False return d_ver = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res) # Ex.: v2.081.2 @@ -979,19 +1603,21 @@ class DubDependency(ExternalDependency): else: d_ver = '' # gdc + if not os.path.isdir(module_build_path): + return '' + # Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA - build_name = 'library-{}-{}-{}-{}_{}'.format(j['buildType'], '.'.join(j['platform']), j['architecture'][0], comp, d_ver) - for entry in os.listdir(os.path.join(module_build_path, '.dub', 'build')): + build_name = 'library-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver) + for entry in os.listdir(module_build_path): if entry.startswith(build_name): - for file in os.listdir(os.path.join(module_build_path, '.dub', 'build', entry)): + for file in os.listdir(os.path.join(module_build_path, entry)): if file == lib_file_name: if folder_only: - path = os.path.join(module_build_path, '.dub', 'build', entry) + return os.path.join(module_build_path, entry) else: - path = os.path.join(module_build_path, '.dub', 'build', entry, lib_file_name) - break + return os.path.join(module_build_path, entry, lib_file_name) - return path + return '' def _call_dubbin(self, args, env=None): p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2] @@ -1063,10 +1689,10 @@ class ExternalProgram: return ' '.join(self.command) @staticmethod - def from_cross_info(cross_info, name): - if name not in cross_info.config['binaries']: + def from_bin_list(bins, name): + if name not in bins: return NonExistingExternalProgram() - command = cross_info.config['binaries'][name] + command = bins[name] if not isinstance(command, (list, str)): raise MesonException('Invalid type {!r} for binary {!r} in cross file' ''.format(command, name)) @@ -1225,8 +1851,8 @@ class ExternalProgram: class NonExistingExternalProgram(ExternalProgram): "A program that will never exist" - def __init__(self): - self.name = 'nonexistingprogram' + def __init__(self, name='nonexistingprogram'): + self.name = name self.command = [None] self.path = None @@ -1308,6 +1934,11 @@ class ExtraFrameworkDependency(ExternalDependency): self.link_args = ['-F' + self.path, '-framework', self.name.split('.')[0]] def detect(self, name, path): + # should use the compiler to look for frameworks, rather than peering at + # the filesystem, so we can also find them when cross-compiling + if self.want_cross: + return + lname = name.lower() if path is None: paths = ['/System/Library/Frameworks', '/Library/Frameworks'] @@ -1445,7 +2076,8 @@ def find_external_dependency(name, env, kwargs): # we have a list of failed ExternalDependency objects, so we can report # the methods we tried to find the dependency - raise DependencyException('Dependency "%s" not found, tried %s' % (name, tried)) + raise DependencyException('Dependency "%s" not found' % (name) + + (', tried %s' % (tried) if tried else '')) # return the last failed dependency object if pkgdep: @@ -1456,6 +2088,10 @@ def find_external_dependency(name, env, kwargs): def _build_external_dependency_list(name, env, kwargs): + # First check if the method is valid + if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]: + raise DependencyException('method {!r} is invalid'.format(kwargs['method'])) + # Is there a specific dependency detector for this dependency? lname = name.lower() if lname in packages: @@ -1474,15 +2110,26 @@ def _build_external_dependency_list(name, env, kwargs): if 'dub' == kwargs.get('method', ''): candidates.append(functools.partial(DubDependency, name, env, kwargs)) return candidates - # TBD: other values of method should control what method(s) are used - # Otherwise, just use the pkgconfig dependency detector - candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + # If it's explicitly requested, use the pkgconfig detection method (only) + if 'pkg-config' == kwargs.get('method', ''): + candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + return candidates + + # If it's explicitly requested, use the CMake detection method (only) + if 'cmake' == kwargs.get('method', ''): + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + return candidates - # On OSX, also try framework dependency detector - if mesonlib.is_osx(): - candidates.append(functools.partial(ExtraFrameworkDependency, name, - False, None, env, None, kwargs)) + # Otherwise, just use the pkgconfig and cmake dependency detector + if 'auto' == kwargs.get('method', 'auto'): + candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs)) + candidates.append(functools.partial(CMakeDependency, name, env, kwargs)) + + # On OSX, also try framework dependency detector + if mesonlib.is_osx(): + candidates.append(functools.partial(ExtraFrameworkDependency, name, + False, None, env, None, kwargs)) return candidates diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index b06f62d..6a8050d 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -288,7 +288,7 @@ class BoostDependency(ExternalDependency): tag = None compiler = self.env.detect_cpp_compiler(self.want_cross) if mesonlib.for_windows(self.want_cross, self.env): - if compiler.get_id() == 'msvc': + if compiler.get_id() in ['msvc', 'clang-cl']: comp_ts_version = compiler.get_toolset_version() compiler_ts = comp_ts_version.split('.') # FIXME - what about other compilers? @@ -320,7 +320,7 @@ class BoostDependency(ExternalDependency): def arch_tag(self): # currently only applies to windows msvc installed binaries - if self.env.detect_cpp_compiler(self.want_cross).get_id() != 'msvc': + if self.env.detect_cpp_compiler(self.want_cross).get_id() not in ['msvc', 'clang-cl']: return '' # pre-compiled binaries only added arch tag for versions > 1.64 if float(self.version) < 1.65: @@ -443,7 +443,7 @@ class BoostDependency(ExternalDependency): if self.libdir: libdirs = [self.libdir] elif self.boost_root is None: - libdirs = mesonlib.get_library_dirs(self.env) + libdirs = mesonlib.get_library_dirs() else: libdirs = [os.path.join(self.boost_root, 'lib')] for libdir in libdirs: diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt new file mode 100644 index 0000000..144ffda --- /dev/null +++ b/mesonbuild/dependencies/data/CMakeLists.txt @@ -0,0 +1,93 @@ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} ) + +# Inspired by CMakeDetermineCompilerABI.cmake to set CMAKE_LIBRARY_ARCHITECTURE +if(CMAKE_LIBRARY_ARCHITECTURE_REGEX) + if(NOT DEFINED CMAKE_LIBRARY_ARCHITECTURE) + file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* ) + foreach(dir ${implicit_dirs}) + if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}") + set(CMAKE_LIBRARY_ARCHITECTURE "${dir}") + break() + endif() + endforeach() + endif() +endif() + +find_package("${NAME}" QUIET) + +set(PACKAGE_FOUND FALSE) +set(_packageName "${NAME}") +string(TOUPPER "${_packageName}" PACKAGE_NAME) + +if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND) + set(PACKAGE_FOUND TRUE) + + # Check the following variables: + # FOO_VERSION + # Foo_VERSION + # FOO_VERSION_STRING + # Foo_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED ${_packageName}_VERSION) + set(PACKAGE_VERSION "${${_packageName}_VERSION}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}") + elseif(DEFINED ${_packageName}_VERSION_STRING) + set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # FOO_LIBRARIES + # Foo_LIBRARIES + # FOO_LIBS + # Foo_LIBS + set(libs) + if(DEFINED ${_packageName}_LIBRARIES) + set(libs ${_packageName}_LIBRARIES) + elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES) + set(libs ${PACKAGE_NAME}_LIBRARIES) + elseif(DEFINED ${_packageName}_LIBS) + set(libs ${_packageName}_LIBS) + elseif(DEFINED ${PACKAGE_NAME}_LIBS) + set(libs ${PACKAGE_NAME}_LIBS) + endif() + + # Check the following variables: + # FOO_INCLUDE_DIRS + # Foo_INCLUDE_DIRS + # FOO_INCLUDES + # Foo_INCLUDES + # FOO_INCLUDE_DIR + # Foo_INCLUDE_DIR + set(includes) + if(DEFINED ${_packageName}_INCLUDE_DIRS) + set(includes ${_packageName}_INCLUDE_DIRS) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS) + set(includes ${PACKAGE_NAME}_INCLUDE_DIRS) + elseif(DEFINED ${_packageName}_INCLUDES) + set(includes ${_packageName}_INCLUDES) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDES) + set(includes ${PACKAGE_NAME}_INCLUDES) + elseif(DEFINED ${_packageName}_INCLUDE_DIR) + set(includes ${_packageName}_INCLUDE_DIR) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR) + set(includes ${PACKAGE_NAME}_INCLUDE_DIR) + endif() + + # Check the following variables: + # FOO_DEFINITIONS + # Foo_DEFINITIONS + set(definitions) + if(DEFINED ${_packageName}_DEFINITIONS) + set(definitions ${_packageName}_DEFINITIONS) + elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS) + set(definitions ${PACKAGE_NAME}_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py index 5ee8b09..47beb4e 100644 --- a/mesonbuild/dependencies/dev.py +++ b/mesonbuild/dependencies/dev.py @@ -16,6 +16,7 @@ # development purposes, such as testing, debugging, etc.. import functools +import glob import os import re @@ -27,6 +28,17 @@ from .base import ( ) +def get_shared_library_suffix(environment, native): + """This is only gauranteed to work for languages that compile to machine + code, not for languages like C# that use a bytecode and always end in .dll + """ + if mesonlib.for_windows(native, environment): + return '.dll' + elif mesonlib.for_darwin(native, environment): + return '.dylib' + return '.so' + + class GTestDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('gtest', environment, 'cpp', kwargs) @@ -196,10 +208,11 @@ class LLVMDependency(ConfigToolDependency): # newest back to oldest (3.5 is arbitrary), and finally the devel version. # Please note that llvm-config-6.0 is a development snapshot and it should # not be moved to the beginning of the list. The only difference between - # llvm-config-7 and llvm-config-devel is that the former is used by + # llvm-config-8 and llvm-config-devel is that the former is used by # Debian and the latter is used by FreeBSD. tools = [ 'llvm-config', # base + 'llvm-config-7', 'llvm-config70', 'llvm-config-6.0', 'llvm-config60', 'llvm-config-5.0', 'llvm-config50', 'llvm-config-4.0', 'llvm-config40', @@ -208,7 +221,7 @@ class LLVMDependency(ConfigToolDependency): 'llvm-config-3.7', 'llvm-config37', 'llvm-config-3.6', 'llvm-config36', 'llvm-config-3.5', 'llvm-config35', - 'llvm-config-7', 'llvm-config-devel', # development snapshot + 'llvm-config-8', 'llvm-config-devel', # development snapshot ] tool_name = 'llvm-config' __cpp_blacklist = {'-DNDEBUG'} @@ -234,7 +247,7 @@ class LLVMDependency(ConfigToolDependency): self.compile_args = list(cargs.difference(self.__cpp_blacklist)) if version_compare(self.version, '>= 3.9'): - self._set_new_link_args() + self._set_new_link_args(environment) else: self._set_old_link_args() self.link_args = strip_system_libdirs(environment, self.link_args) @@ -257,18 +270,66 @@ class LLVMDependency(ConfigToolDependency): new_args.append(arg) return new_args - def _set_new_link_args(self): + def __check_libfiles(self, shared): + """Use llvm-config's --libfiles to check if libraries exist.""" + mode = '--link-shared' if shared else '--link-static' + + # Set self.required to true to force an exception in get_config_value + # if the returncode != 0 + restore = self.required + self.required = True + + try: + # It doesn't matter what the stage is, the caller needs to catch + # the exception anyway. + self.link_args = self.get_config_value(['--libfiles', mode], '') + finally: + self.required = restore + + def _set_new_link_args(self, environment): """How to set linker args for LLVM versions >= 3.9""" - if ((mesonlib.is_dragonflybsd() or mesonlib.is_freebsd()) and not - self.static and version_compare(self.version, '>= 4.0')): - # llvm-config on DragonFly BSD and FreeBSD for versions 4.0, 5.0, - # and 6.0 have an error when generating arguments for shared mode - # linking, even though libLLVM.so is installed, because for some - # reason the tool expects to find a .so for each static library. - # This works around that. - self.link_args = self.get_config_value(['--ldflags'], 'link_args') - self.link_args.append('-lLLVM') - return + mode = self.get_config_value(['--shared-mode'], 'link_args')[0] + if not self.static and mode == 'static': + # If llvm is configured with LLVM_BUILD_LLVM_DYLIB but not with + # LLVM_LINK_LLVM_DYLIB and not LLVM_BUILD_SHARED_LIBS (which + # upstreams doesn't recomend using), then llvm-config will lie to + # you about how to do shared-linking. It wants to link to a a bunch + # of individual shared libs (which don't exist because llvm wasn't + # built with LLVM_BUILD_SHARED_LIBS. + # + # Therefore, we'll try to get the libfiles, if the return code is 0 + # or we get an empty list, then we'll try to build a working + # configuration by hand. + try: + self.__check_libfiles(True) + except DependencyException: + lib_ext = get_shared_library_suffix(environment, self.native) + libdir = self.get_config_value(['--libdir'], 'link_args')[0] + # Sort for reproducability + matches = sorted(glob.iglob(os.path.join(libdir, 'libLLVM*{}'.format(lib_ext)))) + if not matches: + if self.required: + raise + self.is_found = False + return + + self.link_args = self.get_config_value(['--ldflags'], 'link_args') + libname = os.path.basename(matches[0]).rstrip(lib_ext).lstrip('lib') + self.link_args.append('-l{}'.format(libname)) + return + elif self.static and mode == 'shared': + # If, however LLVM_BUILD_SHARED_LIBS is true # (*cough* gentoo *cough*) + # then this is correct. Building with LLVM_BUILD_SHARED_LIBS has a side + # effect, it stops the generation of static archives. Therefore we need + # to check for that and error out on static if this is the case + try: + self.__check_libfiles(False) + except DependencyException: + if self.required: + raise + self.is_found = False + return + link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared'] self.link_args = self.get_config_value( ['--libs', '--ldflags'] + link_args + list(self.required_modules), diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 5164512..9e0a65a 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -29,7 +29,7 @@ from ..environment import detect_cpu_family from .base import ( DependencyException, DependencyMethods, ExternalDependency, ExternalProgram, ExtraFrameworkDependency, PkgConfigDependency, - ConfigToolDependency, + CMakeDependency, ConfigToolDependency, ) @@ -234,6 +234,8 @@ class MPIDependency(ExternalDependency): class OpenMPDependency(ExternalDependency): # Map date of specification release (which is the macro value) to a version. VERSIONS = { + '201811': '5.0', + '201611': '5.0-revision1', # This is supported by ICC 19.x '201511': '4.5', '201307': '4.0', '201107': '3.1', @@ -278,11 +280,14 @@ class ThreadDependency(ExternalDependency): class Python3Dependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('python3', environment, None, kwargs) + + if self.want_cross: + return + self.name = 'python3' self.static = kwargs.get('static', False) # We can only be sure that it is Python 3 at this point self.version = '3' - self.pkgdep = None self._find_libpy3_windows(environment) @classmethod @@ -434,6 +439,11 @@ class PcapDependency(ExternalDependency): @staticmethod def get_pcap_lib_version(ctdep): + # Since we seem to need to run a program to discover the pcap version, + # we can't do that when cross-compiling + if ctdep.want_cross: + return None + v = ctdep.clib_compiler.get_return_value('pcap_lib_version', 'string', '#include <pcap.h>', ctdep.env, [], [ctdep]) v = re.sub(r'libpcap version ', '', v) @@ -465,6 +475,9 @@ class CupsDependency(ExternalDependency): ExtraFrameworkDependency, 'cups', False, None, environment, kwargs.get('language', None), kwargs)) + if DependencyMethods.CMAKE in methods: + candidates.append(functools.partial(CMakeDependency, 'Cups', environment, kwargs)) + return candidates @staticmethod @@ -475,9 +488,9 @@ class CupsDependency(ExternalDependency): @staticmethod def get_methods(): if mesonlib.is_osx(): - return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK] + return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE] else: - return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] + return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.CMAKE] class LibWmfDependency(ExternalDependency): @@ -506,3 +519,34 @@ class LibWmfDependency(ExternalDependency): @staticmethod def get_methods(): return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] + + +class LibGCryptDependency(ExternalDependency): + def __init__(self, environment, kwargs): + super().__init__('libgcrypt', environment, None, kwargs) + + @classmethod + def _factory(cls, environment, kwargs): + methods = cls._process_method_kw(kwargs) + candidates = [] + + if DependencyMethods.PKGCONFIG in methods: + candidates.append(functools.partial(PkgConfigDependency, 'libgcrypt', environment, kwargs)) + + if DependencyMethods.CONFIG_TOOL in methods: + candidates.append(functools.partial(ConfigToolDependency.factory, + 'libgcrypt', environment, None, kwargs, ['libgcrypt-config'], + 'libgcrypt-config', + LibGCryptDependency.tool_finish_init)) + + return candidates + + @staticmethod + def tool_finish_init(ctdep): + ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') + ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') + ctdep.version = ctdep.get_config_value(['--version'], 'version')[0] + + @staticmethod + def get_methods(): + return [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL] diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py index 5b2003f..c78ebed 100644 --- a/mesonbuild/dependencies/platform.py +++ b/mesonbuild/dependencies/platform.py @@ -33,7 +33,7 @@ class AppleFrameworks(ExternalDependency): for f in self.frameworks: self.link_args += ['-framework', f] - self.is_found = mesonlib.is_osx() + self.is_found = mesonlib.for_darwin(self.want_cross, self.env) def log_tried(self): return 'framework' diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index a46afbb..e8fba91 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -30,7 +30,7 @@ from ..mesonlib import ( from ..environment import detect_cpu from .base import DependencyException, DependencyMethods -from .base import ExternalDependency, ExternalProgram +from .base import ExternalDependency, ExternalProgram, NonExistingExternalProgram from .base import ExtraFrameworkDependency, PkgConfigDependency from .base import ConfigToolDependency @@ -39,13 +39,13 @@ class GLDependency(ExternalDependency): def __init__(self, environment, kwargs): super().__init__('gl', environment, None, kwargs) - if mesonlib.is_osx(): + if mesonlib.for_darwin(self.want_cross, self.env): self.is_found = True # FIXME: Use AppleFrameworks dependency self.link_args = ['-framework', 'OpenGL'] # FIXME: Detect version using self.clib_compiler return - if mesonlib.is_windows(): + if mesonlib.for_windows(self.want_cross, self.env): self.is_found = True # FIXME: Use self.clib_compiler.find_library() self.link_args = ['-lopengl32'] @@ -230,21 +230,48 @@ class QtBaseDependency(ExternalDependency): self.from_text = mlog.format_list(methods) self.version = None - def compilers_detect(self): + def compilers_detect(self, interp_obj): "Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH" - if self.bindir or for_windows(self.env.is_cross_build(), self.env): - moc = ExternalProgram(os.path.join(self.bindir, 'moc'), silent=True) - uic = ExternalProgram(os.path.join(self.bindir, 'uic'), silent=True) - rcc = ExternalProgram(os.path.join(self.bindir, 'rcc'), silent=True) - lrelease = ExternalProgram(os.path.join(self.bindir, 'lrelease'), silent=True) - else: - # We don't accept unsuffixed 'moc', 'uic', and 'rcc' because they - # are sometimes older, or newer versions. - moc = ExternalProgram('moc-' + self.name, silent=True) - uic = ExternalProgram('uic-' + self.name, silent=True) - rcc = ExternalProgram('rcc-' + self.name, silent=True) - lrelease = ExternalProgram('lrelease-' + self.name, silent=True) - return moc, uic, rcc, lrelease + # It is important that this list does not change order as the order of + # the returned ExternalPrograms will change as well + bins = ['moc', 'uic', 'rcc', 'lrelease'] + found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name)) + for b in bins} + + def gen_bins(): + for b in bins: + if self.bindir: + yield os.path.join(self.bindir, b), b, False + yield '{}-{}'.format(b, self.name), b, False + yield b, b, self.required + + for b, name, required in gen_bins(): + if found[name].found(): + continue + + # prefer the <tool>-qt<version> of the tool to the plain one, as we + # don't know what the unsuffixed one points to without calling it. + p = interp_obj.find_program_impl([b], silent=True, required=required).held_object + if not p.found(): + continue + + if name == 'lrelease': + arg = ['-version'] + elif mesonlib.version_compare(self.version, '>= 5'): + arg = ['--version'] + else: + arg = ['-v'] + + # Ensure that the version of qt and each tool are the same + _, out, err = mesonlib.Popen_safe(p.get_command() + arg) + if b.startswith('lrelease') or not self.version.startswith('4'): + care = out + else: + care = err + if mesonlib.version_compare(self.version, '== {}'.format(care.split(' ')[-1])): + found[name] = p + + return tuple([found[b] for b in bins]) def _pkgconfig_detect(self, mods, kwargs): # We set the value of required to False so that we can try the @@ -302,8 +329,15 @@ class QtBaseDependency(ExternalDependency): def _find_qmake(self, qmake): # Even when cross-compiling, if a cross-info qmake is not specified, we # fallback to using the qmake in PATH because that's what we used to do - if self.env.is_cross_build() and 'qmake' in self.env.cross_info.config['binaries']: - return ExternalProgram.from_cross_info(self.env.cross_info, 'qmake') + if self.env.is_cross_build(): + if 'qmake' in self.env.cross_info.config['binaries']: + return ExternalProgram.from_bin_list(self.env.cross_info.config['binaries'], 'qmake') + elif self.env.config_info: + # Prefer suffixed to unsuffixed version + p = ExternalProgram.from_bin_list(self.env.config_info.binaries, 'qmake-' + self.name) + if p.found(): + return p + return ExternalProgram.from_bin_list(self.env.config_info.binaries, 'qmake') return ExternalProgram(qmake, silent=True) def _qmake_detect(self, mods, kwargs): @@ -526,7 +560,7 @@ class SDL2Dependency(ExternalDependency): class WxDependency(ConfigToolDependency): - tools = ['wx-config-3.0', 'wx-config'] + tools = ['wx-config-3.0', 'wx-config', 'wx-config-gtk3'] tool_name = 'wx-config' def __init__(self, environment, kwargs): diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 3a1e1e6..01a7c51 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -12,24 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import configparser, os, platform, re, shlex, shutil, subprocess +import configparser, os, platform, re, sys, shlex, shutil, subprocess from . import coredata -from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker +from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker from . import mesonlib -from .mesonlib import EnvironmentException, Popen_safe +from .mesonlib import MesonException, EnvironmentException, PerMachine, Popen_safe from . import mlog from . import compilers from .compilers import ( - CLANG_OSX, - CLANG_STANDARD, - CLANG_WIN, - GCC_CYGWIN, - GCC_MINGW, - GCC_OSX, - GCC_STANDARD, - ICC_STANDARD, + CompilerType, is_assembly, is_header, is_library, @@ -46,6 +39,8 @@ from .compilers import ( ClangCPPCompiler, ClangObjCCompiler, ClangObjCPPCompiler, + ClangClCCompiler, + ClangClCPPCompiler, G95FortranCompiler, GnuCCompiler, GnuCPPCompiler, @@ -66,6 +61,8 @@ from .compilers import ( PathScaleFortranCompiler, PGIFortranCompiler, RustCompiler, + CcrxCCompiler, + CcrxCPPCompiler, SunFortranCompiler, ValaCompiler, VisualStudioCCompiler, @@ -76,6 +73,7 @@ build_filename = 'meson.build' known_cpu_families = ( 'aarch64', + 'arc', 'arm', 'e2k', 'ia64', @@ -86,6 +84,7 @@ known_cpu_families = ( 'ppc64', 'riscv32', 'riscv64', + 'rx', 's390x', 'sparc', 'sparc64', @@ -121,7 +120,7 @@ def find_coverage_tools(): return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe def detect_ninja(version='1.5', log=False): - for n in ['ninja', 'ninja-build']: + for n in ['ninja', 'ninja-build', 'samu']: try: p, found = Popen_safe([n, '--version'])[0:2] except (FileNotFoundError, PermissionError): @@ -196,6 +195,8 @@ def detect_windows_arch(compilers): platform = os.environ.get('Platform', 'x86').lower() if platform == 'x86': return platform + if compiler.id == 'clang-cl' and not compiler.is_64: + return 'x86' if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'): return 'x86' return os_arch @@ -217,6 +218,19 @@ def detect_cpu_family(compilers): return 'arm' if trial.startswith('ppc64'): return 'ppc64' + if trial == 'powerpc': + # FreeBSD calls both ppc and ppc64 "powerpc". + # https://github.com/mesonbuild/meson/issues/4397 + try: + p, stdo, _ = Popen_safe(['uname', '-p']) + except (FileNotFoundError, PermissionError): + # Not much to go on here. + if sys.maxsize > 2**32: + return 'ppc64' + return 'ppc' + if 'powerpc64' in stdo: + return 'ppc64' + return 'ppc' if trial in ('amd64', 'x64'): trial = 'x86_64' if trial == 'x86_64': @@ -306,26 +320,50 @@ class Environment: self.coredata = coredata.load(self.get_build_dir()) self.first_invocation = False except FileNotFoundError: - # WARNING: Don't use any values from coredata in __init__. It gets - # re-initialized with project options by the interpreter during - # build file parsing. - self.coredata = coredata.CoreData(options) - # Used by the regenchecker script, which runs meson - self.coredata.meson_command = mesonlib.meson_command - self.first_invocation = True - self.cross_info = None + self.create_new_coredata(options) + except MesonException as e: + # If we stored previous command line options, we can recover from + # a broken/outdated coredata. + if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)): + mlog.warning('Regenerating configuration from scratch.') + mlog.log('Reason:', mlog.red(str(e))) + coredata.read_cmd_line_file(self.build_dir, options) + self.create_new_coredata(options) + else: + raise e self.exe_wrapper = None + + self.machines = MachineInfos() + # Will be fully initialized later using compilers later. + self.machines.detect_build() if self.coredata.cross_file: self.cross_info = CrossBuildInfo(self.coredata.cross_file) if 'exe_wrapper' in self.cross_info.config['binaries']: from .dependencies import ExternalProgram - self.exe_wrapper = ExternalProgram.from_cross_info(self.cross_info, 'exe_wrapper') + self.exe_wrapper = ExternalProgram.from_bin_list( + self.cross_info.config['binaries'], 'exe_wrapper') + if 'host_machine' in self.cross_info.config: + self.machines.host = MachineInfo.from_literal( + self.cross_info.config['host_machine']) + if 'target_machine' in self.cross_info.config: + self.machines.target = MachineInfo.from_literal( + self.cross_info.config['target_machine']) + else: + self.cross_info = None + self.machines.default_missing() + + if self.coredata.config_files: + self.config_info = coredata.ConfigData( + coredata.load_configs(self.coredata.config_files)) + else: + self.config_info = coredata.ConfigData() + self.cmd_line_options = options.cmd_line_options.copy() # List of potential compilers. if mesonlib.is_windows(): - self.default_c = ['cl', 'cc', 'gcc', 'clang'] - self.default_cpp = ['cl', 'c++', 'g++', 'clang++'] + self.default_c = ['cl', 'cc', 'gcc', 'clang', 'clang-cl'] + self.default_cpp = ['cl', 'c++', 'g++', 'clang++', 'clang-cl'] else: self.default_c = ['cc', 'gcc', 'clang'] self.default_cpp = ['c++', 'g++', 'clang++'] @@ -339,6 +377,7 @@ class Environment: self.default_rust = ['rustc'] self.default_static_linker = ['ar'] self.vs_static_linker = ['lib'] + self.clang_cl_static_linker = ['llvm-lib'] self.gcc_static_linker = ['gcc-ar'] self.clang_static_linker = ['llvm-ar'] @@ -359,10 +398,20 @@ class Environment: self.object_suffix = 'o' self.win_libdir_layout = False if 'STRIP' in os.environ: - self.native_strip_bin = shlex.split(os.environ['STRIP']) + self.native_strip_bin = shlex.split( + os.environ[BinaryTable.evarMap['strip']]) else: self.native_strip_bin = ['strip'] + def create_new_coredata(self, options): + # WARNING: Don't use any values from coredata in __init__. It gets + # re-initialized with project options by the interpreter during + # build file parsing. + self.coredata = coredata.CoreData(options) + # Used by the regenchecker script, which runs meson + self.coredata.meson_command = mesonlib.meson_command + self.first_invocation = True + def is_cross_build(self): return self.cross_info is not None @@ -381,7 +430,7 @@ class Environment: def get_build_command(self, unbuffered=False): cmd = mesonlib.meson_command[:] - if unbuffered and 'python' in cmd[0]: + if unbuffered and 'python' in os.path.basename(cmd[0]): cmd.insert(1, '-u') return cmd @@ -451,56 +500,44 @@ class Environment: def get_gnu_compiler_type(defines): # Detect GCC type (Apple, MinGW, Cygwin, Unix) if '__APPLE__' in defines: - return GCC_OSX + return CompilerType.GCC_OSX elif '__MINGW32__' in defines or '__MINGW64__' in defines: - return GCC_MINGW + return CompilerType.GCC_MINGW elif '__CYGWIN__' in defines: - return GCC_CYGWIN - return GCC_STANDARD - - def warn_about_lang_pointing_to_cross(self, compiler_exe, evar): - evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME') - if evar_str == compiler_exe: - mlog.warning('''Env var %s seems to point to the cross compiler. -This is probably wrong, it should always point to the native compiler.''' % evar) + return CompilerType.GCC_CYGWIN + return CompilerType.GCC_STANDARD - def _get_compilers(self, lang, evar, want_cross): + def _get_compilers(self, lang, want_cross): ''' The list of compilers is detected in the exact same way for C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here. ''' + is_cross = False + exe_wrap = None + evar = BinaryTable.evarMap[lang] + if self.is_cross_build() and want_cross: if lang not in self.cross_info.config['binaries']: raise EnvironmentException('{!r} compiler binary not defined in cross file'.format(lang)) - compilers = mesonlib.stringlistify(self.cross_info.config['binaries'][lang]) - # Ensure ccache exists and remove it if it doesn't - if compilers[0] == 'ccache': - compilers = compilers[1:] - ccache = self.detect_ccache() - else: - ccache = [] - self.warn_about_lang_pointing_to_cross(compilers[0], evar) + compilers, ccache = BinaryTable.parse_entry( + mesonlib.stringlistify(self.cross_info.config['binaries'][lang])) + BinaryTable.warn_about_lang_pointing_to_cross(compilers[0], evar) # Return value has to be a list of compiler 'choices' compilers = [compilers] is_cross = True exe_wrap = self.get_exe_wrapper() elif evar in os.environ: - compilers = shlex.split(os.environ[evar]) - # Ensure ccache exists and remove it if it doesn't - if compilers[0] == 'ccache': - compilers = compilers[1:] - ccache = self.detect_ccache() - else: - ccache = [] + compilers, ccache = BinaryTable.parse_entry( + shlex.split(os.environ[evar])) # Return value has to be a list of compiler 'choices' compilers = [compilers] - is_cross = False - exe_wrap = None + elif lang in self.config_info.binaries: + compilers, ccache = BinaryTable.parse_entry( + mesonlib.stringlistify(self.config_info.binaries[lang])) + compilers = [compilers] else: compilers = getattr(self, 'default_' + lang) - ccache = self.detect_ccache() - is_cross = False - exe_wrap = None + ccache = BinaryTable.detect_ccache() return compilers, ccache, is_cross, exe_wrap def _handle_exceptions(self, exceptions, binaries, bintype='compiler'): @@ -511,13 +548,13 @@ This is probably wrong, it should always point to the native compiler.''' % evar errmsg += '\nRunning "{0}" gave "{1}"'.format(c, e) raise EnvironmentException(errmsg) - def _detect_c_or_cpp_compiler(self, lang, evar, want_cross): + def _detect_c_or_cpp_compiler(self, lang, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, evar, want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] - if 'cl' in compiler or 'cl.exe' in compiler: + if not set(['cl', 'cl.exe', 'clang-cl', 'clang-cl.exe']).isdisjoint(compiler): # Watcom C provides it's own cl.exe clone that mimics an older # version of Microsoft's compiler. Since Watcom's cl.exe is # just a wrapper, we skip using it if we detect its presence @@ -539,6 +576,8 @@ This is probably wrong, it should always point to the native compiler.''' % evar arg = '/?' elif 'armcc' in compiler[0]: arg = '--vsn' + elif 'ccrx' in compiler[0]: + arg = '-v' else: arg = '--version' try: @@ -546,8 +585,12 @@ This is probably wrong, it should always point to the native compiler.''' % evar except OSError as e: popen_exceptions[' '.join(compiler + [arg])] = e continue - version = search_version(out) + + if 'ccrx' in compiler[0]: + out = err + full_version = out.split('\n', 1)[0] + version = search_version(out) guess_gcc_or_lcc = False if 'Free Software Foundation' in out: @@ -560,14 +603,14 @@ This is probably wrong, it should always point to the native compiler.''' % evar if not defines: popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' continue - gtype = self.get_gnu_compiler_type(defines) + compiler_type = self.get_gnu_compiler_type(defines) if guess_gcc_or_lcc == 'lcc': version = self.get_lcc_version_from_defines(defines) cls = ElbrusCCompiler if lang == 'c' else ElbrusCPPCompiler else: version = self.get_gnu_version_from_defines(defines) cls = GnuCCompiler if lang == 'c' else GnuCPPCompiler - return cls(ccache + compiler, version, gtype, is_cross, exe_wrap, defines, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, defines, full_version=full_version) if 'armclang' in out: # The compiler version is not present in the first line of output, @@ -583,17 +626,30 @@ This is probably wrong, it should always point to the native compiler.''' % evar # Override previous values version = search_version(arm_ver_str) full_version = arm_ver_str + compiler_type = CompilerType.ARM_WIN cls = ArmclangCCompiler if lang == 'c' else ArmclangCPPCompiler - return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) + if 'CL.EXE COMPATIBILITY' in out: + # if this is clang-cl masquerading as cl, detect it as cl, not + # clang + arg = '--version' + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[' '.join(compiler + [arg])] = e + version = search_version(out) + is_64 = 'Target: x86_64' in out + cls = ClangClCCompiler if lang == 'c' else ClangClCPPCompiler + return cls(compiler, version, is_cross, exe_wrap, is_64) if 'clang' in out: if 'Apple' in out or mesonlib.for_darwin(want_cross, self): - cltype = CLANG_OSX + compiler_type = CompilerType.CLANG_OSX elif 'windows' in out or mesonlib.for_windows(want_cross, self): - cltype = CLANG_WIN + compiler_type = CompilerType.CLANG_MINGW else: - cltype = CLANG_STANDARD + compiler_type = CompilerType.CLANG_STANDARD cls = ClangCCompiler if lang == 'c' else ClangCPPCompiler - return cls(ccache + compiler, version, cltype, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) if 'Microsoft' in out or 'Microsoft' in err: # Latest versions of Visual Studio print version # number to stderr but earlier ones print version @@ -609,24 +665,35 @@ This is probably wrong, it should always point to the native compiler.''' % evar cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler return cls(compiler, version, is_cross, exe_wrap, is_64) if '(ICC)' in out: - # TODO: add microsoft add check OSX - inteltype = ICC_STANDARD + if mesonlib.for_darwin(want_cross, self): + compiler_type = CompilerType.ICC_OSX + elif mesonlib.for_windows(want_cross, self): + # TODO: fix ICC on Windows + compiler_type = CompilerType.ICC_WIN + else: + compiler_type = CompilerType.ICC_STANDARD cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler - return cls(ccache + compiler, version, inteltype, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) if 'ARM' in out: + compiler_type = CompilerType.ARM_WIN cls = ArmCCompiler if lang == 'c' else ArmCPPCompiler - return cls(ccache + compiler, version, is_cross, exe_wrap, full_version=full_version) + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) + if 'RX Family' in out: + compiler_type = CompilerType.CCRX_WIN + cls = CcrxCCompiler if lang == 'c' else CcrxCPPCompiler + return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version) + self._handle_exceptions(popen_exceptions, compilers) def detect_c_compiler(self, want_cross): - return self._detect_c_or_cpp_compiler('c', 'CC', want_cross) + return self._detect_c_or_cpp_compiler('c', want_cross) def detect_cpp_compiler(self, want_cross): - return self._detect_c_or_cpp_compiler('cpp', 'CXX', want_cross) + return self._detect_c_or_cpp_compiler('cpp', want_cross) def detect_fortran_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', 'FC', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -651,14 +718,14 @@ This is probably wrong, it should always point to the native compiler.''' % evar if not defines: popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' continue - gtype = self.get_gnu_compiler_type(defines) + compiler_type = self.get_gnu_compiler_type(defines) if guess_gcc_or_lcc == 'lcc': version = self.get_lcc_version_from_defines(defines) cls = ElbrusFortranCompiler else: version = self.get_gnu_version_from_defines(defines) cls = GnuFortranCompiler - return cls(compiler, version, gtype, is_cross, exe_wrap, defines, full_version=full_version) + return cls(compiler, version, compiler_type, is_cross, exe_wrap, defines, full_version=full_version) if 'G95' in out: return G95FortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version) @@ -688,7 +755,7 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_objc_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('objc', 'OBJC', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('objc', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -704,18 +771,18 @@ This is probably wrong, it should always point to the native compiler.''' % evar if not defines: popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' continue - gtype = self.get_gnu_compiler_type(defines) + compiler_type = self.get_gnu_compiler_type(defines) version = self.get_gnu_version_from_defines(defines) - return GnuObjCCompiler(ccache + compiler, version, gtype, is_cross, exe_wrap, defines) + return GnuObjCCompiler(ccache + compiler, version, compiler_type, is_cross, exe_wrap, defines) if out.startswith('Apple LLVM'): - return ClangObjCCompiler(ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap) + return ClangObjCCompiler(ccache + compiler, version, CompilerType.CLANG_OSX, is_cross, exe_wrap) if out.startswith('clang'): - return ClangObjCCompiler(ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap) + return ClangObjCCompiler(ccache + compiler, version, CompilerType.CLANG_STANDARD, is_cross, exe_wrap) self._handle_exceptions(popen_exceptions, compilers) def detect_objcpp_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('objcpp', 'OBJCXX', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('objcpp', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -731,28 +798,32 @@ This is probably wrong, it should always point to the native compiler.''' % evar if not defines: popen_exceptions[' '.join(compiler)] = 'no pre-processor defines' continue - gtype = self.get_gnu_compiler_type(defines) + compiler_type = self.get_gnu_compiler_type(defines) version = self.get_gnu_version_from_defines(defines) - return GnuObjCPPCompiler(ccache + compiler, version, gtype, is_cross, exe_wrap, defines) + return GnuObjCPPCompiler(ccache + compiler, version, compiler_type, is_cross, exe_wrap, defines) if out.startswith('Apple LLVM'): - return ClangObjCPPCompiler(ccache + compiler, version, CLANG_OSX, is_cross, exe_wrap) + return ClangObjCPPCompiler(ccache + compiler, version, CompilerType.CLANG_OSX, is_cross, exe_wrap) if out.startswith('clang'): - return ClangObjCPPCompiler(ccache + compiler, version, CLANG_STANDARD, is_cross, exe_wrap) + return ClangObjCPPCompiler(ccache + compiler, version, CompilerType.CLANG_STANDARD, is_cross, exe_wrap) self._handle_exceptions(popen_exceptions, compilers) def detect_java_compiler(self): - exelist = ['javac'] + if 'java' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['java']) + else: + exelist = ['javac'] + try: p, out, err = Popen_safe(exelist + ['-version']) except OSError: raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist)) - version = search_version(err) if 'javac' in out or 'javac' in err: + version = search_version(err if 'javac' in err else out) return JavaCompiler(exelist, version) raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') def detect_cs_compiler(self): - compilers, ccache, is_cross, exe_wrap = self._get_compilers('cs', 'CSC', False) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('cs', False) popen_exceptions = {} for comp in compilers: if not isinstance(comp, list): @@ -774,6 +845,8 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_vala_compiler(self): if 'VALAC' in os.environ: exelist = shlex.split(os.environ['VALAC']) + elif 'vala' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['vala']) else: exelist = ['valac'] try: @@ -787,7 +860,7 @@ This is probably wrong, it should always point to the native compiler.''' % evar def detect_rust_compiler(self, want_cross): popen_exceptions = {} - compilers, ccache, is_cross, exe_wrap = self._get_compilers('rust', 'RUSTC', want_cross) + compilers, ccache, is_cross, exe_wrap = self._get_compilers('rust', want_cross) for compiler in compilers: if isinstance(compiler, str): compiler = [compiler] @@ -818,6 +891,8 @@ This is probably wrong, it should always point to the native compiler.''' % evar elif self.is_cross_build() and want_cross: exelist = mesonlib.stringlistify(self.cross_info.config['binaries']['d']) is_cross = True + elif 'd' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['d']) elif shutil.which("ldc2"): exelist = ['ldc2'] elif shutil.which("ldc"): @@ -836,26 +911,29 @@ This is probably wrong, it should always point to the native compiler.''' % evar version = search_version(out) full_version = out.split('\n', 1)[0] - # Detect which MSVC build environment is currently active. - is_64 = False + # Detect the target architecture, required for proper architecture handling on Windows. c_compiler = {} - if mesonlib.is_windows() and 'VCINSTALLDIR' in os.environ: - # MSVC compiler is required for correct platform detection. - c_compiler = {'c': self.detect_c_compiler(want_cross)} + is_msvc = mesonlib.is_windows() and 'VCINSTALLDIR' in os.environ + if is_msvc: + c_compiler = {'c': self.detect_c_compiler(want_cross)} # MSVC compiler is required for correct platform detection. - if detect_cpu_family(c_compiler) == 'x86_64': - is_64 = True + arch = detect_cpu_family(c_compiler) + if is_msvc and arch == 'x86': + arch = 'x86_mscoff' if 'LLVM D compiler' in out: - return compilers.LLVMDCompiler(exelist, version, is_cross, is_64, full_version=full_version) + return compilers.LLVMDCompiler(exelist, version, is_cross, arch, full_version=full_version) elif 'gdc' in out: - return compilers.GnuDCompiler(exelist, version, is_cross, is_64, full_version=full_version) + return compilers.GnuDCompiler(exelist, version, is_cross, arch, full_version=full_version) elif 'The D Language Foundation' in out or 'Digital Mars' in out: - return compilers.DmdDCompiler(exelist, version, is_cross, is_64, full_version=full_version) + return compilers.DmdDCompiler(exelist, version, is_cross, arch, full_version=full_version) raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"') def detect_swift_compiler(self): - exelist = ['swiftc'] + if 'swift' in self.config_info.binaries: + exelist = mesonlib.stringlistify(self.config_info.binaries['swift']) + else: + exelist = ['swiftc'] try: p, _, err = Popen_safe(exelist + ['-v']) except OSError: @@ -872,11 +950,11 @@ This is probably wrong, it should always point to the native compiler.''' % evar linker = [linker] linkers = [linker] else: - evar = 'AR' + evar = BinaryTable.evarMap['ar'] if evar in os.environ: linkers = [shlex.split(os.environ[evar])] elif isinstance(compiler, compilers.VisualStudioCCompiler): - linkers = [self.vs_static_linker] + linkers = [self.vs_static_linker, self.clang_cl_static_linker] elif isinstance(compiler, compilers.GnuCompiler): # Use gcc-ar if available; needed for LTO linkers = [self.gcc_static_linker, self.default_static_linker] @@ -886,14 +964,14 @@ This is probably wrong, it should always point to the native compiler.''' % evar elif isinstance(compiler, compilers.DCompiler): # Prefer static linkers over linkers used by D compilers if mesonlib.is_windows(): - linkers = [self.vs_static_linker, compiler.get_linker_exelist()] + linkers = [self.vs_static_linker, self.clang_cl_static_linker, compiler.get_linker_exelist()] else: linkers = [self.default_static_linker, compiler.get_linker_exelist()] else: linkers = [self.default_static_linker] popen_exceptions = {} for linker in linkers: - if 'lib' in linker or 'lib.exe' in linker: + if not set(['lib', 'lib.exe', 'llvm-lib', 'llvm-lib.exe']).isdisjoint(linker): arg = '/?' else: arg = '--version' @@ -902,34 +980,27 @@ This is probably wrong, it should always point to the native compiler.''' % evar except OSError as e: popen_exceptions[' '.join(linker + [arg])] = e continue - if '/OUT:' in out or '/OUT:' in err: + if '/OUT:' in out.upper() or '/OUT:' in err.upper(): return VisualStudioLinker(linker) if p.returncode == 0 and ('armar' in linker or 'armar.exe' in linker): return ArmarLinker(linker) if 'DMD32 D Compiler' in out or 'DMD64 D Compiler' in out: - return DLinker(linker, compiler.is_64, compiler.is_msvc) + return DLinker(linker, compiler.arch) if 'LDC - the LLVM D compiler' in out: - return DLinker(linker, compiler.is_64, compiler.is_msvc) + return DLinker(linker, compiler.arch) if 'GDC' in out and ' based on D ' in out: - return DLinker(linker, compiler.is_64, compiler.is_msvc) + return DLinker(linker, compiler.arch) + if err.startswith('Renesas') and ('rlink' in linker or 'rlink.exe' in linker): + return CcrxLinker(linker) if p.returncode == 0: return ArLinker(linker) if p.returncode == 1 and err.startswith('usage'): # OSX return ArLinker(linker) + if p.returncode == 1 and err.startswith('Usage'): # AIX + return ArLinker(linker) self._handle_exceptions(popen_exceptions, linkers, 'linker') raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers)) - def detect_ccache(self): - try: - has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except OSError: - has_ccache = 1 - if has_ccache == 0: - cmdlist = ['ccache'] - else: - cmdlist = [] - return cmdlist - def get_source_dir(self): return self.source_dir @@ -1038,9 +1109,6 @@ class CrossBuildInfo: except Exception: raise EnvironmentException('Malformed value in cross file variable %s.' % entry) - if entry == 'cpu_family' and res not in known_cpu_families: - mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % value) - if self.ok_type(res): self.config[s][entry] = res elif isinstance(res, list): @@ -1097,10 +1165,203 @@ class CrossBuildInfo: return False return True - class MachineInfo: def __init__(self, system, cpu_family, cpu, endian): self.system = system self.cpu_family = cpu_family self.cpu = cpu self.endian = endian + + def __eq__(self, other): + if self.__class__ is not other.__class__: + return NotImplemented + return \ + self.system == other.system and \ + self.cpu_family == other.cpu_family and \ + self.cpu == other.cpu and \ + self.endian == other.endian + + def __ne__(self, other): + if self.__class__ is not other.__class__: + return NotImplemented + return not self.__eq__(other) + + @staticmethod + def detect(compilers = None): + """Detect the machine we're running on + + If compilers are not provided, we cannot know as much. None out those + fields to avoid accidentally depending on partial knowledge. The + underlying ''detect_*'' method can be called to explicitly use the + partial information. + """ + return MachineInfo( + detect_system(), + detect_cpu_family(compilers) if compilers is not None else None, + detect_cpu(compilers) if compilers is not None else None, + sys.byteorder) + + @staticmethod + def from_literal(literal): + minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'} + if set(literal) < minimum_literal: + raise EnvironmentException( + 'Machine info is currently {}\n'.format(literal) + + 'but is missing {}.'.format(minimum_literal - set(literal))) + + cpu_family = literal['cpu_family'] + if cpu_family not in known_cpu_families: + mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family) + + endian = literal['endian'] + if endian not in ('little', 'big'): + mlog.warning('Unknown endian %s' % endian) + + return MachineInfo( + literal['system'], + cpu_family, + literal['cpu'], + endian) + + def is_windows(self): + """ + Machine is windows? + """ + return self.system == 'windows' + + def is_cygwin(self): + """ + Machine is cygwin? + """ + return self.system == 'cygwin' + + def is_linux(self): + """ + Machine is linux? + """ + return self.system == 'linux' + + def is_darwin(self): + """ + Machine is Darwin (iOS/OS X)? + """ + return self.system in ('darwin', 'ios') + + def is_android(self): + """ + Machine is Android? + """ + return self.system == 'android' + + def is_haiku(self): + """ + Machine is Haiku? + """ + return self.system == 'haiku' + + def is_openbsd(self): + """ + Machine is OpenBSD? + """ + return self.system == 'openbsd' + + # Various prefixes and suffixes for import libraries, shared libraries, + # static libraries, and executables. + # Versioning is added to these names in the backends as-needed. + + def get_exe_suffix(self): + if self.is_windows() or self.is_cygwin(): + return 'exe' + else: + return '' + + def get_object_suffix(self): + if self.is_windows(): + return 'obj' + else: + return 'o' + + def libdir_layout_is_win(self): + return self.is_windows() \ + or self.is_cygwin() + +class MachineInfos(PerMachine): + def __init__(self): + super().__init__(None, None, None) + + def default_missing(self): + """Default host to buid and target to host. + + This allows just specifying nothing in the native case, just host in the + cross non-compiler case, and just target in the native-built + cross-compiler case. + """ + if self.host is None: + self.host = self.build + if self.target is None: + self.target = self.host + + def miss_defaulting(self): + """Unset definition duplicated from their previous to None + + This is the inverse of ''default_missing''. By removing defaulted + machines, we can elaborate the original and then redefault them and thus + avoid repeating the elaboration explicitly. + """ + if self.target == self.host: + self.target = None + if self.host == self.build: + self.host = None + + def detect_build(self, compilers = None): + self.build = MachineInfo.detect(compilers) + +class BinaryTable: + # Map from language identifiers to environment variables. + evarMap = { + # Compilers + 'c': 'CC', + 'cpp': 'CXX', + 'cs': 'CSC', + 'd': 'DC', + 'fortran': 'FC', + 'objc': 'OBJC', + 'objcpp': 'OBJCXX', + 'rust': 'RUSTC', + 'vala': 'VALAC', + + # Binutils + 'strip': 'STRIP', + 'ar': 'AR', + } + + @classmethod + def detect_ccache(cls): + try: + has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except OSError: + has_ccache = 1 + if has_ccache == 0: + cmdlist = ['ccache'] + else: + cmdlist = [] + return cmdlist + + @classmethod + def warn_about_lang_pointing_to_cross(cls, compiler_exe, evar): + evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME') + if evar_str == compiler_exe: + mlog.warning('''Env var %s seems to point to the cross compiler. +This is probably wrong, it should always point to the native compiler.''' % evar) + + @classmethod + def parse_entry(cls, entry): + compiler = mesonlib.stringlistify(entry) + # Ensure ccache exists and remove it if it doesn't + if compiler[0] == 'ccache': + compiler = compiler[1:] + ccache = cls.detect_ccache() + else: + ccache = [] + # Return value has to be a list of compiler 'choices' + return compiler, ccache diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 1d6d670..4f09c0f 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -27,16 +27,18 @@ from .dependencies import InternalDependency, Dependency, NotFoundDependency, De from .interpreterbase import InterpreterBase from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest -from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler +from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs +from .interpreterbase import ObjectHolder from .modules import ModuleReturnValue -import os, sys, shutil, uuid +import os, shutil, uuid import re, shlex import subprocess from collections import namedtuple from pathlib import PurePath import traceback +import functools import importlib @@ -57,14 +59,6 @@ def stringifyUserArguments(args): raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.') -class ObjectHolder: - def __init__(self, obj, subproject=None): - self.held_object = obj - self.subproject = subproject - - def __repr__(self): - return '<Holder: {!r}>'.format(self.held_object) - class FeatureOptionHolder(InterpreterObject, ObjectHolder): def __init__(self, env, option): InterpreterObject.__init__(self) @@ -92,13 +86,15 @@ class FeatureOptionHolder(InterpreterObject, ObjectHolder): def auto_method(self, args, kwargs): return self.held_object.is_auto() -def extract_required_kwarg(kwargs, subproject): +def extract_required_kwarg(kwargs, subproject, feature_check=None): val = kwargs.get('required', True) disabled = False required = False feature = None if isinstance(val, FeatureOptionHolder): - FeatureNew('User option "feature"', '0.47.0').use(subproject) + if not feature_check: + feature_check = FeatureNew('User option "feature"', '0.47.0') + feature_check.use(subproject) option = val.held_object feature = val.name if option.is_disabled(): @@ -571,57 +567,11 @@ class GeneratedListHolder(InterpreterObject, ObjectHolder): def add_file(self, a): self.held_object.add_file(a) -class BuildMachine(InterpreterObject, ObjectHolder): - def __init__(self, compilers): - self.compilers = compilers - InterpreterObject.__init__(self) - held_object = environment.MachineInfo(environment.detect_system(), - environment.detect_cpu_family(self.compilers), - environment.detect_cpu(self.compilers), - sys.byteorder) - ObjectHolder.__init__(self, held_object) - self.methods.update({'system': self.system_method, - 'cpu_family': self.cpu_family_method, - 'cpu': self.cpu_method, - 'endian': self.endian_method, - }) - - @noPosargs - @permittedKwargs({}) - def cpu_family_method(self, args, kwargs): - return self.held_object.cpu_family - - @noPosargs - @permittedKwargs({}) - def cpu_method(self, args, kwargs): - return self.held_object.cpu - - @noPosargs - @permittedKwargs({}) - def system_method(self, args, kwargs): - return self.held_object.system - - @noPosargs - @permittedKwargs({}) - def endian_method(self, args, kwargs): - return self.held_object.endian - -# This class will provide both host_machine and -# target_machine -class CrossMachineInfo(InterpreterObject, ObjectHolder): - def __init__(self, cross_info): +# A machine that's statically known from the cross file +class MachineHolder(InterpreterObject, ObjectHolder): + def __init__(self, machine_info): InterpreterObject.__init__(self) - minimum_cross_info = {'cpu', 'cpu_family', 'endian', 'system'} - if set(cross_info) < minimum_cross_info: - raise InterpreterException( - 'Machine info is currently {}\n'.format(cross_info) + - 'but is missing {}.'.format(minimum_cross_info - set(cross_info))) - self.info = cross_info - minfo = environment.MachineInfo(cross_info['system'], - cross_info['cpu_family'], - cross_info['cpu'], - cross_info['endian']) - ObjectHolder.__init__(self, minfo) + ObjectHolder.__init__(self, machine_info) self.methods.update({'system': self.system_method, 'cpu': self.cpu_method, 'cpu_family': self.cpu_family_method, @@ -986,8 +936,26 @@ class CompilerHolder(InterpreterObject): 'first_supported_link_argument': self.first_supported_link_argument_method, 'unittest_args': self.unittest_args_method, 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method, + 'get_argument_syntax': self.get_argument_syntax_method, }) + def _dep_msg(self, deps, endl): + msg_single = 'with dependency {}' + msg_many = 'with dependencies {}' + if not deps: + return endl + if endl is None: + endl = '' + tpl = msg_many if len(deps) > 1 else msg_single + names = [] + for d in deps: + if isinstance(d, dependencies.ExternalLibrary): + name = '-l' + d.name + else: + name = d.name + names.append(name) + return tpl.format(', '.join(names)) + endl + @noPosargs @permittedKwargs({}) def version_method(self, args, kwargs): @@ -998,7 +966,7 @@ class CompilerHolder(InterpreterObject): def cmd_array_method(self, args, kwargs): return self.compiler.exelist - def determine_args(self, kwargs): + def determine_args(self, kwargs, mode='link'): nobuiltins = kwargs.get('no_builtin_args', False) if not isinstance(nobuiltins, bool): raise InterpreterException('Type of no_builtin_args not a boolean.') @@ -1014,11 +982,12 @@ class CompilerHolder(InterpreterObject): if not nobuiltins: opts = self.environment.coredata.compiler_options args += self.compiler.get_option_compile_args(opts) - args += self.compiler.get_option_link_args(opts) + if mode == 'link': + args += self.compiler.get_option_link_args(opts) args += mesonlib.stringlistify(kwargs.get('args', [])) return args - def determine_dependencies(self, kwargs): + def determine_dependencies(self, kwargs, endl=':'): deps = kwargs.get('dependencies', None) if deps is not None: deps = listify(deps) @@ -1032,7 +1001,7 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('Dependencies must be external dependencies') final_deps.append(d) deps = final_deps - return deps + return deps, self._dep_msg(deps, endl) @permittedKwargs({ 'prefix', @@ -1048,9 +1017,11 @@ class CompilerHolder(InterpreterObject): if not isinstance(prefix, str): raise InterpreterException('Prefix argument of sizeof must be a string.') extra_args = mesonlib.stringlistify(kwargs.get('args', [])) - deps = self.determine_dependencies(kwargs) - result = self.compiler.alignment(typename, prefix, self.environment, extra_args, deps) - mlog.log('Checking for alignment of', mlog.bold(typename, True), ':', result) + deps, msg = self.determine_dependencies(kwargs) + result = self.compiler.alignment(typename, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) + mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result) return result @permittedKwargs({ @@ -1072,9 +1043,10 @@ class CompilerHolder(InterpreterObject): testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - result = self.compiler.run(code, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs, endl=None) + result = self.compiler.run(code, self.environment, extra_args=extra_args, + dependencies=deps) if len(testname) > 0: if not result.compiled: h = mlog.red('DID NOT COMPILE') @@ -1082,7 +1054,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO (%d)' % result.returncode) - mlog.log('Checking if', mlog.bold(testname, True), 'runs:', h) + mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h) return TryRunResultHolder(result) @noPosargs @@ -1127,16 +1099,18 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_member must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) had = self.compiler.has_members(typename, [membername], prefix, - self.environment, extra_args, deps) + self.environment, + extra_args=extra_args, + dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') mlog.log('Checking whether type', mlog.bold(typename, True), - 'has member', mlog.bold(membername, True), ':', hadtxt) + 'has member', mlog.bold(membername, True), msg, hadtxt) return had @permittedKwargs({ @@ -1155,17 +1129,19 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_members must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) had = self.compiler.has_members(typename, membernames, prefix, - self.environment, extra_args, deps) + self.environment, + extra_args=extra_args, + dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames])) mlog.log('Checking whether type', mlog.bold(typename, True), - 'has members', members, ':', hadtxt) + 'has members', members, msg, hadtxt) return had @permittedKwargs({ @@ -1184,13 +1160,15 @@ class CompilerHolder(InterpreterObject): if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_function must be a string.') extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - had = self.compiler.has_function(funcname, prefix, self.environment, extra_args, deps) + deps, msg = self.determine_dependencies(kwargs) + had = self.compiler.has_function(funcname, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking for function', mlog.bold(funcname, True), ':', hadtxt) + mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt) return had @permittedKwargs({ @@ -1208,14 +1186,15 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_type must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - had = self.compiler.has_type(typename, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + had = self.compiler.has_type(typename, prefix, self.environment, + extra_args=extra_args, dependencies=deps) if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - mlog.log('Checking for type', mlog.bold(typename, True), ':', hadtxt) + mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt) return had @FeatureNew('compiler.compute_int', '0.40.0') @@ -1246,10 +1225,12 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('High argument of compute_int must be an int.') if guess is not None and not isinstance(guess, int): raise InterpreterException('Guess argument of compute_int must be an int.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - res = self.compiler.compute_int(expression, low, high, guess, prefix, self.environment, extra_args, deps) - mlog.log('Computing int of "%s": %d' % (expression, res)) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + res = self.compiler.compute_int(expression, low, high, guess, prefix, + self.environment, extra_args=extra_args, + dependencies=deps) + mlog.log('Computing int of', mlog.bold(expression, True), msg, res) return res @permittedKwargs({ @@ -1267,10 +1248,11 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of sizeof must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - esize = self.compiler.sizeof(element, prefix, self.environment, extra_args, deps) - mlog.log('Checking for size of "%s": %d' % (element, esize)) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + esize = self.compiler.sizeof(element, prefix, self.environment, + extra_args=extra_args, dependencies=deps) + mlog.log('Checking for size of', mlog.bold(element, True), msg, esize) return esize @FeatureNew('compiler.get_define', '0.40.0') @@ -1289,10 +1271,12 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of get_define() must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - value = self.compiler.get_define(element, prefix, self.environment, extra_args, deps) - mlog.log('Fetching value of define "%s": %s' % (element, value)) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + value = self.compiler.get_define(element, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) + mlog.log('Fetching value of define', mlog.bold(element, True), msg, value) return value @permittedKwargs({ @@ -1314,15 +1298,17 @@ class CompilerHolder(InterpreterObject): testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - result = self.compiler.compiles(code, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs, endl=None) + result = self.compiler.compiles(code, self.environment, + extra_args=extra_args, + dependencies=deps) if len(testname) > 0: if result: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if', mlog.bold(testname, True), 'compiles:', h) + mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h) return result @permittedKwargs({ @@ -1344,15 +1330,17 @@ class CompilerHolder(InterpreterObject): testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - result = self.compiler.links(code, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs, endl=None) + result = self.compiler.links(code, self.environment, + extra_args=extra_args, + dependencies=deps) if len(testname) > 0: if result: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if', mlog.bold(testname, True), 'links:', h) + mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h) return result @FeatureNew('compiler.check_header', '0.47.0') @@ -1371,14 +1359,16 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - haz = self.compiler.check_header(hname, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + haz = self.compiler.check_header(hname, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) if haz: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Check usable header "%s":' % hname, h) + mlog.log('Check usable header', mlog.bold(hname, True), msg, h) return haz @permittedKwargs({ @@ -1396,14 +1386,15 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - haz = self.compiler.has_header(hname, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + haz = self.compiler.has_header(hname, prefix, self.environment, + extra_args=extra_args, dependencies=deps) if haz: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Has header "%s":' % hname, h) + mlog.log('Has header', mlog.bold(hname, True), msg, h) return haz @permittedKwargs({ @@ -1422,16 +1413,20 @@ class CompilerHolder(InterpreterObject): prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header_symbol must be a string.') - extra_args = self.determine_args(kwargs) - deps = self.determine_dependencies(kwargs) - haz = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment, extra_args, deps) + extra_args = functools.partial(self.determine_args, kwargs) + deps, msg = self.determine_dependencies(kwargs) + haz = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment, + extra_args=extra_args, + dependencies=deps) if haz: h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Header <{0}> has symbol "{1}":'.format(hname, symbol), h) + mlog.log('Header <{0}> has symbol'.format(hname), mlog.bold(symbol, True), msg, h) return haz + @FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs({ 'required', 'dirs', @@ -1564,6 +1559,12 @@ class CompilerHolder(InterpreterObject): args = mesonlib.stringlistify(args) return [a for a in args if self.has_func_attribute_method(a, kwargs)] + @FeatureNew('compiler.get_argument_syntax_method', '0.49.0') + @noPosargs + @noKwargs + def get_argument_syntax_method(self, args, kwargs): + return self.compiler.get_argument_syntax() + ModuleState = namedtuple('ModuleState', [ 'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment', @@ -1591,8 +1592,8 @@ class ModuleHolder(InterpreterObject, ObjectHolder): # because the Build object contains dicts and lists. num_targets = len(self.interpreter.build.targets) state = ModuleState( - build_to_src=os.path.relpath(self.interpreter.environment.get_source_dir(), - self.interpreter.environment.get_build_dir()), + build_to_src=mesonlib.relpath(self.interpreter.environment.get_source_dir(), + self.interpreter.environment.get_build_dir()), subproject=self.interpreter.subproject, subdir=self.interpreter.subdir, current_lineno=self.interpreter.current_lineno, @@ -1684,12 +1685,15 @@ class MesonMain(InterpreterObject): @permittedKwargs({}) def add_dist_script_method(self, args, kwargs): - if len(args) != 1: - raise InterpreterException('add_dist_script takes exactly one argument') + if len(args) < 1: + raise InterpreterException('add_dist_script takes one or more arguments') + if len(args) > 1: + FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject) check_stringlist(args, 'add_dist_script argument must be a string') if self.interpreter.subproject != '': raise InterpreterException('add_dist_script may not be used in a subproject.') - self.build.dist_scripts.append(os.path.join(self.interpreter.subdir, args[0])) + script = self._find_source_script(args[0], args[1:]) + self.build.dist_scripts.append(script) @noPosargs @permittedKwargs({}) @@ -1932,20 +1936,23 @@ class Interpreter(InterpreterBase): self.build_def_files = [os.path.join(self.subdir, environment.build_filename)] if not mock: self.parse_project() - self.builtin['build_machine'] = BuildMachine(self.coredata.compilers) - if not self.build.environment.is_cross_build(): - self.builtin['host_machine'] = self.builtin['build_machine'] - self.builtin['target_machine'] = self.builtin['build_machine'] - else: - cross_info = self.build.environment.cross_info - if cross_info.has_host(): - self.builtin['host_machine'] = CrossMachineInfo(cross_info.config['host_machine']) - else: - self.builtin['host_machine'] = self.builtin['build_machine'] - if cross_info.has_target(): - self.builtin['target_machine'] = CrossMachineInfo(cross_info.config['target_machine']) - else: - self.builtin['target_machine'] = self.builtin['host_machine'] + + # Initialize machine descriptions. We can do a better job now because we + # have the compilers needed to gain more knowledge, so wipe out old + # inferrence and start over. + self.build.environment.machines.miss_defaulting() + self.build.environment.machines.detect_build(self.coredata.compilers) + self.build.environment.machines.default_missing() + assert self.build.environment.machines.build.cpu is not None + assert self.build.environment.machines.host.cpu is not None + assert self.build.environment.machines.target.cpu is not None + + self.builtin['build_machine'] = \ + MachineHolder(self.build.environment.machines.build) + self.builtin['host_machine'] = \ + MachineHolder(self.build.environment.machines.host) + self.builtin['target_machine'] = \ + MachineHolder(self.build.environment.machines.target) def get_non_matching_default_options(self): env = self.environment @@ -2227,14 +2234,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Program or command {!r} not found ' 'or not executable'.format(cmd)) cmd = prog - try: - cmd_path = os.path.relpath(cmd.get_path(), start=srcdir) - except ValueError: - # On Windows a relative path can't be evaluated for - # paths on two different drives (i.e. c:\foo and f:\bar). - # The only thing left to is is to use the original absolute - # path. - cmd_path = cmd.get_path() + cmd_path = mesonlib.relpath(cmd.get_path(), start=srcdir) if not cmd_path.startswith('..') and cmd_path not in self.build_def_files: self.build_def_files.append(cmd_path) expanded_args = [] @@ -2251,7 +2251,7 @@ external dependencies (including libraries) must go to "dependencies".''') if not os.path.isabs(a): a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a) if os.path.isfile(a): - a = os.path.relpath(a, start=srcdir) + a = mesonlib.relpath(a, start=srcdir) if not a.startswith('..'): if a not in self.build_def_files: self.build_def_files.append(a) @@ -2310,22 +2310,24 @@ external dependencies (including libraries) must go to "dependencies".''') return subproject subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) - r = wrap.Resolver(subproject_dir_abs, self.coredata.wrap_mode) + r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode')) try: resolved = r.resolve(dirname) - except RuntimeError as e: - # if the reason subproject execution failed was because - # the directory doesn't exist, try to give some helpful - # advice if it's a nested subproject that needs - # promotion... - self.print_nested_info(dirname) - - if required: - msg = 'Subproject directory {!r} does not exist and cannot be downloaded:\n{}' - raise InterpreterException(msg.format(os.path.join(self.subproject_dir, dirname), e)) - - mlog.log('\nSubproject ', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)\n') - return self.disabled_subproject(dirname) + except wrap.WrapException as e: + subprojdir = os.path.join(self.subproject_dir, r.directory) + if not required: + mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n') + return self.disabled_subproject(dirname) + + if isinstance(e, wrap.WrapNotFoundException): + # if the reason subproject execution failed was because + # the directory doesn't exist, try to give some helpful + # advice if it's a nested subproject that needs + # promotion... + self.print_nested_info(dirname) + + msg = 'Failed to initialize {!r}:\n{}' + raise InterpreterException(msg.format(subprojdir, e)) subdir = os.path.join(self.subproject_dir, resolved) os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True) @@ -2334,7 +2336,8 @@ external dependencies (including libraries) must go to "dependencies".''') with mlog.nested(): try: mlog.log('\nExecuting subproject', mlog.bold(dirname), '\n') - subi = Interpreter(self.build, self.backend, dirname, subdir, self.subproject_dir, + new_build = self.build.copy() + subi = Interpreter(new_build, self.backend, dirname, subdir, self.subproject_dir, self.modules, default_options) subi.subprojects = self.subprojects @@ -2360,6 +2363,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.subprojects.update(subi.subprojects) self.subprojects[dirname] = SubprojectHolder(subi, self.subproject_dir, dirname) self.build_def_files += subi.build_def_files + self.build.merge(subi.build) return self.subprojects[dirname] def get_option_internal(self, optname): @@ -2417,9 +2421,18 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_configuration_data(self, node, args, kwargs): - if args: - raise InterpreterException('configuration_data takes no arguments') - return ConfigurationDataHolder(self.subproject) + if len(args) > 1: + raise InterpreterException('configuration_data takes only one optional positional arguments') + elif len(args) == 1: + initial_values = args[0] + if not isinstance(initial_values, dict): + raise InterpreterException('configuration_data first argument must be a dictionary') + else: + initial_values = {} + cdata = ConfigurationDataHolder(self.subproject) + for k, v in initial_values.items(): + cdata.set_method([k, v], {}) + return cdata def set_options(self, default_options): # Set default options as if they were passed to the command line. @@ -2735,8 +2748,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.coredata. base_options[optname] = oobj self.emit_base_options_warnings(enabled_opts) - def program_from_cross_file(self, prognames, silent=False): - cross_info = self.environment.cross_info + def _program_from_file(self, prognames, bins, silent): for p in prognames: if hasattr(p, 'held_object'): p = p.held_object @@ -2744,11 +2756,19 @@ external dependencies (including libraries) must go to "dependencies".''') continue # Always points to a local (i.e. self generated) file. if not isinstance(p, str): raise InterpreterException('Executable name must be a string') - prog = ExternalProgram.from_cross_info(cross_info, p) + prog = ExternalProgram.from_bin_list(bins, p) if prog.found(): return ExternalProgramHolder(prog) return None + def program_from_cross_file(self, prognames, silent=False): + bins = self.environment.cross_info.config['binaries'] + return self._program_from_file(prognames, bins, silent) + + def program_from_config_file(self, prognames, silent=False): + bins = self.environment.config_info.binaries + return self._program_from_file(prognames, bins, silent) + def program_from_system(self, args, silent=False): # Search for scripts relative to current subdir. # Do not cache found programs because find_program('foobar') @@ -2803,10 +2823,14 @@ external dependencies (including libraries) must go to "dependencies".''') def find_program_impl(self, args, native=False, required=True, silent=True): if not isinstance(args, list): args = [args] + progobj = self.program_from_overrides(args, silent=silent) - if progobj is None and self.build.environment.is_cross_build(): - if not native: + if progobj is None: + if self.build.environment.is_cross_build() and not native: progobj = self.program_from_cross_file(args, silent=silent) + else: + progobj = self.program_from_config_file(args, silent=silent) + if progobj is None: progobj = self.program_from_system(args, silent=silent) if required and (progobj is None or not progobj.found()): @@ -2817,6 +2841,8 @@ external dependencies (including libraries) must go to "dependencies".''') self.store_name_lookups(args) return progobj + @FeatureNewKwargs('find_program', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs(permitted_kwargs['find_program']) def func_find_program(self, node, args, kwargs): if not args: @@ -2882,35 +2908,33 @@ external dependencies (including libraries) must go to "dependencies".''') return True def get_subproject_dep(self, name, dirname, varname, required): + dep = DependencyHolder(NotFoundDependency(self.environment), self.subproject) try: subproject = self.subprojects[dirname] - if not subproject.found(): - if not required: - return DependencyHolder(NotFoundDependency(self.environment), self.subproject) + if subproject.found(): + dep = self.subprojects[dirname].get_variable_method([varname], {}) + except InvalidArguments as e: + pass - raise DependencyException('Subproject %s was not found.' % (name)) + if not isinstance(dep, DependencyHolder): + raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' + 'not a dependency object.'.format(varname, dirname)) - dep = self.subprojects[dirname].get_variable_method([varname], {}) - except InvalidArguments as e: + if not dep.found(): if required: - raise DependencyException('Could not find dependency {} in subproject {}; {}' - ''.format(varname, dirname, str(e))) + raise DependencyException('Could not find dependency {} in subproject {}' + ''.format(varname, dirname)) # If the dependency is not required, don't raise an exception subproj_path = os.path.join(self.subproject_dir, dirname) mlog.log('Dependency', mlog.bold(name), 'from subproject', mlog.bold(subproj_path), 'found:', mlog.red('NO')) - return None - if not isinstance(dep, DependencyHolder): - raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' - 'not a dependency object.'.format(varname, dirname)) + return dep def _find_cached_fallback_dep(self, name, dirname, varname, wanted, required): if dirname not in self.subprojects: return False dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep: - return False if not dep.found(): return dep @@ -2945,8 +2969,10 @@ external dependencies (including libraries) must go to "dependencies".''') elif name == 'openmp': FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject) + @FeatureNewKwargs('dependency', '0.49.0', ['disabler']) @FeatureNewKwargs('dependency', '0.40.0', ['method']) @FeatureNewKwargs('dependency', '0.38.0', ['default_options']) + @disablerIfNotFound @permittedKwargs(permitted_kwargs['dependency']) def func_dependency(self, node, args, kwargs): self.validate_arguments(args, 1, [str]) @@ -2987,7 +3013,7 @@ external dependencies (including libraries) must go to "dependencies".''') dep = NotFoundDependency(self.environment) # Unless a fallback exists and is forced ... - if self.coredata.wrap_mode == WrapMode.forcefallback and 'fallback' in kwargs: + if self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback and 'fallback' in kwargs: pass # ... search for it outside the project elif name != '': @@ -3024,26 +3050,21 @@ external dependencies (including libraries) must go to "dependencies".''') return Disabler() def print_nested_info(self, dependency_name): - message_templ = '''\nDependency %s not found but it is available in a sub-subproject. -To use it in the current project, promote it by going in the project source -root and issuing %s. - -''' + message = ['Dependency', mlog.bold(dependency_name), 'not found but it is available in a sub-subproject.\n' + + 'To use it in the current project, promote it by going in the project source\n' + 'root and issuing'] sprojs = mesonlib.detect_subprojects('subprojects', self.source_root) if dependency_name not in sprojs: return found = sprojs[dependency_name] if len(found) > 1: - suffix = 'one of the following commands' + message.append('one of the following commands:') else: - suffix = 'the following command' - message = message_templ % (dependency_name, suffix) - cmds = [] - command_templ = 'meson wrap promote ' + message.append('the following command:') + command_templ = '\nmeson wrap promote {}' for l in found: - cmds.append(command_templ + l[len(self.source_root) + 1:]) - final_message = message + '\n'.join(cmds) - print(final_message) + message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:]))) + mlog.warning(*message) def get_subproject_infos(self, kwargs): fbinfo = kwargs['fallback'] @@ -3054,12 +3075,12 @@ root and issuing %s. def dependency_fallback(self, name, kwargs): display_name = name if name else '(anonymous)' - if self.coredata.wrap_mode in (WrapMode.nofallback, WrapMode.nodownload): + if self.coredata.get_builtin_option('wrap_mode') in (WrapMode.nofallback, WrapMode.nodownload): mlog.log('Not looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback' 'dependencies is disabled.') return None - elif self.coredata.wrap_mode == WrapMode.forcefallback: + elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback: mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.') else: @@ -3091,8 +3112,8 @@ root and issuing %s. return None required = kwargs.get('required', True) dep = self.get_subproject_dep(name, dirname, varname, required) - if not dep: - return None + if not dep.found(): + return dep subproj_path = os.path.join(self.subproject_dir, dirname) # Check if the version of the declared dependency matches what we want if 'version' in kwargs: @@ -3584,9 +3605,13 @@ This will become a hard error in the future.''' % kwargs['input']) raise InterpreterException('@INPUT@ used as command argument, but no input file specified.') # Validate output output = kwargs['output'] - ofile_rpath = os.path.join(self.subdir, output) if not isinstance(output, str): raise InterpreterException('Output file name must be a string') + if ifile_abs: + values = mesonlib.get_filenames_templates_dict([ifile_abs], None) + outputs = mesonlib.substitute_values([output], values) + output = outputs[0] + ofile_rpath = os.path.join(self.subdir, output) if ofile_rpath in self.configure_file_outputs: mesonbuildfile = os.path.join(self.subdir, 'meson.build') current_call = "{}:{}".format(mesonbuildfile, self.current_lineno) @@ -3594,10 +3619,6 @@ This will become a hard error in the future.''' % kwargs['input']) mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call) else: self.configure_file_outputs[ofile_rpath] = self.current_lineno - if ifile_abs: - values = mesonlib.get_filenames_templates_dict([ifile_abs], None) - outputs = mesonlib.substitute_values([output], values) - output = outputs[0] if os.path.dirname(output) != '': raise InterpreterException('Output file name must not contain a subdirectory.') (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output)) @@ -3605,7 +3626,12 @@ This will become a hard error in the future.''' % kwargs['input']) # Perform the appropriate action if 'configuration' in kwargs: conf = kwargs['configuration'] - if not isinstance(conf, ConfigurationDataHolder): + if isinstance(conf, dict): + cdata = ConfigurationDataHolder(self.subproject) + for k, v in conf.items(): + cdata.set_method([k, v], {}) + conf = cdata + elif not isinstance(conf, ConfigurationDataHolder): raise InterpreterException('Argument "configuration" is not of type configuration_data') mlog.log('Configuring', mlog.bold(output), 'using configuration') if inputfile is not None: @@ -3748,6 +3774,14 @@ different subdirectory. timeout_multiplier = kwargs.get('timeout_multiplier', 1) if not isinstance(timeout_multiplier, int): raise InterpreterException('Timeout multiplier must be a number.') + is_default = kwargs.get('is_default', False) + if not isinstance(is_default, bool): + raise InterpreterException('is_default option must be a boolean') + if is_default: + if self.build.test_setup_default_name is not None: + raise InterpreterException('\'%s\' is already set as default. ' + 'is_default can be set to true only once' % self.build.test_setup_default_name) + self.build.test_setup_default_name = setup_name env = self.unpack_env_kwarg(kwargs) self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper=exe_wrapper, gdb=gdb, @@ -3834,7 +3868,7 @@ different subdirectory. @stringArgs @noKwargs def func_join_paths(self, node, args, kwargs): - return os.path.join(*args).replace('\\', '/') + return self.join_path_strings(args) def run(self): super().run() @@ -3855,7 +3889,8 @@ different subdirectory. return if 'b_sanitize' not in self.coredata.base_options: return - if self.coredata.base_options['b_lundef'].value: + if (self.coredata.base_options['b_lundef'].value and + self.coredata.base_options['b_sanitize'].value != 'none'): mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef. This will probably not work. Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_sanitize'].value)) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 1c74eeb..66e9dd6 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -21,6 +21,14 @@ from . import environment, dependencies import os, copy, re, types from functools import wraps +class ObjectHolder: + def __init__(self, obj, subproject=None): + self.held_object = obj + self.subproject = subproject + + def __repr__(self): + return '<Holder: {!r}>'.format(self.held_object) + # Decorators for method calls. def check_stringlist(a, msg='Arguments must be strings.'): @@ -137,6 +145,17 @@ def noArgsFlattening(f): setattr(f, 'no-args-flattening', True) return f +def disablerIfNotFound(f): + @wraps(f) + def wrapped(*wrapped_args, **wrapped_kwargs): + kwargs = _get_callee_args(wrapped_args)[3] + disabler = kwargs.pop('disabler', False) + ret = f(*wrapped_args, **wrapped_kwargs) + if disabler and not ret.held_object.found(): + return Disabler() + return ret + return wrapped + class permittedKwargs: def __init__(self, permitted): @@ -292,6 +311,12 @@ class InvalidArguments(InterpreterException): class SubdirDoneRequest(BaseException): pass +class ContinueRequest(BaseException): + pass + +class BreakRequest(BaseException): + pass + class InterpreterObject: def __init__(self): self.methods = {} @@ -357,6 +382,9 @@ class InterpreterBase: me.file = environment.build_filename raise me + def join_path_strings(self, args): + return os.path.join(*args).replace('\\', '/') + def parse_project(self): """ Parses project() and initializes languages, compilers etc. Do this @@ -445,6 +473,10 @@ class InterpreterBase: return self.evaluate_indexing(cur) elif isinstance(cur, mparser.TernaryNode): return self.evaluate_ternary(cur) + elif isinstance(cur, mparser.ContinueNode): + raise ContinueRequest() + elif isinstance(cur, mparser.BreakNode): + raise BreakRequest() elif self.is_elementary_type(cur): return cur else: @@ -487,6 +519,13 @@ class InterpreterBase: return False return True + def evaluate_in(self, val1, val2): + if not isinstance(val1, (str, int, float, ObjectHolder)): + raise InvalidArguments('lvalue of "in" operator must be a string, integer, float, or object') + if not isinstance(val2, (list, dict)): + raise InvalidArguments('rvalue of "in" operator must be an array or a dict') + return val1 in val2 + def evaluate_comparison(self, node): val1 = self.evaluate_statement(node.left) if is_disabler(val1): @@ -494,6 +533,10 @@ class InterpreterBase: val2 = self.evaluate_statement(node.right) if is_disabler(val2): return val2 + if node.ctype == 'in': + return self.evaluate_in(val1, val2) + elif node.ctype == 'notin': + return not self.evaluate_in(val1, val2) valid = self.validate_comparison_types(val1, val2) # Ordering comparisons of different types isn't allowed since PR #1810 # (0.41.0). Since PR #2884 we also warn about equality comparisons of @@ -588,9 +631,13 @@ The result of this is undefined and will become a hard error in a future Meson r raise InvalidCode('Multiplication works only with integers.') return l * r elif cur.operation == 'div': - if not isinstance(l, int) or not isinstance(r, int): - raise InvalidCode('Division works only with integers.') - return l // r + if isinstance(l, str) and isinstance(r, str): + return self.join_path_strings((l, r)) + if isinstance(l, int) and isinstance(r, int): + if r == 0: + raise InvalidCode('Division by zero.') + return l // r + raise InvalidCode('Division works only with strings or integers.') elif cur.operation == 'mod': if not isinstance(l, int) or not isinstance(r, int): raise InvalidCode('Modulo works only with integers.') @@ -622,7 +669,12 @@ The result of this is undefined and will become a hard error in a future Meson r return items for item in items: self.set_variable(varname, item) - self.evaluate_codeblock(node.block) + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + continue + except BreakRequest: + break elif isinstance(items, dict): if len(node.varnames) != 2: raise InvalidArguments('Foreach on dict unpacks key and value') @@ -631,7 +683,12 @@ The result of this is undefined and will become a hard error in a future Meson r for key, value in items.items(): self.set_variable(node.varnames[0].value, key) self.set_variable(node.varnames[1].value, value) - self.evaluate_codeblock(node.block) + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + continue + except BreakRequest: + break else: raise InvalidArguments('Items of foreach loop must be an array or a dict') diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 30ca5d8..5432514 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -139,11 +139,10 @@ class ArmarLinker(ArLinker): return False class DLinker(StaticLinker): - def __init__(self, exelist, is_64, is_msvc): + def __init__(self, exelist, arch): self.exelist = exelist self.id = exelist[0] - self.is_64 = is_64 - self.is_msvc = is_msvc + self.arch = arch def can_linker_accept_rsp(self): return mesonlib.is_windows() @@ -165,9 +164,9 @@ class DLinker(StaticLinker): def get_linker_always_args(self): if is_windows(): - if self.is_64: + if self.arch == 'x86_64': return ['-m64'] - elif self.is_msvc and self.id == 'dmd': + elif self.arch == 'x86_mscoff' and self.id == 'dmd': return ['-m32mscoff'] return ['-m32'] return [] @@ -193,3 +192,54 @@ class DLinker(StaticLinker): def get_link_debugfile_args(self, targetfile): return [] + +class CcrxLinker(StaticLinker): + + def __init__(self, exelist): + self.exelist = exelist + self.id = 'rlink' + pc, stdo = Popen_safe(self.exelist + ['-h'])[0:2] + self.std_args = [] + + def can_linker_accept_rsp(self): + return False + + def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + return [] + + def get_exelist(self): + return self.exelist[:] + + def get_std_link_args(self): + return self.std_args + + def get_output_args(self, target): + return ['-output=%s' % target] + + def get_buildtype_linker_args(self, buildtype): + return [] + + def get_linker_always_args(self): + return ['-nologo', '-form=library'] + + def get_coverage_link_args(self): + return [] + + def get_always_args(self): + return [] + + def thread_link_flags(self, env): + return [] + + def openmp_flags(self): + return [] + + def get_option_link_args(self, options): + return [] + + @classmethod + def unix_args_to_native(cls, args): + return args[:] + + def get_link_debugfile_args(self, targetfile): + return [] diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 2fd69b0..28589da 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -13,17 +13,13 @@ # limitations under the License. import os -import argparse from . import (coredata, mesonlib, build) -def buildparser(): - parser = argparse.ArgumentParser(prog='meson configure') +def add_arguments(parser): coredata.register_builtin_arguments(parser) - parser.add_argument('builddir', nargs='?', default='.') parser.add_argument('--clearcache', action='store_true', default=False, help='Clear cached state (e.g. found dependencies)') - return parser class ConfException(mesonlib.MesonException): @@ -119,21 +115,21 @@ class Conf: print(' Source dir', self.build.environment.source_dir) print(' Build dir ', self.build.environment.build_dir) - dir_option_names = ['prefix', - 'libdir', - 'libexecdir', - 'bindir', - 'sbindir', - 'includedir', + dir_option_names = ['bindir', 'datadir', - 'mandir', + 'includedir', 'infodir', + 'libdir', + 'libexecdir', 'localedir', - 'sysconfdir', 'localstatedir', - 'sharedstatedir'] - test_option_names = ['stdsplit', - 'errorlogs'] + 'mandir', + 'prefix', + 'sbindir', + 'sharedstatedir', + 'sysconfdir'] + test_option_names = ['errorlogs', + 'stdsplit'] core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names] dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names} @@ -149,9 +145,7 @@ class Conf: self.print_options('Testing options', test_options) -def run(args): - args = mesonlib.expand_arguments(args) - options = buildparser().parse_args(args) +def run(options): coredata.parse_cmd_line_options(options) builddir = os.path.abspath(os.path.realpath(options.builddir)) try: @@ -159,6 +153,7 @@ def run(args): save = False if len(options.cmd_line_options) > 0: c.set_options(options.cmd_line_options) + coredata.update_cmd_line_file(builddir, options) save = True elif options.clearcache: c.clear_cache() diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index 8a2dc0c..59d4f81 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -20,6 +20,9 @@ import stat import time import platform, subprocess, operator, os, shutil, re import collections +from enum import Enum +from functools import lru_cache + from mesonbuild import mlog have_fcntl = False @@ -48,6 +51,23 @@ else: python_command = [sys.executable] meson_command = None +def set_meson_command(mainfile): + global python_command + global meson_command + # On UNIX-like systems `meson` is a Python script + # On Windows `meson` and `meson.exe` are wrapper exes + if not mainfile.endswith('.py'): + meson_command = [mainfile] + elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'): + # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain + meson_command = python_command + ['-m', 'mesonbuild.mesonmain'] + else: + # Either run uninstalled, or full path to meson-script.py + meson_command = python_command + [mainfile] + # We print this value for unit tests. + if 'MESON_COMMAND_TESTS' in os.environ: + mlog.log('meson_command is {!r}'.format(meson_command)) + def is_ascii_string(astring): try: if isinstance(astring, str): @@ -204,6 +224,7 @@ class File: return ret.format(self.relative_name()) @staticmethod + @lru_cache(maxsize=None) def from_source_file(source_root, subdir, fname): if not os.path.isfile(os.path.join(source_root, subdir, fname)): raise MesonException('File %s does not exist.' % fname) @@ -217,12 +238,14 @@ class File: def from_absolute_file(fname): return File(False, '', fname) + @lru_cache(maxsize=None) def rel_to_builddir(self, build_to_src): if self.is_built: return self.relative_name() else: return os.path.join(build_to_src, self.subdir, self.fname) + @lru_cache(maxsize=None) def absolute_path(self, srcdir, builddir): absdir = srcdir if self.is_built: @@ -241,6 +264,7 @@ class File: def __hash__(self): return hash((self.fname, self.subdir, self.is_built)) + @lru_cache(maxsize=None) def relative_name(self): return os.path.join(self.subdir, self.fname) @@ -260,6 +284,53 @@ def classify_unity_sources(compilers, sources): compsrclist[comp].append(src) return compsrclist +class OrderedEnum(Enum): + """ + An Enum which additionally offers homogeneous ordered comparison. + """ + def __ge__(self, other): + if self.__class__ is other.__class__: + return self.value >= other.value + return NotImplemented + + def __gt__(self, other): + if self.__class__ is other.__class__: + return self.value > other.value + return NotImplemented + + def __le__(self, other): + if self.__class__ is other.__class__: + return self.value <= other.value + return NotImplemented + + def __lt__(self, other): + if self.__class__ is other.__class__: + return self.value < other.value + return NotImplemented + +MachineChoice = OrderedEnum('MachineChoice', ['BUILD', 'HOST', 'TARGET']) + +class PerMachine: + def __init__(self, build, host, target): + self.build = build + self.host = host + self.target = target + + def __getitem__(self, machine: MachineChoice): + return { + MachineChoice.BUILD: self.build, + MachineChoice.HOST: self.host, + MachineChoice.TARGET: self.target + }[machine] + + def __setitem__(self, machine: MachineChoice, val): + key = { + MachineChoice.BUILD: 'build', + MachineChoice.HOST: 'host', + MachineChoice.TARGET: 'target' + }[machine] + setattr(self, key, val) + def is_osx(): return platform.system().lower() == 'darwin' @@ -292,77 +363,93 @@ def is_dragonflybsd(): def is_freebsd(): return platform.system().lower() == 'freebsd' +def _get_machine_is_cross(env, is_cross): + """ + This is not morally correct, but works for now. For cross builds the build + and host machines differ. `is_cross == true` means the host machine, while + `is_cross == false` means the build machine. Both are used in practice, + even though the documentation refers to the host machine implying we should + hard-code it. For non-cross builds `is_cross == false` is passed but the + host and build machines are identical so it doesn't matter. + + Users for `for_*` should instead specify up front which machine they want + and query that like: + + env.machines[MachineChoice.HOST].is_haiku() + + """ + for_machine = MachineChoice.HOST if is_cross else MachineChoice.BUILD + return env.machines[for_machine] + def for_windows(is_cross, env): """ Host machine is windows? + Deprecated: Please use `env.machines[for_machine].is_windows()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_windows() - return env.cross_info.get_host_system() == 'windows' + return _get_machine_is_cross(env, is_cross).is_windows() def for_cygwin(is_cross, env): """ Host machine is cygwin? + Deprecated: Please use `env.machines[for_machine].is_cygwin()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_cygwin() - return env.cross_info.get_host_system() == 'cygwin' + return _get_machine_is_cross(env, is_cross).is_cygwin() def for_linux(is_cross, env): """ Host machine is linux? + Deprecated: Please use `env.machines[for_machine].is_linux()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_linux() - return env.cross_info.get_host_system() == 'linux' + return _get_machine_is_cross(env, is_cross).is_linux() def for_darwin(is_cross, env): """ Host machine is Darwin (iOS/OS X)? + Deprecated: Please use `env.machines[for_machine].is_darwin()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_osx() - return env.cross_info.get_host_system() in ('darwin', 'ios') + return _get_machine_is_cross(env, is_cross).is_darwin() def for_android(is_cross, env): """ Host machine is Android? + Deprecated: Please use `env.machines[for_machine].is_android()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_android() - return env.cross_info.get_host_system() == 'android' + return _get_machine_is_cross(env, is_cross).is_android() def for_haiku(is_cross, env): """ Host machine is Haiku? + Deprecated: Please use `env.machines[for_machine].is_haiku()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_haiku() - return env.cross_info.get_host_system() == 'haiku' + return _get_machine_is_cross(env, is_cross).is_haiku() def for_openbsd(is_cross, env): """ Host machine is OpenBSD? + Deprecated: Please use `env.machines[for_machine].is_openbsd()`. + Note: 'host' is the machine on which compiled binaries will run """ - if not is_cross: - return is_openbsd() - elif env.cross_info.has_host(): - return env.cross_info.config['host_machine']['system'] == 'openbsd' - return False + return _get_machine_is_cross(env, is_cross).is_openbsd() def exe_exists(arglist): try: @@ -520,6 +607,7 @@ def version_compare_condition_with_min(condition, minimum): # # Map versions in the constraint of the form '0.46' to '0.46.0', to embed # this knowledge of the meson versioning scheme. + condition = condition.strip() if re.match('^\d+.\d+$', condition): condition += '.0' @@ -1061,7 +1149,7 @@ def substring_is_in_list(substr, strlist): return True return False -class OrderedSet(collections.MutableSet): +class OrderedSet(collections.abc.MutableSet): """A set that preserves the order in which items are added, by first insertion. """ @@ -1125,3 +1213,12 @@ class BuildDirLock: elif have_msvcrt: msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1) self.lockfile.close() + +def relpath(path, start): + # On Windows a relative path can't be evaluated for paths on two different + # drives (i.e. c:\foo and f:\bar). The only thing left to do is to use the + # original absolute path. + try: + return os.path.relpath(path, start) + except ValueError: + return path diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index cd925e5..ebe2c8e 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -12,261 +12,139 @@ # See the License for the specific language governing permissions and # limitations under the License. -import time -import sys, stat, traceback, argparse -import datetime +import sys import os.path -import platform -import cProfile as profile +import importlib +import traceback +import argparse -from . import environment, interpreter, mesonlib -from . import build -from . import mlog, coredata +from . import mesonlib +from . import mlog +from . import mconf, minit, minstall, mintro, msetup, mtest, rewriter from .mesonlib import MesonException from .environment import detect_msys2_arch -from .wrap import WrapMode - -default_warning = '1' - -def create_parser(): - p = argparse.ArgumentParser(prog='meson') - coredata.register_builtin_arguments(p) - p.add_argument('--cross-file', default=None, - help='File describing cross compilation environment.') - p.add_argument('-v', '--version', action='version', - version=coredata.version) - # See the mesonlib.WrapMode enum for documentation - p.add_argument('--wrap-mode', default=None, - type=wrapmodetype, choices=WrapMode, - help='Special wrap mode to use') - p.add_argument('--profile-self', action='store_true', dest='profile', - help=argparse.SUPPRESS) - p.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings', - help='Make all Meson warnings fatal') - p.add_argument('--reconfigure', action='store_true', - help='Set options and reconfigure the project. Useful when new ' + - 'options have been added to the project and the default value ' + - 'is not working.') - p.add_argument('builddir', nargs='?', default=None) - p.add_argument('sourcedir', nargs='?', default=None) - return p - -def wrapmodetype(string): - try: - return getattr(WrapMode, string) - except AttributeError: - msg = ', '.join([t.name.lower() for t in WrapMode]) - msg = 'invalid argument {!r}, use one of {}'.format(string, msg) - raise argparse.ArgumentTypeError(msg) - -class MesonApp: - - def __init__(self, options): - (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir, - options.sourcedir, - options.reconfigure) - self.options = options - - def has_build_file(self, dirname): - fname = os.path.join(dirname, environment.build_filename) - return os.path.exists(fname) - - def validate_core_dirs(self, dir1, dir2): - if dir1 is None: - if dir2 is None: - if not os.path.exists('meson.build') and os.path.exists('../meson.build'): - dir2 = '..' - else: - raise MesonException('Must specify at least one directory name.') - dir1 = os.getcwd() - if dir2 is None: - dir2 = os.getcwd() - ndir1 = os.path.abspath(os.path.realpath(dir1)) - ndir2 = os.path.abspath(os.path.realpath(dir2)) - if not os.path.exists(ndir1): - os.makedirs(ndir1) - if not os.path.exists(ndir2): - os.makedirs(ndir2) - if not stat.S_ISDIR(os.stat(ndir1).st_mode): - raise MesonException('%s is not a directory' % dir1) - if not stat.S_ISDIR(os.stat(ndir2).st_mode): - raise MesonException('%s is not a directory' % dir2) - if os.path.samefile(dir1, dir2): - raise MesonException('Source and build directories must not be the same. Create a pristine build directory.') - if self.has_build_file(ndir1): - if self.has_build_file(ndir2): - raise MesonException('Both directories contain a build file %s.' % environment.build_filename) - return ndir1, ndir2 - if self.has_build_file(ndir2): - return ndir2, ndir1 - raise MesonException('Neither directory contains a build file %s.' % environment.build_filename) - - def validate_dirs(self, dir1, dir2, reconfigure): - (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2) - priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat') - if os.path.exists(priv_dir): - if not reconfigure: - print('Directory already configured.\n' - '\nJust run your build command (e.g. ninja) and Meson will regenerate as necessary.\n' - 'If ninja fails, run "ninja reconfigure" or "meson --reconfigure"\n' - 'to force Meson to regenerate.\n' - '\nIf build failures persist, manually wipe your build directory to clear any\n' - 'stored system data.\n' - '\nTo change option values, run "meson configure" instead.') - sys.exit(1) +from .wrap import wraptool + + +class CommandLineParser: + def __init__(self): + self.commands = {} + self.hidden_commands = [] + self.parser = argparse.ArgumentParser(prog='meson') + self.subparsers = self.parser.add_subparsers(title='Commands', + description='If no command is specified it defaults to setup command.') + self.add_command('setup', msetup.add_arguments, msetup.run, + help='Configure the project') + self.add_command('configure', mconf.add_arguments, mconf.run, + help='Change project options',) + self.add_command('install', minstall.add_arguments, minstall.run, + help='Install the project') + self.add_command('introspect', mintro.add_arguments, mintro.run, + help='Introspect project') + self.add_command('init', minit.add_arguments, minit.run, + help='Create a new project') + self.add_command('test', mtest.add_arguments, mtest.run, + help='Run tests') + self.add_command('wrap', wraptool.add_arguments, wraptool.run, + help='Wrap tools') + self.add_command('help', self.add_help_arguments, self.run_help_command, + help='Print help of a subcommand') + + # Hidden commands + self.add_command('rewrite', rewriter.add_arguments, rewriter.run, + help=argparse.SUPPRESS) + self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command, + help=argparse.SUPPRESS) + + def add_command(self, name, add_arguments_func, run_func, help): + # FIXME: Cannot have hidden subparser: + # https://bugs.python.org/issue22848 + if help == argparse.SUPPRESS: + p = argparse.ArgumentParser(prog='meson ' + name) + self.hidden_commands.append(name) else: - if reconfigure: - print('Directory does not contain a valid build tree:\n{}'.format(build_dir)) - sys.exit(1) - return src_dir, build_dir - - def check_pkgconfig_envvar(self, env): - curvar = os.environ.get('PKG_CONFIG_PATH', '') - if curvar != env.coredata.pkgconf_envvar: - mlog.warning('PKG_CONFIG_PATH has changed between invocations from "%s" to "%s".' % - (env.coredata.pkgconf_envvar, curvar)) - env.coredata.pkgconf_envvar = curvar - - def generate(self): - env = environment.Environment(self.source_dir, self.build_dir, self.options) - mlog.initialize(env.get_log_dir(), self.options.fatal_warnings) - if self.options.profile: - mlog.set_timestamp_start(time.monotonic()) - with mesonlib.BuildDirLock(self.build_dir): - self._generate(env) - - def _generate(self, env): - mlog.debug('Build started at', datetime.datetime.now().isoformat()) - mlog.debug('Main binary:', sys.executable) - mlog.debug('Python system:', platform.system()) - mlog.log(mlog.bold('The Meson build system')) - self.check_pkgconfig_envvar(env) - mlog.log('Version:', coredata.version) - mlog.log('Source dir:', mlog.bold(self.source_dir)) - mlog.log('Build dir:', mlog.bold(self.build_dir)) - if env.is_cross_build(): - mlog.log('Build type:', mlog.bold('cross build')) + p = self.subparsers.add_parser(name, help=help) + add_arguments_func(p) + p.set_defaults(run_func=run_func) + self.commands[name] = p + + def add_runpython_arguments(self, parser): + parser.add_argument('script_file') + parser.add_argument('script_args', nargs=argparse.REMAINDER) + + def run_runpython_command(self, options): + import runpy + sys.argv[1:] = options.script_args + runpy.run_path(options.script_file, run_name='__main__') + return 0 + + def add_help_arguments(self, parser): + parser.add_argument('command', nargs='?') + + def run_help_command(self, options): + if options.command: + self.commands[options.command].print_help() else: - mlog.log('Build type:', mlog.bold('native build')) - b = build.Build(env) - - intr = interpreter.Interpreter(b) - if env.is_cross_build(): - mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {}))) - mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {}))) - mlog.log('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {}))) - mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {}))) - mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {}))) - mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) - if self.options.profile: - fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log') - profile.runctx('intr.run()', globals(), locals(), filename=fname) + self.parser.print_help() + return 0 + + def run(self, args): + # If first arg is not a known command, assume user wants to run the setup + # command. + known_commands = list(self.commands.keys()) + ['-h', '--help'] + if len(args) == 0 or args[0] not in known_commands: + args = ['setup'] + args + + # Hidden commands have their own parser instead of using the global one + if args[0] in self.hidden_commands: + parser = self.commands[args[0]] + args = args[1:] else: - intr.run() - # Print all default option values that don't match the current value - for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options(): - mlog.log('Option', mlog.bold(def_opt_name), 'is:', - mlog.bold(str(cur_opt_value)), - '[default: {}]'.format(str(def_opt_value))) + parser = self.parser + + args = mesonlib.expand_arguments(args) + options = parser.parse_args(args) + try: - dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') - # We would like to write coredata as late as possible since we use the existence of - # this file to check if we generated the build file successfully. Since coredata - # includes settings, the build files must depend on it and appear newer. However, due - # to various kernel caches, we cannot guarantee that any time in Python is exactly in - # sync with the time that gets applied to any files. Thus, we dump this file as late as - # possible, but before build files, and if any error occurs, delete it. - cdf = env.dump_coredata() - if self.options.profile: - fname = 'profile-{}-backend.log'.format(intr.backend.name) - fname = os.path.join(self.build_dir, 'meson-private', fname) - profile.runctx('intr.backend.generate(intr)', globals(), locals(), filename=fname) - else: - intr.backend.generate(intr) - build.save(b, dumpfile) - # Post-conf scripts must be run after writing coredata or else introspection fails. - intr.backend.run_postconf_scripts() - except: - if 'cdf' in locals(): - old_cdf = cdf + '.prev' - if os.path.exists(old_cdf): - os.replace(old_cdf, cdf) - else: - os.unlink(cdf) - raise + return options.run_func(options) + except MesonException as e: + mlog.exception(e) + logfile = mlog.shutdown() + if logfile is not None: + mlog.log("\nA full log can be found at", mlog.bold(logfile)) + if os.environ.get('MESON_FORCE_BACKTRACE'): + raise + return 1 + except Exception as e: + if os.environ.get('MESON_FORCE_BACKTRACE'): + raise + traceback.print_exc() + return 2 + finally: + mlog.shutdown() -def run_script_command(args): - cmdname = args[0] - cmdargs = args[1:] - if cmdname == 'exe': - import mesonbuild.scripts.meson_exe as abc - cmdfunc = abc.run - elif cmdname == 'cleantrees': - import mesonbuild.scripts.cleantrees as abc - cmdfunc = abc.run - elif cmdname == 'commandrunner': - import mesonbuild.scripts.commandrunner as abc - cmdfunc = abc.run - elif cmdname == 'delsuffix': - import mesonbuild.scripts.delwithsuffix as abc - cmdfunc = abc.run - elif cmdname == 'dirchanger': - import mesonbuild.scripts.dirchanger as abc - cmdfunc = abc.run - elif cmdname == 'gtkdoc': - import mesonbuild.scripts.gtkdochelper as abc - cmdfunc = abc.run - elif cmdname == 'msgfmthelper': - import mesonbuild.scripts.msgfmthelper as abc - cmdfunc = abc.run - elif cmdname == 'hotdoc': - import mesonbuild.scripts.hotdochelper as abc - cmdfunc = abc.run - elif cmdname == 'regencheck': - import mesonbuild.scripts.regen_checker as abc - cmdfunc = abc.run - elif cmdname == 'symbolextractor': - import mesonbuild.scripts.symbolextractor as abc - cmdfunc = abc.run - elif cmdname == 'scanbuild': - import mesonbuild.scripts.scanbuild as abc - cmdfunc = abc.run - elif cmdname == 'vcstagger': - import mesonbuild.scripts.vcstagger as abc - cmdfunc = abc.run - elif cmdname == 'gettext': - import mesonbuild.scripts.gettext as abc - cmdfunc = abc.run - elif cmdname == 'yelphelper': - import mesonbuild.scripts.yelphelper as abc - cmdfunc = abc.run - elif cmdname == 'uninstall': - import mesonbuild.scripts.uninstall as abc - cmdfunc = abc.run - elif cmdname == 'dist': - import mesonbuild.scripts.dist as abc - cmdfunc = abc.run - elif cmdname == 'coverage': - import mesonbuild.scripts.coverage as abc - cmdfunc = abc.run - else: - raise MesonException('Unknown internal command {}.'.format(cmdname)) - return cmdfunc(cmdargs) +def run_script_command(script_name, script_args): + # Map script name to module name for those that doesn't match + script_map = {'exe': 'meson_exe', + 'install': 'meson_install', + 'delsuffix': 'delwithsuffix', + 'gtkdoc': 'gtkdochelper', + 'hotdoc': 'hotdochelper', + 'regencheck': 'regen_checker'} + module_name = script_map.get(script_name, script_name) -def set_meson_command(mainfile): - # On UNIX-like systems `meson` is a Python script - # On Windows `meson` and `meson.exe` are wrapper exes - if not mainfile.endswith('.py'): - mesonlib.meson_command = [mainfile] - elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'): - # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain - mesonlib.meson_command = mesonlib.python_command + ['-m', 'mesonbuild.mesonmain'] - else: - # Either run uninstalled, or full path to meson-script.py - mesonlib.meson_command = mesonlib.python_command + [mainfile] - # We print this value for unit tests. - if 'MESON_COMMAND_TESTS' in os.environ: - mlog.log('meson_command is {!r}'.format(mesonlib.meson_command)) + try: + module = importlib.import_module('mesonbuild.scripts.' + module_name) + except ModuleNotFoundError as e: + mlog.exception(e) + return 1 + + try: + return module.run(script_args) + except MesonException as e: + mlog.error('Error in {} helper script:'.format(script_name)) + mlog.exception(e) + return 1 def run(original_args, mainfile): if sys.version_info < (3, 5): @@ -274,6 +152,7 @@ def run(original_args, mainfile): print('You have python %s.' % sys.version) print('Please update your environment') return 1 + # https://github.com/mesonbuild/meson/issues/3653 if sys.platform.lower() == 'msys': mlog.error('This python3 seems to be msys/python on MSYS2 Windows, which is known to have path semantics incompatible with Meson') @@ -283,104 +162,23 @@ def run(original_args, mainfile): else: mlog.error('Please download and use Python as detailed at: https://mesonbuild.com/Getting-meson.html') return 2 + # Set the meson command that will be used to run scripts and so on - set_meson_command(mainfile) + mesonlib.set_meson_command(mainfile) + args = original_args[:] - if len(args) > 0: - # First check if we want to run a subcommand. - cmd_name = args[0] - remaining_args = args[1:] - # "help" is a special case: Since printing of the help may be - # delegated to a subcommand, we edit cmd_name before executing - # the rest of the logic here. - if cmd_name == 'help': - remaining_args += ['--help'] - args = remaining_args - cmd_name = args[0] - if cmd_name == 'test': - from . import mtest - return mtest.run(remaining_args) - elif cmd_name == 'setup': - args = remaining_args - # FALLTHROUGH like it's 1972. - elif cmd_name == 'install': - from . import minstall - return minstall.run(remaining_args) - elif cmd_name == 'introspect': - from . import mintro - return mintro.run(remaining_args) - elif cmd_name == 'rewrite': - from . import rewriter - return rewriter.run(remaining_args) - elif cmd_name == 'configure': - try: - from . import mconf - return mconf.run(remaining_args) - except MesonException as e: - mlog.exception(e) - sys.exit(1) - elif cmd_name == 'wrap': - from .wrap import wraptool - return wraptool.run(remaining_args) - elif cmd_name == 'init': - from . import minit - return minit.run(remaining_args) - elif cmd_name == 'runpython': - import runpy - script_file = remaining_args[0] - sys.argv[1:] = remaining_args[1:] - runpy.run_path(script_file, run_name='__main__') - sys.exit(0) - # No special command? Do the basic setup/reconf. + # Special handling of internal commands called from backends, they don't + # need to go through argparse. if len(args) >= 2 and args[0] == '--internal': if args[1] == 'regenerate': # Rewrite "meson --internal regenerate" command line to # "meson --reconfigure" args = ['--reconfigure'] + args[2:] else: - script = args[1] - try: - sys.exit(run_script_command(args[1:])) - except MesonException as e: - mlog.error('\nError in {} helper script:'.format(script)) - mlog.exception(e) - sys.exit(1) - - parser = create_parser() - - args = mesonlib.expand_arguments(args) - options = parser.parse_args(args) - coredata.parse_cmd_line_options(options) - try: - app = MesonApp(options) - except Exception as e: - # Log directory does not exist, so just print - # to stdout. - print('Error during basic setup:\n') - print(e) - return 1 - try: - app.generate() - except Exception as e: - if isinstance(e, MesonException): - mlog.exception(e) - # Path to log file - mlog.shutdown() - logfile = os.path.join(app.build_dir, environment.Environment.log_dir, mlog.log_fname) - mlog.log("\nA full log can be found at", mlog.bold(logfile)) - if os.environ.get('MESON_FORCE_BACKTRACE'): - raise - return 1 - else: - if os.environ.get('MESON_FORCE_BACKTRACE'): - raise - traceback.print_exc() - return 2 - finally: - mlog.shutdown() + return run_script_command(args[1], args[2:]) - return 0 + return CommandLineParser().run(args) def main(): # Always resolve the command path so Ninja can find it for regen, tests, etc. diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py index a66361f..394fe40 100644 --- a/mesonbuild/minit.py +++ b/mesonbuild/minit.py @@ -14,7 +14,7 @@ """Code that creates simple startup projects.""" -import os, sys, argparse, re, shutil, subprocess +import os, sys, re, shutil, subprocess from glob import glob from mesonbuild import mesonlib from mesonbuild.environment import detect_ninja @@ -425,8 +425,7 @@ def create_meson_build(options): open('meson.build', 'w').write(content) print('Generated meson.build file:\n\n' + content) -def run(args): - parser = argparse.ArgumentParser(prog='meson') +def add_arguments(parser): parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory") parser.add_argument("-n", "--name", help="project name. default: name of current directory") @@ -441,7 +440,8 @@ def run(args): parser.add_argument('--type', default='executable', choices=['executable', 'library']) parser.add_argument('--version', default='0.1') - options = parser.parse_args(args) + +def run(options): if len(glob('*')) == 0: autodetect_options(options, sample=True) if not options.language: diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 1d72179..8ac6aab 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys, pickle, os, shutil, subprocess, gzip, errno +import sys, pickle, os, shutil, subprocess, errno import shlex -import argparse from glob import glob from .scripts import depfixer from .scripts import destdir_join @@ -33,15 +32,13 @@ build definitions so that it will not break when the change happens.''' selinux_updates = [] -def buildparser(): - parser = argparse.ArgumentParser(prog='meson install') +def add_arguments(parser): parser.add_argument('-C', default='.', dest='wd', help='directory to cd into before running') parser.add_argument('--no-rebuild', default=False, action='store_true', help='Do not rebuild before installing.') parser.add_argument('--only-changed', default=False, action='store_true', help='Only overwrite files that are older than the copied file.') - return parser class DirMaker: def __init__(self, lf): @@ -241,7 +238,7 @@ class Installer: 'a file'.format(to_file)) if self.should_preserve_existing_file(from_file, to_file): append_to_log(self.lf, '# Preserving old file %s\n' % to_file) - print('Preserving existing file %s.' % to_file) + print('Preserving existing file %s' % to_file) return False os.remove(to_file) print('Installing %s to %s' % (from_file, outdir)) @@ -319,8 +316,6 @@ class Installer: abs_dst = os.path.join(dst_dir, filepart) if os.path.isdir(abs_dst): print('Tried to copy file %s but a directory of that name already exists.' % abs_dst) - if os.path.exists(abs_dst): - os.remove(abs_dst) parent_dir = os.path.dirname(abs_dst) if not os.path.isdir(parent_dir): os.mkdir(parent_dir) @@ -382,17 +377,7 @@ class Installer: outdir = os.path.dirname(outfilename) d.dirmaker.makedirs(outdir, exist_ok=True) install_mode = m[2] - if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'): - with open(outfilename, 'wb') as of: - with open(full_source_filename, 'rb') as sf: - # Set mtime and filename for reproducibility. - with gzip.GzipFile(fileobj=of, mode='wb', filename='', mtime=0) as gz: - gz.write(sf.read()) - shutil.copystat(full_source_filename, outfilename) - print('Installing %s to %s' % (full_source_filename, outdir)) - append_to_log(self.lf, outfilename) - else: - self.do_copyfile(full_source_filename, outfilename) + self.do_copyfile(full_source_filename, outfilename) set_mode(outfilename, install_mode, d.install_umask) def install_headers(self, d): @@ -501,9 +486,7 @@ class Installer: else: raise -def run(args): - parser = buildparser() - opts = parser.parse_args(args) +def run(opts): datafilename = 'meson-private/install.dat' private_dir = os.path.dirname(datafilename) log_dir = os.path.join(private_dir, '../meson-logs') @@ -520,6 +503,3 @@ def run(args): append_to_log(lf, '# Does not contain files installed by custom scripts.') installer.do_install(datafilename) return 0 - -if __name__ == '__main__': - sys.exit(run(sys.argv[1:])) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 188459a..48ec20f 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -22,13 +22,14 @@ project files and don't need this info.""" import json from . import build, mtest, coredata as cdata from . import mesonlib +from . import astinterpreter +from . import mparser +from .interpreterbase import InvalidArguments from .backend import ninjabackend -import argparse import sys, os import pathlib -def buildparser(): - parser = argparse.ArgumentParser(prog='meson introspect') +def add_arguments(parser): parser.add_argument('--targets', action='store_true', dest='list_targets', default=False, help='List top level targets.') parser.add_argument('--installed', action='store_true', dest='list_installed', default=False, @@ -48,7 +49,6 @@ def buildparser(): parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False, help='Information about projects.') parser.add_argument('builddir', nargs='?', default='.', help='The build directory') - return parser def determine_installed_path(target, installdata): install_target = None @@ -127,18 +127,43 @@ def list_target_files(target_name, coredata, builddata): def list_buildoptions(coredata, builddata): optlist = [] - add_keys(optlist, coredata.user_options) - add_keys(optlist, coredata.compiler_options) - add_keys(optlist, coredata.base_options) - add_keys(optlist, coredata.builtins) + + dir_option_names = ['bindir', + 'datadir', + 'includedir', + 'infodir', + 'libdir', + 'libexecdir', + 'localedir', + 'localstatedir', + 'mandir', + 'prefix', + 'sbindir', + 'sharedstatedir', + 'sysconfdir'] + test_option_names = ['errorlogs', + 'stdsplit'] + core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names] + + dir_options = {k: o for k, o in coredata.builtins.items() if k in dir_option_names} + test_options = {k: o for k, o in coredata.builtins.items() if k in test_option_names} + core_options = {k: o for k, o in coredata.builtins.items() if k in core_option_names} + + add_keys(optlist, core_options, 'core') + add_keys(optlist, coredata.backend_options, 'backend') + add_keys(optlist, coredata.base_options, 'base') + add_keys(optlist, coredata.compiler_options, 'compiler') + add_keys(optlist, dir_options, 'directory') + add_keys(optlist, coredata.user_options, 'user') + add_keys(optlist, test_options, 'test') print(json.dumps(optlist)) -def add_keys(optlist, options): +def add_keys(optlist, options, section): keys = list(options.keys()) keys.sort() for key in keys: opt = options[key] - optdict = {'name': key, 'value': opt.value} + optdict = {'name': key, 'value': opt.value, 'section': section} if isinstance(opt, cdata.UserStringOption): typestr = 'string' elif isinstance(opt, cdata.UserBooleanOption): @@ -156,14 +181,18 @@ def add_keys(optlist, options): optdict['description'] = opt.description optlist.append(optdict) -def list_buildsystem_files(builddata): - src_dir = builddata.environment.get_source_dir() +def find_buildsystem_files_list(src_dir): # I feel dirty about this. But only slightly. filelist = [] for root, _, files in os.walk(src_dir): for f in files: if f == 'meson.build' or f == 'meson_options.txt': filelist.append(os.path.relpath(os.path.join(root, f), src_dir)) + return filelist + +def list_buildsystem_files(builddata): + src_dir = builddata.environment.get_source_dir() + filelist = find_buildsystem_files_list(src_dir) print(json.dumps(filelist)) def list_deps(coredata): @@ -197,20 +226,81 @@ def list_tests(testdata): print(json.dumps(result)) def list_projinfo(builddata): - result = {'name': builddata.project_name, 'version': builddata.project_version} + result = {'version': builddata.project_version, + 'descriptive_name': builddata.project_name} subprojects = [] for k, v in builddata.subprojects.items(): c = {'name': k, - 'version': v} + 'version': v, + 'descriptive_name': builddata.projects.get(k)} subprojects.append(c) result['subprojects'] = subprojects print(json.dumps(result)) -def run(args): +class ProjectInfoInterperter(astinterpreter.AstInterpreter): + def __init__(self, source_root, subdir): + super().__init__(source_root, subdir) + self.funcs.update({'project': self.func_project}) + self.project_name = None + self.project_version = None + + def func_project(self, node, args, kwargs): + if len(args) < 1: + raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') + self.project_name = args[0] + self.project_version = kwargs.get('version', 'undefined') + if isinstance(self.project_version, mparser.ElementaryNode): + self.project_version = self.project_version.value + + def set_variable(self, varname, variable): + pass + + def analyze(self): + self.load_root_meson_file() + self.sanity_check_ast() + self.parse_project() + self.run() + +def list_projinfo_from_source(sourcedir): + files = find_buildsystem_files_list(sourcedir) + + result = {'buildsystem_files': []} + subprojects = {} + + for f in files: + f = f.replace('\\', '/') + if f == 'meson.build': + interpreter = ProjectInfoInterperter(sourcedir, '') + interpreter.analyze() + version = None + if interpreter.project_version is str: + version = interpreter.project_version + result.update({'version': version, 'descriptive_name': interpreter.project_name}) + result['buildsystem_files'].append(f) + elif f.startswith('subprojects/'): + subproject_id = f.split('/')[1] + subproject = subprojects.setdefault(subproject_id, {'buildsystem_files': []}) + subproject['buildsystem_files'].append(f) + if f.count('/') == 2 and f.endswith('meson.build'): + interpreter = ProjectInfoInterperter(os.path.join(sourcedir, 'subprojects', subproject_id), '') + interpreter.analyze() + subproject.update({'name': subproject_id, 'version': interpreter.project_version, 'descriptive_name': interpreter.project_name}) + else: + result['buildsystem_files'].append(f) + + subprojects = [obj for name, obj in subprojects.items()] + result['subprojects'] = subprojects + print(json.dumps(result)) + +def run(options): datadir = 'meson-private' - options = buildparser().parse_args(args) if options.builddir is not None: datadir = os.path.join(options.builddir, datadir) + if options.builddir.endswith('/meson.build') or options.builddir.endswith('\\meson.build') or options.builddir == 'meson.build': + if options.projectinfo: + sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11] + list_projinfo_from_source(sourcedir) + return 0 if not os.path.isdir(datadir): print('Current directory is not a build dir. Please specify it or ' 'change the working directory to it.') diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index 095b8fd..890cb46 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -104,6 +104,8 @@ def process_markup(args, keep): if log_timestamp_start is not None: arr = ['[{:.3f}]'.format(time.monotonic() - log_timestamp_start)] for arg in args: + if arg is None: + continue if isinstance(arg, str): arr.append(arg) elif isinstance(arg, AnsiDecorator): diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 3d39950..bf49770 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -17,6 +17,7 @@ functionality such as gobject-introspection, gresources and gtk-doc''' import os import copy +import shlex import subprocess from .. import build @@ -153,10 +154,10 @@ class GnomeModule(ExtensionModule): # Make source dirs relative to build dir now source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs] - # Always include current directory, but after paths set by user - source_dirs.append(os.path.join(state.build_to_src, state.subdir)) # Ensure build directories of generated deps are included source_dirs += subdirs + # Always include current directory, but after paths set by user + source_dirs.append(os.path.join(state.build_to_src, state.subdir)) for source_dir in OrderedSet(source_dirs): cmd += ['--sourcedir', source_dir] @@ -326,18 +327,20 @@ class GnomeModule(ExtensionModule): for dep in deps: if isinstance(dep, InternalDependency): + cflags.update(dep.get_compile_args()) cflags.update(get_include_args(dep.include_directories)) for lib in dep.libraries: if hasattr(lib, 'held_object'): lib = lib.held_object - internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) - libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath, - use_gir_args, True) - cflags.update(libdepflags[0]) - internal_ldflags.update(libdepflags[1]) - external_ldflags.update(libdepflags[2]) - external_ldflags_nodedup += libdepflags[3] - gi_includes.update(libdepflags[4]) + if isinstance(lib, build.SharedLibrary): + internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) + libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath, + use_gir_args, True) + cflags.update(libdepflags[0]) + internal_ldflags.update(libdepflags[1]) + external_ldflags.update(libdepflags[2]) + external_ldflags_nodedup += libdepflags[3] + gi_includes.update(libdepflags[4]) extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath, use_gir_args, True) cflags.update(extdepflags[0]) @@ -405,11 +408,21 @@ class GnomeModule(ExtensionModule): else: return cflags, internal_ldflags, external_ldflags, external_ldflags_nodedup, gi_includes - def _unwrap_gir_target(self, girtarget): + def _unwrap_gir_target(self, girtarget, state): while hasattr(girtarget, 'held_object'): girtarget = girtarget.held_object - if not isinstance(girtarget, (build.Executable, build.SharedLibrary)): - raise MesonException('Gir target must be an executable or shared library') + + if not isinstance(girtarget, (build.Executable, build.SharedLibrary, + build.StaticLibrary)): + raise MesonException('Gir target must be an executable or library') + + STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1" + if isinstance(girtarget, (build.StaticLibrary)) and \ + not mesonlib.version_compare( + self._get_gir_dep(state)[0].get_version(), + STATIC_BUILD_REQUIRED_VERSION): + raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION) + return girtarget def _get_gir_dep(self, state): @@ -517,7 +530,12 @@ class GnomeModule(ExtensionModule): ret = [] for lang in langs: - for link_arg in state.environment.coredata.get_external_link_args(lang): + if state.environment.is_cross_build(): + link_args = state.environment.cross_info.config["properties"].get(lang + '_link_args', "") + else: + link_args = state.environment.coredata.get_external_link_args(lang) + + for link_arg in link_args: if link_arg.startswith('-L'): ret.append(link_arg) @@ -529,8 +547,13 @@ class GnomeModule(ExtensionModule): for girtarget in girtargets: if isinstance(girtarget, build.Executable): ret += ['--program', girtarget] - elif isinstance(girtarget, build.SharedLibrary): - libname = girtarget.get_basename() + else: + # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72 + # we can't use the full path until this is merged. + if isinstance(girtarget, build.SharedLibrary): + libname = girtarget.get_basename() + else: + libname = os.path.join("@PRIVATE_OUTDIR_ABS_%s@" % girtarget.get_id(), girtarget.get_filename()) # Needed for the following binutils bug: # https://github.com/mesonbuild/meson/issues/1911 # However, g-ir-scanner does not understand -Wl,-rpath @@ -690,7 +713,10 @@ class GnomeModule(ExtensionModule): def _get_external_args_for_langs(self, state, langs): ret = [] for lang in langs: - ret += state.environment.coredata.get_external_args(lang) + if state.environment.is_cross_build(): + ret += state.environment.cross_info.config["properties"].get(lang + '_args', "") + else: + ret += state.environment.coredata.get_external_args(lang) return ret @staticmethod @@ -721,7 +747,7 @@ class GnomeModule(ExtensionModule): giscanner = self.interpreter.find_program_impl('g-ir-scanner') gicompiler = self.interpreter.find_program_impl('g-ir-compiler') - girtargets = [self._unwrap_gir_target(arg) for arg in args] + girtargets = [self._unwrap_gir_target(arg, state) for arg in args] if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]): raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable') @@ -881,12 +907,15 @@ This will become a hard error in the future.''') rv = [inscript, pottarget, potarget] return ModuleReturnValue(None, rv) + @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['c_args']) + @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['module_version']) @FeatureNewKwargs('gnome.gtkdoc', '0.37.0', ['namespace', 'mode']) @permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install', 'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile', 'fixxref_args', 'html_args', 'html_assets', 'content_files', 'mkdb_args', 'ignore_headers', 'include_directories', - 'namespace', 'mode', 'expand_content_files'}) + 'namespace', 'mode', 'expand_content_files', 'module_version', + 'c_args'}) def gtkdoc(self, state, args, kwargs): if len(args) != 1: raise MesonException('Gtkdoc must have one positional argument.') @@ -901,11 +930,14 @@ This will become a hard error in the future.''') main_xml = kwargs.get('main_xml', '') if not isinstance(main_xml, str): raise MesonException('Main xml keyword argument must be a string.') + moduleversion = kwargs.get('module_version', '') + if not isinstance(moduleversion, str): + raise MesonException('Module version keyword argument must be a string.') if main_xml != '': if main_file != '': raise MesonException('You can only specify main_xml or main_sgml, not both.') main_file = main_xml - targetname = modulename + '-doc' + targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc' command = state.environment.get_build_command() namespace = kwargs.get('namespace', '') @@ -936,6 +968,7 @@ This will become a hard error in the future.''') '--headerdirs=' + '@@'.join(header_dirs), '--mainfile=' + main_file, '--modulename=' + modulename, + '--moduleversion=' + moduleversion, '--mode=' + mode] if namespace: args.append('--namespace=' + namespace) @@ -988,7 +1021,9 @@ This will become a hard error in the future.''') def _get_build_args(self, kwargs, state, depends): args = [] deps = extract_as_list(kwargs, 'dependencies', unholder=True) - cflags, internal_ldflags, external_ldflags, gi_includes = \ + cflags = OrderedSet() + cflags.update(mesonlib.stringlistify(kwargs.pop('c_args', []))) + deps_cflags, internal_ldflags, external_ldflags, gi_includes = \ self._get_dependencies_flags(deps, state, depends, include_rpath=True) inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories') for incd in inc_dirs: @@ -996,25 +1031,32 @@ This will become a hard error in the future.''') raise MesonException( 'Gir include dirs should be include_directories().') + cflags.update(deps_cflags) cflags.update(get_include_args(inc_dirs)) ldflags = OrderedSet() ldflags.update(internal_ldflags) ldflags.update(external_ldflags) if state.environment.is_cross_build(): + cflags.update(state.environment.cross_info.config["properties"].get('c_args', "")) + ldflags.update(state.environment.cross_info.config["properties"].get('c_link_args', "")) compiler = state.environment.coredata.cross_compilers.get('c') else: cflags.update(state.environment.coredata.get_external_args('c')) ldflags.update(state.environment.coredata.get_external_link_args('c')) compiler = state.environment.coredata.compilers.get('c') + compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)]) + cflags.update(compiler_flags[0]) + ldflags.update(compiler_flags[1]) + ldflags.update(compiler_flags[2]) if compiler: - args += ['--cc=%s' % ' '.join(compiler.get_exelist())] - args += ['--ld=%s' % ' '.join(compiler.get_linker_exelist())] + args += ['--cc=%s' % ' '.join([shlex.quote(x) for x in compiler.get_exelist()])] + args += ['--ld=%s' % ' '.join([shlex.quote(x) for x in compiler.get_linker_exelist()])] if cflags: - args += ['--cflags=%s' % ' '.join(cflags)] + args += ['--cflags=%s' % ' '.join([shlex.quote(x) for x in cflags])] if ldflags: - args += ['--ldflags=%s' % ' '.join(ldflags)] + args += ['--ldflags=%s' % ' '.join([shlex.quote(x) for x in ldflags])] return args diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py index e621938..c07391e 100644 --- a/mesonbuild/modules/hotdoc.py +++ b/mesonbuild/modules/hotdoc.py @@ -38,12 +38,13 @@ MIN_HOTDOC_VERSION = '0.8.100' class HotdocTargetBuilder: - def __init__(self, name, state, hotdoc, kwargs): + def __init__(self, name, state, hotdoc, interpreter, kwargs): self.hotdoc = hotdoc self.build_by_default = kwargs.pop('build_by_default', False) self.kwargs = kwargs self.name = name self.state = state + self.interpreter = interpreter self.include_paths = OrderedDict() self.builddir = state.environment.get_build_dir() @@ -93,7 +94,7 @@ class HotdocTargetBuilder: self.check_extra_arg_type(arg, v) return - valid_types = (str, bool, mesonlib.File, build.IncludeDirs) + valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.BuildTarget) if not isinstance(value, valid_types): raise InvalidArguments('Argument "%s=%s" should be of type: %s.' % ( arg, value, [t.__name__ for t in valid_types])) @@ -211,6 +212,9 @@ class HotdocTargetBuilder: cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir)) continue + elif isinstance(arg, build.CustomTarget) or isinstance(arg, build.BuildTarget): + self._dependencies.append(arg) + arg = self.interpreter.backend.get_target_filename_abs(arg) cmd.append(arg) @@ -375,7 +379,7 @@ class HotDocModule(ExtensionModule): @noKwargs def has_extensions(self, state, args, kwargs): - res = self.hotdoc.run_hotdoc(['--has-extension'] + args) == 0 + res = self.hotdoc.run_hotdoc(['--has-extension=%s' % extension for extension in args]) == 0 return ModuleReturnValue(res, [res]) def generate_doc(self, state, args, kwargs): @@ -384,7 +388,7 @@ class HotDocModule(ExtensionModule): ' required for the project name.') project_name = args[0] - builder = HotdocTargetBuilder(project_name, state, self.hotdoc, kwargs) + builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs) target, install_script = builder.make_targets() targets = [HotdocTargetHolder(target, self.interpreter)] if install_script: diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py index 40f73f7..aeab813 100644 --- a/mesonbuild/modules/i18n.py +++ b/mesonbuild/modules/i18n.py @@ -82,17 +82,23 @@ class I18nModule(ExtensionModule): kwargs['command'] = command inputfile = kwargs['input'] - if isinstance(inputfile, str): - inputfile = mesonlib.File.from_source_file(state.environment.source_dir, - state.subdir, inputfile) - output = kwargs['output'] - ifile_abs = inputfile.absolute_path(state.environment.source_dir, - state.environment.build_dir) - values = mesonlib.get_filenames_templates_dict([ifile_abs], None) - outputs = mesonlib.substitute_values([output], values) - output = outputs[0] - - ct = build.CustomTarget(output + '_merge', state.subdir, state.subproject, kwargs) + if hasattr(inputfile, 'held_object'): + ct = build.CustomTarget(kwargs['output'] + '_merge', state.subdir, state.subproject, kwargs) + else: + if isinstance(inputfile, list): + # We only use this input file to create a name of the custom target. + # Thus we can ignore the other entries. + inputfile = inputfile[0] + if isinstance(inputfile, str): + inputfile = mesonlib.File.from_source_file(state.environment.source_dir, + state.subdir, inputfile) + output = kwargs['output'] + ifile_abs = inputfile.absolute_path(state.environment.source_dir, + state.environment.build_dir) + values = mesonlib.get_filenames_templates_dict([ifile_abs], None) + outputs = mesonlib.substitute_values([output], values) + output = outputs[0] + ct = build.CustomTarget(output + '_' + state.subdir.replace('/', '@').replace('\\', '@') + '_merge', state.subdir, state.subproject, kwargs) return ModuleReturnValue(ct, [ct]) @FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset']) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 8684864..52285c2 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -276,10 +276,16 @@ class PkgConfigModule(ExtensionModule): install_dir = l.get_custom_install_dir()[0] if install_dir is False: continue - if isinstance(install_dir, str): - Lflag = '-L${prefix}/%s ' % self._escape(self._make_relative(prefix, install_dir)) - else: # install_dir is True - Lflag = '-L${libdir}' + if 'cs' in l.compilers: + if isinstance(install_dir, str): + Lflag = '-r${prefix}/%s/%s ' % (self._escape(self._make_relative(prefix, install_dir)), l.filename) + else: # install_dir is True + Lflag = '-r${libdir}/%s' % l.filename + else: + if isinstance(install_dir, str): + Lflag = '-L${prefix}/%s ' % self._escape(self._make_relative(prefix, install_dir)) + else: # install_dir is True + Lflag = '-L${libdir}' if Lflag not in Lflags: Lflags.append(Lflag) yield Lflag @@ -288,7 +294,8 @@ class PkgConfigModule(ExtensionModule): # find the library if l.name_suffix_set: mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile)) - yield '-l%s' % lname + if 'cs' not in l.compilers: + yield '-l%s' % lname if len(deps.pub_libs) > 0: ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs)))) diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index d9ec562..3b2bf07 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -23,9 +23,9 @@ from mesonbuild.modules import ModuleReturnValue from ..interpreterbase import ( noPosargs, noKwargs, permittedKwargs, InterpreterObject, InvalidArguments, - FeatureNew + FeatureNew, FeatureNewKwargs, disablerIfNotFound ) -from ..interpreter import ExternalProgramHolder +from ..interpreter import ExternalProgramHolder, extract_required_kwarg from ..interpreterbase import flatten from ..build import known_shmod_kwargs from .. import mlog @@ -260,52 +260,37 @@ class PythonDependency(ExternalDependency): return super().get_pkgconfig_variable(variable_name, kwargs) -VARIABLES_COMMAND = ''' +INTROSPECT_COMMAND = ''' import sysconfig import json - -print (json.dumps (sysconfig.get_config_vars())) -''' - - -PATHS_COMMAND = ''' -import sysconfig -import json - -print (json.dumps(sysconfig.get_paths())) -''' - - -INSTALL_PATHS_COMMAND = ''' -import sysconfig -import json - -print (json.dumps(sysconfig.get_paths(scheme='posix_prefix', vars={'base': '', 'platbase': '', 'installed_base': ''}))) -''' - - -IS_PYPY_COMMAND = ''' import sys -import json -print (json.dumps('__pypy__' in sys.builtin_module_names)) -''' +install_paths = sysconfig.get_paths(scheme='posix_prefix', vars={'base': '', 'platbase': '', 'installed_base': ''}) +print (json.dumps ({ + 'variables': sysconfig.get_config_vars(), + 'paths': sysconfig.get_paths(), + 'install_paths': install_paths, + 'version': sysconfig.get_python_version(), + 'platform': sysconfig.get_platform(), + 'is_pypy': '__pypy__' in sys.builtin_module_names, +})) +''' class PythonInstallation(ExternalProgramHolder, InterpreterObject): - def __init__(self, interpreter, python): + def __init__(self, interpreter, python, info): InterpreterObject.__init__(self) ExternalProgramHolder.__init__(self, python) self.interpreter = interpreter prefix = self.interpreter.environment.coredata.get_builtin_option('prefix') - self.variables = json.loads(run_command(python, VARIABLES_COMMAND)) - self.paths = json.loads(run_command(python, PATHS_COMMAND)) - install_paths = json.loads(run_command(python, INSTALL_PATHS_COMMAND)) + self.variables = info['variables'] + self.paths = info['paths'] + install_paths = info['install_paths'] self.platlib_install_path = os.path.join(prefix, install_paths['platlib'][1:]) self.purelib_install_path = os.path.join(prefix, install_paths['purelib'][1:]) - self.version = run_command(python, "import sysconfig; print (sysconfig.get_python_version())") - self.platform = run_command(python, "import sysconfig; print (sysconfig.get_platform())") - self.is_pypy = json.loads(run_command(python, IS_PYPY_COMMAND)) + self.version = info['version'] + self.platform = info['platform'] + self.is_pypy = info['is_pypy'] @permittedKwargs(mod_kwargs) def extension_module(self, interpreter, state, args, kwargs): @@ -475,16 +460,29 @@ class PythonModule(ExtensionModule): else: return None + def _check_version(self, name_or_path, version): + if name_or_path == 'python2': + return mesonlib.version_compare(version, '< 3.0') + elif name_or_path == 'python3': + return mesonlib.version_compare(version, '>= 3.0') + return True + + @FeatureNewKwargs('python.find_installation', '0.49.0', ['disabler']) + @disablerIfNotFound @permittedKwargs(['required']) def find_installation(self, interpreter, state, args, kwargs): - required = kwargs.get('required', True) - if not isinstance(required, bool): - raise InvalidArguments('"required" argument must be a boolean.') + feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0') + disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check) + if disabled: + mlog.log('find_installation skipped: feature', mlog.bold(feature), 'disabled') + return ExternalProgramHolder(NonExistingExternalProgram()) if len(args) > 1: raise InvalidArguments('find_installation takes zero or one positional argument.') - if args: + if 'python' in state.environment.config_info.binaries: + name_or_path = state.environment.config_info.binaries['python'] + elif args: name_or_path = args[0] if not isinstance(name_or_path, str): raise InvalidArguments('find_installation argument must be a string.') @@ -509,12 +507,6 @@ class PythonModule(ExtensionModule): # it if not python.found() and name_or_path in ['python2', 'python3']: python = ExternalProgram('python', silent = True) - if python.found(): - version = run_command(python, "import sysconfig; print (sysconfig.get_python_version())") - if not version or \ - name_or_path == 'python2' and mesonlib.version_compare(version, '>= 3.0') or \ - name_or_path == 'python3' and not mesonlib.version_compare(version, '>= 3.0'): - python = NonExistingExternalProgram() if not python.found(): if required: @@ -522,13 +514,17 @@ class PythonModule(ExtensionModule): res = ExternalProgramHolder(NonExistingExternalProgram()) else: # Sanity check, we expect to have something that at least quacks in tune - version = run_command(python, "import sysconfig; print (sysconfig.get_python_version())") - if not version: + try: + info = json.loads(run_command(python, INTROSPECT_COMMAND)) + except json.JSONDecodeError: + info = None + + if isinstance(info, dict) and 'version' in info and self._check_version(name_or_path, info['version']): + res = PythonInstallation(interpreter, python, info) + else: res = ExternalProgramHolder(NonExistingExternalProgram()) if required: raise mesonlib.MesonException('{} is not a valid python'.format(python)) - else: - res = PythonInstallation(interpreter, python) return res diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py index 3cfc689..f664632 100644 --- a/mesonbuild/modules/python3.py +++ b/mesonbuild/modules/python3.py @@ -17,11 +17,12 @@ from .. import mesonlib, dependencies from . import ExtensionModule from mesonbuild.modules import ModuleReturnValue -from ..interpreterbase import noKwargs, permittedKwargs +from ..interpreterbase import noKwargs, permittedKwargs, FeatureDeprecated from ..build import known_shmod_kwargs class Python3Module(ExtensionModule): + @FeatureDeprecated('python3 module', '0.48.0') def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.snippets.add('extension_module') @@ -47,7 +48,10 @@ class Python3Module(ExtensionModule): @noKwargs def find_python(self, state, args, kwargs): - py3 = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True) + options = [state.environment.config_info.binaries.get('python3')] + if not options[0]: # because this would be [None] + options = ['python3', mesonlib.python_command] + py3 = dependencies.ExternalProgram(*options, silent=True) return ModuleReturnValue(py3, [py3]) @noKwargs diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index a8e916a..28fb98c 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -18,7 +18,7 @@ from .. import build from ..mesonlib import MesonException, Popen_safe, extract_as_list, File from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency import xml.etree.ElementTree as ET -from . import ModuleReturnValue, get_include_args +from . import ModuleReturnValue, get_include_args, ExtensionModule from ..interpreterbase import permittedKwargs, FeatureNewKwargs _QT_DEPS_LUT = { @@ -27,10 +27,11 @@ _QT_DEPS_LUT = { } -class QtBaseModule: +class QtBaseModule(ExtensionModule): tools_detected = False - def __init__(self, qt_version=5): + def __init__(self, interpreter, qt_version=5): + ExtensionModule.__init__(self, interpreter) self.qt_version = qt_version def _detect_tools(self, env, method): @@ -43,7 +44,7 @@ class QtBaseModule: kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method} qt = _QT_DEPS_LUT[self.qt_version](env, kwargs) # Get all tools and then make sure that they are the right version - self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect() + self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter) # Moc, uic and rcc write their version strings to stderr. # Moc and rcc return a non-zero result when doing so. # What kind of an idiot thought that was a good idea? @@ -116,11 +117,13 @@ class QtBaseModule: except Exception: return [] + @FeatureNewKwargs('qt.preprocess', '0.49.0', ['uic_extra_arguments']) @FeatureNewKwargs('qt.preprocess', '0.44.0', ['moc_extra_arguments']) - @permittedKwargs({'moc_headers', 'moc_sources', 'moc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'}) + @FeatureNewKwargs('qt.preprocess', '0.49.0', ['rcc_extra_arguments']) + @permittedKwargs({'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'}) def preprocess(self, state, args, kwargs): - rcc_files, ui_files, moc_headers, moc_sources, moc_extra_arguments, sources, include_directories, dependencies \ - = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'moc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True) + rcc_files, ui_files, moc_headers, moc_sources, uic_extra_arguments, moc_extra_arguments, rcc_extra_arguments, sources, include_directories, dependencies \ + = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True) sources += args[1:] method = kwargs.get('method', 'auto') self._detect_tools(state.environment, method) @@ -139,7 +142,7 @@ class QtBaseModule: name = args[0] rcc_kwargs = {'input': rcc_files, 'output': name + '.cpp', - 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', '@INPUT@'], + 'command': [self.rcc, '-name', name, '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) @@ -153,15 +156,16 @@ class QtBaseModule: name = 'qt' + str(self.qt_version) + '-' + basename.replace('.', '_') rcc_kwargs = {'input': rcc_file, 'output': name + '.cpp', - 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', '@INPUT@'], + 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) if len(ui_files) > 0: if not self.uic.found(): - raise MesonException(err_msg.format('UIC', 'uic-qt' + self.qt_version)) + raise MesonException(err_msg.format('UIC', 'uic-qt{}'.format(self.qt_version), self.qt_version)) + arguments = uic_extra_arguments + ['-o', '@OUTPUT@', '@INPUT@'] ui_kwargs = {'output': 'ui_@BASENAME@.h', - 'arguments': ['-o', '@OUTPUT@', '@INPUT@']} + 'arguments': arguments} ui_gen = build.Generator([self.uic], ui_kwargs) ui_output = ui_gen.process_files('Qt{} ui'.format(self.qt_version), ui_files, state) sources.append(ui_output) diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py index 29992d5..112e3e4 100644 --- a/mesonbuild/modules/qt4.py +++ b/mesonbuild/modules/qt4.py @@ -14,14 +14,13 @@ from .. import mlog from .qt import QtBaseModule -from . import ExtensionModule -class Qt4Module(ExtensionModule, QtBaseModule): +class Qt4Module(QtBaseModule): def __init__(self, interpreter): - QtBaseModule.__init__(self, qt_version=4) - ExtensionModule.__init__(self, interpreter) + QtBaseModule.__init__(self, interpreter, qt_version=4) + def initialize(*args, **kwargs): mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:', diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py index 19623ac..96a7964 100644 --- a/mesonbuild/modules/qt5.py +++ b/mesonbuild/modules/qt5.py @@ -14,14 +14,13 @@ from .. import mlog from .qt import QtBaseModule -from . import ExtensionModule -class Qt5Module(ExtensionModule, QtBaseModule): +class Qt5Module(QtBaseModule): def __init__(self, interpreter): - QtBaseModule.__init__(self, qt_version=5) - ExtensionModule.__init__(self, interpreter) + QtBaseModule.__init__(self, interpreter, qt_version=5) + def initialize(*args, **kwargs): mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:', diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index 59e845c..d185d89 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import enum import os +import re from .. import mlog from .. import mesonlib, build @@ -24,6 +26,10 @@ from ..interpreter import CustomTargetHolder from ..interpreterbase import permittedKwargs, FeatureNewKwargs from ..dependencies import ExternalProgram +class ResourceCompilerType(enum.Enum): + windres = 1 + rc = 2 + class WindowsModule(ExtensionModule): def detect_compiler(self, compilers): @@ -32,31 +38,19 @@ class WindowsModule(ExtensionModule): return compilers[l] raise MesonException('Resource compilation requires a C or C++ compiler.') - @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends']) - @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'}) - def compile_resources(self, state, args, kwargs): - comp = self.detect_compiler(state.compilers) + def _find_resource_compiler(self, state): + # FIXME: Does not handle `native: true` executables, see + # See https://github.com/mesonbuild/meson/issues/1531 - extra_args = mesonlib.stringlistify(kwargs.get('args', [])) - wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True) - wrc_depends = extract_as_list(kwargs, 'depends', pop = True) - for d in wrc_depends: - if isinstance(d, CustomTargetHolder): - extra_args += get_include_args([d.outdir_include()]) - inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True) - for incd in inc_dirs: - if not isinstance(incd.held_object, (str, build.IncludeDirs)): - raise MesonException('Resource include dirs should be include_directories().') - extra_args += get_include_args(inc_dirs) + if hasattr(self, '_rescomp'): + return self._rescomp rescomp = None - # FIXME: Does not handle `native: true` executables, see - # https://github.com/mesonbuild/meson/issues/1531 if state.environment.is_cross_build(): # If cross compiling see if windres has been specified in the # cross file before trying to find it another way. - cross_info = state.environment.cross_info - rescomp = ExternalProgram.from_cross_info(cross_info, 'windres') + bins = state.environment.cross_info.config['binaries'] + rescomp = ExternalProgram.from_bin_list(bins, 'windres') if not rescomp or not rescomp.found(): if 'WINDRES' in os.environ: @@ -65,7 +59,15 @@ class WindowsModule(ExtensionModule): rescomp = ExternalProgram('windres', command=os.environ.get('WINDRES'), silent=True) if not rescomp or not rescomp.found(): - if comp.id == 'msvc': + # Take windres from the config file after the environment, which is + # in keeping with the expectations on unix-like OSes that + # environment variables trump config files. + bins = state.environment.config_info.binaries + rescomp = ExternalProgram.from_bin_list(bins, 'windres') + + if not rescomp or not rescomp.found(): + comp = self.detect_compiler(state.compilers) + if comp.id == 'msvc' or comp.id == 'clang-cl': rescomp = ExternalProgram('rc', silent=True) else: rescomp = ExternalProgram('windres', silent=True) @@ -73,7 +75,38 @@ class WindowsModule(ExtensionModule): if not rescomp.found(): raise MesonException('Could not find Windows resource compiler') - if 'rc' in rescomp.get_path(): + for (arg, match, type) in [ + ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc), + ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres), + ]: + p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg]) + m = re.search(match, o, re.MULTILINE) + if m: + mlog.log('Windows resource compiler: %s' % m.group()) + self._rescomp = (rescomp, type) + break + else: + raise MesonException('Could not determine type of Windows resource compiler') + + return self._rescomp + + @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends']) + @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'}) + def compile_resources(self, state, args, kwargs): + extra_args = mesonlib.stringlistify(kwargs.get('args', [])) + wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True) + wrc_depends = extract_as_list(kwargs, 'depends', pop = True) + for d in wrc_depends: + if isinstance(d, CustomTargetHolder): + extra_args += get_include_args([d.outdir_include()]) + inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True) + for incd in inc_dirs: + if not isinstance(incd.held_object, (str, build.IncludeDirs)): + raise MesonException('Resource include dirs should be include_directories().') + extra_args += get_include_args(inc_dirs) + + rescomp, rescomp_type = self._find_resource_compiler(state) + if rescomp_type == ResourceCompilerType.rc: # RC is used to generate .res files, a special binary resource # format, which can be passed directly to LINK (apparently LINK uses # CVTRES internally to convert this to a COFF object) @@ -129,7 +162,7 @@ class WindowsModule(ExtensionModule): } # instruct binutils windres to generate a preprocessor depfile - if 'windres' in rescomp.get_path(): + if rescomp_type == ResourceCompilerType.windres: res_kwargs['depfile'] = res_kwargs['output'] + '.d' res_kwargs['command'] += ['--preprocessor-arg=-MD', '--preprocessor-arg=-MQ@OUTPUT@', '--preprocessor-arg=-MF@DEPFILE@'] diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index 9af6dac..be5c807 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -90,8 +90,9 @@ class Lexer: def __init__(self, code): self.code = code self.keywords = {'true', 'false', 'if', 'else', 'elif', - 'endif', 'and', 'or', 'not', 'foreach', 'endforeach'} - self.future_keywords = {'continue', 'break', 'in', 'return'} + 'endif', 'and', 'or', 'not', 'foreach', 'endforeach', + 'in', 'continue', 'break'} + self.future_keywords = {'return'} self.token_specification = [ # Need to be sorted longest to shortest. ('ignore', re.compile(r'[ \t]')), @@ -242,6 +243,12 @@ class StringNode(ElementaryNode): def __str__(self): return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno) +class ContinueNode(ElementaryNode): + pass + +class BreakNode(ElementaryNode): + pass + class ArrayNode: def __init__(self, args): self.subdir = args.subdir @@ -436,7 +443,9 @@ comparison_map = {'equal': '==', 'lt': '<', 'le': '<=', 'gt': '>', - 'ge': '>=' + 'ge': '>=', + 'in': 'in', + 'notin': 'not in', } # Recursive descent parser for Meson's definition language. @@ -543,6 +552,8 @@ class Parser: for nodename, operator_type in comparison_map.items(): if self.accept(nodename): return ComparisonNode(operator_type, left, self.e5()) + if self.accept('not') and self.accept('in'): + return ComparisonNode('notin', left, self.e5()) return left def e5(self): @@ -754,6 +765,10 @@ class Parser: block = self.foreachblock() self.block_expect('endforeach', block_start) return block + if self.accept('continue'): + return ContinueNode(self.current) + if self.accept('break'): + return BreakNode(self.current) return self.statement() def codeblock(self): diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py new file mode 100644 index 0000000..f9a5e1c --- /dev/null +++ b/mesonbuild/msetup.py @@ -0,0 +1,218 @@ +# Copyright 2016-2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import sys, stat +import datetime +import os.path +import platform +import cProfile as profile +import argparse + +from . import environment, interpreter, mesonlib +from . import build +from . import mlog, coredata +from .mesonlib import MesonException + +def add_arguments(parser): + coredata.register_builtin_arguments(parser) + parser.add_argument('--cross-file', default=None, + help='File describing cross compilation environment.') + parser.add_argument('--native-file', + default=[], + action='append', + help='File containing overrides for native compilation environment.') + parser.add_argument('-v', '--version', action='version', + version=coredata.version) + parser.add_argument('--profile-self', action='store_true', dest='profile', + help=argparse.SUPPRESS) + parser.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings', + help='Make all Meson warnings fatal') + parser.add_argument('--reconfigure', action='store_true', + help='Set options and reconfigure the project. Useful when new ' + + 'options have been added to the project and the default value ' + + 'is not working.') + parser.add_argument('--wipe', action='store_true', + help='Wipe build directory and reconfigure using previous command line options. ' + + 'Userful when build directory got corrupted, or when rebuilding with a ' + + 'newer version of meson.') + parser.add_argument('builddir', nargs='?', default=None) + parser.add_argument('sourcedir', nargs='?', default=None) + +class MesonApp: + def __init__(self, options): + (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir, + options.sourcedir, + options.reconfigure, + options.wipe) + + if options.wipe: + # Make a copy of the cmd line file to make sure we can always + # restore that file if anything bad happens. For example if + # configuration fails we need to be able to wipe again. + filename = coredata.get_cmd_line_file(self.build_dir) + with open(filename, 'r') as f: + content = f.read() + + coredata.read_cmd_line_file(self.build_dir, options) + + try: + mesonlib.windows_proof_rmtree(self.build_dir) + finally: + # Restore the file + path = os.path.dirname(filename) + os.makedirs(path, exist_ok=True) + with open(filename, 'w') as f: + f.write(content) + + self.options = options + + def has_build_file(self, dirname): + fname = os.path.join(dirname, environment.build_filename) + return os.path.exists(fname) + + def validate_core_dirs(self, dir1, dir2): + if dir1 is None: + if dir2 is None: + if not os.path.exists('meson.build') and os.path.exists('../meson.build'): + dir2 = '..' + else: + raise MesonException('Must specify at least one directory name.') + dir1 = os.getcwd() + if dir2 is None: + dir2 = os.getcwd() + ndir1 = os.path.abspath(os.path.realpath(dir1)) + ndir2 = os.path.abspath(os.path.realpath(dir2)) + if not os.path.exists(ndir1): + os.makedirs(ndir1) + if not os.path.exists(ndir2): + os.makedirs(ndir2) + if not stat.S_ISDIR(os.stat(ndir1).st_mode): + raise MesonException('%s is not a directory' % dir1) + if not stat.S_ISDIR(os.stat(ndir2).st_mode): + raise MesonException('%s is not a directory' % dir2) + if os.path.samefile(dir1, dir2): + raise MesonException('Source and build directories must not be the same. Create a pristine build directory.') + if self.has_build_file(ndir1): + if self.has_build_file(ndir2): + raise MesonException('Both directories contain a build file %s.' % environment.build_filename) + return ndir1, ndir2 + if self.has_build_file(ndir2): + return ndir2, ndir1 + raise MesonException('Neither directory contains a build file %s.' % environment.build_filename) + + def validate_dirs(self, dir1, dir2, reconfigure, wipe): + (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2) + priv_dir = os.path.join(build_dir, 'meson-private/coredata.dat') + if os.path.exists(priv_dir): + if not reconfigure and not wipe: + print('Directory already configured.\n' + '\nJust run your build command (e.g. ninja) and Meson will regenerate as necessary.\n' + 'If ninja fails, run "ninja reconfigure" or "meson --reconfigure"\n' + 'to force Meson to regenerate.\n' + '\nIf build failures persist, run "meson setup --wipe" to rebuild from scratch\n' + 'using the same options as passed when configuring the build.' + '\nTo change option values, run "meson configure" instead.') + sys.exit(0) + else: + has_cmd_line_file = os.path.exists(coredata.get_cmd_line_file(build_dir)) + if (wipe and not has_cmd_line_file) or (not wipe and reconfigure): + print('Directory does not contain a valid build tree:\n{}'.format(build_dir)) + sys.exit(1) + return src_dir, build_dir + + def check_pkgconfig_envvar(self, env): + curvar = os.environ.get('PKG_CONFIG_PATH', '') + if curvar != env.coredata.pkgconf_envvar: + mlog.warning('PKG_CONFIG_PATH has changed between invocations from "%s" to "%s".' % + (env.coredata.pkgconf_envvar, curvar)) + env.coredata.pkgconf_envvar = curvar + + def generate(self): + env = environment.Environment(self.source_dir, self.build_dir, self.options) + mlog.initialize(env.get_log_dir(), self.options.fatal_warnings) + if self.options.profile: + mlog.set_timestamp_start(time.monotonic()) + with mesonlib.BuildDirLock(self.build_dir): + self._generate(env) + + def _generate(self, env): + mlog.debug('Build started at', datetime.datetime.now().isoformat()) + mlog.debug('Main binary:', sys.executable) + mlog.debug('Python system:', platform.system()) + mlog.log(mlog.bold('The Meson build system')) + self.check_pkgconfig_envvar(env) + mlog.log('Version:', coredata.version) + mlog.log('Source dir:', mlog.bold(self.source_dir)) + mlog.log('Build dir:', mlog.bold(self.build_dir)) + if env.is_cross_build(): + mlog.log('Build type:', mlog.bold('cross build')) + else: + mlog.log('Build type:', mlog.bold('native build')) + b = build.Build(env) + + intr = interpreter.Interpreter(b) + if env.is_cross_build(): + mlog.log('Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {}))) + mlog.log('Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {}))) + mlog.log('Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method([], {}))) + mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {}))) + mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {}))) + mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) + if self.options.profile: + fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log') + profile.runctx('intr.run()', globals(), locals(), filename=fname) + else: + intr.run() + # Print all default option values that don't match the current value + for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options(): + mlog.log('Option', mlog.bold(def_opt_name), 'is:', + mlog.bold(str(cur_opt_value)), + '[default: {}]'.format(str(def_opt_value))) + try: + dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') + # We would like to write coredata as late as possible since we use the existence of + # this file to check if we generated the build file successfully. Since coredata + # includes settings, the build files must depend on it and appear newer. However, due + # to various kernel caches, we cannot guarantee that any time in Python is exactly in + # sync with the time that gets applied to any files. Thus, we dump this file as late as + # possible, but before build files, and if any error occurs, delete it. + cdf = env.dump_coredata() + if self.options.profile: + fname = 'profile-{}-backend.log'.format(intr.backend.name) + fname = os.path.join(self.build_dir, 'meson-private', fname) + profile.runctx('intr.backend.generate(intr)', globals(), locals(), filename=fname) + else: + intr.backend.generate(intr) + build.save(b, dumpfile) + # Post-conf scripts must be run after writing coredata or else introspection fails. + intr.backend.run_postconf_scripts() + if env.first_invocation: + coredata.write_cmd_line_file(self.build_dir, self.options) + else: + coredata.update_cmd_line_file(self.build_dir, self.options) + except: + if 'cdf' in locals(): + old_cdf = cdf + '.prev' + if os.path.exists(old_cdf): + os.replace(old_cdf, cdf) + else: + os.unlink(cdf) + raise + +def run(options): + coredata.parse_cmd_line_options(options) + app = MesonApp(options) + app.generate() + return 0 diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 8d9a585..1f9e7f8 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -23,6 +23,7 @@ from mesonbuild.dependencies import ExternalProgram from mesonbuild.mesonlib import substring_is_in_list, MesonException from mesonbuild import mlog +import tempfile import time, datetime, multiprocessing, json import concurrent.futures as conc import platform @@ -60,8 +61,7 @@ def determine_worker_count(): num_workers = 1 return num_workers -def buildparser(): - parser = argparse.ArgumentParser(prog='meson test') +def add_arguments(parser): parser.add_argument('--repeat', default=1, dest='repeat', type=int, help='Number of times to run the tests.') parser.add_argument('--no-rebuild', default=False, action='store_true', @@ -102,7 +102,6 @@ def buildparser(): help='Arguments to pass to the specified test(s) or all tests') parser.add_argument('args', nargs='*', help='Optional list of tests to run') - return parser def returncode_to_status(retcode): @@ -145,6 +144,8 @@ class TestResult(enum.Enum): TIMEOUT = 'TIMEOUT' SKIP = 'SKIP' FAIL = 'FAIL' + EXPECTEDFAIL = 'EXPECTEDFAIL' + UNEXPECTEDPASS = 'UNEXPECTEDPASS' class TestRun: @@ -291,8 +292,8 @@ class SingleTestRunner: stdout = None stderr = None if not self.options.verbose: - stdout = subprocess.PIPE - stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT + stdout = tempfile.TemporaryFile("wb+") + stderr = tempfile.TemporaryFile("wb+") if self.options and self.options.split else stdout # Let gdb handle ^C instead of us if self.options.gdb: @@ -326,7 +327,7 @@ class SingleTestRunner: else: timeout = self.test.timeout try: - (stdo, stde) = p.communicate(timeout=timeout) + p.communicate(timeout=timeout) except subprocess.TimeoutExpired: if self.options.verbose: print('%s time out (After %d seconds)' % (self.test.name, timeout)) @@ -339,6 +340,8 @@ class SingleTestRunner: # Let us accept ^C again signal.signal(signal.SIGINT, previous_sigint_handler) + additional_error = None + if kill_test or timed_out: # Python does not provide multiplatform support for # killing a process and all its children so we need @@ -355,33 +358,43 @@ class SingleTestRunner: # already died) so carry on. pass try: - (stdo, stde) = p.communicate(timeout=1) + p.communicate(timeout=1) except subprocess.TimeoutExpired: # An earlier kill attempt has not worked for whatever reason. # Try to kill it one last time with a direct call. # If the process has spawned children, they will remain around. p.kill() try: - (stdo, stde) = p.communicate(timeout=1) + p.communicate(timeout=1) except subprocess.TimeoutExpired: - stdo = b'Test process could not be killed.' - stde = b'' + additional_error = b'Test process could not be killed.' except ValueError: - stdo = b'Could not read output. Maybe the process has redirected its stdout/stderr?' - stde = b'' + additional_error = b'Could not read output. Maybe the process has redirected its stdout/stderr?' endtime = time.time() duration = endtime - starttime - stdo = decode(stdo) - if stde: - stde = decode(stde) + if additional_error is None: + if stdout is None: # if stdout is None stderr should be as well + stdo = '' + stde = '' + else: + stdout.seek(0) + stdo = decode(stdout.read()) + if stderr != stdout: + stderr.seek(0) + stde = decode(stderr.read()) + else: + stde = "" + else: + stdo = "" + stde = additional_error if timed_out: res = TestResult.TIMEOUT elif p.returncode == GNU_SKIP_RETURNCODE: res = TestResult.SKIP - elif self.test.should_fail == bool(p.returncode): - res = TestResult.OK + elif self.test.should_fail: + res = TestResult.EXPECTEDFAIL if bool(p.returncode) else TestResult.UNEXPECTEDPASS else: - res = TestResult.FAIL + res = TestResult.FAIL if bool(p.returncode) else TestResult.OK return TestRun(res, p.returncode, self.test.should_fail, duration, stdo, stde, cmd, self.test.env) @@ -390,6 +403,8 @@ class TestHarness: self.options = options self.collected_logs = [] self.fail_count = 0 + self.expectedfail_count = 0 + self.unexpectedpass_count = 0 self.success_count = 0 self.skip_count = 0 self.timeout_count = 0 @@ -435,6 +450,8 @@ class TestHarness: def get_test_runner(self, test): options = deepcopy(self.options) + if not options.setup: + options.setup = self.build_data.test_setup_default_name if options.setup: env = self.merge_suite_options(options, test) else: @@ -454,6 +471,10 @@ class TestHarness: self.success_count += 1 elif result.res is TestResult.FAIL: self.fail_count += 1 + elif result.res is TestResult.EXPECTEDFAIL: + self.expectedfail_count += 1 + elif result.res is TestResult.UNEXPECTEDPASS: + self.unexpectedpass_count += 1 else: sys.exit('Unknown test result encountered: {}'.format(result.res)) @@ -469,9 +490,10 @@ class TestHarness: result_str = '%s %s %s%s%s%5.2f s %s' % \ (num, name, padding1, result.res.value, padding2, result.duration, status) - if not self.options.quiet or result.res is not TestResult.OK: - if result.res is not TestResult.OK and mlog.colorize_console: - if result.res in (TestResult.FAIL, TestResult.TIMEOUT): + ok_statuses = (TestResult.OK, TestResult.EXPECTEDFAIL) + if not self.options.quiet or result.res not in ok_statuses: + if result.res not in ok_statuses and mlog.colorize_console: + if result.res in (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS): decorator = mlog.red elif result.res is TestResult.SKIP: decorator = mlog.yellow @@ -492,11 +514,14 @@ class TestHarness: def print_summary(self): msg = ''' -OK: %4d -FAIL: %4d -SKIP: %4d -TIMEOUT: %4d -''' % (self.success_count, self.fail_count, self.skip_count, self.timeout_count) +Ok: %4d +Expected Fail: %4d +Fail: %4d +Unexpected Pass: %4d +Skipped: %4d +Timeout: %4d +''' % (self.success_count, self.expectedfail_count, self.fail_count, + self.unexpectedpass_count, self.skip_count, self.timeout_count) print(msg) if self.logfile: self.logfile.write(msg) @@ -737,9 +762,7 @@ def rebuild_all(wd): return True -def run(args): - options = buildparser().parse_args(args) - +def run(options): if options.benchmark: options.num_processes = 1 @@ -784,3 +807,9 @@ def run(args): else: print(e) return 1 + +def run_with_args(args): + parser = argparse.ArgumentParser(prog='meson test') + add_arguments(parser) + options = parser.parse_args(args) + return run(options) diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 1127288..37ed7ef 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -27,11 +27,8 @@ import mesonbuild.astinterpreter from mesonbuild.mesonlib import MesonException from mesonbuild import mlog import sys, traceback -import argparse - -def buildparser(): - parser = argparse.ArgumentParser(prog='meson rewrite') +def add_arguments(parser): parser.add_argument('--sourcedir', default='.', help='Path to source directory.') parser.add_argument('--target', default=None, @@ -39,14 +36,12 @@ def buildparser(): parser.add_argument('--filename', default=None, help='Name of source file to add or remove to target.') parser.add_argument('commands', nargs='+') - return parser -def run(args): - options = buildparser().parse_args(args) +def run(options): if options.target is None or options.filename is None: sys.exit("Must specify both target and filename.") print('This tool is highly experimental, use with care.') - rewriter = mesonbuild.astinterpreter.AstInterpreter(options.sourcedir, '') + rewriter = mesonbuild.astinterpreter.RewriterInterpreter(options.sourcedir, '') try: if options.commands[0] == 'add': rewriter.add_source(options.target, options.filename) diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index 916c84f..0509eff 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -77,16 +77,19 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): subprocess.check_call([lcov_exe, '-a', initial_tracefile, '-a', run_tracefile, + '--rc', 'lcov_branch_coverage=1', '-o', raw_tracefile]) # Remove all directories outside the source_root from the covinfo subprocess.check_call([lcov_exe, '--extract', raw_tracefile, os.path.join(source_root, '*'), + '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo]) # Remove all directories inside subproject dir subprocess.check_call([lcov_exe, '--remove', covinfo, os.path.join(subproject_root, '*'), + '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo]) subprocess.check_call([genhtml_exe, '--prefix', build_root, diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index d3d3028..7294186 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -16,6 +16,8 @@ import sys, struct import shutil, subprocess +from ..mesonlib import OrderedSet + SHT_STRTAB = 3 DT_NEEDED = 1 DT_RPATH = 15 @@ -374,7 +376,26 @@ def fix_darwin(fname, new_rpath, final_path, install_name_mappings): try: args = [] if rpaths: - for rp in rpaths: + # TODO: fix this properly, not totally clear how + # + # removing rpaths from binaries on macOS has tons of + # weird edge cases. For instance, if the user provided + # a '-Wl,-rpath' argument in LDFLAGS that happens to + # coincide with an rpath generated from a dependency, + # this would cause installation failures, as meson would + # generate install_name_tool calls with two identical + # '-delete_rpath' arguments, which install_name_tool + # fails on. Because meson itself ensures that it never + # adds duplicate rpaths, duplicate rpaths necessarily + # come from user variables. The idea of using OrderedSet + # is to remove *at most one* duplicate RPATH entry. This + # is not optimal, as it only respects the user's choice + # partially: if they provided a non-duplicate '-Wl,-rpath' + # argument, it gets removed, if they provided a duplicate + # one, it remains in the final binary. A potentially optimal + # solution would split all user '-Wl,-rpath' arguments from + # LDFLAGS, and later add them back with '-add_rpath'. + for rp in OrderedSet(rpaths): args += ['-delete_rpath', rp] subprocess.check_call(['install_name_tool', fname] + args, stdout=subprocess.DEVNULL, @@ -392,7 +413,7 @@ def fix_darwin(fname, new_rpath, final_path, install_name_mappings): subprocess.check_call(['install_name_tool', fname] + args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) - except Exception as e: + except Exception: raise sys.exit(0) @@ -411,6 +432,9 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True) # Static libraries never have rpaths if fname.endswith('.a'): return + # DLLs never have rpaths + if fname.endswith('.dll'): + return try: if fname.endswith('.jar'): fix_jar(fname) diff --git a/mesonbuild/scripts/dist.py b/mesonbuild/scripts/dist.py index 6fa10ff..f49492c 100644 --- a/mesonbuild/scripts/dist.py +++ b/mesonbuild/scripts/dist.py @@ -24,8 +24,8 @@ import tarfile, zipfile import tempfile from glob import glob from mesonbuild.environment import detect_ninja -from mesonbuild.dependencies import ExternalProgram from mesonbuild.mesonlib import windows_proof_rmtree +from mesonbuild import mlog def create_hash(fname): hashname = fname + '.sha256sum' @@ -80,18 +80,27 @@ def run_dist_scripts(dist_root, dist_scripts): env = os.environ.copy() env['MESON_DIST_ROOT'] = dist_root for d in dist_scripts: - print('Processing dist script %s.' % d) - ddir, dname = os.path.split(d) - ep = ExternalProgram(dname, - search_dir=os.path.join(dist_root, ddir), - silent=True) - if not ep.found(): - sys.exit('Script %s could not be found in dist directory.' % d) - pc = subprocess.run(ep.command, env=env) - if pc.returncode != 0: - sys.exit('Dist script errored out.') + script = d['exe'] + args = d['args'] + name = ' '.join(script + args) + print('Running custom dist script {!r}'.format(name)) + try: + rc = subprocess.call(script + args, env=env) + if rc != 0: + sys.exit('Dist script errored out') + except OSError: + print('Failed to run dist script {!r}'.format(name)) + sys.exit(1) + + +def git_have_dirty_index(src_root): + '''Check whether there are uncommitted changes in git''' + ret = subprocess.call(['git', '-C', src_root, 'diff-index', '--quiet', 'HEAD']) + return ret == 1 def create_dist_git(dist_name, src_root, bld_root, dist_sub, dist_scripts): + if git_have_dirty_index(src_root): + mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball') distdir = os.path.join(dist_sub, dist_name) if os.path.exists(distdir): shutil.rmtree(distdir) @@ -111,14 +120,21 @@ def create_dist_git(dist_name, src_root, bld_root, dist_sub, dist_scripts): return (xzname, ) +def hg_have_dirty_index(src_root): + '''Check whether there are uncommitted changes in hg''' + out = subprocess.check_output(['hg', '-R', src_root, 'summary']) + return b'commit: (clean)' not in out + def create_dist_hg(dist_name, src_root, bld_root, dist_sub, dist_scripts): - os.makedirs(dist_sub, exist_ok=True) + if hg_have_dirty_index(src_root): + mlog.warning('Repository has uncommitted changes that will not be included in the dist tarball') + os.makedirs(dist_sub, exist_ok=True) tarname = os.path.join(dist_sub, dist_name + '.tar') xzname = tarname + '.xz' subprocess.check_call(['hg', 'archive', '-R', src_root, '-S', '-t', 'tar', tarname]) if len(dist_scripts) > 0: - print('WARNING: dist scripts not supported in Mercurial projects.') + mlog.warning('dist scripts are not supported in Mercurial projects') with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf: shutil.copyfileobj(tf, xf) os.unlink(tarname) @@ -129,7 +145,7 @@ def create_dist_hg(dist_name, src_root, bld_root, dist_sub, dist_scripts): def check_dist(packagename, meson_command): - print('Testing distribution package %s.' % packagename) + print('Testing distribution package %s' % packagename) unpackdir = tempfile.mkdtemp() builddir = tempfile.mkdtemp() installdir = tempfile.mkdtemp() @@ -142,21 +158,21 @@ def check_dist(packagename, meson_command): print('Running Meson on distribution package failed') return 1 if subprocess.call([ninja_bin], cwd=builddir) != 0: - print('Compiling the distribution package failed.') + print('Compiling the distribution package failed') return 1 if subprocess.call([ninja_bin, 'test'], cwd=builddir) != 0: - print('Running unit tests on the distribution package failed.') + print('Running unit tests on the distribution package failed') return 1 myenv = os.environ.copy() myenv['DESTDIR'] = installdir if subprocess.call([ninja_bin, 'install'], cwd=builddir, env=myenv) != 0: - print('Installing the distribution package failed.') + print('Installing the distribution package failed') return 1 finally: shutil.rmtree(unpackdir) shutil.rmtree(builddir) shutil.rmtree(installdir) - print('Distribution package %s tested.' % packagename) + print('Distribution package %s tested' % packagename) return 0 def run(args): @@ -177,7 +193,7 @@ def run(args): elif os.path.isdir(os.path.join(src_root, '.hg')): names = create_dist_hg(dist_name, src_root, bld_root, dist_sub, build.dist_scripts) else: - print('Dist currently only works with Git or Mercurial repos.') + print('Dist currently only works with Git or Mercurial repos') return 1 if names is None: return 1 diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py index bf3d9f6..01ced5b 100644 --- a/mesonbuild/scripts/gtkdochelper.py +++ b/mesonbuild/scripts/gtkdochelper.py @@ -28,6 +28,7 @@ parser.add_argument('--subdir', dest='subdir') parser.add_argument('--headerdirs', dest='headerdirs') parser.add_argument('--mainfile', dest='mainfile') parser.add_argument('--modulename', dest='modulename') +parser.add_argument('--moduleversion', dest='moduleversion') parser.add_argument('--htmlargs', dest='htmlargs', default='') parser.add_argument('--scanargs', dest='scanargs', default='') parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='') @@ -65,7 +66,7 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None): # This preserves the order of messages. p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2] if p.returncode != 0: - err_msg = ["{!r} failed with status {:d}".format(cmd[0], p.returncode)] + err_msg = ["{!r} failed with status {:d}".format(cmd, p.returncode)] if out: err_msg.append(out) raise MesonException('\n'.join(err_msg)) @@ -73,7 +74,7 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None): print(out) def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, - main_file, module, + main_file, module, module_version, html_args, scan_args, fixxref_args, mkdb_args, gobject_typesfile, scanobjs_args, run, ld, cc, ldflags, cflags, html_assets, content_files, ignore_headers, namespace, @@ -191,7 +192,7 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, else: mkhtml_cmd.append('%s-docs.xml' % module) # html gen must be run in the HTML dir - gtkdoc_run_check(mkhtml_cmd, os.path.join(abs_out, 'html')) + gtkdoc_run_check(mkhtml_cmd, htmldir) # Fix cross-references in HTML files fixref_cmd = ['gtkdoc-fixxref', @@ -199,6 +200,10 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, '--module-dir=html'] + fixxref_args gtkdoc_run_check(fixref_cmd, abs_out) + if module_version: + shutil.move(os.path.join(htmldir, '{}.devhelp2'.format(module)), + os.path.join(htmldir, '{}-{}.devhelp2'.format(module, module_version))) + def install_gtkdoc(build_root, doc_subdir, install_prefix, datadir, module): source = os.path.join(build_root, doc_subdir, 'html') final_destination = os.path.join(install_prefix, datadir, module) @@ -234,6 +239,7 @@ def run(args): options.headerdirs.split('@@'), options.mainfile, options.modulename, + options.moduleversion, htmlargs, scanargs, fixxrefargs, @@ -255,7 +261,12 @@ def run(args): if 'MESON_INSTALL_PREFIX' in os.environ: destdir = os.environ.get('DESTDIR', '') install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX']) - install_dir = options.install_dir if options.install_dir else options.modulename + if options.install_dir: + install_dir = options.install_dir + else: + install_dir = options.modulename + if options.moduleversion: + install_dir += '-' + options.moduleversion if os.path.isabs(install_dir): install_dir = destdir_join(destdir, install_dir) install_gtkdoc(options.builddir, diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py index 84abfc3..23c7334 100644 --- a/mesonbuild/scripts/meson_exe.py +++ b/mesonbuild/scripts/meson_exe.py @@ -81,6 +81,8 @@ def run_exe(exe): if exe.capture and p.returncode == 0: with open(exe.capture, 'wb') as output: output.write(stdout) + else: + sys.stdout.buffer.write(stdout) if stderr: sys.stderr.buffer.write(stderr) return p.returncode diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py index f381552..1c86bf1 100644 --- a/mesonbuild/scripts/scanbuild.py +++ b/mesonbuild/scripts/scanbuild.py @@ -13,16 +13,17 @@ # limitations under the License. import os +import shlex import subprocess import shutil import tempfile from ..environment import detect_ninja from ..mesonlib import Popen_safe -def scanbuild(exename, srcdir, blddir, privdir, logdir, args): +def scanbuild(exelist, srcdir, blddir, privdir, logdir, args): with tempfile.TemporaryDirectory(dir=privdir) as scandir: - meson_cmd = [exename] + args - build_cmd = [exename, '-o', logdir, detect_ninja(), '-C', scandir] + meson_cmd = exelist + args + build_cmd = exelist + ['-o', logdir, detect_ninja(), '-C', scandir] rc = subprocess.call(meson_cmd + [srcdir, scandir]) if rc != 0: return rc @@ -58,8 +59,14 @@ def run(args): toolname = tool break - exename = os.environ.get('SCANBUILD', toolname) - if not shutil.which(exename): - print('Scan-build not installed.') + if 'SCANBUILD' in os.environ: + exelist = shlex.split(os.environ['SCANBUILD']) + else: + exelist = [toolname] + + try: + Popen_safe(exelist + ['--help']) + except OSError: + print('Could not execute scan-build "%s"' % ' '.join(exelist)) return 1 - return scanbuild(exename, srcdir, blddir, privdir, logdir, meson_cmd) + return scanbuild(exelist, srcdir, blddir, privdir, logdir, meson_cmd) diff --git a/mesonbuild/wrap/__init__.py b/mesonbuild/wrap/__init__.py index b792dfa..6be2c44 100644 --- a/mesonbuild/wrap/__init__.py +++ b/mesonbuild/wrap/__init__.py @@ -33,6 +33,15 @@ from enum import Enum # Note that these options do not affect subprojects that # are git submodules since those are only usable in git # repositories, and you almost always want to download them. + +# This did _not_ work when inside the WrapMode class. +# I don't know why. If you can fix this, patches welcome. +string_to_value = {'default': 1, + 'nofallback': 2, + 'nodownload': 3, + 'forcefallback': 4, + } + class WrapMode(Enum): default = 1 nofallback = 2 @@ -41,3 +50,8 @@ class WrapMode(Enum): def __str__(self): return self.name + + @staticmethod + def from_string(mode_name): + g = string_to_value[mode_name] + return WrapMode(g) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 42808e3..7cad904 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -17,9 +17,9 @@ import contextlib import urllib.request, os, hashlib, shutil, tempfile, stat import subprocess import sys -from pathlib import Path +import configparser from . import WrapMode -from ..mesonlib import Popen_safe +from ..mesonlib import MesonException try: import ssl @@ -41,8 +41,11 @@ def build_ssl_context(): return ctx def quiet_git(cmd, workingdir): - pc = subprocess.Popen(['git', '-C', workingdir] + cmd, stdin=subprocess.DEVNULL, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + try: + pc = subprocess.Popen(['git', '-C', workingdir] + cmd, stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except FileNotFoundError as e: + return False, str(e) out, err = pc.communicate() if pc.returncode != 0: return False, err @@ -67,202 +70,181 @@ def open_wrapdburl(urlstring): urlstring = 'http' + urlstring[5:] return urllib.request.urlopen(urlstring, timeout=req_timeout) +class WrapException(MesonException): + pass + +class WrapNotFoundException(WrapException): + pass class PackageDefinition: def __init__(self, fname): - self.values = {} - with open(fname) as ifile: - first = ifile.readline().strip() - - if first == '[wrap-file]': - self.type = 'file' - elif first == '[wrap-git]': - self.type = 'git' - elif first == '[wrap-hg]': - self.type = 'hg' - elif first == '[wrap-svn]': - self.type = 'svn' - else: - raise RuntimeError('Invalid format of package file') - for line in ifile: - line = line.strip() - if line == '': - continue - (k, v) = line.split('=', 1) - k = k.strip() - v = v.strip() - self.values[k] = v + self.basename = os.path.basename(fname) + try: + self.config = configparser.ConfigParser(interpolation=None) + self.config.read(fname) + except: + raise WrapException('Failed to parse {}'.format(self.basename)) + if len(self.config.sections()) < 1: + raise WrapException('Missing sections in {}'.format(self.basename)) + self.wrap_section = self.config.sections()[0] + if not self.wrap_section.startswith('wrap-'): + m = '{!r} is not a valid first section in {}' + raise WrapException(m.format(self.wrap_section, self.basename)) + self.type = self.wrap_section[5:] + self.values = dict(self.config[self.wrap_section]) def get(self, key): - return self.values[key] + try: + return self.values[key] + except KeyError: + m = 'Missing key {!r} in {}' + raise WrapException(m.format(key, self.basename)) def has_patch(self): return 'patch_url' in self.values class Resolver: - def __init__(self, subdir_root, wrap_mode=WrapMode(1)): + def __init__(self, subdir_root, wrap_mode=WrapMode.default): self.wrap_mode = wrap_mode self.subdir_root = subdir_root self.cachedir = os.path.join(self.subdir_root, 'packagecache') def resolve(self, packagename): - # Check if the directory is already resolved - dirname = Path(os.path.join(self.subdir_root, packagename)) - subprojdir = os.path.join(*dirname.parts[-2:]) - if dirname.is_dir(): - if (dirname / 'meson.build').is_file(): - # The directory is there and has meson.build? Great, use it. - return packagename - # Is the dir not empty and also not a git submodule dir that is - # not checkout properly? Can't do anything, exception! - elif next(dirname.iterdir(), None) and not (dirname / '.git').is_file(): - m = '{!r} is not empty and has no meson.build files' - raise RuntimeError(m.format(subprojdir)) - elif dirname.exists(): - m = '{!r} already exists and is not a dir; cannot use as subproject' - raise RuntimeError(m.format(subprojdir)) - - dirname = str(dirname) + self.packagename = packagename + self.directory = packagename + # We always have to load the wrap file, if it exists, because it could + # override the default directory name. + self.wrap = self.load_wrap() + if self.wrap and 'directory' in self.wrap.values: + self.directory = self.wrap.get('directory') + if os.path.dirname(self.directory): + raise WrapException('Directory key must be a name and not a path') + self.dirname = os.path.join(self.subdir_root, self.directory) + meson_file = os.path.join(self.dirname, 'meson.build') + + # The directory is there and has meson.build? Great, use it. + if os.path.exists(meson_file): + return self.directory + # Check if the subproject is a git submodule - if self.resolve_git_submodule(dirname): - return packagename + self.resolve_git_submodule() + + if os.path.exists(self.dirname): + if not os.path.isdir(self.dirname): + raise WrapException('Path already exists but is not a directory') + else: + # A wrap file is required to download + if not self.wrap: + m = 'Subproject directory not found and {}.wrap file not found' + raise WrapNotFoundException(m.format(self.packagename)) + + if self.wrap.type == 'file': + self.get_file() + else: + self.check_can_download() + if self.wrap.type == 'git': + self.get_git() + elif self.wrap.type == "hg": + self.get_hg() + elif self.wrap.type == "svn": + self.get_svn() + else: + raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type)) + + # A meson.build file is required in the directory + if not os.path.exists(meson_file): + raise WrapException('Subproject exists but has no meson.build file') + return self.directory + + def load_wrap(self): + fname = os.path.join(self.subdir_root, self.packagename + '.wrap') + if os.path.isfile(fname): + return PackageDefinition(fname) + return None + + def check_can_download(self): # Don't download subproject data based on wrap file if requested. # Git submodules are ok (see above)! if self.wrap_mode is WrapMode.nodownload: m = 'Automatic wrap-based subproject downloading is disabled' - raise RuntimeError(m) - - # Check if there's a .wrap file for this subproject - fname = os.path.join(self.subdir_root, packagename + '.wrap') - if not os.path.isfile(fname): - # No wrap file with this name? Give up. - m = 'No {}.wrap found for {!r}' - raise RuntimeError(m.format(packagename, subprojdir)) - p = PackageDefinition(fname) - if p.type == 'file': - if not os.path.isdir(self.cachedir): - os.mkdir(self.cachedir) - self.download(p, packagename) - self.extract_package(p) - elif p.type == 'git': - self.get_git(p) - elif p.type == "hg": - self.get_hg(p) - elif p.type == "svn": - self.get_svn(p) - else: - raise AssertionError('Unreachable code.') - return p.get('directory') + raise WrapException(m) - def resolve_git_submodule(self, dirname): + def resolve_git_submodule(self): # Are we in a git repository? ret, out = quiet_git(['rev-parse'], self.subdir_root) if not ret: return False # Is `dirname` a submodule? - ret, out = quiet_git(['submodule', 'status', dirname], self.subdir_root) + ret, out = quiet_git(['submodule', 'status', self.dirname], self.subdir_root) if not ret: return False # Submodule has not been added, add it if out.startswith(b'+'): - mlog.warning('git submodule {} might be out of date'.format(dirname)) + mlog.warning('git submodule might be out of date') return True elif out.startswith(b'U'): - raise RuntimeError('submodule {} has merge conflicts'.format(dirname)) + raise WrapException('git submodule has merge conflicts') # Submodule exists, but is deinitialized or wasn't initialized elif out.startswith(b'-'): - if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', dirname]) == 0: + if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', self.dirname]) == 0: return True - raise RuntimeError('Failed to git submodule init {!r}'.format(dirname)) + raise WrapException('git submodule failed to init') # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout. elif out.startswith(b' '): - subprocess.call(['git', 'checkout', '.'], cwd=dirname) + subprocess.call(['git', 'checkout', '.'], cwd=self.dirname) # Even if checkout failed, try building it anyway and let the user # handle any problems manually. return True + elif out == b'': + # It is not a submodule, just a folder that exists in the main repository. + return False m = 'Unknown git submodule output: {!r}' - raise RuntimeError(m.format(out)) + raise WrapException(m.format(out)) - def get_git(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - try: - subprocess.check_call(['git', 'rev-parse'], cwd=checkoutdir) - except subprocess.CalledProcessError: - raise RuntimeError('%s is not empty but is not a valid ' - 'git repository, we can not work with it' - ' as a subproject directory.' % ( - checkoutdir)) - - if revno.lower() == 'head': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['git', 'pull'], cwd=checkoutdir) - else: - if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir) - subprocess.check_call(['git', 'checkout', revno], - cwd=checkoutdir) - else: - subprocess.check_call(['git', 'clone', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - if revno.lower() != 'head': - if subprocess.call(['git', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['git', 'fetch', p.get('url'), revno], cwd=checkoutdir) - subprocess.check_call(['git', 'checkout', revno], - cwd=checkoutdir) - push_url = p.values.get('push-url') - if push_url: - subprocess.check_call(['git', 'remote', 'set-url', - '--push', 'origin', push_url], - cwd=checkoutdir) - - def get_hg(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - if revno.lower() == 'tip': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['hg', 'pull'], cwd=checkoutdir) - else: - if subprocess.call(['hg', 'checkout', revno], cwd=checkoutdir) != 0: - subprocess.check_call(['hg', 'pull'], cwd=checkoutdir) - subprocess.check_call(['hg', 'checkout', revno], - cwd=checkoutdir) - else: - subprocess.check_call(['hg', 'clone', p.get('url'), - p.get('directory')], cwd=self.subdir_root) - if revno.lower() != 'tip': - subprocess.check_call(['hg', 'checkout', revno], - cwd=checkoutdir) - - def get_svn(self, p): - checkoutdir = os.path.join(self.subdir_root, p.get('directory')) - revno = p.get('revision') - is_there = os.path.isdir(checkoutdir) - if is_there: - p, out = Popen_safe(['svn', 'info', '--show-item', 'revision', checkoutdir]) - current_revno = out - if current_revno == revno: - return - - if revno.lower() == 'head': - # Failure to do pull is not a fatal error, - # because otherwise you can't develop without - # a working net connection. - subprocess.call(['svn', 'update'], cwd=checkoutdir) - else: - subprocess.check_call(['svn', 'update', '-r', revno], cwd=checkoutdir) + def get_file(self): + path = self.get_file_internal('source') + extract_dir = self.subdir_root + # Some upstreams ship packages that do not have a leading directory. + # Create one for them. + if 'lead_directory_missing' in self.wrap.values: + os.mkdir(self.dirname) + extract_dir = self.dirname + shutil.unpack_archive(path, extract_dir) + if self.wrap.has_patch(): + self.apply_patch() + + def get_git(self): + revno = self.wrap.get('revision') + if self.wrap.values.get('clone-recursive', '').lower() == 'true': + subprocess.check_call(['git', 'clone', '--recursive', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) else: - subprocess.check_call(['svn', 'checkout', '-r', revno, p.get('url'), - p.get('directory')], cwd=self.subdir_root) + subprocess.check_call(['git', 'clone', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) + if revno.lower() != 'head': + if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0: + subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname) + subprocess.check_call(['git', 'checkout', revno], + cwd=self.dirname) + push_url = self.wrap.values.get('push-url') + if push_url: + subprocess.check_call(['git', 'remote', 'set-url', + '--push', 'origin', push_url], + cwd=self.dirname) + + def get_hg(self): + revno = self.wrap.get('revision') + subprocess.check_call(['hg', 'clone', self.wrap.get('url'), + self.directory], cwd=self.subdir_root) + if revno.lower() != 'tip': + subprocess.check_call(['hg', 'checkout', revno], + cwd=self.dirname) + + def get_svn(self): + revno = self.wrap.get('revision') + subprocess.check_call(['svn', 'checkout', '-r', revno, self.wrap.get('url'), + self.directory], cwd=self.subdir_root) def get_data(self, url): blocksize = 10 * 1024 @@ -308,41 +290,48 @@ class Resolver: hashvalue = h.hexdigest() return hashvalue, tmpfile.name - def get_hash(self, data): + def check_hash(self, what, path): + expected = self.wrap.get(what + '_hash') h = hashlib.sha256() - h.update(data) - hashvalue = h.hexdigest() - return hashvalue - - def download(self, p, packagename): - ofname = os.path.join(self.cachedir, p.get('source_filename')) - if os.path.exists(ofname): - mlog.log('Using', mlog.bold(packagename), 'from cache.') - else: - srcurl = p.get('source_url') - mlog.log('Downloading', mlog.bold(packagename), 'from', mlog.bold(srcurl)) - dhash, tmpfile = self.get_data(srcurl) - expected = p.get('source_hash') - if dhash != expected: - os.remove(tmpfile) - raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash)) - os.rename(tmpfile, ofname) - if p.has_patch(): - patch_filename = p.get('patch_filename') - filename = os.path.join(self.cachedir, patch_filename) - if os.path.exists(filename): - mlog.log('Using', mlog.bold(patch_filename), 'from cache.') - else: - purl = p.get('patch_url') - mlog.log('Downloading patch from', mlog.bold(purl)) - phash, tmpfile = self.get_data(purl) - expected = p.get('patch_hash') - if phash != expected: - os.remove(tmpfile) - raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash)) - os.rename(tmpfile, filename) - else: - mlog.log('Package does not require patch.') + with open(path, 'rb') as f: + h.update(f.read()) + dhash = h.hexdigest() + if dhash != expected: + raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + + def download(self, what, ofname): + self.check_can_download() + srcurl = self.wrap.get(what + '_url') + mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl)) + dhash, tmpfile = self.get_data(srcurl) + expected = self.wrap.get(what + '_hash') + if dhash != expected: + os.remove(tmpfile) + raise WrapException('Incorrect hash for %s:\n %s expected\n %s actual.' % (what, expected, dhash)) + os.rename(tmpfile, ofname) + + def get_file_internal(self, what): + filename = self.wrap.get(what + '_filename') + cache_path = os.path.join(self.cachedir, filename) + + if os.path.exists(cache_path): + self.check_hash(what, cache_path) + mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.') + return cache_path + + if not os.path.isdir(self.cachedir): + os.mkdir(self.cachedir) + self.download(what, cache_path) + return cache_path + + def apply_patch(self): + path = self.get_file_internal('patch') + try: + shutil.unpack_archive(path, self.subdir_root) + except Exception: + with tempfile.TemporaryDirectory() as workdir: + shutil.unpack_archive(path, workdir) + self.copy_tree(workdir, self.subdir_root) def copy_tree(self, root_src_dir, root_dst_dir): """ @@ -362,36 +351,3 @@ class Resolver: os.chmod(dst_file, stat.S_IWUSR) os.remove(dst_file) shutil.copy2(src_file, dst_dir) - - def extract_package(self, package): - if sys.version_info < (3, 5): - try: - import lzma # noqa: F401 - del lzma - except ImportError: - pass - else: - try: - shutil.register_unpack_format('xztar', ['.tar.xz', '.txz'], shutil._unpack_tarfile, [], "xz'ed tar-file") - except shutil.RegistryError: - pass - target_dir = os.path.join(self.subdir_root, package.get('directory')) - if os.path.isdir(target_dir): - return - extract_dir = self.subdir_root - # Some upstreams ship packages that do not have a leading directory. - # Create one for them. - try: - package.get('lead_directory_missing') - os.mkdir(target_dir) - extract_dir = target_dir - except KeyError: - pass - shutil.unpack_archive(os.path.join(self.cachedir, package.get('source_filename')), extract_dir) - if package.has_patch(): - try: - shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), self.subdir_root) - except Exception: - with tempfile.TemporaryDirectory() as workdir: - shutil.unpack_archive(os.path.join(self.cachedir, package.get('patch_filename')), workdir) - self.copy_tree(workdir, self.subdir_root) diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py index 364452d..bb64b5b 100644 --- a/mesonbuild/wrap/wraptool.py +++ b/mesonbuild/wrap/wraptool.py @@ -16,7 +16,6 @@ import json import sys, os import configparser import shutil -import argparse from glob import glob @@ -208,9 +207,6 @@ def status(options): else: print('', name, 'not up to date. Have %s %d, but %s %d is available.' % (current_branch, current_revision, latest_branch, latest_revision)) -def run(args): - parser = argparse.ArgumentParser(prog='wraptool') - add_arguments(parser) - options = parser.parse_args(args) +def run(options): options.wrap_func(options) return 0 diff --git a/msi/License.rtf b/msi/License.rtf index 9b58df9..b3945ba 100644 --- a/msi/License.rtf +++ b/msi/License.rtf @@ -1,73 +1,73 @@ -{\rtf1\ansi\ansicpg1252\deff0{\fonttbl{\f0\fswiss\fprq2\fcharset0 Arial;}}
-{\colortbl ;\red0\green0\blue255;}
-{\*\generator Msftedit 5.41.21.2510;}\viewkind4\uc1\pard\qc\lang1033\b\f0\fs18 Apache License\par
-Version 2.0, January 2004\par
-{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/"}}{\fldrslt{\ul\cf1 http://www.apache.org/licenses/}}}\f0\fs18\par
-\b0\par
-\pard TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\par
-\par
-\pard\fi-180\li180 1. Definitions.\par
-\par
-\pard\li180 "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.\par
-\par
-"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.\par
-\par
-"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.\par
-\par
-"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.\par
-\par
-"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.\par
-\par
-"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.\par
-\par
-"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).\par
-\par
-"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.\par
-\par
-"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."\par
-\par
-"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.\par
-\pard\par
-\pard\fi-180\li180 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.\par
-\par
-3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.\par
-\par
-4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:\par
-\pard\par
-\pard\fi-270\li450 (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and\par
-\par
-(b) You must cause any modified files to carry prominent notices stating that You changed the files; and\par
-\par
-(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and\par
-\par
-(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.\par
-\pard\par
-\pard\li180 You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.\par
-\pard\par
-\pard\fi-180\li180 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.\par
-\par
-6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.\par
-\par
-7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.\par
-\par
-8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.\par
-\par
-9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.\par
-\pard\par
-END OF TERMS AND CONDITIONS\par
-\par
-APPENDIX: How to apply the Apache License to your work.\par
-\par
-To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.\par
-\par
-\pard\li180 Copyright [yyyy] [name of copyright owner]\par
-\par
-Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at\par
-\par
-\pard\li360{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt{\ul\cf1 http://www.apache.org/licenses/LICENSE-2.0}}}\f0\fs18\par
-\pard\li180\par
-Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.\par
-\pard\par
-\par
-}
+{\rtf1\ansi\ansicpg1252\deff0{\fonttbl{\f0\fswiss\fprq2\fcharset0 Arial;}} +{\colortbl ;\red0\green0\blue255;} +{\*\generator Msftedit 5.41.21.2510;}\viewkind4\uc1\pard\qc\lang1033\b\f0\fs18 Apache License\par +Version 2.0, January 2004\par +{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/"}}{\fldrslt{\ul\cf1 http://www.apache.org/licenses/}}}\f0\fs18\par +\b0\par +\pard TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\par +\par +\pard\fi-180\li180 1. Definitions.\par +\par +\pard\li180 "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.\par +\par +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.\par +\par +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.\par +\par +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.\par +\par +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.\par +\par +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.\par +\par +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).\par +\par +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.\par +\par +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."\par +\par +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.\par +\pard\par +\pard\fi-180\li180 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.\par +\par +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.\par +\par +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:\par +\pard\par +\pard\fi-270\li450 (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and\par +\par +(b) You must cause any modified files to carry prominent notices stating that You changed the files; and\par +\par +(c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and\par +\par +(d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.\par +\pard\par +\pard\li180 You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.\par +\pard\par +\pard\fi-180\li180 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.\par +\par +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.\par +\par +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.\par +\par +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.\par +\par +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.\par +\pard\par +END OF TERMS AND CONDITIONS\par +\par +APPENDIX: How to apply the Apache License to your work.\par +\par +To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives.\par +\par +\pard\li180 Copyright [yyyy] [name of copyright owner]\par +\par +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at\par +\par +\pard\li360{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt{\ul\cf1 http://www.apache.org/licenses/LICENSE-2.0}}}\f0\fs18\par +\pard\li180\par +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.\par +\pard\par +\par +} diff --git a/run_cross_test.py b/run_cross_test.py index 7191402..b2ef6be 100755 --- a/run_cross_test.py +++ b/run_cross_test.py @@ -25,26 +25,34 @@ Eventually migrate to something fancier.''' import sys import os from pathlib import Path +import argparse from run_project_tests import gather_tests, run_tests, StopException, setup_commands from run_project_tests import failing_logs -def runtests(cross_file): +def runtests(cross_file, failfast): commontests = [('common', gather_tests(Path('test cases', 'common')), False)] try: - (passing_tests, failing_tests, skipped_tests) = run_tests(commontests, 'meson-cross-test-run', ['--cross', cross_file]) + (passing_tests, failing_tests, skipped_tests) = \ + run_tests(commontests, 'meson-cross-test-run', failfast, ['--cross', cross_file]) except StopException: pass print('\nTotal passed cross tests:', passing_tests) print('Total failed cross tests:', failing_tests) print('Total skipped cross tests:', skipped_tests) - if failing_tests > 0 and ('TRAVIS' in os.environ or 'APPVEYOR' in os.environ): + if failing_tests > 0 and ('CI' in os.environ): print('\nMesonlogs of failing tests\n') - for l in failing_logs: - print(l, '\n') - sys.exit(failing_tests) + for log in failing_logs: + print(log, '\n') + return failing_tests + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--failfast', action='store_true') + parser.add_argument('cross_file') + options = parser.parse_args() + setup_commands('ninja') + return runtests(options.cross_file, options.failfast) if __name__ == '__main__': - setup_commands('ninja') - cross_file = sys.argv[1] - runtests(cross_file) + sys.exit(main()) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index fd33856..e7eab72 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import os import tempfile import unittest @@ -23,11 +24,6 @@ from pathlib import Path from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows -# Find the meson.py adjacent to us -meson_py = Path(__file__).resolve().parent / 'meson.py' -if not meson_py.is_file(): - raise RuntimeError("meson.py not found: test must only run from git") - def get_pypath(): import sysconfig pypath = sysconfig.get_path('purelib', vars={'base': ''}) @@ -67,15 +63,14 @@ class CommandTests(unittest.TestCase): def _run(self, command, workdir=None): ''' - Run a command while printing the stdout and stderr to stdout, - and also return a copy of it + Run a command while printing the stdout, and also return a copy of it ''' # If this call hangs CI will just abort. It is very hard to distinguish # between CI issue and test bug in that case. Set timeout and fail loud # instead. p = subprocess.run(command, stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, env=os.environ.copy(), - universal_newlines=True, cwd=workdir, timeout=60 * 5) + env=os.environ.copy(), universal_newlines=True, + cwd=workdir, timeout=60 * 5) print(p.stdout) if p.returncode != 0: raise subprocess.CalledProcessError(p.returncode, command) @@ -128,7 +123,9 @@ class CommandTests(unittest.TestCase): pylibdir = prefix / get_pypath() bindir = prefix / get_pybindir() pylibdir.mkdir(parents=True) - os.environ['PYTHONPATH'] = str(pylibdir) + # XXX: join with empty name so it always ends with os.sep otherwise + # distutils complains that prefix isn't contained in PYTHONPATH + os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '') os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)]) # Check that all the files were installed correctly @@ -176,8 +173,7 @@ class CommandTests(unittest.TestCase): builddir = str(self.tmpdir / 'build4') (bindir / 'meson').rename(bindir / 'meson.real') wrapper = (bindir / 'meson') - with open(str(wrapper), 'w') as f: - f.write('#!/bin/sh\n\nmeson.real "$@"') + wrapper.open('w').write('#!/bin/sh\n\nmeson.real "$@"') wrapper.chmod(0o755) meson_setup = [str(wrapper), 'setup'] meson_command = meson_setup + self.meson_args @@ -195,5 +191,6 @@ class CommandTests(unittest.TestCase): zipapp.create_archive(source=source, target=target, interpreter=python_command[0], main=None) self._run([target.as_posix(), '--help']) + if __name__ == '__main__': - unittest.main(buffer=True) + sys.exit(unittest.main(buffer=True)) diff --git a/run_project_tests.py b/run_project_tests.py index a373aa0..0d64f47 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -36,11 +36,12 @@ import argparse import xml.etree.ElementTree as ET import time import multiprocessing -from concurrent.futures import ProcessPoolExecutor +from concurrent.futures import ProcessPoolExecutor, CancelledError import re from run_tests import get_fake_options, run_configure, get_meson_script from run_tests import get_backend_commands, get_backend_args_for_dir, Backend from run_tests import ensure_backend_detects_changes +from run_tests import guess_backend class BuildStep(Enum): @@ -81,7 +82,7 @@ class AutoDeletedDir: failing_logs = [] print_debug = 'MESON_PRINT_TEST_OUTPUT' in os.environ -under_ci = not {'TRAVIS', 'APPVEYOR'}.isdisjoint(os.environ) +under_ci = 'CI' in os.environ do_debug = under_ci or print_debug no_meson_log_msg = 'No meson-log.txt found.' @@ -101,26 +102,7 @@ signal.signal(signal.SIGTERM, stop_handler) def setup_commands(optbackend): global do_debug, backend, backend_flags global compile_commands, clean_commands, test_commands, install_commands, uninstall_commands - backend = optbackend - msbuild_exe = shutil.which('msbuild') - # Auto-detect backend if unspecified - if backend is None: - if msbuild_exe is not None: - backend = 'vs' # Meson will auto-detect VS version to use - else: - backend = 'ninja' - # Set backend arguments for Meson - if backend.startswith('vs'): - backend_flags = ['--backend=' + backend] - backend = Backend.vs - elif backend == 'xcode': - backend_flags = ['--backend=xcode'] - backend = Backend.xcode - elif backend == 'ninja': - backend_flags = ['--backend=ninja'] - backend = Backend.ninja - else: - raise RuntimeError('Unknown backend: {!r}'.format(backend)) + backend, backend_flags = guess_backend(optbackend, shutil.which('msbuild')) compile_commands, clean_commands, test_commands, install_commands, \ uninstall_commands = get_backend_commands(backend, do_debug) @@ -136,10 +118,25 @@ def get_relative_files_list_from_dir(fromdir): return paths def platform_fix_name(fname, compiler, env): + # canonicalize compiler + if compiler == 'clang-cl': + canonical_compiler = 'msvc' + else: + canonical_compiler = compiler + if '?lib' in fname: - if mesonlib.for_cygwin(env.is_cross_build(), env): + if mesonlib.for_windows(env.is_cross_build(), env) and canonical_compiler == 'msvc': + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/\1.', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + elif mesonlib.for_windows(env.is_cross_build(), env): + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/lib\1.', fname) + fname = re.sub(r'\?lib(.*)\.dll$', r'lib\1.dll', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) + elif mesonlib.for_cygwin(env.is_cross_build(), env): fname = re.sub(r'lib/\?lib(.*)\.so$', r'bin/cyg\1.dll', fname) + fname = re.sub(r'lib/\?lib(.*)\.', r'bin/cyg\1.', fname) fname = re.sub(r'\?lib(.*)\.dll$', r'cyg\1.dll', fname) + fname = re.sub(r'/\?lib/', r'/bin/', fname) else: fname = re.sub(r'\?lib', 'lib', fname) @@ -150,12 +147,47 @@ def platform_fix_name(fname, compiler, env): if fname.startswith('?msvc:'): fname = fname[6:] - if compiler != 'cl': + if canonical_compiler != 'msvc': return None if fname.startswith('?gcc:'): fname = fname[5:] - if compiler == 'cl': + if canonical_compiler == 'msvc': + return None + + if fname.startswith('?cygwin:'): + fname = fname[8:] + if not mesonlib.for_cygwin(env.is_cross_build(), env): + return None + + if fname.endswith('?so'): + if mesonlib.for_windows(env.is_cross_build(), env) and canonical_compiler == 'msvc': + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/(?:lib|)([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif mesonlib.for_windows(env.is_cross_build(), env): + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif mesonlib.for_cygwin(env.is_cross_build(), env): + fname = re.sub(r'lib/([^/]*)\?so$', r'bin/\1.dll', fname) + fname = re.sub(r'/lib([^/]*?)\?so$', r'/cyg\1.dll', fname) + fname = re.sub(r'/([^/]*?)\?so$', r'/\1.dll', fname) + return fname + elif mesonlib.for_darwin(env.is_cross_build(), env): + return fname[:-3] + '.dylib' + else: + return fname[:-3] + '.so' + + if fname.endswith('?implib') or fname.endswith('?implibempty'): + if mesonlib.for_windows(env.is_cross_build(), env) and canonical_compiler == 'msvc': + # only MSVC doesn't generate empty implibs + if fname.endswith('?implibempty') and compiler == 'msvc': + return None + return re.sub(r'/(?:lib|)([^/]*?)\?implib(?:empty|)$', r'/\1.lib', fname) + elif mesonlib.for_windows(env.is_cross_build(), env) or mesonlib.for_cygwin(env.is_cross_build(), env): + return re.sub(r'\?implib(?:empty|)$', r'.dll.a', fname) + else: return None return fname @@ -183,17 +215,11 @@ def validate_install(srcdir, installdir, compiler, env): expected[fname] = True for (fname, found) in expected.items(): if not found: - # Ignore missing PDB files if we aren't using cl - if fname.endswith('.pdb') and compiler != 'cl': - continue ret_msg += 'Expected file {0} missing.\n'.format(fname) # Check if there are any unexpected files found = get_relative_files_list_from_dir(installdir) for fname in found: - # Windows-specific tests check for the existence of installed PDB - # files, but common tests do not, for obvious reasons. Ignore any - # extra PDB files found. - if fname not in expected and not fname.endswith('.pdb') and compiler == 'cl': + if fname not in expected: ret_msg += 'Extra file {0} found.\n'.format(fname) return ret_msg @@ -248,12 +274,12 @@ def run_test_inprocess(testdir): os.chdir(testdir) test_log_fname = Path('meson-logs', 'testlog.txt') try: - returncode_test = mtest.run(['--no-rebuild']) + returncode_test = mtest.run_with_args(['--no-rebuild']) if test_log_fname.exists(): test_log = test_log_fname.open(errors='ignore').read() else: test_log = '' - returncode_benchmark = mtest.run(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog']) + returncode_benchmark = mtest.run_with_args(['--no-rebuild', '--benchmark', '--logbase', 'benchmarklog']) finally: sys.stdout = old_stdout sys.stderr = old_stderr @@ -394,6 +420,7 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen def gather_tests(testdir: Path): tests = [t.name for t in testdir.glob('*')] + tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc) testlist = [(int(t.split()[0]), t) for t in tests] testlist.sort() tests = [testdir / t[1] for t in testlist] @@ -523,14 +550,14 @@ def detect_tests_to_run(): gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] return gathered_tests -def run_tests(all_tests, log_name_base, extra_args): +def run_tests(all_tests, log_name_base, failfast, extra_args): global logfile txtname = log_name_base + '.txt' with open(txtname, 'w', encoding='utf-8', errors='ignore') as lf: logfile = lf - return _run_tests(all_tests, log_name_base, extra_args) + return _run_tests(all_tests, log_name_base, failfast, extra_args) -def _run_tests(all_tests, log_name_base, extra_args): +def _run_tests(all_tests, log_name_base, failfast, extra_args): global stop, executor, futures, system_compiler xmlname = log_name_base + '.xml' junit_root = ET.Element('testsuites') @@ -578,7 +605,10 @@ def _run_tests(all_tests, log_name_base, extra_args): futures.append((testname, t, result)) for (testname, t, result) in futures: sys.stdout.flush() - result = result.result() + try: + result = result.result() + except CancelledError: + continue if (result is None) or (('MESON_SKIP_TEST' in result.stdo) and (skippable(name, t.as_posix()))): print(yellow('Skipping:'), t.as_posix()) current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, @@ -599,6 +629,10 @@ def _run_tests(all_tests, log_name_base, extra_args): else: failing_logs.append(result.stdo) failing_logs.append(result.stde) + if failfast: + print("Cancelling the rest of the tests") + for (_, _, res) in futures: + res.cancel() else: print('Succeeded test%s: %s' % (without_install, t.as_posix())) passing_tests += 1 @@ -616,6 +650,10 @@ def _run_tests(all_tests, log_name_base, extra_args): stdoel.text = result.stdo stdeel = ET.SubElement(current_test, 'system-err') stdeel.text = result.stde + + if failfast and failing_tests > 0: + break + print("\nTotal configuration time: %.2fs" % conf_time) print("Total build time: %.2fs" % build_time) print("Total test time: %.2fs" % test_time) @@ -694,14 +732,14 @@ def check_meson_commands_work(): def detect_system_compiler(): global system_compiler - if shutil.which('cl'): - system_compiler = 'cl' - elif shutil.which('cc'): - system_compiler = 'cc' - elif shutil.which('gcc'): - system_compiler = 'gcc' - else: - raise RuntimeError("Could not find C compiler.") + + with AutoDeletedDir(tempfile.mkdtemp(prefix='b ', dir='.')) as build_dir: + env = environment.Environment(None, build_dir, get_fake_options('/')) + try: + comp = env.detect_c_compiler(env.is_cross_build()) + except: + raise RuntimeError("Could not find C compiler.") + system_compiler = comp.get_id() if __name__ == '__main__': parser = argparse.ArgumentParser(description="Run the test suite of Meson.") @@ -709,6 +747,8 @@ if __name__ == '__main__': help='arguments that are passed directly to Meson (remember to have -- before these).') parser.add_argument('--backend', default=None, dest='backend', choices=backendlist) + parser.add_argument('--failfast', action='store_true', + help='Stop running if test case fails') options = parser.parse_args() setup_commands(options.backend) @@ -720,7 +760,7 @@ if __name__ == '__main__': check_meson_commands_work() try: all_tests = detect_tests_to_run() - (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.extra_args) + (passing_tests, failing_tests, skipped_tests) = run_tests(all_tests, 'meson-test-run', options.failfast, options.extra_args) except StopException: pass print('\nTotal passed tests:', green(str(passing_tests))) diff --git a/run_tests.py b/run_tests.py index a5fd7a5..ebee602 100755 --- a/run_tests.py +++ b/run_tests.py @@ -21,17 +21,40 @@ import shutil import subprocess import tempfile import platform +import argparse from io import StringIO from enum import Enum from glob import glob from pathlib import Path - import mesonbuild from mesonbuild import mesonlib from mesonbuild import mesonmain from mesonbuild import mtest from mesonbuild import mlog from mesonbuild.environment import Environment, detect_ninja +from mesonbuild.coredata import backendlist + +def guess_backend(backend, msbuild_exe): + # Auto-detect backend if unspecified + backend_flags = [] + if backend is None: + if msbuild_exe is not None: + backend = 'vs' # Meson will auto-detect VS version to use + else: + backend = 'ninja' + # Set backend arguments for Meson + if backend.startswith('vs'): + backend_flags = ['--backend=' + backend] + backend = Backend.vs + elif backend == 'xcode': + backend_flags = ['--backend=xcode'] + backend = Backend.xcode + elif backend == 'ninja': + backend_flags = ['--backend=ninja'] + backend = Backend.ninja + else: + raise RuntimeError('Unknown backend: {!r}'.format(backend)) + return (backend, backend_flags) # Fake classes and objects for mocking @@ -50,6 +73,7 @@ def get_fake_options(prefix): opts.wrap_mode = None opts.prefix = prefix opts.cmd_line_options = {} + opts.native_file = [] return opts def get_fake_env(sdir, bdir, prefix): @@ -106,9 +130,9 @@ def find_vcxproj_with_target(builddir, target): import re, fnmatch t, ext = os.path.splitext(target) if ext: - p = '<TargetName>{}</TargetName>\s*<TargetExt>\{}</TargetExt>'.format(t, ext) + p = r'<TargetName>{}</TargetName>\s*<TargetExt>\{}</TargetExt>'.format(t, ext) else: - p = '<TargetName>{}</TargetName>'.format(t) + p = r'<TargetName>{}</TargetName>'.format(t) for root, dirs, files in os.walk(builddir): for f in fnmatch.filter(files, '*.vcxproj'): f = os.path.join(builddir, f) @@ -143,7 +167,9 @@ def get_backend_commands(backend, debug=False): test_cmd = cmd + ['RUN_TESTS.vcxproj'] elif backend is Backend.xcode: cmd = ['xcodebuild'] - clean_cmd = cmd + ['-alltargets', 'clean'] + # In Xcode9 new build system's clean command fails when using a custom build directory. + # Maybe use it when CI uses Xcode10 we can remove '-UseNewBuildSystem=FALSE' + clean_cmd = cmd + ['-alltargets', 'clean', '-UseNewBuildSystem=FALSE'] test_cmd = cmd + ['-target', 'RUN_TESTS'] elif backend is Backend.ninja: # We need at least 1.6 because of -w dupbuild=err @@ -179,7 +205,7 @@ def run_mtest_inprocess(commandlist): old_stderr = sys.stderr sys.stderr = mystderr = StringIO() try: - returncode = mtest.run(commandlist) + returncode = mtest.run_with_args(commandlist) finally: sys.stdout = old_stdout sys.stderr = old_stderr @@ -216,34 +242,27 @@ def print_system_info(): print('System:', platform.system()) print('') -if __name__ == '__main__': +def main(): print_system_info() + parser = argparse.ArgumentParser() + parser.add_argument('--cov', action='store_true') + parser.add_argument('--backend', default=None, dest='backend', + choices=backendlist) + parser.add_argument('--cross', default=False, dest='cross', action='store_true') + parser.add_argument('--failfast', action='store_true') + (options, _) = parser.parse_known_args() # Enable coverage early... - enable_coverage = '--cov' in sys.argv + enable_coverage = options.cov if enable_coverage: os.makedirs('.coverage', exist_ok=True) sys.argv.remove('--cov') import coverage coverage.process_startup() returncode = 0 - # Iterate over list in reverse order to find the last --backend arg - backend = Backend.ninja - cross = False - # FIXME: PLEASE convert to argparse - for arg in reversed(sys.argv[1:]): - if arg.startswith('--backend'): - if arg.startswith('--backend=vs'): - backend = Backend.vs - elif arg == '--backend=xcode': - backend = Backend.xcode - if arg.startswith('--cross'): - cross = True - if arg == '--cross=mingw': - cross = 'mingw' - elif arg == '--cross=arm': - cross = 'arm' + cross = options.cross + backend, _ = guess_backend(options.backend, shutil.which('msbuild')) # Running on a developer machine? Be nice! - if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'TRAVIS' not in os.environ: + if not mesonlib.is_windows() and not mesonlib.is_haiku() and 'CI' not in os.environ: os.nice(20) # Appveyor sets the `platform` environment variable which completely messes # up building with the vs2010 and vs2015 backends. @@ -265,26 +284,50 @@ if __name__ == '__main__': # Can't pass arguments to unit tests, so set the backend to use in the environment env = os.environ.copy() env['MESON_UNIT_TEST_BACKEND'] = backend.name - with tempfile.TemporaryDirectory() as td: + with tempfile.TemporaryDirectory() as temp_dir: # Enable coverage on all subsequent processes. if enable_coverage: - with open(os.path.join(td, 'usercustomize.py'), 'w') as f: - f.write('import coverage\n' - 'coverage.process_startup()\n') + Path(temp_dir, 'usercustomize.py').open('w').write( + 'import coverage\n' + 'coverage.process_startup()\n') env['COVERAGE_PROCESS_START'] = '.coveragerc' - env['PYTHONPATH'] = os.pathsep.join([td] + env.get('PYTHONPATH', [])) + if 'PYTHONPATH' in env: + env['PYTHONPATH'] = os.pathsep.join([temp_dir, env.get('PYTHONPATH')]) + else: + env['PYTHONPATH'] = temp_dir if not cross: - returncode += subprocess.call(mesonlib.python_command + ['run_meson_command_tests.py', '-v'], env=env) - returncode += subprocess.call(mesonlib.python_command + ['run_unittests.py', '-v'], env=env) - returncode += subprocess.call(mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:], env=env) + cmd = mesonlib.python_command + ['run_meson_command_tests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_unittests.py', '-v'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + cmd = mesonlib.python_command + ['run_project_tests.py'] + sys.argv[1:] + returncode += subprocess.call(cmd, env=env) else: cross_test_args = mesonlib.python_command + ['run_cross_test.py'] - if cross is True or cross == 'arm': - print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console)) - print() - returncode += subprocess.call(cross_test_args + ['cross/ubuntu-armhf.txt'], env=env) - if cross is True or cross == 'mingw': - print(mlog.bold('Running mingw-w64 64-bit cross tests.').get_text(mlog.colorize_console)) - print() - returncode += subprocess.call(cross_test_args + ['cross/linux-mingw-w64-64bit.txt'], env=env) - sys.exit(returncode) + print(mlog.bold('Running armhf cross tests.').get_text(mlog.colorize_console)) + print() + cmd = cross_test_args + ['cross/ubuntu-armhf.txt'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + if options.failfast and returncode != 0: + return returncode + print(mlog.bold('Running mingw-w64 64-bit cross tests.') + .get_text(mlog.colorize_console)) + print() + cmd = cross_test_args + ['cross/linux-mingw-w64-64bit.txt'] + if options.failfast: + cmd += ['--failfast'] + returncode += subprocess.call(cmd, env=env) + return returncode + +if __name__ == '__main__': + sys.exit(main()) diff --git a/run_unittests.py b/run_unittests.py index 58ab3e1..ae2c8c7 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -16,13 +16,17 @@ import stat import shlex import subprocess -import re, json +import re +import json import tempfile import textwrap import os import shutil +import sys import unittest import platform +import pickle +import functools from itertools import chain from unittest import mock from configparser import ConfigParser @@ -37,13 +41,14 @@ import mesonbuild.coredata import mesonbuild.modules.gnome from mesonbuild.interpreter import Interpreter, ObjectHolder from mesonbuild.mesonlib import ( - is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, + is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, windows_proof_rmtree, python_command, version_compare, BuildDirLock, Version ) from mesonbuild.environment import detect_ninja from mesonbuild.mesonlib import MesonException, EnvironmentException from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram +from mesonbuild.build import Target import mesonbuild.modules.pkgconfig from run_tests import exe_suffix, get_fake_env, get_meson_script @@ -81,7 +86,7 @@ def is_tarball(): return False def is_ci(): - if 'TRAVIS' in os.environ or 'APPVEYOR' in os.environ: + if 'CI' in os.environ: return True return False @@ -98,19 +103,66 @@ def _git_init(project_dir): def skipIfNoPkgconfig(f): ''' - Skip this test if no pkg-config is found, unless we're on Travis or - Appveyor CI. This allows users to run our test suite without having + Skip this test if no pkg-config is found, unless we're on CI. + This allows users to run our test suite without having pkg-config installed on, f.ex., macOS, while ensuring that our CI does not silently skip the test because of misconfiguration. Note: Yes, we provide pkg-config even while running Windows CI ''' + @functools.wraps(f) def wrapped(*args, **kwargs): if not is_ci() and shutil.which('pkg-config') is None: raise unittest.SkipTest('pkg-config not found') return f(*args, **kwargs) return wrapped +def skip_if_not_language(lang): + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + try: + env = get_fake_env('', '', '') + f = getattr(env, 'detect_{}_compiler'.format(lang)) + if lang in ['cs', 'vala', 'java', 'swift']: + f() + else: + f(False) + except EnvironmentException: + raise unittest.SkipTest('No {} compiler found.'.format(lang)) + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_env_value(value): + def wrapper(func): + @functools.wraps(func) + def wrapped(*args, **kwargs): + if value in os.environ: + raise unittest.SkipTest( + 'Environment variable "{}" set, skipping.'.format(value)) + return func(*args, **kwargs) + return wrapped + return wrapper + +def skip_if_not_base_option(feature): + """Skip tests if The compiler does not support a given base option. + + for example, ICC doesn't currently support b_sanitize. + """ + def actual(f): + @functools.wraps(f) + def wrapped(*args, **kwargs): + env = get_fake_env('', '', '') + cc = env.detect_c_compiler(False) + if feature not in cc.base_options: + raise unittest.SkipTest( + '{} not available with {}'.format(feature, cc.id)) + return f(*args, **kwargs) + return wrapped + return actual + + class PatchModule: ''' Fancy monkey-patching! Whee! Can't use mock.patch because it only @@ -265,7 +317,7 @@ class InternalTests(unittest.TestCase): def test_compiler_args_class_gnuld(self): cargsfunc = mesonbuild.compilers.CompilerArgs ## Test --start/end-group - gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', 0, False) + gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', mesonbuild.compilers.CompilerType.GCC_STANDARD, False) ## Test that 'direct' append and extend works l = cargsfunc(gcc, ['-Lfoodir', '-lfoo']) self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) @@ -582,7 +634,7 @@ class InternalTests(unittest.TestCase): 'static': unix_static}, 'linux': {'shared': ('lib{}.so', '{}.so'), 'static': unix_static}, - 'darwin': {'shared': ('lib{}.dylib', '{}.dylib'), + 'darwin': {'shared': ('lib{}.dylib', 'lib{}.so', '{}.dylib', '{}.so'), 'static': unix_static}, 'cygwin': {'shared': ('cyg{}.dll', 'cyg{}.dll.a', 'lib{}.dll', 'lib{}.dll.a', '{}.dll', '{}.dll.a'), @@ -599,7 +651,7 @@ class InternalTests(unittest.TestCase): elif is_cygwin(): self._test_all_naming(cc, env, patterns, 'cygwin') elif is_windows(): - if cc.get_id() == 'msvc': + if cc.get_argument_syntax() == 'msvc': self._test_all_naming(cc, env, patterns, 'windows-msvc') else: self._test_all_naming(cc, env, patterns, 'windows-mingw') @@ -622,10 +674,6 @@ class InternalTests(unittest.TestCase): with PatchModule(mesonbuild.compilers.c.for_windows, 'mesonbuild.compilers.c.for_windows', true): self._test_all_naming(cc, env, patterns, 'windows-mingw') - cc.id = 'msvc' - with PatchModule(mesonbuild.compilers.c.for_windows, - 'mesonbuild.compilers.c.for_windows', true): - self._test_all_naming(cc, env, patterns, 'windows-msvc') def test_pkgconfig_parse_libs(self): ''' @@ -677,7 +725,7 @@ class InternalTests(unittest.TestCase): bar_dep = PkgConfigDependency('bar', env, kwargs) self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()]) internal_dep = PkgConfigDependency('internal', env, kwargs) - if compiler.get_id() == 'msvc': + if compiler.get_argument_syntax() == 'msvc': self.assertEqual(internal_dep.get_link_args(), []) else: link_args = internal_dep.get_link_args() @@ -922,11 +970,11 @@ class BasePlatformTests(unittest.TestCase): # Misc stuff self.orig_env = os.environ.copy() if self.backend is Backend.ninja: - self.no_rebuild_stdout = 'ninja: no work to do.' + self.no_rebuild_stdout = ['ninja: no work to do.', 'samu: nothing to do'] else: # VS doesn't have a stable output when no changes are done # XCode backend is untested with unit tests, help welcome! - self.no_rebuild_stdout = 'UNKNOWN BACKEND {!r}'.format(self.backend.name) + self.no_rebuild_stdout = ['UNKNOWN BACKEND {!r}'.format(self.backend.name)] self.builddirs = [] self.new_builddir() @@ -1077,8 +1125,11 @@ class BasePlatformTests(unittest.TestCase): def get_compdb(self): if self.backend is not Backend.ninja: raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name)) - with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: - contents = json.load(ifile) + try: + with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: + contents = json.load(ifile) + except FileNotFoundError: + raise unittest.SkipTest('Compiler db not found') # If Ninja is using .rsp files, generate them, read their contents, and # replace it as the command for all compile commands in the parsed json. if len(contents) > 0 and contents[0]['command'].endswith('.rsp'): @@ -1114,6 +1165,13 @@ class BasePlatformTests(unittest.TestCase): universal_newlines=True) return json.loads(out) + def introspect_directory(self, directory, args): + if isinstance(args, str): + args = [args] + out = subprocess.check_output(self.mintro_command + args + [directory], + universal_newlines=True) + return json.loads(out) + def assertPathEqual(self, path1, path2): ''' Handles a lot of platform-specific quirks related to paths such as @@ -1132,7 +1190,7 @@ class BasePlatformTests(unittest.TestCase): def assertBuildIsNoop(self): ret = self.build() if self.backend is Backend.ninja: - self.assertEqual(ret.split('\n')[-2], self.no_rebuild_stdout) + self.assertIn(ret.split('\n')[-2], self.no_rebuild_stdout) elif self.backend is Backend.vs: # Ensure that some target said that no rebuild was done self.assertIn('CustomBuild:\n All outputs are up-to-date.', ret) @@ -1462,6 +1520,38 @@ class AllPlatformTests(BasePlatformTests): self.assertRaises(subprocess.CalledProcessError, self._run, self.mtest_command + ['--setup=main:onlyinbar']) + def test_testsetup_default(self): + testdir = os.path.join(self.unit_test_dir, '47 testsetup default') + self.init(testdir) + self.build() + + # Run tests without --setup will cause the default setup to be used + self.run_tests() + with open(os.path.join(self.logdir, 'testlog.txt')) as f: + default_log = f.read() + + # Run tests with explicitly using the same setup that is set as default + self._run(self.mtest_command + ['--setup=mydefault']) + with open(os.path.join(self.logdir, 'testlog-mydefault.txt')) as f: + mydefault_log = f.read() + + # Run tests with another setup + self._run(self.mtest_command + ['--setup=other']) + with open(os.path.join(self.logdir, 'testlog-other.txt')) as f: + other_log = f.read() + + self.assertTrue('ENV_A is 1' in default_log) + self.assertTrue('ENV_B is 2' in default_log) + self.assertTrue('ENV_C is 2' in default_log) + + self.assertTrue('ENV_A is 1' in mydefault_log) + self.assertTrue('ENV_B is 2' in mydefault_log) + self.assertTrue('ENV_C is 2' in mydefault_log) + + self.assertTrue('ENV_A is 1' in other_log) + self.assertTrue('ENV_B is 3' in other_log) + self.assertTrue('ENV_C is 2' in other_log) + def assertFailedTestCount(self, failure_count, command): try: self._run(command) @@ -1551,7 +1641,8 @@ class AllPlatformTests(BasePlatformTests): incs = [a for a in shlex.split(execmd) if a.startswith("-I")] self.assertEqual(len(incs), 9) # target private dir - self.assertPathEqual(incs[0], "-Isub4/sub4@@someexe@exe") + someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe") + self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id)) # target build subdir self.assertPathEqual(incs[1], "-Isub4") # target source subdir @@ -1600,6 +1691,7 @@ class AllPlatformTests(BasePlatformTests): clang = mesonbuild.compilers.ClangCompiler intel = mesonbuild.compilers.IntelCompiler msvc = mesonbuild.compilers.VisualStudioCCompiler + clangcl = mesonbuild.compilers.ClangClCCompiler ar = mesonbuild.linkers.ArLinker lib = mesonbuild.linkers.VisualStudioLinker langs = [('c', 'CC'), ('cpp', 'CXX')] @@ -1621,6 +1713,9 @@ class AllPlatformTests(BasePlatformTests): if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')): self.assertIsInstance(ecc, gnu) self.assertIsInstance(elinker, ar) + elif 'clang-cl' in ebase: + self.assertIsInstance(ecc, clangcl) + self.assertIsInstance(elinker, lib) elif 'clang' in ebase: self.assertIsInstance(ecc, clang) self.assertIsInstance(elinker, ar) @@ -1642,36 +1737,36 @@ class AllPlatformTests(BasePlatformTests): if isinstance(cc, gnu): self.assertIsInstance(linker, ar) if is_osx(): - self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_OSX) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.GCC_OSX) elif is_windows(): - self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_MINGW) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.GCC_MINGW) elif is_cygwin(): - self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_CYGWIN) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.GCC_CYGWIN) else: - self.assertEqual(cc.gcc_type, mesonbuild.compilers.GCC_STANDARD) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.GCC_STANDARD) if isinstance(cc, clang): self.assertIsInstance(linker, ar) if is_osx(): - self.assertEqual(cc.clang_type, mesonbuild.compilers.CLANG_OSX) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.CLANG_OSX) elif is_windows(): # Not implemented yet - self.assertEqual(cc.clang_type, mesonbuild.compilers.CLANG_WIN) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.CLANG_MINGW) else: - self.assertEqual(cc.clang_type, mesonbuild.compilers.CLANG_STANDARD) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.CLANG_STANDARD) if isinstance(cc, intel): self.assertIsInstance(linker, ar) if is_osx(): - self.assertEqual(cc.icc_type, mesonbuild.compilers.ICC_OSX) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.ICC_OSX) elif is_windows(): - self.assertEqual(cc.icc_type, mesonbuild.compilers.ICC_WIN) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.ICC_WIN) else: - self.assertEqual(cc.icc_type, mesonbuild.compilers.ICC_STANDARD) + self.assertEqual(cc.compiler_type, mesonbuild.compilers.CompilerType.ICC_STANDARD) if isinstance(cc, msvc): self.assertTrue(is_windows()) self.assertIsInstance(linker, lib) self.assertEqual(cc.id, 'msvc') self.assertTrue(hasattr(cc, 'is_64')) - # If we're in the appveyor CI, we know what the compiler will be + # If we're on Windows CI, we know what the compiler will be if 'arch' in os.environ: if os.environ['arch'] == 'x64': self.assertTrue(cc.is_64) @@ -1694,6 +1789,8 @@ class AllPlatformTests(BasePlatformTests): wrapperlinker_s += shlex.quote(w) + ' ' os.environ['AR'] = wrapperlinker_s wlinker = env.detect_static_linker(wcc) + # Pop it so we don't use it for the next detection + evalue = os.environ.pop('AR') # Must be the same type since it's a wrapper around the same exelist self.assertIs(type(cc), type(wcc)) self.assertIs(type(linker), type(wlinker)) @@ -1992,7 +2089,7 @@ int main(int argc, char **argv) { def pbcompile(self, compiler, source, objectfile, extra_args=[]): cmd = compiler.get_exelist() - if compiler.id == 'msvc': + if compiler.get_argument_syntax() == 'msvc': cmd += ['/nologo', '/Fo' + objectfile, '/c', source] + extra_args else: cmd += ['-c', source, '-o', objectfile] + extra_args @@ -2014,7 +2111,7 @@ int main(int argc, char **argv) { def build_static_lib(self, compiler, linker, source, objectfile, outfile, extra_args=None): if extra_args is None: extra_args = [] - if compiler.id == 'msvc': + if compiler.get_argument_syntax() == 'msvc': link_cmd = ['lib', '/NOLOGO', '/OUT:' + outfile, objectfile] else: link_cmd = ['ar', 'csr', outfile, objectfile] @@ -2047,9 +2144,10 @@ int main(int argc, char **argv) { def build_shared_lib(self, compiler, source, objectfile, outfile, impfile, extra_args=None): if extra_args is None: extra_args = [] - if compiler.id == 'msvc': - link_cmd = ['link', '/NOLOGO', '/DLL', '/DEBUG', - '/IMPLIB:' + impfile, '/OUT:' + outfile, objectfile] + if compiler.get_argument_syntax() == 'msvc': + link_cmd = compiler.get_linker_exelist() + [ + '/NOLOGO', '/DLL', '/DEBUG', '/IMPLIB:' + impfile, + '/OUT:' + outfile, objectfile] else: extra_args += ['-fPIC'] link_cmd = compiler.get_exelist() + ['-shared', '-o', outfile, objectfile] @@ -2067,7 +2165,7 @@ int main(int argc, char **argv) { source = os.path.join(tdir, 'alexandria.c') objectfile = os.path.join(tdir, 'alexandria.' + object_suffix) impfile = os.path.join(tdir, 'alexandria.lib') - if cc.id == 'msvc': + if cc.get_argument_syntax() == 'msvc': shlibfile = os.path.join(tdir, 'alexandria.' + shared_suffix) elif is_cygwin(): shlibfile = os.path.join(tdir, 'cygalexandria.' + shared_suffix) @@ -2105,7 +2203,7 @@ int main(int argc, char **argv) { objectfile = os.path.join(testdir, 'foo.' + objext) stlibfile = os.path.join(testdir, 'libfoo.a') impfile = os.path.join(testdir, 'foo.lib') - if cc.id == 'msvc': + if cc.get_argument_syntax() == 'msvc': shlibfile = os.path.join(testdir, 'foo.' + shext) elif is_cygwin(): shlibfile = os.path.join(testdir, 'cygfoo.' + shext) @@ -2162,6 +2260,7 @@ int main(int argc, char **argv) { expected = { 'name': 'list', 'description': 'list', + 'section': 'user', 'type': 'array', 'value': ['foo', 'bar'], } @@ -2186,6 +2285,7 @@ int main(int argc, char **argv) { expected = { 'name': 'list', 'description': 'list', + 'section': 'user', 'type': 'array', 'value': ['foo', 'bar'], } @@ -2210,6 +2310,7 @@ int main(int argc, char **argv) { expected = { 'name': 'list', 'description': 'list', + 'section': 'user', 'type': 'array', 'value': [], } @@ -2479,7 +2580,7 @@ recommended as it is not supported on some platforms''') testdirlib = os.path.join(testdirbase, 'lib') extra_args = None env = get_fake_env(testdirlib, self.builddir, self.prefix) - if env.detect_c_compiler(False).get_id() != 'msvc': + if env.detect_c_compiler(False).get_id() not in ['msvc', 'clang-cl']: # static libraries are not linkable with -l with msvc because meson installs them # as .a files which unix_args_to_native will not know as it expects libraries to use # .lib as extension. For a DLL the import library is installed as .lib. Thus for msvc @@ -2714,6 +2815,8 @@ recommended as it is not supported on some platforms''') self.assertRegex(out, "WARNING:.*\"double_output.txt\".*overwrites") self.assertRegex(out, "WARNING:.*\"subdir.double_output2.txt\".*overwrites") self.assertNotRegex(out, "WARNING:.*no_write_conflict.txt.*overwrites") + self.assertNotRegex(out, "WARNING:.*@BASENAME@.*overwrites") + self.assertRegex(out, "WARNING:.*\"sameafterbasename\".*overwrites") # No warnings about empty configuration data objects passed to files with substitutions self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy1.txt.in") self.assertNotRegex(out, "WARNING:.*empty configuration_data.*nosubst-nocopy2.txt.in") @@ -2776,6 +2879,106 @@ recommended as it is not supported on some platforms''') self.assertEqual(opts['debug'], True) self.assertEqual(opts['optimization'], '0') + @skipIfNoPkgconfig + @unittest.skipIf(is_windows(), 'Help needed with fixing this test on windows') + def test_native_dep_pkgconfig(self): + testdir = os.path.join(self.unit_test_dir, + '46 native dep pkgconfig var') + with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: + crossfile.write(textwrap.dedent( + '''[binaries] + pkgconfig = r'{0}' + + [properties] + + [host_machine] + system = 'linux' + cpu_family = 'arm' + cpu = 'armv7' + endian = 'little' + '''.format(os.path.join(testdir, 'cross_pkgconfig.py')))) + crossfile.flush() + self.meson_cross_file = crossfile.name + + os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(testdir, + 'native_pkgconfig') + self.init(testdir, extra_args=['-Dstart_native=false']) + self.wipe() + self.init(testdir, extra_args=['-Dstart_native=true']) + + def test_reconfigure(self): + testdir = os.path.join(self.unit_test_dir, '46 reconfigure') + self.init(testdir, extra_args=['-Dopt1=val1']) + self.setconf('-Dopt2=val2') + + # Set an older version to force a reconfigure from scratch + filename = os.path.join(self.privatedir, 'coredata.dat') + with open(filename, 'rb') as f: + obj = pickle.load(f) + obj.version = '0.47.0' + with open(filename, 'wb') as f: + pickle.dump(obj, f) + + out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3']) + self.assertRegex(out, 'WARNING:.*Regenerating configuration from scratch') + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 default4') + self.build() + self.run_tests() + + # Create a file in builddir and verify wipe command removes it + filename = os.path.join(self.builddir, 'something') + open(filename, 'w').close() + self.assertTrue(os.path.exists(filename)) + out = self.init(testdir, extra_args=['--wipe', '-Dopt4=val4']) + self.assertFalse(os.path.exists(filename)) + self.assertRegex(out, 'opt1 val1') + self.assertRegex(out, 'opt2 val2') + self.assertRegex(out, 'opt3 val3') + self.assertRegex(out, 'opt4 val4') + self.build() + self.run_tests() + + def test_target_construct_id_from_path(self): + # This id is stable but not guessable. + # The test is supposed to prevent unintentional + # changes of target ID generation. + target_id = Target.construct_id_from_path('some/obscure/subdir', + 'target-id', '@suffix') + self.assertEqual('5e002d3@@target-id@suffix', target_id) + target_id = Target.construct_id_from_path('subproject/foo/subdir/bar', + 'target2-id', '@other') + self.assertEqual('81d46d1@@target2-id@other', target_id) + + def test_introspect_projectinfo_without_configured_build(self): + testfile = os.path.join(self.common_test_dir, '36 run program', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), set(['meson.build'])) + self.assertEqual(res['version'], None) + self.assertEqual(res['descriptive_name'], 'run command') + self.assertEqual(res['subprojects'], []) + + testfile = os.path.join(self.common_test_dir, '44 options', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) + self.assertEqual(res['version'], None) + self.assertEqual(res['descriptive_name'], 'options') + self.assertEqual(res['subprojects'], []) + + testfile = os.path.join(self.common_test_dir, '47 subproject options', 'meson.build') + res = self.introspect_directory(testfile, '--projectinfo') + self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build'])) + self.assertEqual(res['version'], None) + self.assertEqual(res['descriptive_name'], 'suboptions') + self.assertEqual(len(res['subprojects']), 1) + subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files']) + self.assertEqual(subproject_files, set(['subprojects/subproject/meson_options.txt', 'subprojects/subproject/meson.build'])) + self.assertEqual(res['subprojects'][0]['name'], 'subproject') + self.assertEqual(res['subprojects'][0]['version'], 'undefined') + self.assertEqual(res['subprojects'][0]['descriptive_name'], 'subproject') + class FailureTests(BasePlatformTests): ''' @@ -2785,7 +2988,7 @@ class FailureTests(BasePlatformTests): function can fail, and creating failing tests for all of them is tedious and slows down testing. ''' - dnf = "[Dd]ependency.*not found" + dnf = "[Dd]ependency.*not found(:.*)?" nopkg = '[Pp]kg-config not found' def setUp(self): @@ -2888,15 +3091,15 @@ class FailureTests(BasePlatformTests): def test_wx_notfound_dependency(self): # Want to test failure, so skip if available - if shutil.which('wx-config-3.0') or shutil.which('wx-config'): - raise unittest.SkipTest('wx-config or wx-config-3.0 found') + if shutil.which('wx-config-3.0') or shutil.which('wx-config') or shutil.which('wx-config-gtk3'): + raise unittest.SkipTest('wx-config, wx-config-3.0 or wx-config-gtk3 found') self.assertMesonRaises("dependency('wxwidgets')", self.dnf) self.assertMesonOutputs("dependency('wxwidgets', required : false)", "Dependency .*WxWidgets.* found: .*NO.*") def test_wx_dependency(self): - if not shutil.which('wx-config-3.0') and not shutil.which('wx-config'): - raise unittest.SkipTest('Neither wx-config nor wx-config-3.0 found') + if not shutil.which('wx-config-3.0') and not shutil.which('wx-config') and not shutil.which('wx-config-gtk3'): + raise unittest.SkipTest('Neither wx-config, wx-config-3.0 nor wx-config-gtk3 found') self.assertMesonRaises("dependency('wxwidgets', modules : 1)", "module argument is not a string") @@ -3005,7 +3208,7 @@ class FailureTests(BasePlatformTests): # Same as above, except the meson version is now appropriate self.assertMesonDoesNotOutput("dict = {}", ".*WARNING.*Project targetting.*but.*", - meson_version='>= 0.47.0') + meson_version='>= 0.47') def test_using_too_recent_feature_dependency(self): self.assertMesonOutputs("dependency('pcap', required: false)", @@ -3086,7 +3289,7 @@ class WindowsTests(BasePlatformTests): testdir = os.path.join(self.platform_test_dir, '1 basic') env = get_fake_env(testdir, self.builddir, self.prefix) cc = env.detect_c_compiler(False) - if cc.id != 'msvc': + if cc.get_argument_syntax() != 'msvc': raise unittest.SkipTest('Not using MSVC') # To force people to update this test, and also test self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt'}) @@ -3098,7 +3301,7 @@ class WindowsTests(BasePlatformTests): # resource compiler depfile generation is not yet implemented for msvc env = get_fake_env(testdir, self.builddir, self.prefix) - depfile_works = env.detect_c_compiler(False).get_id() != 'msvc' + depfile_works = env.detect_c_compiler(False).get_id() not in ['msvc', 'clang-cl'] self.init(testdir) self.build() @@ -3125,6 +3328,24 @@ class WindowsTests(BasePlatformTests): self.utime(os.path.join(testdir, 'res', 'resource.h')) self.assertRebuiltTarget('prog_1') + def test_msvc_cpp17(self): + testdir = os.path.join(self.unit_test_dir, '45 vscpp17') + + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(False) + if cc.get_argument_syntax() != 'msvc': + raise unittest.SkipTest('Test only applies to MSVC-like compilers') + + try: + self.init(testdir) + except subprocess.CalledProcessError: + # According to Python docs, output is only stored when + # using check_output. We don't use it, so we can't check + # that the output is correct (i.e. that it failed due + # to the right reason). + return + self.build() + class DarwinTests(BasePlatformTests): ''' Tests that should run on macOS @@ -3143,6 +3364,10 @@ class DarwinTests(BasePlatformTests): testdir = os.path.join(self.common_test_dir, '4 shared') # Try with bitcode enabled out = self.init(testdir, extra_args='-Db_bitcode=true') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(False) + if cc.id != 'clang': + raise unittest.SkipTest('Not using Clang on OSX') # Warning was printed self.assertRegex(out, 'WARNING:.*b_bitcode') # Compiler options were added @@ -3203,6 +3428,18 @@ class DarwinTests(BasePlatformTests): self.assertEqual(self._get_darwin_versions(targets['intstringver']), ('1111.0.0', '2.5.0')) self.assertEqual(self._get_darwin_versions(targets['stringlistvers']), ('2.6.0', '2.6.1')) + def test_duplicate_rpath(self): + testdir = os.path.join(self.unit_test_dir, '10 build_rpath') + # We purposely pass a duplicate rpath to Meson, in order + # to ascertain that Meson does not call install_name_tool + # with duplicate -delete_rpath arguments, which would + # lead to erroring out on installation + os.environ["LDFLAGS"] = "-Wl,-rpath,/foo/bar" + self.init(testdir) + self.build() + self.install() + del os.environ["LDFLAGS"] + class LinuxlikeTests(BasePlatformTests): ''' @@ -3298,17 +3535,17 @@ class LinuxlikeTests(BasePlatformTests): self.assertEqual(sorted(out), sorted(['libfoo >= 1.0'])) out = self._run(cmd + ['--cflags-only-other']).strip().split() - self.assertEqual(sorted(out), sorted(['-pthread', '-DCUSTOM'])) + self.check_pkg_flags_are_same(out, ['-pthread', '-DCUSTOM']) out = self._run(cmd + ['--libs-only-l', '--libs-only-other']).strip().split() - self.assertEqual(sorted(out), sorted(['-pthread', '-lcustom', - '-llibmain', '-llibexposed'])) + self.check_pkg_flags_are_same(out, ['-pthread', '-lcustom', + '-llibmain', '-llibexposed']) out = self._run(cmd + ['--libs-only-l', '--libs-only-other', '--static']).strip().split() - self.assertEqual(sorted(out), sorted(['-pthread', '-lcustom', - '-llibmain', '-llibexposed', - '-llibinternal', '-lcustom2', - '-lfoo'])) + self.check_pkg_flags_are_same(out, ['-pthread', '-lcustom', + '-llibmain', '-llibexposed', + '-llibinternal', '-lcustom2', + '-lfoo']) cmd = ['pkg-config', 'requires-test'] out = self._run(cmd + ['--print-requires']).strip().split('\n') @@ -3318,6 +3555,11 @@ class LinuxlikeTests(BasePlatformTests): out = self._run(cmd + ['--print-requires-private']).strip().split('\n') self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello'])) + def check_pkg_flags_are_same(self, output, expected): + if is_osx() or is_haiku(): + expected = [x for x in expected if x != '-pthread'] + self.assertEqual(sorted(output), sorted(expected)) + def test_pkg_unfound(self): testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig') self.init(testdir) @@ -3363,24 +3605,25 @@ class LinuxlikeTests(BasePlatformTests): self.assertIn(" -Werror ", c_command) @skipIfNoPkgconfig - def test_qt5dependency_pkgconfig_detection(self): + def test_qtdependency_pkgconfig_detection(self): ''' Test that qt4 and qt5 detection with pkgconfig works. ''' # Verify Qt4 or Qt5 can be found with pkg-config qt4 = subprocess.call(['pkg-config', '--exists', 'QtCore']) qt5 = subprocess.call(['pkg-config', '--exists', 'Qt5Core']) - if qt4 != 0 or qt5 != 0: - raise unittest.SkipTest('Qt not found with pkg-config') testdir = os.path.join(self.framework_test_dir, '4 qt') self.init(testdir, ['-Dmethod=pkg-config']) # Confirm that the dependency was found with pkg-config mesonlog = self.get_meson_log() - self.assertRegex('\n'.join(mesonlog), - r'Dependency qt4 \(modules: Core\) found: YES .*, `pkg-config`\n') - self.assertRegex('\n'.join(mesonlog), - r'Dependency qt5 \(modules: Core\) found: YES .*, `pkg-config`\n') - + if qt4 == 0: + self.assertRegex('\n'.join(mesonlog), + r'Dependency qt4 \(modules: Core\) found: YES 4.* \(pkg-config\)\n') + if qt5 == 0: + self.assertRegex('\n'.join(mesonlog), + r'Dependency qt5 \(modules: Core\) found: YES 5.* \(pkg-config\)\n') + + @skip_if_not_base_option('b_sanitize') def test_generate_gir_with_address_sanitizer(self): if is_cygwin(): raise unittest.SkipTest('asan not available on Cygwin') @@ -3489,18 +3732,22 @@ class LinuxlikeTests(BasePlatformTests): for v in compiler.get_options()[lang_std].choices: if (compiler.get_id() == 'clang' and '17' in v and (version_compare(compiler.version, '<5.0.0') or - (compiler.clang_type == mesonbuild.compilers.CLANG_OSX and version_compare(compiler.version, '<9.1')))): + (compiler.compiler_type == mesonbuild.compilers.CompilerType.CLANG_OSX and version_compare(compiler.version, '<9.1')))): continue if (compiler.get_id() == 'clang' and '2a' in v and (version_compare(compiler.version, '<6.0.0') or - (compiler.clang_type == mesonbuild.compilers.CLANG_OSX and version_compare(compiler.version, '<9.1')))): + (compiler.compiler_type == mesonbuild.compilers.CompilerType.CLANG_OSX and version_compare(compiler.version, '<9.1')))): continue if (compiler.get_id() == 'gcc' and '2a' in v and version_compare(compiler.version, '<8.0.0')): continue std_opt = '{}={}'.format(lang_std, v) self.init(testdir, ['-D' + std_opt]) cmd = self.get_compdb()[0]['command'] - if v != 'none': + # c++03 and gnu++03 are not understood by ICC, don't try to look for them + skiplist = frozenset([ + ('intel', 'c++03'), + ('intel', 'gnu++03')]) + if v != 'none' and not (compiler.get_id(), v) in skiplist: cmd_std = " -std={} ".format(v) self.assertIn(cmd_std, cmd) try: @@ -3545,8 +3792,10 @@ class LinuxlikeTests(BasePlatformTests): def test_unity_subproj(self): testdir = os.path.join(self.common_test_dir, '46 subproject') self.init(testdir, extra_args='--unity=subprojects') - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/subprojects@sublib@@simpletest@exe/simpletest-unity.c')) - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib/subprojects@sublib@@sublib@sha/sublib-unity.c')) + simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe') + self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity.c')) + sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha') + self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity.c')) self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c')) self.build() @@ -3624,7 +3873,7 @@ class LinuxlikeTests(BasePlatformTests): ('share', 'drwxr-x---'), ('share/man', 'drwxr-x---'), ('share/man/man1', 'drwxr-x---'), - ('share/man/man1/foo.1.gz', '-r--r--r-T'), + ('share/man/man1/foo.1', '-r--r--r-T'), ('share/sub1', 'drwxr-x---'), ('share/sub1/second.dat', '-rwxr-x--t'), ('subdir', 'drwxr-x---'), @@ -3697,7 +3946,7 @@ class LinuxlikeTests(BasePlatformTests): 'include/sample.h', 'share/datafile.cat', 'share/file.dat', - 'share/man/man1/prog.1.gz', + 'share/man/man1/prog.1', 'share/subdir/datafile.dog', ]: f = os.path.join(self.installdir, 'usr', *datafile.split('/')) @@ -3711,23 +3960,26 @@ class LinuxlikeTests(BasePlatformTests): testdir = os.path.join(self.unit_test_dir, '6 std override') self.init(testdir) compdb = self.get_compdb() + # Don't try to use -std=c++03 as a check for the + # presence of a compiler flag, as ICC does not + # support it. for i in compdb: - if 'prog03' in i['file']: - c03_comp = i['command'] + if 'prog98' in i['file']: + c98_comp = i['command'] if 'prog11' in i['file']: c11_comp = i['command'] if 'progp' in i['file']: plain_comp = i['command'] self.assertNotEqual(len(plain_comp), 0) - self.assertIn('-std=c++03', c03_comp) - self.assertNotIn('-std=c++11', c03_comp) + self.assertIn('-std=c++98', c98_comp) + self.assertNotIn('-std=c++11', c98_comp) self.assertIn('-std=c++11', c11_comp) - self.assertNotIn('-std=c++03', c11_comp) - self.assertNotIn('-std=c++03', plain_comp) + self.assertNotIn('-std=c++98', c11_comp) + self.assertNotIn('-std=c++98', plain_comp) self.assertNotIn('-std=c++11', plain_comp) # Now werror self.assertIn('-Werror', plain_comp) - self.assertNotIn('-Werror', c03_comp) + self.assertNotIn('-Werror', c98_comp) def test_run_installed(self): if is_cygwin() or is_osx(): @@ -3751,7 +4003,7 @@ class LinuxlikeTests(BasePlatformTests): # when all tests are run (but works when only this test is run), # but doing this explicitly works. env = os.environ.copy() - env['LD_LIBRARY_PATH'] = installed_libdir + env['LD_LIBRARY_PATH'] = ':'.join([installed_libdir, env.get('LD_LIBRARY_PATH', '')]) self.assertEqual(subprocess.call(installed_exe, env=env), 0) # Ensure that introspect --installed works installed = self.introspect('--installed') @@ -3815,7 +4067,7 @@ class LinuxlikeTests(BasePlatformTests): break self.assertIsInstance(docbook_target, dict) ifile = self.introspect(['--target-files', 'generated-gdbus-docbook@cus'])[0] - self.assertEqual(t['filename'], 'gdbus/generated-gdbus-doc-' + ifile) + self.assertEqual(t['filename'], 'gdbus/generated-gdbus-doc-' + os.path.basename(ifile)) def test_build_rpath(self): if is_cygwin(): @@ -3836,6 +4088,7 @@ class LinuxlikeTests(BasePlatformTests): install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx')) self.assertEqual(install_rpath, 'baz') + @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): if is_cygwin(): raise unittest.SkipTest('asan not available on Cygwin') @@ -3951,7 +4204,7 @@ endian = 'little' self.init(testdir2) self.build() myenv = os.environ.copy() - myenv['LD_LIBRARY_PATH'] = lib_dir + myenv['LD_LIBRARY_PATH'] = ':'.join([lib_dir, myenv.get('LD_LIBRARY_PATH', '')]) if is_cygwin(): bin_dir = os.path.join(tempdirname, 'bin') myenv['PATH'] = bin_dir + os.pathsep + myenv['PATH'] @@ -3994,6 +4247,17 @@ endian = 'little' deps.append(b'-lintl') self.assertEqual(set(deps), set(stdo.split())) + @skipIfNoPkgconfig + @skip_if_not_language('cs') + def test_pkgconfig_csharp_library(self): + testdir = os.path.join(self.unit_test_dir, '48 pkgconfig csharp library') + self.init(testdir) + myenv = os.environ.copy() + myenv['PKG_CONFIG_PATH'] = self.privatedir + stdo = subprocess.check_output(['pkg-config', '--libs', 'libsomething'], env=myenv) + + self.assertEqual("-r/usr/lib/libsomething.dll", str(stdo.decode('ascii')).strip()) + def test_deterministic_dep_order(self): ''' Test that the dependencies are always listed in a deterministic order. @@ -4328,6 +4592,273 @@ class RewriterTests(unittest.TestCase): self.assertEqual(s2, self.read_contents('sub2/meson.build')) +class NativeFileTests(BasePlatformTests): + + def setUp(self): + super().setUp() + self.testcase = os.path.join(self.unit_test_dir, '46 native file binary') + self.current_config = 0 + self.current_wrapper = 0 + + def helper_create_native_file(self, values): + """Create a config file as a temporary file. + + values should be a nested dictionary structure of {section: {key: + value}} + """ + filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) + self.current_config += 1 + with open(filename, 'wt') as f: + for section, entries in values.items(): + f.write('[{}]\n'.format(section)) + for k, v in entries.items(): + f.write("{}='{}'\n".format(k, v)) + return filename + + def helper_create_binary_wrapper(self, binary, **kwargs): + """Creates a wrapper around a binary that overrides specific values.""" + filename = os.path.join(self.builddir, 'binary_wrapper{}.py'.format(self.current_wrapper)) + self.current_wrapper += 1 + if is_haiku(): + chbang = '#!/bin/env python3' + else: + chbang = '#!/usr/bin/env python3' + + with open(filename, 'wt') as f: + f.write(textwrap.dedent('''\ + {} + import argparse + import subprocess + import sys + + def main(): + parser = argparse.ArgumentParser() + '''.format(chbang))) + for name in kwargs: + f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name)) + f.write(' args, extra_args = parser.parse_known_args()\n') + for name, value in kwargs.items(): + f.write(' if args.{}:\n'.format(name)) + f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout'))) + f.write(' sys.exit(0)\n') + f.write(textwrap.dedent(''' + ret = subprocess.run( + ["{}"] + extra_args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding='utf-8') + print(ret.stdout) + print(ret.stderr, file=sys.stderr) + sys.exit(ret.returncode) + + if __name__ == '__main__': + main() + '''.format(binary))) + + if not is_windows(): + os.chmod(filename, 0o755) + return filename + + # On windows we need yet another level of indirection, as cmd cannot + # invoke python files itself, so instead we generate a .bat file, which + # invokes our python wrapper + batfile = os.path.join(self.builddir, 'binary_wrapper{}.bat'.format(self.current_wrapper)) + with open(batfile, 'wt') as f: + f.write('py -3 {} %*'.format(filename)) + return batfile + + def helper_for_compiler(self, lang, cb): + """Helper for generating tests for overriding compilers for langaugages + with more than one implementation, such as C, C++, ObjC, ObjC++, and D. + """ + env = get_fake_env('', '', '') + getter = getattr(env, 'detect_{}_compiler'.format(lang)) + if lang not in ['cs']: + getter = functools.partial(getter, False) + cc = getter() + binary, newid = cb(cc) + env.config_info.binaries = {lang: binary} + compiler = getter() + self.assertEqual(compiler.id, newid) + + def test_multiple_native_files_override(self): + wrapper = self.helper_create_binary_wrapper('bash', version='foo') + config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + wrapper = self.helper_create_binary_wrapper('bash', version='12345') + config2 = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + self.init(self.testcase, extra_args=[ + '--native-file', config, '--native-file', config2, + '-Dcase=find_program']) + + def test_multiple_native_files(self): + wrapper = self.helper_create_binary_wrapper('bash', version='12345') + config = self.helper_create_native_file({'binaries': {'bash': wrapper}}) + wrapper = self.helper_create_binary_wrapper('python') + config2 = self.helper_create_native_file({'binaries': {'python': wrapper}}) + self.init(self.testcase, extra_args=[ + '--native-file', config, '--native-file', config2, + '-Dcase=find_program']) + + def _simple_test(self, case, binary): + wrapper = self.helper_create_binary_wrapper(binary, version='12345') + config = self.helper_create_native_file({'binaries': {binary: wrapper}}) + self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)]) + + def test_find_program(self): + self._simple_test('find_program', 'bash') + + def test_config_tool_dep(self): + # Do the skip at this level to avoid screwing up the cache + if not shutil.which('llvm-config'): + raise unittest.SkipTest('No llvm-installed, cannot test') + self._simple_test('config_dep', 'llvm-config') + + def test_python3_module(self): + self._simple_test('python3', 'python3') + + def test_python_module(self): + if is_windows(): + # Bat adds extra crap to stdout, so the version check logic in the + # python module breaks. This is fine on other OSes because they + # don't need the extra indirection. + raise unittest.SkipTest('bat indirection breaks internal sanity checks.') + self._simple_test('python', 'python') + + @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') + @skip_if_env_value('CC') + def test_c_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang', 'clang' + if not shutil.which('gcc'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'gcc', 'gcc' + self.helper_for_compiler('c', cb) + + @unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard') + @skip_if_env_value('CXX') + def test_cpp_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang++', 'clang' + if not shutil.which('g++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'g++', 'gcc' + self.helper_for_compiler('cpp', cb) + + @skip_if_not_language('objc') + @skip_if_env_value('OBJC') + def test_objc_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang', 'clang' + if not shutil.which('gcc'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'gcc', 'gcc' + self.helper_for_compiler('objc', cb) + + @skip_if_not_language('objcpp') + @skip_if_env_value('OBJCXX') + def test_objcpp_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if not shutil.which('clang++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'clang++', 'clang' + if not shutil.which('g++'): + raise unittest.SkipTest('Only one compiler found, cannot test.') + return 'g++', 'gcc' + self.helper_for_compiler('objcpp', cb) + + @skip_if_not_language('d') + @skip_if_env_value('DC') + def test_d_compiler(self): + def cb(comp): + if comp.id == 'dmd': + if shutil.which('ldc'): + return 'ldc', 'ldc' + elif shutil.which('gdc'): + return 'gdc', 'gdc' + else: + raise unittest.SkipTest('No alternative dlang compiler found.') + return 'dmd', 'dmd' + self.helper_for_compiler('d', cb) + + @skip_if_not_language('cs') + @skip_if_env_value('CSC') + def test_cs_compiler(self): + def cb(comp): + if comp.id == 'csc': + if not shutil.which('mcs'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'mcs', 'mcs' + if not shutil.which('csc'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'csc', 'csc' + self.helper_for_compiler('cs', cb) + + @skip_if_not_language('fortran') + @skip_if_env_value('FC') + def test_fortran_compiler(self): + def cb(comp): + if comp.id == 'gcc': + if shutil.which('ifort'): + return 'ifort', 'intel' + # XXX: there are several other fortran compilers meson + # supports, but I don't have any of them to test with + raise unittest.SkipTest('No alternate Fortran implementation.') + if not shutil.which('gfortran'): + raise unittest.SkipTest('No alternate C# implementation.') + return 'gfortran', 'gcc' + self.helper_for_compiler('fortran', cb) + + def _single_implementation_compiler(self, lang, binary, version_str, version): + """Helper for languages with a single (supported) implementation. + + Builds a wrapper around the compiler to override the version. + """ + wrapper = self.helper_create_binary_wrapper(binary, version=version_str) + env = get_fake_env('', '', '') + getter = getattr(env, 'detect_{}_compiler'.format(lang)) + if lang in ['rust']: + getter = functools.partial(getter, False) + env.config_info.binaries = {lang: wrapper} + compiler = getter() + self.assertEqual(compiler.version, version) + + @skip_if_not_language('vala') + @skip_if_env_value('VALAC') + def test_vala_compiler(self): + self._single_implementation_compiler( + 'vala', 'valac', 'Vala 1.2345', '1.2345') + + @skip_if_not_language('rust') + @skip_if_env_value('RUSTC') + def test_rust_compiler(self): + self._single_implementation_compiler( + 'rust', 'rustc', 'rustc 1.2345', '1.2345') + + @skip_if_not_language('java') + def test_java_compiler(self): + self._single_implementation_compiler( + 'java', 'javac', 'javac 9.99.77', '9.99.77') + + @skip_if_not_language('swift') + def test_swift_compiler(self): + wrapper = self.helper_create_binary_wrapper( + 'swiftc', version='Swift 1.2345', outfile='stderr') + env = get_fake_env('', '', '') + env.config_info.binaries = {'swift': wrapper} + compiler = env.detect_swift_compiler() + self.assertEqual(compiler.version, '1.2345') + + def unset_envs(): # For unit tests we must fully control all command lines # so that there are no unexpected changes coming from the @@ -4343,9 +4874,10 @@ def should_run_cross_arm_tests(): def should_run_cross_mingw_tests(): return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin()) -if __name__ == '__main__': +def main(): unset_envs() - cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', 'PythonTests'] + cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests', + 'PythonTests', 'NativeFileTests'] if not is_windows(): cases += ['LinuxlikeTests'] if should_run_cross_arm_tests(): @@ -4357,4 +4889,7 @@ if __name__ == '__main__': if is_osx(): cases += ['DarwinTests'] - unittest.main(defaultTest=cases, buffer=True) + return unittest.main(defaultTest=cases, buffer=True) + +if __name__ == '__main__': + sys.exit(main()) @@ -16,13 +16,12 @@ import sys -from mesonbuild.coredata import version - if sys.version_info < (3, 5, 0): print('Tried to install with an unsupported version of Python. ' 'Meson requires Python 3.5.0 or greater') sys.exit(1) +from mesonbuild.coredata import version from setuptools import setup # On windows, will create Scripts/meson.exe and Scripts/meson-script.py @@ -35,6 +34,7 @@ packages = ['mesonbuild', 'mesonbuild.modules', 'mesonbuild.scripts', 'mesonbuild.wrap'] +package_data = {'mesonbuild.dependencies': ['data/CMakeLists.txt']} data_files = [] if sys.platform != 'win32': # Only useful on UNIX-like systems @@ -51,6 +51,7 @@ if __name__ == '__main__': license=' Apache License, Version 2.0', python_requires='>=3.5', packages=packages, + package_data=package_data, entry_points=entries, data_files=data_files, classifiers=['Development Status :: 5 - Production/Stable', @@ -54,9 +54,13 @@ def main(): help='Branch push is targeted to') parser.add_argument('--is-pull-env', required=True, help='Variable set if it is a PR') + parser.add_argument('--base-branch-origin', action='store_true', + help='Base branch reference is only in origin remote') args = parser.parse_args() check_pr(args.is_pull_env) base = get_base_branch(args.base_branch_env) + if args.base_branch_origin: + base = 'origin/' + base if all(is_documentation(f) for f in get_git_files(base)): print("Don't run CI for documentation-only changes, add '[skip ci]' to commit title.") print('See http://mesonbuild.com/Contributing.html#skipping-integration-tests') diff --git a/test cases/common/10 man install/installed_files.txt b/test cases/common/10 man install/installed_files.txt index c13baa4..5aad8ea 100644 --- a/test cases/common/10 man install/installed_files.txt +++ b/test cases/common/10 man install/installed_files.txt @@ -1,5 +1,5 @@ -usr/share/man/man1/foo.1.gz -usr/share/man/man2/bar.2.gz -usr/share/man/man1/vanishing.1.gz -usr/share/man/man2/vanishing.2.gz -usr/share/man/man1/baz.1.gz +usr/share/man/man1/foo.1 +usr/share/man/man2/bar.2 +usr/share/man/man1/vanishing.1 +usr/share/man/man2/vanishing.2 +usr/share/man/man1/baz.1 diff --git a/test cases/common/100 manygen/subdir/manygen.py b/test cases/common/100 manygen/subdir/manygen.py index 7ffd435..0fbc2ec 100755 --- a/test cases/common/100 manygen/subdir/manygen.py +++ b/test cases/common/100 manygen/subdir/manygen.py @@ -6,38 +6,30 @@ from __future__ import print_function # file and a header file. import sys, os -import shutil, subprocess +import subprocess with open(sys.argv[1]) as f: funcname = f.readline().strip() outdir = sys.argv[2] buildtype_args = sys.argv[3] +compiler_type = sys.argv[4] +compiler = sys.argv[5:] if not os.path.isdir(outdir): print('Outdir does not exist.') sys.exit(1) -# Emulate the environment.detect_c_compiler() logic -compiler = os.environ.get('CC', None) -if not compiler: - compiler = shutil.which('cl') or \ - shutil.which('gcc') or \ - shutil.which('clang') or \ - shutil.which('cc') - -compbase = os.path.basename(compiler) -if 'cl' in compbase and 'clang' not in compbase: +if compiler_type == 'msvc': libsuffix = '.lib' is_vs = True - compiler = 'cl' - linker = 'lib' + if any(['clang-cl' in c for c in compiler]): + linker = 'llvm-lib' + else: + linker = 'lib' else: libsuffix = '.a' is_vs = False linker = 'ar' - if compiler is None: - print('No known compilers found.') - sys.exit(1) objsuffix = '.o' @@ -70,9 +62,9 @@ with open(tmpc, 'w') as f: ''' % funcname) if is_vs: - subprocess.check_call([compiler, '/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc]) + subprocess.check_call(compiler + ['/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc]) else: - subprocess.check_call([compiler, '-c', '-o', outo, tmpc]) + subprocess.check_call(compiler + ['-c', '-o', outo, tmpc]) with open(tmpc, 'w') as f: f.write('''int %s_in_lib() { @@ -81,10 +73,10 @@ with open(tmpc, 'w') as f: ''' % funcname) if is_vs: - subprocess.check_call([compiler, '/nologo', '/c', '/Fo' + tmpo, tmpc]) + subprocess.check_call(compiler + ['/nologo', '/c', '/Fo' + tmpo, tmpc]) subprocess.check_call([linker, '/NOLOGO', '/OUT:' + outa, tmpo]) else: - subprocess.check_call([compiler, '-c', '-o', tmpo, tmpc]) + subprocess.check_call(compiler + ['-c', '-o', tmpo, tmpc]) subprocess.check_call([linker, 'csr', outa, tmpo]) os.unlink(tmpo) diff --git a/test cases/common/100 manygen/subdir/meson.build b/test cases/common/100 manygen/subdir/meson.build index 73b4ff7..56f60e6 100644 --- a/test cases/common/100 manygen/subdir/meson.build +++ b/test cases/common/100 manygen/subdir/meson.build @@ -3,7 +3,8 @@ py3_bin = import('python3').find_python() buildtype = get_option('buildtype') buildtype_args = '-Dfooxxx' # a useless compiler argument -if meson.get_compiler('c').get_id() == 'msvc' +cc = meson.get_compiler('c') +if cc.get_argument_syntax() == 'msvc' # We need our manually generated code to use the same CRT as the executable. # Taken from compilers.py since build files do not have access to this. if buildtype == 'debug' @@ -21,5 +22,5 @@ endif generated = custom_target('manygen', output : outfiles, input : ['funcinfo.def'], - command : [py3_bin, gen[0], '@INPUT@', '@OUTDIR@', buildtype_args], + command : [py3_bin, gen[0], '@INPUT@', '@OUTDIR@', buildtype_args, cc.get_argument_syntax(), cc.cmd_array()], ) diff --git a/test cases/common/112 spaces backslash/meson.build b/test cases/common/112 spaces backslash/meson.build index bf614e8..d590494 100644 --- a/test cases/common/112 spaces backslash/meson.build +++ b/test cases/common/112 spaces backslash/meson.build @@ -7,7 +7,7 @@ project('comparer', 'c') include_dir = meson.current_source_dir() + '/include' default_c_args = ['-I' + include_dir] -if meson.get_compiler('c').get_id() == 'msvc' +if meson.get_compiler('c').get_argument_syntax() == 'msvc' default_c_args += ['/Faasm output\\'] # Hack to create the 'asm output' directory in the builddir subdir('asm output') diff --git a/test cases/common/116 pathjoin/meson.build b/test cases/common/116 pathjoin/meson.build index 751ca68..d3957dd 100644 --- a/test cases/common/116 pathjoin/meson.build +++ b/test cases/common/116 pathjoin/meson.build @@ -1,17 +1,24 @@ project('pathjoin', 'c') # Test string-args form since that is the canonical way -assert(join_paths('foo') == 'foo', 'Single argument join is broken') -assert(join_paths('foo', 'bar') == 'foo/bar', 'Path joining is broken') +assert(join_paths('foo') == 'foo', 'Single argument join is broken') +assert(join_paths('foo', 'bar') == 'foo/bar', 'Path joining is broken') assert(join_paths('foo', 'bar', 'baz') == 'foo/bar/baz', 'Path joining is broken') -assert(join_paths('/foo', 'bar') == '/foo/bar', 'Path joining is broken') -assert(join_paths('foo', '/bar') == '/bar', 'Absolute path joining is broken') -assert(join_paths('/foo', '/bar') == '/bar', 'Absolute path joining is broken') +assert(join_paths('/foo', 'bar') == '/foo/bar', 'Path joining is broken') +assert(join_paths('foo', '/bar') == '/bar', 'Absolute path joining is broken') +assert(join_paths('/foo', '/bar') == '/bar', 'Absolute path joining is broken') # Test array form since people are using that too -assert(join_paths(['foo']) == 'foo', 'Single argument join is broken') -assert(join_paths(['foo', 'bar']) == 'foo/bar', 'Path joining is broken') +assert(join_paths(['foo']) == 'foo', 'Single argument join is broken') +assert(join_paths(['foo', 'bar']) == 'foo/bar', 'Path joining is broken') assert(join_paths(['foo', 'bar', 'baz']) == 'foo/bar/baz', 'Path joining is broken') -assert(join_paths(['/foo', 'bar']) == '/foo/bar', 'Path joining is broken') -assert(join_paths(['foo', '/bar']) == '/bar', 'Absolute path joining is broken') -assert(join_paths(['/foo', '/bar']) == '/bar', 'Absolute path joining is broken') +assert(join_paths(['/foo', 'bar']) == '/foo/bar', 'Path joining is broken') +assert(join_paths(['foo', '/bar']) == '/bar', 'Absolute path joining is broken') +assert(join_paths(['/foo', '/bar']) == '/bar', 'Absolute path joining is broken') + +# Division operator should do the same as join_paths +assert('foo' / 'bar' == 'foo/bar', 'Path division is broken') +assert('foo' /'bar' /'baz' == 'foo/bar/baz', 'Path division is broken') +assert('/foo' / 'bar' == '/foo/bar', 'Path division is broken') +assert('foo' / '/bar' == '/bar', 'Absolute path division is broken') +assert('/foo' / '/bar' == '/bar', 'Absolute path division is broken') diff --git a/test cases/common/122 shared module/installed_files.txt b/test cases/common/122 shared module/installed_files.txt index a351490..d46527c 100644 --- a/test cases/common/122 shared module/installed_files.txt +++ b/test cases/common/122 shared module/installed_files.txt @@ -1 +1,3 @@ -usr/lib/libnosyms.so +usr/lib/modules/libnosyms?so +usr/lib/modules/libnosyms?implibempty +?msvc:usr/lib/modules/nosyms.pdb diff --git a/test cases/common/122 shared module/meson.build b/test cases/common/122 shared module/meson.build index 9f9ad63..3d52300 100644 --- a/test cases/common/122 shared module/meson.build +++ b/test cases/common/122 shared module/meson.build @@ -13,8 +13,6 @@ e = executable('prog', 'prog.c', test('import test', e, args : m) # Shared module that does not export any symbols -shared_module('nosyms', 'nosyms.c', install : true, - # Because we don't have cross-platform library support in - # installed_files.txt - name_suffix : 'so', - name_prefix : 'lib') +shared_module('nosyms', 'nosyms.c', + install : true, + install_dir : join_paths(get_option('libdir'), 'modules')) diff --git a/test cases/common/123 llvm ir and assembly/meson.build b/test cases/common/123 llvm ir and assembly/meson.build index 51321fb..a67c6c6 100644 --- a/test cases/common/123 llvm ir and assembly/meson.build +++ b/test cases/common/123 llvm ir and assembly/meson.build @@ -28,15 +28,18 @@ foreach lang : ['c', 'cpp'] # MSVC cannot directly compile assembly files, so we pass it through the # cl.exe pre-processor first and then assemble it with the ml.exe assembler. # Then we can link it into the executable. - if cc_id == 'msvc' - cl = find_program('cl') + if cc.get_argument_syntax() == 'msvc' + cl = cc.cmd_array() if cpu == 'x86' - ml = find_program('ml') + ml = find_program('ml', required: false) elif cpu == 'x86_64' - ml = find_program('ml64') + ml = find_program('ml64', required: false) else error('Unsupported cpu family: "' + cpu + '"') endif + if not ml.found() + error('MESON_SKIP_TEST: ML (masm) not found') + endif # Preprocess file (ml doesn't support pre-processing) preproc_name = lang + square_base + '.i' square_preproc = custom_target(lang + square_impl + 'preproc', diff --git a/test cases/common/124 cpp and asm/meson.build b/test cases/common/124 cpp and asm/meson.build index 9160775..f097084 100644 --- a/test cases/common/124 cpp and asm/meson.build +++ b/test cases/common/124 cpp and asm/meson.build @@ -15,7 +15,7 @@ endif sources = ['trivial.cc'] # If the compiler cannot compile assembly, don't use it -if meson.get_compiler('cpp').get_id() != 'msvc' +if not ['msvc', 'clang-cl'].contains(meson.get_compiler('cpp').get_id()) sources += ['retval-' + cpu + '.S'] cpp_args = ['-DUSE_ASM'] message('Using ASM') diff --git a/test cases/common/126 object only target/installed_files.txt b/test cases/common/126 object only target/installed_files.txt index c7dab9f..5e796b0 100644 --- a/test cases/common/126 object only target/installed_files.txt +++ b/test cases/common/126 object only target/installed_files.txt @@ -1 +1,2 @@ usr/bin/prog?exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/common/127 no buildincdir/meson.build b/test cases/common/127 no buildincdir/meson.build index ac69e8e..53f1a7f 100644 --- a/test cases/common/127 no buildincdir/meson.build +++ b/test cases/common/127 no buildincdir/meson.build @@ -1,5 +1,5 @@ project('nobuilddir', 'c', - default_options : 'werror=true') + default_options : ['werror=true', 'buildtype=plain']) cc = meson.get_compiler('c') diff --git a/test cases/common/13 pch/meson.build b/test cases/common/13 pch/meson.build index 05b4037..d39527b 100644 --- a/test cases/common/13 pch/meson.build +++ b/test cases/common/13 pch/meson.build @@ -2,4 +2,9 @@ project('pch test', 'c', 'cpp') subdir('c') subdir('cpp') -subdir('mixed') + +if meson.backend() == 'xcode' + warning('Xcode backend only supports one precompiled header per target. Skipping "mixed" which has various precompiled headers.') +else + subdir('mixed') +endif diff --git a/test cases/common/13 pch/mixed/meson.build b/test cases/common/13 pch/mixed/meson.build index 7f6033d..f0c3eca 100644 --- a/test cases/common/13 pch/mixed/meson.build +++ b/test cases/common/13 pch/mixed/meson.build @@ -5,8 +5,9 @@ exe = executable( cpp_pch : ['pch/main_pch.cc', 'pch/main.h'], ) +# test pch when only a header is given (not supported by msvc) cc = meson.get_compiler('c') -if cc.get_id() != 'msvc' +if not ['msvc', 'clang-cl'].contains(cc.get_id()) exe2 = executable( 'prog2', files('main.cc', 'func.c'), diff --git a/test cases/common/132 generated assembly/meson.build b/test cases/common/132 generated assembly/meson.build index 6a8744b..5fb7429 100644 --- a/test cases/common/132 generated assembly/meson.build +++ b/test cases/common/132 generated assembly/meson.build @@ -2,8 +2,8 @@ project('generated assembly', 'c') cc = meson.get_compiler('c') -if cc.get_id() == 'msvc' - error('MESON_SKIP_TEST: assembly files cannot be compiled directly by MSVC') +if ['msvc', 'clang-cl'].contains(cc.get_id()) + error('MESON_SKIP_TEST: assembly files cannot be compiled directly by the compiler') endif cpu = host_machine.cpu_family() diff --git a/test cases/common/137 get define/meson.build b/test cases/common/137 get define/meson.build index b20c554..109f628 100644 --- a/test cases/common/137 get define/meson.build +++ b/test cases/common/137 get define/meson.build @@ -32,6 +32,9 @@ foreach lang : ['c', 'cpp'] elif host_system == 'netbsd' d = cc.get_define('__NetBSD__') assert(d == '1', '__NetBSD__ value is @0@ instead of 1'.format(d)) + elif host_system == 'gnu' + d = cc.get_define('__GNU__') + assert(d == '1', '__GNU__ value is @0@ instead of 1'.format(d)) else error('Please report a bug and help us improve support for this platform') endif diff --git a/test cases/common/138 c cpp and asm/meson.build b/test cases/common/138 c cpp and asm/meson.build index 2c3610e..ca820e2 100644 --- a/test cases/common/138 c cpp and asm/meson.build +++ b/test cases/common/138 c cpp and asm/meson.build @@ -9,7 +9,7 @@ if not supported_cpus.contains(cpu) error('MESON_SKIP_TEST unsupported cpu:' + cpu) endif -if meson.get_compiler('c').get_id() == 'msvc' +if meson.get_compiler('c').get_argument_syntax() == 'msvc' error('MESON_SKIP_TEST MSVC can\'t compile assembly') endif diff --git a/test cases/common/14 configure file/differentafterbasename1.in b/test cases/common/14 configure file/differentafterbasename1.in new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/differentafterbasename1.in diff --git a/test cases/common/14 configure file/differentafterbasename2.in b/test cases/common/14 configure file/differentafterbasename2.in new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/differentafterbasename2.in diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build index d7beeb1..53b06f3 100644 --- a/test cases/common/14 configure file/meson.build +++ b/test cases/common/14 configure file/meson.build @@ -12,20 +12,20 @@ assert(conf.get('var', 'default') == 'mystring', 'Get function is not working.') assert(conf.get('notthere', 'default') == 'default', 'Default value getting is not working.') cfile = configure_file(input : 'config.h.in', -output : 'config.h', -configuration : conf) + output : 'config.h', + configuration : conf) e = executable('inctest', 'prog.c', # Note that you should NOT do this. Don't add generated headers here # This tests that we do the right thing even if people add in conf files # to their sources. -cfile) + cfile) test('inctest', e) # Test if we can also pass files() as input configure_file(input : files('config.h.in'), - output : 'config2.h', - configuration : conf) + output : 'config2.h', + configuration : conf) # Now generate a header file with an external script. genprog = import('python3').find_python() @@ -93,8 +93,7 @@ dump = configuration_data() dump.set('ZERO', 0) config_templates = files(['config4a.h.in', 'config4b.h.in']) foreach config_template : config_templates - configure_file(input : config_template, output : '@BASENAME@', - configuration : dump) + configure_file(input : config_template, output : '@BASENAME@', configuration : dump) endforeach test('Substituted', executable('prog4', 'prog4.c')) @@ -123,8 +122,7 @@ conf5.set('var2', 'error') configure_file( input : 'config5.h.in', output : '@BASENAME@', - configuration : conf5 -) + configuration : conf5) test('test5', executable('prog5', 'prog5.c')) # Test escaping @@ -134,8 +132,7 @@ conf6.set('var2', 'bar') configure_file( input : 'config6.h.in', output : '@BASENAME@', - configuration : conf6 -) + configuration : conf6) test('test6', executable('prog6', 'prog6.c')) # test empty install dir string @@ -152,8 +149,7 @@ configure_file( input : 'config7.h.in', output : '@BASENAME@', format : 'cmake', - configuration : conf7 -) + configuration : conf7) test('test7', executable('prog7', 'prog7.c')) # Test copying of an empty configuration data object @@ -182,24 +178,21 @@ configure_file( input : 'config8.h.in', output : '@BASENAME@', encoding : 'koi8-r', - configuration : conf8 -) + configuration : conf8) # Test that passing an empty configuration_data() object to a file with # #mesondefine substitutions does not print the warning. configure_file( input: 'nosubst-nocopy1.txt.in', output: 'nosubst-nocopy1.txt', - configuration : configuration_data() -) + configuration : configuration_data()) # test that passing an empty configuration_data() object to a file with # @foo@ substitutions does not print the warning. configure_file( input: 'nosubst-nocopy2.txt.in', output: 'nosubst-nocopy2.txt', - configuration : configuration_data() -) + configuration : configuration_data()) # test that passing a configured file object to test() works, and that passing # an empty configuration_data() object to a file that leads to no substitutions @@ -207,25 +200,45 @@ configure_file( test_file = configure_file( input: 'test.py.in', output: 'test.py', - configuration: configuration_data() -) + configuration: configuration_data()) # Test that overwriting an existing file creates a warning. configure_file( input: 'test.py.in', output: 'double_output.txt', - configuration: conf -) + configuration: conf) configure_file( input: 'test.py.in', output: 'double_output.txt', - configuration: conf -) + configuration: conf) # Test that the same file name in a different subdir will not create a warning configure_file( input: 'test.py.in', output: 'no_write_conflict.txt', + configuration: conf) + +# Test that @BASENAME@ is substituted before checking and does not create a warning. +configure_file( + input: 'differentafterbasename1.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'differentafterbasename2.in', + output: '@BASENAME@', + configuration: conf +) + +# Test that @BASENAME@ is substituted before checking and does create a warning on conflict. +configure_file( + input: 'sameafterbasename.in', + output: '@BASENAME@', + configuration: conf +) +configure_file( + input: 'sameafterbasename.in2', + output: '@BASENAME@', configuration: conf ) @@ -233,3 +246,27 @@ test('configure-file', test_file) cdata = configuration_data() cdata.set('invalid_value', ['array']) + +# Dictionaries + +cdata = configuration_data({ + 'A_STRING' : '"foo"', + 'A_INT' : 42, + 'A_DEFINED' : true, + 'A_UNDEFINED' : false, +}) + +configure_file(output : 'config9a.h', + configuration : cdata, +) + +configure_file(output : 'config9b.h', + configuration : { + 'B_STRING' : '"foo"', + 'B_INT' : 42, + 'B_DEFINED' : true, + 'B_UNDEFINED' : false, + } +) + +test('test9', executable('prog9', 'prog9.c')) diff --git a/test cases/common/14 configure file/prog9.c b/test cases/common/14 configure file/prog9.c new file mode 100644 index 0000000..28c7354 --- /dev/null +++ b/test cases/common/14 configure file/prog9.c @@ -0,0 +1,18 @@ +#include <string.h> +#include <config9a.h> +#include <config9b.h> + +#if defined(A_UNDEFINED) || defined(B_UNDEFINED) +#error "Should not be defined" +#endif + +#if !defined(A_DEFINED) || !defined(B_DEFINED) +#error "Should be defined" +#endif + +int main(int argc, char **argv) { + return strcmp(A_STRING, "foo") + || strcmp(B_STRING, "foo") + || A_INT != 42 + || B_INT != 42; +} diff --git a/test cases/common/14 configure file/sameafterbasename.in b/test cases/common/14 configure file/sameafterbasename.in new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/sameafterbasename.in diff --git a/test cases/common/14 configure file/sameafterbasename.in2 b/test cases/common/14 configure file/sameafterbasename.in2 new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/common/14 configure file/sameafterbasename.in2 diff --git a/test cases/common/143 C and CPP link/meson.build b/test cases/common/143 C and CPP link/meson.build index 55c1b87..79d6f67 100644 --- a/test cases/common/143 C and CPP link/meson.build +++ b/test cases/common/143 C and CPP link/meson.build @@ -25,9 +25,16 @@ libc = static_library('cfoo', ['foo.c', 'foo.h']) # ourselves at configure time and then 'find' it with cxx.find_library(). cxx = meson.get_compiler('cpp') -if cxx.get_id() == 'msvc' +if cxx.get_argument_syntax() == 'msvc' + if cxx.get_id() == 'msvc' + static_linker = find_program('lib') + elif cxx.get_id() == 'clang-cl' + static_linker = find_program('llvm-lib') + else + error('unable to determine static linker to use with this compiler') + endif compile_cmd = ['/c', '@INPUT@', '/Fo@OUTPUT@'] - stlib_cmd = ['lib', '/OUT:@OUTPUT@', '@INPUT@'] + stlib_cmd = [static_linker, '/OUT:@OUTPUT@', '@INPUT@'] else compile_cmd = ['-c', '-fPIC', '@INPUT@', '-o', '@OUTPUT@'] stlib_cmd = ['ar', 'csr', '@OUTPUT@', '@INPUT@'] diff --git a/test cases/common/152 simd/simd_sse2.c b/test cases/common/152 simd/simd_sse2.c index 0274533..271022e 100644 --- a/test cases/common/152 simd/simd_sse2.c +++ b/test cases/common/152 simd/simd_sse2.c @@ -21,7 +21,7 @@ int sse2_available() { #endif void increment_sse2(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_sse3.c b/test cases/common/152 simd/simd_sse3.c index e97d102..89c2f8b 100644 --- a/test cases/common/152 simd/simd_sse3.c +++ b/test cases/common/152 simd/simd_sse3.c @@ -22,7 +22,7 @@ int sse3_available() { #endif void increment_sse3(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_sse41.c b/test cases/common/152 simd/simd_sse41.c index 0308c7e..859fb43 100644 --- a/test cases/common/152 simd/simd_sse41.c +++ b/test cases/common/152 simd/simd_sse41.c @@ -24,7 +24,7 @@ int sse41_available() { #endif void increment_sse41(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_sse42.c b/test cases/common/152 simd/simd_sse42.c index 137ffc4..edd6e5b 100644 --- a/test cases/common/152 simd/simd_sse42.c +++ b/test cases/common/152 simd/simd_sse42.c @@ -27,7 +27,7 @@ int sse42_available() { #endif void increment_sse42(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simd_ssse3.c b/test cases/common/152 simd/simd_ssse3.c index ab4dff4..0156f77 100644 --- a/test cases/common/152 simd/simd_ssse3.c +++ b/test cases/common/152 simd/simd_ssse3.c @@ -30,7 +30,7 @@ int ssse3_available() { #endif void increment_ssse3(float arr[4]) { - double darr[4]; + ALIGN_16 double darr[4]; __m128d val1 = _mm_set_pd(arr[0], arr[1]); __m128d val2 = _mm_set_pd(arr[2], arr[3]); __m128d one = _mm_set_pd(1.0, 1.0); diff --git a/test cases/common/152 simd/simdchecker.c b/test cases/common/152 simd/simdchecker.c index 222fbf3..cd6fe4f 100644 --- a/test cases/common/152 simd/simdchecker.c +++ b/test cases/common/152 simd/simdchecker.c @@ -1,93 +1,143 @@ #include<simdfuncs.h> #include<stdio.h> +#include<string.h> -/* - * A function that checks at runtime which simd accelerations are - * available and calls the best one. Falls - * back to plain C implementation if SIMD is not available. - */ +typedef void (*simd_func)(float*); + +int check_simd_implementation(float *four, + const float *four_initial, + const char *simd_type, + const float *expected, + simd_func fptr, + const int blocksize) { + int rv = 0; + memcpy(four, four_initial, blocksize*sizeof(float)); + printf("Using %s.\n", simd_type); + fptr(four); + for(int i=0; i<blocksize; i++) { + if(four[i] != expected[i]) { + printf("Increment function failed, got %f expected %f.\n", four[i], expected[i]); + rv = 1; + } + } + return rv; +} int main(int argc, char **argv) { - float four[4] = {2.0, 3.0, 4.0, 5.0}; + static const float four_initial[4] = {2.0, 3.0, 4.0, 5.0}; + ALIGN_16 float four[4]; const float expected[4] = {3.0, 4.0, 5.0, 6.0}; - void (*fptr)(float[4]) = NULL; - const char *type; - int i; + int r=0; + const int blocksize = 4; -/* Add here. The first matched one is used so put "better" instruction - * sets at the top. +/* + * Test all implementations that the current CPU supports. */ #if HAVE_NEON - if(fptr == NULL && neon_available()) { - fptr = increment_neon; - type = "NEON"; + if(neon_available()) { + r += check_simd_implementation(four, + four_initial, + "NEON", + expected, + increment_neon, + blocksize); } #endif #if HAVE_AVX2 - if(fptr == NULL && avx2_available()) { - fptr = increment_avx2; - type = "AVX2"; + if(avx2_available()) { + r += check_simd_implementation(four, + four_initial, + "AVX2", + expected, + increment_avx2, + blocksize); } #endif #if HAVE_AVX - if(fptr == NULL && avx_available()) { - fptr = increment_avx; - type = "AVX"; + if(avx_available()) { + r += check_simd_implementation(four, + four_initial, + "AVC", + expected, + increment_avx, + blocksize); } #endif #if HAVE_SSE42 - if(fptr == NULL && sse42_available()) { - fptr = increment_sse42; - type = "SSE42"; + if(sse42_available()) { + r += check_simd_implementation(four, + four_initial, + "SSR42", + expected, + increment_sse42, + blocksize); } #endif #if HAVE_SSE41 - if(fptr == NULL && sse41_available()) { - fptr = increment_sse41; - type = "SSE41"; + if(sse41_available()) { + r += check_simd_implementation(four, + four_initial, + "SSE41", + expected, + increment_sse41, + blocksize); } #endif #if HAVE_SSSE3 - if(fptr == NULL && ssse3_available()) { - fptr = increment_ssse3; - type = "SSSE3"; + if(ssse3_available()) { + r += check_simd_implementation(four, + four_initial, + "SSSE3", + expected, + increment_ssse3, + blocksize); } #endif #if HAVE_SSE3 - if(fptr == NULL && sse3_available()) { - fptr = increment_sse3; - type = "SSE3"; + if(sse3_available()) { + r += check_simd_implementation(four, + four_initial, + "SSE3", + expected, + increment_sse3, + blocksize); } #endif #if HAVE_SSE2 - if(fptr == NULL && sse2_available()) { - fptr = increment_sse2; - type = "SSE2"; + if(sse2_available()) { + r += check_simd_implementation(four, + four_initial, + "SSE2", + expected, + increment_sse2, + blocksize); } #endif #if HAVE_SSE - if(fptr == NULL && sse_available()) { - fptr = increment_sse; - type = "SSE"; + if(sse_available()) { + r += check_simd_implementation(four, + four_initial, + "SSE", + expected, + increment_sse, + blocksize); } #endif #if HAVE_MMX - if(fptr == NULL && mmx_available()) { - fptr = increment_mmx; - type = "MMX"; + if(mmx_available()) { + r += check_simd_implementation(four, + four_initial, + "MMX", + expected, + increment_mmx, + blocksize); } #endif - if(fptr == NULL) { - fptr = increment_fallback; - type = "fallback"; - } - printf("Using %s.\n", type); - fptr(four); - for(i=0; i<4; i++) { - if(four[i] != expected[i]) { - printf("Increment function failed, got %f expected %f.\n", four[i], expected[i]); - return 1; - } - } - return 0; + r += check_simd_implementation(four, + four_initial, + "fallback", + expected, + increment_fallback, + blocksize); + return r; } diff --git a/test cases/common/152 simd/simdfuncs.h b/test cases/common/152 simd/simdfuncs.h index dfb0560..c5e1658 100644 --- a/test cases/common/152 simd/simdfuncs.h +++ b/test cases/common/152 simd/simdfuncs.h @@ -2,6 +2,14 @@ #include<simdconfig.h> +#ifdef _MSC_VER +#define ALIGN_16 __declspec(align(16)) +#else +#include<stdalign.h> +#define ALIGN_16 alignas(16) +#endif + + /* Yes, I do know that arr[4] decays into a pointer * as a function argument. Don't do this in real code * but for this test it is ok. diff --git a/test cases/common/158 wrap file should not failed/meson.build b/test cases/common/158 wrap file should not failed/meson.build index 9cf4e9a..f4ec2a8 100644 --- a/test cases/common/158 wrap file should not failed/meson.build +++ b/test cases/common/158 wrap file should not failed/meson.build @@ -1,6 +1,9 @@ -project('mainproj', 'c') +project('mainproj', 'c', + default_options : ['wrap_mode=nodownload'], +) subproject('zlib') +subproject('foo') executable('grabprog', files('src/subprojects/prog.c')) executable('grabprog2', files('src/subprojects/foo/prog2.c')) diff --git a/test cases/common/158 wrap file should not failed/subprojects/foo.wrap b/test cases/common/158 wrap file should not failed/subprojects/foo.wrap new file mode 100644 index 0000000..90d6d40 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/foo.wrap @@ -0,0 +1,11 @@ +[wrap-file] +directory = foo-1.0 + +source_url = http://something.invalid +source_filename = foo-1.0.tar.xz +source_hash = ae5fc03185654f76b459db16ca25809703f8821aeb39a433902244bb479c4b79 +lead_directory_missing = true + +patch_url = https://something.invalid/patch +patch_filename = foo-1.0-patch.tar.xz +patch_hash = 8f2e286a4b190228d4e0c25ddc91195449cfb5e5c52006355838964b244037da diff --git a/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz Binary files differnew file mode 100644 index 0000000..26d2927 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz diff --git a/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz Binary files differnew file mode 100644 index 0000000..2647ef9 --- /dev/null +++ b/test cases/common/158 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz diff --git a/test cases/common/164 disabler/meson.build b/test cases/common/164 disabler/meson.build index 1956cd3..a1763d2 100644 --- a/test cases/common/164 disabler/meson.build +++ b/test cases/common/164 disabler/meson.build @@ -31,4 +31,12 @@ endif assert(number == 2, 'If found handled incorrectly, value should be 2 but is @0@'.format(number)) +dep = dependency('notfounddep', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) +cc = meson.get_compiler('c') +dep = cc.find_library('notfounddep', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) + +dep = find_program('donotfindme', required : false, disabler : true) +app = executable('myapp', 'notfound.c', dependencies : [dep]) diff --git a/test cases/common/17 comparison/meson.build b/test cases/common/17 comparison/meson.build index fb641ed..bba0168 100644 --- a/test cases/common/17 comparison/meson.build +++ b/test cases/common/17 comparison/meson.build @@ -137,3 +137,18 @@ assert(2 != 'st', 'not equal') assert(not ([] == 'st'), 'not equal') assert(not ([] == 1), 'not equal') assert(not (2 == 'st'), 'not equal') + +# "in" and "not in" operators + +assert(1 in [1, 2], '''1 should be in [1, 2]''') +assert(3 not in [1, 2], '''3 shouldn't be in [1, 2]''') +assert(not (3 in [1, 2]), '''3 shouldn't be in [1, 2]''') + +assert('b' in ['a', 'b'], ''''b' should be in ['a', 'b']''') +assert('c' not in ['a', 'b'], ''''c' shouldn't be in ['a', 'b']''') + +assert(exe1 in [exe1, exe2], ''''exe1 should be in [exe1, exe2]''') +assert(exe3 not in [exe1, exe2], ''''exe3 shouldn't be in [exe1, exe2]''') + +assert('a' in {'a': 'b'}, '''1 should be in {'a': 'b'}''') +assert('b' not in {'a': 'b'}, '''1 should be in {'a': 'b'}''') diff --git a/test cases/common/186 has link arg/meson.build b/test cases/common/186 has link arg/meson.build index e166101..10f2218 100644 --- a/test cases/common/186 has link arg/meson.build +++ b/test cases/common/186 has link arg/meson.build @@ -3,7 +3,7 @@ project('has link arg', 'c', 'cpp') cc = meson.get_compiler('c') cpp = meson.get_compiler('cpp') -if cc.get_id() == 'msvc' +if cc.get_argument_syntax() == 'msvc' is_arg = '/OPT:REF' useless = '/DEBUG' isnt_arg = '/iambroken' diff --git a/test cases/common/190 openmp/meson.build b/test cases/common/190 openmp/meson.build index eb270ab..018bf24 100644 --- a/test cases/common/190 openmp/meson.build +++ b/test cases/common/190 openmp/meson.build @@ -10,6 +10,9 @@ endif if cc.get_id() == 'msvc' and cc.version().version_compare('<17') error('MESON_SKIP_TEST msvc is too old to support OpenMP.') endif +if cc.get_id() == 'clang-cl' + error('MESON_SKIP_TEST clang-cl does not support OpenMP.') +endif if host_machine.system() == 'darwin' error('MESON_SKIP_TEST macOS does not support OpenMP.') endif diff --git a/test cases/common/196 install_mode/installed_files.txt b/test cases/common/196 install_mode/installed_files.txt index 724d954..4bd2211 100644 --- a/test cases/common/196 install_mode/installed_files.txt +++ b/test cases/common/196 install_mode/installed_files.txt @@ -1,9 +1,10 @@ usr/bin/runscript.sh usr/bin/trivialprog?exe +?msvc:usr/bin/trivialprog.pdb usr/include/config.h usr/include/rootdir.h usr/libtest/libstat.a -usr/share/man/man1/foo.1.gz +usr/share/man/man1/foo.1 usr/share/sub1/second.dat usr/share/sub2/stub usr/subdir/data.dat diff --git a/test cases/common/204 function attributes/meson.build b/test cases/common/204 function attributes/meson.build index bc049d7..1e93803 100644 --- a/test cases/common/204 function attributes/meson.build +++ b/test cases/common/204 function attributes/meson.build @@ -19,7 +19,7 @@ project('gcc func attributes', ['c', 'cpp']) c = meson.get_compiler('c') cpp = meson.get_compiler('cpp') -expected_result = c.get_id() != 'msvc' +expected_result = not ['msvc', 'clang-cl'].contains(c.get_id()) # Q: Why is ifunc not in this list or any of the below lists? # A: It's too damn hard to figure out if you actually support it, since it @@ -50,16 +50,20 @@ attributes = [ 'used', 'warn_unused_result', 'weak', - 'weakref', ] +if c.get_id() != 'intel' + # not supported by icc as of 19.0.0 + attributes += 'weakref' +endif + # These are unsupported on darwin with apple clang 9.1.0 if host_machine.system() != 'darwin' attributes += 'alias' attributes += 'visibility' endif -if c.get_id() == 'gcc' +if ['gcc', 'intel'].contains(c.get_id()) # not supported by clang as of 5.0.0 (at least up to 6.0.1) attributes += 'artificial' attributes += 'error' @@ -69,7 +73,7 @@ if c.get_id() == 'gcc' attributes += 'optimize' attributes += 'warning' - if c.version().version_compare('>= 7.0.0') + if c.get_id() == 'gcc' and c.version().version_compare('>= 7.0.0') attributes += 'fallthrough' endif endif @@ -91,7 +95,7 @@ foreach a : ['dllexport', 'dllimport'] endforeach message('checking get_supported_function_attributes') -if c.get_id() != 'msvc' +if not ['msvc', 'clang-cl'].contains(c.get_id()) multi_expected = attributes else multi_expected = [] diff --git a/test cases/common/205 broken subproject/meson.build b/test cases/common/205 broken subproject/meson.build new file mode 100644 index 0000000..e3a6cae --- /dev/null +++ b/test cases/common/205 broken subproject/meson.build @@ -0,0 +1,2 @@ +project('test broken subproject') +subproject('broken', required : false) diff --git a/test cases/common/205 broken subproject/subprojects/broken/broken.c b/test cases/common/205 broken subproject/subprojects/broken/broken.c new file mode 100644 index 0000000..a9fc4b1 --- /dev/null +++ b/test cases/common/205 broken subproject/subprojects/broken/broken.c @@ -0,0 +1 @@ +#error This must not compile diff --git a/test cases/common/205 broken subproject/subprojects/broken/meson.build b/test cases/common/205 broken subproject/subprojects/broken/meson.build new file mode 100644 index 0000000..2d64fde --- /dev/null +++ b/test cases/common/205 broken subproject/subprojects/broken/meson.build @@ -0,0 +1,4 @@ +project('broken', 'c') + +executable('app', 'broken.c') +assert(false, 'This subproject must fail') diff --git a/test cases/common/206 argument syntax/meson.build b/test cases/common/206 argument syntax/meson.build new file mode 100644 index 0000000..216da45 --- /dev/null +++ b/test cases/common/206 argument syntax/meson.build @@ -0,0 +1,25 @@ +project( + 'argument syntax', + ['c'], +) + +cc = meson.get_compiler('c') + +if ['gcc', 'lcc', 'clang'].contains(cc.get_id()) + expected = 'gcc' +elif ['msvc', 'clang-cl'].contains(cc.get_id()) + expected = 'msvc' +elif cc.get_id() == 'intel' + if host_machine.system() == 'windows' + expected = 'msvc' + else + expected = 'gcc' + endif +else + # It's possible that other compilers end up here that shouldn't + expected = 'other' +endif + +assert(cc.get_argument_syntax() == expected, + 'Wrong output for compiler @0@. expected @1@ but got @2@'.format( + cc.get_id(), expected, cc.get_argument_syntax())) diff --git a/test cases/common/207 install name_prefix name_suffix/installed_files.txt b/test cases/common/207 install name_prefix name_suffix/installed_files.txt new file mode 100644 index 0000000..240a8be --- /dev/null +++ b/test cases/common/207 install name_prefix name_suffix/installed_files.txt @@ -0,0 +1,15 @@ +?msvc:usr/bin/baz.pdb +?msvc:usr/bin/bowcorge.pdb +?msvc:usr/bin/foo.pdb +?msvc:usr/lib/baz.pdb +?msvc:usr/lib/bowcorge.pdb +?msvc:usr/lib/foo.pdb +usr/?lib/bowcorge.stern +usr/lib/?libbaz.cheese +usr/lib/bar.a +usr/lib/bowcorge?implib +usr/lib/bowgrault.stern +usr/lib/foo?implib +usr/lib/foo?so +usr/lib/libbaz?implib +usr/lib/libqux.cheese diff --git a/test cases/common/207 install name_prefix name_suffix/libfile.c b/test cases/common/207 install name_prefix name_suffix/libfile.c new file mode 100644 index 0000000..44f7667 --- /dev/null +++ b/test cases/common/207 install name_prefix name_suffix/libfile.c @@ -0,0 +1,14 @@ +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC func() { + return 0; +} diff --git a/test cases/common/207 install name_prefix name_suffix/meson.build b/test cases/common/207 install name_prefix name_suffix/meson.build new file mode 100644 index 0000000..4539999 --- /dev/null +++ b/test cases/common/207 install name_prefix name_suffix/meson.build @@ -0,0 +1,10 @@ +project('library with name_prefix name_suffix test', 'c') + +shared_library('foo', 'libfile.c', name_prefix: '', install : true) +static_library('bar', 'libfile.c', name_prefix: '', install : true) + +shared_library('baz', 'libfile.c', name_suffix: 'cheese', install : true) +static_library('qux', 'libfile.c', name_suffix: 'cheese', install : true) + +shared_library('corge', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true) +static_library('grault', 'libfile.c', name_prefix: 'bow', name_suffix: 'stern', install : true) diff --git a/test cases/common/25 library versions/installed_files.txt b/test cases/common/25 library versions/installed_files.txt index e10d1dd..938e063 100644 --- a/test cases/common/25 library versions/installed_files.txt +++ b/test cases/common/25 library versions/installed_files.txt @@ -1 +1,3 @@ usr/lib/prefixsomelib.suffix +usr/lib/prefixsomelib?implib +?msvc:usr/lib/prefixsomelib.pdb diff --git a/test cases/common/25 library versions/lib.c b/test cases/common/25 library versions/lib.c index 67b6f4d..10019dc 100644 --- a/test cases/common/25 library versions/lib.c +++ b/test cases/common/25 library versions/lib.c @@ -1,3 +1,14 @@ -int myFunc() { +#if defined _WIN32 || defined __CYGWIN__ + #define DLL_PUBLIC __declspec(dllexport) +#else + #if defined __GNUC__ + #define DLL_PUBLIC __attribute__ ((visibility("default"))) + #else + #pragma message ("Compiler does not support symbol visibility.") + #define DLL_PUBLIC + #endif +#endif + +int DLL_PUBLIC myFunc() { return 55; } diff --git a/test cases/common/43 library chain/installed_files.txt b/test cases/common/43 library chain/installed_files.txt index c7dab9f..5e796b0 100644 --- a/test cases/common/43 library chain/installed_files.txt +++ b/test cases/common/43 library chain/installed_files.txt @@ -1 +1,2 @@ usr/bin/prog?exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/common/44 options/meson.build b/test cases/common/44 options/meson.build index f177aa4..c6cf9c8 100644 --- a/test cases/common/44 options/meson.build +++ b/test cases/common/44 options/meson.build @@ -29,3 +29,5 @@ endif if get_option('integer_opt') != 3 error('Incorrect value in integer option.') endif + +assert(get_option('wrap_mode') == 'default', 'Wrap mode option is broken.') diff --git a/test cases/common/46 subproject/installed_files.txt b/test cases/common/46 subproject/installed_files.txt index dc09fb7..dba3202 100644 --- a/test cases/common/46 subproject/installed_files.txt +++ b/test cases/common/46 subproject/installed_files.txt @@ -1,2 +1,3 @@ usr/bin/user?exe +?msvc:usr/bin/user.pdb usr/share/sublib/sublib.depmf diff --git a/test cases/common/48 pkgconfig-gen/installed_files.txt b/test cases/common/48 pkgconfig-gen/installed_files.txt index 3c44d28..94de704 100644 --- a/test cases/common/48 pkgconfig-gen/installed_files.txt +++ b/test cases/common/48 pkgconfig-gen/installed_files.txt @@ -1,3 +1,4 @@ usr/include/simple.h usr/lib/pkgconfig/simple.pc usr/lib/pkgconfig/libfoo.pc +usr/lib/pkgconfig/libhello.pc diff --git a/test cases/common/49 custom install dirs/installed_files.txt b/test cases/common/49 custom install dirs/installed_files.txt index 0cc533a..4e17c2d 100644 --- a/test cases/common/49 custom install dirs/installed_files.txt +++ b/test cases/common/49 custom install dirs/installed_files.txt @@ -1,9 +1,11 @@ usr/dib/dab/dub/prog?exe +?msvc:usr/dib/dab/dub/prog.pdb usr/dib/dab/dub2/prog2?exe +?msvc:usr/dib/dab/dub2/prog2.pdb usr/some/dir/sample.h usr/some/dir2/sample.h -usr/woman/prog.1.gz -usr/woman2/prog.1.gz +usr/woman/prog.1 +usr/woman2/prog.1 usr/meow/datafile.cat usr/meow2/datafile.cat usr/woof/subdir/datafile.dog diff --git a/test cases/common/57 install script/installed_files.txt b/test cases/common/57 install script/installed_files.txt index 94c1fed..28f9ed0 100644 --- a/test cases/common/57 install script/installed_files.txt +++ b/test cases/common/57 install script/installed_files.txt @@ -1,4 +1,5 @@ usr/bin/prog?exe +?msvc:usr/bin/prog.pdb usr/diiba/daaba/file.dat usr/this/should/also-work.dat usr/this/does/something-different.dat.in diff --git a/test cases/common/6 linkshared/installed_files.txt b/test cases/common/6 linkshared/installed_files.txt index c7dab9f..5e796b0 100644 --- a/test cases/common/6 linkshared/installed_files.txt +++ b/test cases/common/6 linkshared/installed_files.txt @@ -1 +1,2 @@ usr/bin/prog?exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/common/64 foreach/installed_files.txt b/test cases/common/64 foreach/installed_files.txt index 2930ff0..3376925 100644 --- a/test cases/common/64 foreach/installed_files.txt +++ b/test cases/common/64 foreach/installed_files.txt @@ -1,3 +1,6 @@ usr/bin/prog1?exe +?msvc:usr/bin/prog1.pdb usr/bin/prog2?exe +?msvc:usr/bin/prog2.pdb usr/bin/prog3?exe +?msvc:usr/bin/prog3.pdb diff --git a/test cases/common/64 foreach/meson.build b/test cases/common/64 foreach/meson.build index e633de8..7084e80 100644 --- a/test cases/common/64 foreach/meson.build +++ b/test cases/common/64 foreach/meson.build @@ -18,3 +18,16 @@ foreach i : tests # we definitely don't want that. tests = ['test4', 'prog4', 'prog4.c'] endforeach + +items = ['a', 'continue', 'b', 'break', 'c'] +result = [] +foreach i : items + if i == 'continue' + continue + elif i == 'break' + break + endif + result += i +endforeach + +assert(result == ['a', 'b'], 'Continue or break in foreach failed') diff --git a/test cases/common/8 install/installed_files.txt b/test cases/common/8 install/installed_files.txt index cbbdc03..d3122a7 100644 --- a/test cases/common/8 install/installed_files.txt +++ b/test cases/common/8 install/installed_files.txt @@ -1,2 +1,3 @@ usr/bin/prog?exe +?msvc:usr/bin/prog.pdb usr/libtest/libstat.a diff --git a/test cases/common/91 default options/meson.build b/test cases/common/91 default options/meson.build index 9f45df0..c4c72ef 100644 --- a/test cases/common/91 default options/meson.build +++ b/test cases/common/91 default options/meson.build @@ -6,11 +6,9 @@ project('default options', 'cpp', 'c', default_options : [ 'warning_level=3', ]) -cpp_id = meson.get_compiler('cpp').get_id() - assert(get_option('buildtype') == 'debugoptimized', 'Build type default value wrong.') -if cpp_id == 'msvc' +if meson.get_compiler('cpp').get_argument_syntax() == 'msvc' cpp_eh = get_option('cpp_eh') assert(cpp_eh == 'none', 'MSVC eh value is "' + cpp_eh + '" instead of "none"') else @@ -33,4 +31,3 @@ assert(w_level == '3', 'warning level "' + w_level + '" instead of "3"') # assert(not cc.compiles('int foobar;'), 'Default arg not used in test.') # assert(cc.compiles('int foobar;', no_builtin_args : true), 'No_builtin did not disable builtins.') # endif - diff --git a/test cases/csharp/1 basic/installed_files.txt b/test cases/csharp/1 basic/installed_files.txt index f64c68c..5022d28 100644 --- a/test cases/csharp/1 basic/installed_files.txt +++ b/test cases/csharp/1 basic/installed_files.txt @@ -1 +1,2 @@ usr/bin/prog.exe +?msvc:usr/bin/prog.pdb diff --git a/test cases/csharp/2 library/installed_files.txt b/test cases/csharp/2 library/installed_files.txt index 4ebea55..73e77a2 100644 --- a/test cases/csharp/2 library/installed_files.txt +++ b/test cases/csharp/2 library/installed_files.txt @@ -1,2 +1,5 @@ usr/bin/prog.exe -usr/lib/helper.dll +?msvc:usr/bin/prog.pdb +?msvc:usr/bin/helper.dll +?msvc:usr/bin/helper.pdb +?gcc:usr/lib/helper.dll diff --git a/test cases/csharp/4 external dep/installed_files.txt b/test cases/csharp/4 external dep/installed_files.txt new file mode 100644 index 0000000..f64c68c --- /dev/null +++ b/test cases/csharp/4 external dep/installed_files.txt @@ -0,0 +1 @@ +usr/bin/prog.exe diff --git a/test cases/d/3 shared library/installed_files.txt b/test cases/d/3 shared library/installed_files.txt index 6658947..4e2c591 100644 --- a/test cases/d/3 shared library/installed_files.txt +++ b/test cases/d/3 shared library/installed_files.txt @@ -2,3 +2,4 @@ usr/bin/app_d?exe ?msvc:usr/bin/stuff.dll ?msvc:usr/lib/stuff.lib ?gcc:usr/lib/libstuff.so +usr/lib/pkgconfig/test.pc diff --git a/test cases/d/3 shared library/meson.build b/test cases/d/3 shared library/meson.build index 64f972b..b37b700 100644 --- a/test cases/d/3 shared library/meson.build +++ b/test cases/d/3 shared library/meson.build @@ -11,13 +11,11 @@ ldyn = shared_library('stuff', 'libstuff.d', install : true) ed = executable('app_d', 'app.d', link_with : ldyn, install : true) test('linktest_dyn', ed) -if host_machine.system() != 'windows' - # test D attributes for pkg-config - pkgc = import('pkgconfig') - pkgc.generate(name: 'test', - libraries: ldyn, - subdirs: 'd/stuff', - description: 'A test of D attributes to pkgconfig.generate.', - d_module_versions: ['Use_Static'] - ) -endif +# test D attributes for pkg-config +pkgc = import('pkgconfig') +pkgc.generate(name: 'test', + libraries: ldyn, + subdirs: 'd/stuff', + description: 'A test of D attributes to pkgconfig.generate.', + d_module_versions: ['Use_Static'] +) diff --git a/test cases/d/5 mixed/installed_files.txt b/test cases/d/5 mixed/installed_files.txt index 5f3f4e2..5950753 100644 --- a/test cases/d/5 mixed/installed_files.txt +++ b/test cases/d/5 mixed/installed_files.txt @@ -3,4 +3,6 @@ usr/bin/appdc_s?exe usr/lib/libstuff.a ?gcc:usr/lib/libstuff.so ?msvc:usr/bin/stuff.dll +?msvc:usr/bin/stuff.pdb ?msvc:usr/lib/stuff.lib +?msvc:usr/lib/stuff.pdb diff --git a/test cases/d/9 features/app.d b/test cases/d/9 features/app.d index 6b43bf0..05c56ca 100644 --- a/test cases/d/9 features/app.d +++ b/test cases/d/9 features/app.d @@ -41,6 +41,30 @@ void main (string[] args) exit (1); } } + + version (With_VersionInteger) + version(3) exit(0); + + version (With_Debug) + debug exit(0); + + version (With_DebugInteger) + debug(3) exit(0); + + version (With_DebugIdentifier) + debug(DebugIdentifier) exit(0); + + version (With_DebugAll) { + int dbg = 0; + debug dbg++; + debug(2) dbg++; + debug(3) dbg++; + debug(4) dbg++; + debug(DebugIdentifier) dbg++; + + if (dbg == 5) + exit(0); + } // we fail here exit (1); diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build index 694e488..06f0341 100644 --- a/test cases/d/9 features/meson.build +++ b/test cases/d/9 features/meson.build @@ -1,4 +1,4 @@ -project('D Features', 'd') +project('D Features', 'd', default_options : ['debug=false']) # ONLY FOR BACKWARDS COMPATIBILITY. # DO NOT DO THIS IN NEW CODE! @@ -44,3 +44,63 @@ e_test = executable('dapp_test', d_unittest: true ) test('dapp_test', e_test) + +# test version level +e_version_int = executable('dapp_version_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 3], +) +test('dapp_version_int_t', e_version_int, args: ['debug']) + +# test version level failure +e_version_int_fail = executable('dapp_version_int_fail', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 2], +) +test('dapp_version_int_t_fail', e_version_int_fail, args: ['debug'], should_fail: true) + +# test debug conditions: disabled +e_no_debug = executable('dapp_no_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], +) +test('dapp_no_debug_t_fail', e_no_debug, args: ['debug'], should_fail: true) + +# test debug conditions: enabled +e_debug = executable('dapp_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], + d_debug: 1, +) +test('dapp_debug_t', e_debug, args: ['debug']) + +# test debug conditions: integer +e_debug_int = executable('dapp_debug_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugInteger'], + d_debug: 3, +) +test('dapp_debug_int_t', e_debug_int, args: ['debug']) + +# test debug conditions: identifier +e_debug_ident = executable('dapp_debug_ident', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugIdentifier'], + d_debug: 'DebugIdentifier', +) +test('dapp_debug_ident_t', e_debug_ident, args: ['debug']) + +# test with all debug conditions at once, and with redundant values +e_debug_all = executable('dapp_debug_all', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugAll'], + d_debug: ['4', 'DebugIdentifier', 2, 'DebugIdentifierUnused'], +) +test('dapp_debug_all_t', e_debug_all, args: ['debug']) diff --git a/test cases/failing/88 dub compiler/meson.build b/test cases/failing/88 dub compiler/meson.build index f5bc494..2f0b801 100644 --- a/test cases/failing/88 dub compiler/meson.build +++ b/test cases/failing/88 dub compiler/meson.build @@ -1,3 +1,9 @@ project('dub', 'd', meson_version: '0.48.0') +if meson.get_compiler('d').get_id() == 'dmd' + if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' + error('MESON_SKIP_TEST Windows test environment lacks multiple D compilers.') + endif +endif + dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch diff --git a/test cases/failing/89 link_with custom target/demo.c b/test cases/failing/89 link_with custom target/demo.c new file mode 100644 index 0000000..b6feaca --- /dev/null +++ b/test cases/failing/89 link_with custom target/demo.c @@ -0,0 +1,5 @@ +int func_in_foo(); + +int main(int argc, char **argv) { + return func_in_foo(); +} diff --git a/test cases/failing/89 link_with custom target/foo.c b/test cases/failing/89 link_with custom target/foo.c new file mode 100644 index 0000000..2c71422 --- /dev/null +++ b/test cases/failing/89 link_with custom target/foo.c @@ -0,0 +1,3 @@ +int func_in_foo() { + return 0; +} diff --git a/test cases/failing/89 link_with custom target/lib_generator.py b/test cases/failing/89 link_with custom target/lib_generator.py new file mode 100755 index 0000000..98ed5a8 --- /dev/null +++ b/test cases/failing/89 link_with custom target/lib_generator.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 + +# Mimic a binary that generates a static library + +import os +import subprocess +import sys + +if __name__ == '__main__': + if len(sys.argv) != 4: + print(sys.argv[0], 'compiler input_file output_file') + sys.exit(1) + compiler = sys.argv[1] + ifile = sys.argv[2] + ofile = sys.argv[3] + tmp = ifile + '.o' + if compiler.endswith('cl'): + subprocess.check_call([compiler, '/nologo', '/MDd', '/Fo' + tmp, '/c', ifile]) + subprocess.check_call(['lib', '/nologo', '/OUT:' + ofile, tmp]) + else: + subprocess.check_call([compiler, '-c', ifile, '-o', tmp]) + subprocess.check_call(['ar', 'csr', ofile, tmp]) + +os.unlink(tmp) diff --git a/test cases/failing/89 link_with custom target/meson.build b/test cases/failing/89 link_with custom target/meson.build new file mode 100644 index 0000000..6977ca1 --- /dev/null +++ b/test cases/failing/89 link_with custom target/meson.build @@ -0,0 +1,23 @@ +project('link_with custom target', ['c']) + +# +# libraries created by a custom_target currently can be used in sources: (see +# common/100 manygen/ for an example of that), but not in link_with: +# + +lib_generator = find_program('lib_generator.py') + +cc = meson.get_compiler('c').cmd_array().get(-1) + +libfoo_target = custom_target( + 'libfoo', + input: ['foo.c'], + output: ['libfoo.a'], + command: [lib_generator, cc, '@INPUT@', '@OUTPUT@'] +) + +libfoo = declare_dependency( + link_with: libfoo_target, +) + +executable('demo', ['demo.c'], dependencies: [libfoo]) diff --git a/test cases/failing/90 subproj not-found dep/meson.build b/test cases/failing/90 subproj not-found dep/meson.build new file mode 100644 index 0000000..2b17df1 --- /dev/null +++ b/test cases/failing/90 subproj not-found dep/meson.build @@ -0,0 +1,2 @@ +project('dep-test') +missing = dependency('', fallback: ['somesubproj', 'notfound_dep'], required: true) diff --git a/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build b/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build new file mode 100644 index 0000000..5f451f4 --- /dev/null +++ b/test cases/failing/90 subproj not-found dep/subprojects/somesubproj/meson.build @@ -0,0 +1,3 @@ +project('dep', 'c') + +notfound_dep = dependency('', required : false) diff --git a/test cases/fortran/9 cpp/meson.build b/test cases/fortran/9 cpp/meson.build index 49497c0..93037aa 100644 --- a/test cases/fortran/9 cpp/meson.build +++ b/test cases/fortran/9 cpp/meson.build @@ -6,5 +6,16 @@ if cpp.get_id() == 'clang' error('MESON_SKIP_TEST Clang C++ does not find -lgfortran for some reason.') endif -e = executable('cppfort', 'main.cpp', 'fortran.f') +fc = meson.get_compiler('fortran') +link_with = [] +if fc.get_id() == 'intel' + link_with += fc.find_library('ifport') +endif + +e = executable( + 'cppfort', + ['main.cpp', 'fortran.f'], + dependencies : [link_with], +) + test('C++ FORTRAN', e) diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build index d1e1da4..1d29455 100644 --- a/test cases/frameworks/1 boost/meson.build +++ b/test cases/frameworks/1 boost/meson.build @@ -10,6 +10,12 @@ if not dep.found() error('MESON_SKIP_TEST boost not found.') endif +compiler = meson.get_compiler('cpp') +if compiler.has_argument('-permissive') + # boost 1.64, the version we test against, doesn't work with -permissive + add_project_arguments('-permissive', language: 'cpp') +endif + # We want to have multiple separate configurations of Boost # within one project. The need to be independent of each other. # Use one without a library dependency and one with it. diff --git a/test cases/frameworks/10 gtk-doc/doc/meson.build b/test cases/frameworks/10 gtk-doc/doc/meson.build index 059d405..019be94 100644 --- a/test cases/frameworks/10 gtk-doc/doc/meson.build +++ b/test cases/frameworks/10 gtk-doc/doc/meson.build @@ -16,3 +16,18 @@ gnome.gtkdoc('foobar2', content_files : [docbook, version_xml], install : true, install_dir : 'foobar2') + +gnome.gtkdoc('foobar', + module_version : '3.0', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true) + +gnome.gtkdoc('foobar2', + module_version : '3.0', + src_dir : inc, + main_sgml : 'foobar-docs.sgml', + content_files : [docbook, version_xml], + install : true, + install_dir : 'foobar3') diff --git a/test cases/frameworks/10 gtk-doc/installed_files.txt b/test cases/frameworks/10 gtk-doc/installed_files.txt index 2bfb3f5..952a724 100644 --- a/test cases/frameworks/10 gtk-doc/installed_files.txt +++ b/test cases/frameworks/10 gtk-doc/installed_files.txt @@ -27,3 +27,31 @@ usr/share/gtk-doc/html/foobar2/right-insensitive.png usr/share/gtk-doc/html/foobar2/style.css usr/share/gtk-doc/html/foobar2/up.png usr/share/gtk-doc/html/foobar2/up-insensitive.png +usr/share/gtk-doc/html/foobar-3.0/BAR.html +usr/share/gtk-doc/html/foobar-3.0/foobar-3.0.devhelp2 +usr/share/gtk-doc/html/foobar-3.0/foobar.html +usr/share/gtk-doc/html/foobar-3.0/foobar-foo.html +usr/share/gtk-doc/html/foobar-3.0/foobar-foo-version.html +usr/share/gtk-doc/html/foobar-3.0/home.png +usr/share/gtk-doc/html/foobar-3.0/index.html +usr/share/gtk-doc/html/foobar-3.0/left.png +usr/share/gtk-doc/html/foobar-3.0/left-insensitive.png +usr/share/gtk-doc/html/foobar-3.0/right.png +usr/share/gtk-doc/html/foobar-3.0/right-insensitive.png +usr/share/gtk-doc/html/foobar-3.0/style.css +usr/share/gtk-doc/html/foobar-3.0/up.png +usr/share/gtk-doc/html/foobar-3.0/up-insensitive.png +usr/share/gtk-doc/html/foobar3/BAR.html +usr/share/gtk-doc/html/foobar3/foobar2-3.0.devhelp2 +usr/share/gtk-doc/html/foobar3/foobar.html +usr/share/gtk-doc/html/foobar3/foobar2-foo.html +usr/share/gtk-doc/html/foobar3/foobar2-foo-version.html +usr/share/gtk-doc/html/foobar3/home.png +usr/share/gtk-doc/html/foobar3/index.html +usr/share/gtk-doc/html/foobar3/left.png +usr/share/gtk-doc/html/foobar3/left-insensitive.png +usr/share/gtk-doc/html/foobar3/right.png +usr/share/gtk-doc/html/foobar3/right-insensitive.png +usr/share/gtk-doc/html/foobar3/style.css +usr/share/gtk-doc/html/foobar3/up.png +usr/share/gtk-doc/html/foobar3/up-insensitive.png diff --git a/test cases/frameworks/11 gir subproject/installed_files.txt b/test cases/frameworks/11 gir subproject/installed_files.txt index 87d49a1..6f11f54 100644 --- a/test cases/frameworks/11 gir subproject/installed_files.txt +++ b/test cases/frameworks/11 gir subproject/installed_files.txt @@ -3,4 +3,6 @@ usr/lib/girepository-1.0/MesonSub-1.0.typelib usr/share/gir-1.0/Meson-1.0.gir usr/share/gir-1.0/MesonSub-1.0.gir usr/lib/?libgirsubproject.so +?cygwin:usr/lib/libgirlib.dll.a usr/lib/?libgirlib.so +?cygwin:usr/lib/libgirsubproject.dll.a diff --git a/test cases/frameworks/12 multiple gir/installed_files.txt b/test cases/frameworks/12 multiple gir/installed_files.txt index a5d16bc..3f9a8f2 100644 --- a/test cases/frameworks/12 multiple gir/installed_files.txt +++ b/test cases/frameworks/12 multiple gir/installed_files.txt @@ -1,6 +1,8 @@ usr/lib/girepository-1.0/Meson-1.0.typelib usr/lib/girepository-1.0/MesonSub-1.0.typelib usr/lib/?libgirlib.so +?cygwin:usr/lib/libgirlib.dll.a usr/lib/?libgirsubproject.so +?cygwin:usr/lib/libgirsubproject.dll.a usr/share/gir-1.0/Meson-1.0.gir usr/share/gir-1.0/MesonSub-1.0.gir diff --git a/test cases/frameworks/15 llvm/meson.build b/test cases/frameworks/15 llvm/meson.build index e05fddd..b43bb87 100644 --- a/test cases/frameworks/15 llvm/meson.build +++ b/test cases/frameworks/15 llvm/meson.build @@ -2,18 +2,29 @@ project('llvmtest', ['c', 'cpp'], default_options : ['c_std=c99']) d = dependency('llvm', required : false) if not d.found() - error('MESON_SKIP_TEST llvm not found.') + d = dependency('llvm', required : false, static : true) + if not d.found() + error('MESON_SKIP_TEST llvm not found.') + else + static = true + endif +else + static = false endif -d = dependency('llvm', modules : 'not-found', required : false) +d = dependency('llvm', modules : 'not-found', required : false, static : static) assert(d.found() == false, 'not-found llvm module found') -d = dependency('llvm', version : '<0.1', required : false) +d = dependency('llvm', version : '<0.1', required : false, static : static) assert(d.found() == false, 'ancient llvm module found') -d = dependency('llvm', optional_modules : 'not-found', required : false) +d = dependency('llvm', optional_modules : 'not-found', required : false, static : static) assert(d.found() == true, 'optional module stopped llvm from being found.') +# Check we can apply a version constraint +d = dependency('llvm', version : ['< 500', '>=@0@'.format(d.version())], required: false, static : static) +assert(d.found() == true, 'Cannot set version constraints') + dep_tinfo = dependency('tinfo', required : false) if not dep_tinfo.found() cpp = meson.get_compiler('cpp') @@ -35,12 +46,10 @@ foreach static : [true, false] 'sum.c', dependencies : [ llvm_dep, dep_tinfo, - dependency('zlib'), + # zlib will be statically linked on windows + dependency('zlib', required : host_machine.system() != 'windows'), meson.get_compiler('c').find_library('dl', required : false), ] ) endif endforeach - -# Check we can apply a version constraint -dependency('llvm', version: '>=@0@'.format(d.version())) diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build index 1085d40..2102b81 100644 --- a/test cases/frameworks/17 mpi/meson.build +++ b/test cases/frameworks/17 mpi/meson.build @@ -1,4 +1,4 @@ -project('mpi', 'c', 'cpp') +project('mpi', 'c', 'cpp', default_options: ['b_asneeded=false']) cc = meson.get_compiler('c') @@ -17,7 +17,7 @@ exec = executable('exec', test('MPI C', exec) if build_machine.system() != 'windows' - # C++ MPI not supported by MS-MPI used on AppVeyor. + # C++ MPI not supported by MS-MPI mpicpp = dependency('mpi', language : 'cpp') execpp = executable('execpp', 'main.cpp', @@ -44,4 +44,6 @@ if uburesult.returncode() != 0 and add_languages('fortran', required : false) endif # Check we can apply a version constraint -dependency('mpi', version: '>=@0@'.format(mpic.version())) +if mpic.version() != 'unknown' + dependency('mpi', version: '>=@0@'.format(mpic.version())) +endif diff --git a/test cases/frameworks/21 libwmf/meson.build b/test cases/frameworks/21 libwmf/meson.build index ab0ebf6..6952bf7 100644 --- a/test cases/frameworks/21 libwmf/meson.build +++ b/test cases/frameworks/21 libwmf/meson.build @@ -9,7 +9,11 @@ libwmf_dep = dependency('libwmf', version : '>= 0.2.8') libwmf_ver = libwmf_dep.version() assert(libwmf_ver.split('.').length() > 1, 'libwmf version is "@0@"'.format(libwmf_ver)) message('libwmf version is "@0@"'.format(libwmf_ver)) -e = executable('libwmf_prog', 'libwmf_prog.c', dependencies : libwmf_dep) +# Workaround for Debian bug 912563 where libwmf-devel returns cflags +# that do not not have Freetype include paths but their headers +# use them unconditionally. +ft_dep = dependency('freetype2') +e = executable('libwmf_prog', 'libwmf_prog.c', dependencies : [libwmf_dep, ft_dep]) test('libwmftest', e) diff --git a/test cases/frameworks/23 hotdoc/installed_files.txt b/test cases/frameworks/23 hotdoc/installed_files.txt new file mode 100644 index 0000000..6804dbf --- /dev/null +++ b/test cases/frameworks/23 hotdoc/installed_files.txt @@ -0,0 +1,304 @@ +usr/share/doc/foobar/html/foo.html +usr/share/doc/foobar/html/c-index.html +usr/share/doc/foobar/html/index.html +usr/share/doc/foobar/html/dumped.trie +usr/share/doc/foobar/html/assets/css/prism.css +usr/share/doc/foobar/html/assets/css/bootstrap-toc.min.css +usr/share/doc/foobar/html/assets/css/frontend.css +usr/share/doc/foobar/html/assets/css/dumped.trie +usr/share/doc/foobar/html/assets/css/jquery.mCustomScrollbar.min.css +usr/share/doc/foobar/html/assets/css/custom_bootstrap.css +usr/share/doc/foobar/html/assets/templates/navbar_links.html +usr/share/doc/foobar/html/assets/templates/scripts.html +usr/share/doc/foobar/html/assets/templates/stylesheets.html +usr/share/doc/foobar/html/assets/templates/multi_return_value.html +usr/share/doc/foobar/html/assets/templates/parameters.html +usr/share/doc/foobar/html/assets/templates/base_page.html +usr/share/doc/foobar/html/assets/templates/footer.html +usr/share/doc/foobar/html/assets/templates/extra_head.html +usr/share/doc/foobar/html/assets/templates/parameter_detail.html +usr/share/doc/foobar/html/assets/templates/navbar_center.html +usr/share/doc/foobar/html/assets/templates/enum_member.html +usr/share/doc/foobar/html/assets/templates/member_list.html +usr/share/doc/foobar/html/assets/templates/return_item.html +usr/share/doc/foobar/html/assets/templates/subpages.html +usr/share/doc/foobar/html/assets/templates/dumped.trie +usr/share/doc/foobar/html/assets/templates/page_content.html +usr/share/doc/foobar/html/assets/templates/navbar.html +usr/share/doc/foobar/html/assets/templates/site_navigation.html +usr/share/doc/foobar/html/assets/templates/field_detail.html +usr/share/doc/foobar/html/assets/templates/brand-logo.html +usr/share/doc/foobar/html/assets/js/prism_autoloader_path_override.js +usr/share/doc/foobar/html/assets/js/jquery.js +usr/share/doc/foobar/html/assets/js/scrollspy.js +usr/share/doc/foobar/html/assets/js/isotope.pkgd.min.js +usr/share/doc/foobar/html/assets/js/utils.js +usr/share/doc/foobar/html/assets/js/typeahead.jquery.min.js +usr/share/doc/foobar/html/assets/js/language_switching.js +usr/share/doc/foobar/html/assets/js/tag_filtering.js +usr/share/doc/foobar/html/assets/js/prism-autoloader.js +usr/share/doc/foobar/html/assets/js/navbar_offset_scroller.js +usr/share/doc/foobar/html/assets/js/lines_around_headings.js +usr/share/doc/foobar/html/assets/js/trie_index.js +usr/share/doc/foobar/html/assets/js/search.js +usr/share/doc/foobar/html/assets/js/trie.js +usr/share/doc/foobar/html/assets/js/bootstrap.js +usr/share/doc/foobar/html/assets/js/navigation.js +usr/share/doc/foobar/html/assets/js/bootstrap-toc.min.js +usr/share/doc/foobar/html/assets/js/anchor.min.js +usr/share/doc/foobar/html/assets/js/prism-core.js +usr/share/doc/foobar/html/assets/js/sitemap.js +usr/share/doc/foobar/html/assets/js/dumped.trie +usr/share/doc/foobar/html/assets/js/mustache.min.js +usr/share/doc/foobar/html/assets/js/compare-versions.js +usr/share/doc/foobar/html/assets/js/jquery.touchSwipe.min.js +usr/share/doc/foobar/html/assets/js/jquery.mCustomScrollbar.concat.min.js +usr/share/doc/foobar/html/assets/js/search/members +usr/share/doc/foobar/html/assets/js/search/Hello +usr/share/doc/foobar/html/assets/js/search/hello +usr/share/doc/foobar/html/assets/js/search/type +usr/share/doc/foobar/html/assets/js/search/FooIndecision +usr/share/doc/foobar/html/assets/js/search/fooindecision +usr/share/doc/foobar/html/assets/js/search/Members +usr/share/doc/foobar/html/assets/js/search/dumped.trie +usr/share/doc/foobar/html/assets/js/search/indecision +usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/index.html-hello-world.fragment +usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/dumped.trie +usr/share/doc/foobar/html/assets/js/search/hotdoc_fragments/foo.html-FooIndecision.fragment +usr/share/doc/foobar/html/assets/prism_components/prism-inform7.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-pascal.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-bro.js +usr/share/doc/foobar/html/assets/prism_components/prism-nim.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-gherkin.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-stylus.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-ocaml.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-powershell.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-smalltalk.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-verilog.js +usr/share/doc/foobar/html/assets/prism_components/prism-puppet.js +usr/share/doc/foobar/html/assets/prism_components/prism-aspnet.js +usr/share/doc/foobar/html/assets/prism_components/prism-parigp.js +usr/share/doc/foobar/html/assets/prism_components/prism-objectivec.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-processing.js +usr/share/doc/foobar/html/assets/prism_components/prism-objectivec.js +usr/share/doc/foobar/html/assets/prism_components/prism-jsx.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nginx.js +usr/share/doc/foobar/html/assets/prism_components/prism-powershell.js +usr/share/doc/foobar/html/assets/prism_components/prism-php.js +usr/share/doc/foobar/html/assets/prism_components/prism-smarty.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-roboconf.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-batch.js +usr/share/doc/foobar/html/assets/prism_components/prism-vhdl.js +usr/share/doc/foobar/html/assets/prism_components/prism-protobuf.js +usr/share/doc/foobar/html/assets/prism_components/prism-textile.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-crystal.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-scss.js +usr/share/doc/foobar/html/assets/prism_components/prism-bro.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-smarty.js +usr/share/doc/foobar/html/assets/prism_components/prism-bison.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-tcl.js +usr/share/doc/foobar/html/assets/prism_components/prism-pure.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-makefile.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-applescript.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-css-extras.js +usr/share/doc/foobar/html/assets/prism_components/prism-stylus.js +usr/share/doc/foobar/html/assets/prism_components/prism-q.js +usr/share/doc/foobar/html/assets/prism_components/prism-dart.js +usr/share/doc/foobar/html/assets/prism_components/prism-oz.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-haskell.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-clike.js +usr/share/doc/foobar/html/assets/prism_components/prism-kotlin.js +usr/share/doc/foobar/html/assets/prism_components/prism-http.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-bash.js +usr/share/doc/foobar/html/assets/prism_components/prism-apl.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-docker.js +usr/share/doc/foobar/html/assets/prism_components/prism-sass.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-basic.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nasm.js +usr/share/doc/foobar/html/assets/prism_components/prism-kotlin.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-abap.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-perl.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-rust.js +usr/share/doc/foobar/html/assets/prism_components/prism-c.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-scala.js +usr/share/doc/foobar/html/assets/prism_components/prism-glsl.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-lua.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-coffeescript.js +usr/share/doc/foobar/html/assets/prism_components/prism-jade.js +usr/share/doc/foobar/html/assets/prism_components/prism-keyman.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-crystal.js +usr/share/doc/foobar/html/assets/prism_components/prism-rest.js +usr/share/doc/foobar/html/assets/prism_components/prism-json.js +usr/share/doc/foobar/html/assets/prism_components/prism-roboconf.js +usr/share/doc/foobar/html/assets/prism_components/prism-twig.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-dart.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-vim.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-handlebars.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-cpp.js +usr/share/doc/foobar/html/assets/prism_components/prism-fsharp.js +usr/share/doc/foobar/html/assets/prism_components/prism-sas.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-brainfuck.js +usr/share/doc/foobar/html/assets/prism_components/prism-haxe.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-julia.js +usr/share/doc/foobar/html/assets/prism_components/prism-jade.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-python.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nim.js +usr/share/doc/foobar/html/assets/prism_components/prism-typescript.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-csharp.js +usr/share/doc/foobar/html/assets/prism_components/prism-brainfuck.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-asciidoc.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-groovy.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-applescript.js +usr/share/doc/foobar/html/assets/prism_components/prism-elixir.js +usr/share/doc/foobar/html/assets/prism_components/prism-diff.js +usr/share/doc/foobar/html/assets/prism_components/prism-scheme.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-parser.js +usr/share/doc/foobar/html/assets/prism_components/prism-qore.js +usr/share/doc/foobar/html/assets/prism_components/prism-yaml.js +usr/share/doc/foobar/html/assets/prism_components/prism-j.js +usr/share/doc/foobar/html/assets/prism_components/prism-mel.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-css-extras.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-erlang.js +usr/share/doc/foobar/html/assets/prism_components/prism-icon.js +usr/share/doc/foobar/html/assets/prism_components/prism-actionscript.js +usr/share/doc/foobar/html/assets/prism_components/prism-cpp.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-makefile.js +usr/share/doc/foobar/html/assets/prism_components/prism-q.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nsis.js +usr/share/doc/foobar/html/assets/prism_components/prism-mizar.js +usr/share/doc/foobar/html/assets/prism_components/prism-wiki.js +usr/share/doc/foobar/html/assets/prism_components/prism-csharp.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-julia.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-coffeescript.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-sql.js +usr/share/doc/foobar/html/assets/prism_components/prism-php-extras.js +usr/share/doc/foobar/html/assets/prism_components/prism-basic.js +usr/share/doc/foobar/html/assets/prism_components/prism-swift.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-haxe.js +usr/share/doc/foobar/html/assets/prism_components/prism-apacheconf.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-javascript.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-markup.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-keyman.js +usr/share/doc/foobar/html/assets/prism_components/prism-sql.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-php-extras.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-scheme.js +usr/share/doc/foobar/html/assets/prism_components/prism-python.js +usr/share/doc/foobar/html/assets/prism_components/prism-autoit.js +usr/share/doc/foobar/html/assets/prism_components/prism-gherkin.js +usr/share/doc/foobar/html/assets/prism_components/prism-java.js +usr/share/doc/foobar/html/assets/prism_components/prism-parigp.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-autohotkey.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-ruby.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nginx.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-core.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-fortran.js +usr/share/doc/foobar/html/assets/prism_components/prism-nasm.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-ini.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-protobuf.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-jsx.js +usr/share/doc/foobar/html/assets/prism_components/prism-markdown.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nix.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nsis.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-oz.js +usr/share/doc/foobar/html/assets/prism_components/prism-less.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-abap.js +usr/share/doc/foobar/html/assets/prism_components/prism-puppet.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-nix.js +usr/share/doc/foobar/html/assets/prism_components/prism-pascal.js +usr/share/doc/foobar/html/assets/prism_components/prism-latex.js +usr/share/doc/foobar/html/assets/prism_components/prism-verilog.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-aspnet.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-go.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-glsl.js +usr/share/doc/foobar/html/assets/prism_components/prism-inform7.js +usr/share/doc/foobar/html/assets/prism_components/prism-yaml.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-matlab.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-lua.js +usr/share/doc/foobar/html/assets/prism_components/prism-mizar.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-c.js +usr/share/doc/foobar/html/assets/prism_components/prism-fsharp.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-haml.js +usr/share/doc/foobar/html/assets/prism_components/prism-rust.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-icon.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-fortran.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-qore.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-batch.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-eiffel.js +usr/share/doc/foobar/html/assets/prism_components/prism-vim.js +usr/share/doc/foobar/html/assets/prism_components/prism-j.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-eiffel.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-elixir.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-erlang.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-matlab.js +usr/share/doc/foobar/html/assets/prism_components/prism-tcl.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-ruby.js +usr/share/doc/foobar/html/assets/prism_components/prism-d.js +usr/share/doc/foobar/html/assets/prism_components/prism-swift.js +usr/share/doc/foobar/html/assets/prism_components/prism-wiki.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-lolcode.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-latex.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-prolog.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-php.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-scss.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-vhdl.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-lolcode.js +usr/share/doc/foobar/html/assets/prism_components/prism-prolog.js +usr/share/doc/foobar/html/assets/prism_components/prism-apacheconf.js +usr/share/doc/foobar/html/assets/prism_components/prism-core.js +usr/share/doc/foobar/html/assets/prism_components/prism-diff.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-json.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-ini.js +usr/share/doc/foobar/html/assets/prism_components/dumped.trie +usr/share/doc/foobar/html/assets/prism_components/prism-r.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-markup.js +usr/share/doc/foobar/html/assets/prism_components/prism-apl.js +usr/share/doc/foobar/html/assets/prism_components/prism-markdown.js +usr/share/doc/foobar/html/assets/prism_components/prism-asciidoc.js +usr/share/doc/foobar/html/assets/prism_components/prism-ocaml.js +usr/share/doc/foobar/html/assets/prism_components/prism-javascript.js +usr/share/doc/foobar/html/assets/prism_components/prism-autohotkey.js +usr/share/doc/foobar/html/assets/prism_components/prism-less.js +usr/share/doc/foobar/html/assets/prism_components/prism-pure.js +usr/share/doc/foobar/html/assets/prism_components/prism-groovy.js +usr/share/doc/foobar/html/assets/prism_components/prism-bison.js +usr/share/doc/foobar/html/assets/prism_components/prism-sass.js +usr/share/doc/foobar/html/assets/prism_components/prism-css.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-haml.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-handlebars.js +usr/share/doc/foobar/html/assets/prism_components/prism-textile.js +usr/share/doc/foobar/html/assets/prism_components/prism-parser.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-docker.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-monkey.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-http.js +usr/share/doc/foobar/html/assets/prism_components/prism-git.js +usr/share/doc/foobar/html/assets/prism_components/prism-sas.js +usr/share/doc/foobar/html/assets/prism_components/prism-go.js +usr/share/doc/foobar/html/assets/prism_components/prism-mel.js +usr/share/doc/foobar/html/assets/prism_components/prism-rest.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-clike.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-d.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-haskell.js +usr/share/doc/foobar/html/assets/prism_components/prism-git.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-java.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-rip.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-perl.js +usr/share/doc/foobar/html/assets/prism_components/prism-typescript.js +usr/share/doc/foobar/html/assets/prism_components/prism-actionscript.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-autoit.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-rip.js +usr/share/doc/foobar/html/assets/prism_components/prism-twig.js +usr/share/doc/foobar/html/assets/prism_components/prism-monkey.js +usr/share/doc/foobar/html/assets/prism_components/prism-processing.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-scala.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-smalltalk.js +usr/share/doc/foobar/html/assets/prism_components/prism-bash.min.js +usr/share/doc/foobar/html/assets/prism_components/prism-r.js +usr/share/doc/foobar/html/assets/prism_components/prism-css.js +usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.woff +usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.woff2 +usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.svg +usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.ttf +usr/share/doc/foobar/html/assets/fonts/glyphicons-halflings-regular.eot +usr/share/doc/foobar/html/assets/fonts/dumped.trie +usr/share/doc/foobar/html/assets/images/home.svg +usr/share/doc/foobar/html/assets/images/dumped.trie diff --git a/test cases/frameworks/23 hotdoc/meson.build b/test cases/frameworks/23 hotdoc/meson.build index 191569d..dd3c92a 100644 --- a/test cases/frameworks/23 hotdoc/meson.build +++ b/test cases/frameworks/23 hotdoc/meson.build @@ -7,3 +7,9 @@ endif subdir('doc') +assert(hotdoc.has_extensions(['gi-extension']) == true, + 'GI extension should always be found.') + +assert(hotdoc.has_extensions(['gi-extension', 'no-way-you-exist-extension']) == false, + 'A hotdoc extension called "no-way-you-exist-extension" should never be found.') + diff --git a/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c b/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c new file mode 100644 index 0000000..f131359 --- /dev/null +++ b/test cases/frameworks/24 libgcrypt/libgcrypt_prog.c @@ -0,0 +1,8 @@ +#include <gcrypt.h> + +int +main() +{ + gcry_check_version(NULL); + return 0; +} diff --git a/test cases/frameworks/24 libgcrypt/meson.build b/test cases/frameworks/24 libgcrypt/meson.build new file mode 100644 index 0000000..5aadb13 --- /dev/null +++ b/test cases/frameworks/24 libgcrypt/meson.build @@ -0,0 +1,23 @@ +project('libgcrypt test', 'c') + +wm = find_program('libgcrypt-config', required : false) +if not wm.found() + error('MESON_SKIP_TEST: libgcrypt-config not installed') +endif + +libgcrypt_dep = dependency('libgcrypt', version : '>= 1.0') +libgcrypt_ver = libgcrypt_dep.version() +assert(libgcrypt_ver.split('.').length() > 1, 'libgcrypt version is "@0@"'.format(libgcrypt_ver)) +message('libgcrypt version is "@0@"'.format(libgcrypt_ver)) +e = executable('libgcrypt_prog', 'libgcrypt_prog.c', dependencies : libgcrypt_dep) + +test('libgcrypttest', e) + +# Test using the method keyword: + +dependency('libgcrypt', method : 'config-tool') +dependency('libgcrypt', method : 'pkg-config', required: false) + +# Check we can apply a version constraint +dependency('libgcrypt', version: '>=@0@'.format(libgcrypt_dep.version()), method: 'pkg-config', required: false) +dependency('libgcrypt', version: '>=@0@'.format(libgcrypt_dep.version()), method: 'config-tool') diff --git a/test cases/frameworks/4 qt/meson.build b/test cases/frameworks/4 qt/meson.build index 7ac945e..15fd822 100644 --- a/test cases/frameworks/4 qt/meson.build +++ b/test cases/frameworks/4 qt/meson.build @@ -58,6 +58,10 @@ foreach qt : ['qt4', 'qt5'] # Test that setting a unique name with a positional argument works qtmodule.preprocess(qt + 'teststuff', qresources : files(['stuff.qrc', 'stuff2.qrc']), method : get_option('method')) + # Test that passing extra arguments to rcc works + # qt4-rcc and qt5-rcc take different arguments, for example qt4: ['-compress', '3']; qt5: '--compress=3' + qtmodule.preprocess(qt + 'testrccarg', qresources : files(['stuff.qrc', 'stuff2.qrc']), rcc_extra_arguments : '--compress=3', method : get_option('method')) + qexe = executable(qt + 'app', sources : ['main.cpp', 'mainWindow.cpp', # Sources that don't need preprocessing. prep, prep_rcc], diff --git a/test cases/frameworks/6 gettext/data/data3/meson.build b/test cases/frameworks/6 gettext/data/data3/meson.build new file mode 100644 index 0000000..044b498 --- /dev/null +++ b/test cases/frameworks/6 gettext/data/data3/meson.build @@ -0,0 +1,9 @@ +# Target name will contain a path separator +i18n.merge_file( + input: 'test.desktop.in', + output: 'test4.desktop', + type: 'desktop', + po_dir: '../../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications') +) diff --git a/test cases/frameworks/6 gettext/data/data3/test.desktop.in b/test cases/frameworks/6 gettext/data/data3/test.desktop.in new file mode 100644 index 0000000..33b9a9f --- /dev/null +++ b/test cases/frameworks/6 gettext/data/data3/test.desktop.in @@ -0,0 +1,6 @@ +[Desktop Entry] +Name=Test +GenericName=Application +Comment=Test Application +Type=Application + diff --git a/test cases/frameworks/6 gettext/data/meson.build b/test cases/frameworks/6 gettext/data/meson.build index a6b0a8b..d78c19e 100644 --- a/test cases/frameworks/6 gettext/data/meson.build +++ b/test cases/frameworks/6 gettext/data/meson.build @@ -10,7 +10,7 @@ i18n.merge_file( # Use filename substitution for another file i18n.merge_file( - input: 'test2.desktop.in', + input: files('test2.desktop.in'), output: '@BASENAME@', type: 'desktop', po_dir: '../po', @@ -26,3 +26,5 @@ i18n.merge_file( install: true, install_dir: join_paths(get_option('datadir'), 'applications') ) + +subdir('data3') diff --git a/test cases/frameworks/6 gettext/data2/meson.build b/test cases/frameworks/6 gettext/data2/meson.build new file mode 100644 index 0000000..d927ba3 --- /dev/null +++ b/test cases/frameworks/6 gettext/data2/meson.build @@ -0,0 +1,8 @@ +i18n.merge_file( + input: 'test.desktop.in', + output: 'test.desktop', + type: 'desktop', + po_dir: '../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications') +) diff --git a/test cases/frameworks/6 gettext/data2/test.desktop.in b/test cases/frameworks/6 gettext/data2/test.desktop.in new file mode 100644 index 0000000..33b9a9f --- /dev/null +++ b/test cases/frameworks/6 gettext/data2/test.desktop.in @@ -0,0 +1,6 @@ +[Desktop Entry] +Name=Test +GenericName=Application +Comment=Test Application +Type=Application + diff --git a/test cases/frameworks/6 gettext/generated/desktopgenerator.py b/test cases/frameworks/6 gettext/generated/desktopgenerator.py new file mode 100644 index 0000000..e49c2d6 --- /dev/null +++ b/test cases/frameworks/6 gettext/generated/desktopgenerator.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 + +import os, sys, shutil + +ifile = sys.argv[1] +ofile = sys.argv[2] + +try: + os.unlink(ofile) +except FileNotFoundError: + pass + +shutil.copy(ifile, ofile) diff --git a/test cases/frameworks/6 gettext/generated/meson.build b/test cases/frameworks/6 gettext/generated/meson.build new file mode 100644 index 0000000..5ed9205 --- /dev/null +++ b/test cases/frameworks/6 gettext/generated/meson.build @@ -0,0 +1,16 @@ +dgen = find_program('desktopgenerator.py') + +desktop_in_file = custom_target('something.desktop.in', + input : ['something.desktop.in.in'], + output : 'something.desktop.in', + command : [dgen, '@INPUT@', '@OUTPUT@'], +) + +i18n.merge_file( + input : desktop_in_file, + output : 'something.desktop', + type : 'desktop', + po_dir : '../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications'), +) diff --git a/test cases/frameworks/6 gettext/generated/something.desktop.in.in b/test cases/frameworks/6 gettext/generated/something.desktop.in.in new file mode 100644 index 0000000..e2094fd --- /dev/null +++ b/test cases/frameworks/6 gettext/generated/something.desktop.in.in @@ -0,0 +1,15 @@ +[Desktop Entry] +Name=Something doer +Comment=Do something +# Translators: Search terms to find this application. Do NOT translate or localize the semicolons! The list MUST also end with a semicolon! +Keywords=zip;tar;extract;unpack; +TryExec=file-roller +Exec=file-roller %U +StartupNotify=true +Terminal=false +Type=Application +# Translators: Do NOT translate or transliterate this text (this is an icon file name)! +Icon=something +Categories=GTK;GNOME;Utility +X-GNOME-DocPath=file-roller/file-roller.xml +X-GNOME-UsesNotifications=true diff --git a/test cases/frameworks/6 gettext/installed_files.txt b/test cases/frameworks/6 gettext/installed_files.txt index 9298909..850711a 100644 --- a/test cases/frameworks/6 gettext/installed_files.txt +++ b/test cases/frameworks/6 gettext/installed_files.txt @@ -1,6 +1,8 @@ usr/bin/intlprog?exe usr/share/locale/de/LC_MESSAGES/intltest.mo usr/share/locale/fi/LC_MESSAGES/intltest.mo +usr/share/applications/something.desktop usr/share/applications/test.desktop usr/share/applications/test2.desktop usr/share/applications/test3.desktop +usr/share/applications/test4.desktop diff --git a/test cases/frameworks/6 gettext/meson.build b/test cases/frameworks/6 gettext/meson.build index e02234b..09ef982 100644 --- a/test cases/frameworks/6 gettext/meson.build +++ b/test cases/frameworks/6 gettext/meson.build @@ -14,3 +14,5 @@ i18n = import('i18n') subdir('po') subdir('src') subdir('data') +subdir('data2') +subdir('generated') diff --git a/test cases/frameworks/7 gnome/installed_files.txt b/test cases/frameworks/7 gnome/installed_files.txt index 7502888..9c1d496 100644 --- a/test cases/frameworks/7 gnome/installed_files.txt +++ b/test cases/frameworks/7 gnome/installed_files.txt @@ -4,8 +4,13 @@ usr/include/enums3.h usr/include/enums5.h usr/include/marshaller.h usr/lib/?libgir_lib.so +?cygwin:usr/lib/libgir_lib.dll.a +usr/lib/?libgir_lib2.so +?cygwin:usr/lib/libgir_lib2.dll.a usr/lib/?libdep1lib.so +?cygwin:usr/lib/libdep1lib.dll.a usr/lib/?libdep2lib.so +?cygwin:usr/lib/libdep2lib.dll.a usr/lib/girepository-1.0/Meson-1.0.typelib usr/lib/girepository-1.0/MesonDep1-1.0.typelib usr/lib/girepository-1.0/MesonDep2-1.0.typelib @@ -14,5 +19,6 @@ usr/share/gir-1.0/MesonDep1-1.0.gir usr/share/gir-1.0/MesonDep2-1.0.gir usr/share/glib-2.0/schemas/com.github.meson.gschema.xml usr/share/simple-resources.gresource +usr/include/enums6.h usr/include/simple-resources.h usr/include/generated-gdbus.h diff --git a/test cases/frameworks/7 gnome/resources/res3.txt b/test cases/frameworks/7 gnome/resources/res3.txt new file mode 100644 index 0000000..aeed4a5 --- /dev/null +++ b/test cases/frameworks/7 gnome/resources/res3.txt @@ -0,0 +1 @@ +This file is from the wrong directory. diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in b/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in new file mode 100644 index 0000000..8845985 --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/Config.java.in @@ -0,0 +1,5 @@ +package com.mesonbuild; + +public class Config { + public static final boolean FOOBAR = true; +} diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java b/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java new file mode 100644 index 0000000..df3c53d --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/Simple.java @@ -0,0 +1,12 @@ +package com.mesonbuild; + +import com.mesonbuild.Config; + +class Simple { + public static void main(String [] args) { + if (Config.FOOBAR) { + TextPrinter t = new TextPrinter("Printing from Java."); + t.print(); + } + } +} diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java b/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java new file mode 100644 index 0000000..dc2771c --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/TextPrinter.java @@ -0,0 +1,14 @@ +package com.mesonbuild; + +class TextPrinter { + + private String msg; + + TextPrinter(String s) { + msg = s; + } + + public void print() { + System.out.println(msg); + } +} diff --git a/test cases/java/8 codegen custom target/com/mesonbuild/meson.build b/test cases/java/8 codegen custom target/com/mesonbuild/meson.build new file mode 100644 index 0000000..0309941 --- /dev/null +++ b/test cases/java/8 codegen custom target/com/mesonbuild/meson.build @@ -0,0 +1,8 @@ +python = import('python').find_installation('python3') + +config_file = custom_target('confgen', + input : 'Config.java.in', + output : 'Config.java', + command : [python, '-c', + 'import shutil, sys, time; time.sleep(1); shutil.copy(sys.argv[1], sys.argv[2])', + '@INPUT@', '@OUTPUT@']) diff --git a/test cases/java/8 codegen custom target/meson.build b/test cases/java/8 codegen custom target/meson.build new file mode 100644 index 0000000..ab441a6 --- /dev/null +++ b/test cases/java/8 codegen custom target/meson.build @@ -0,0 +1,15 @@ +# If we generate code under the build directory then the backend needs to add +# the build directory to the -sourcepath passed to javac otherwise the compiler +# won't be able to handle the -implicit:class behaviour of automatically +# compiling dependency classes. + +project('codegenjava', 'java') + +subdir('com/mesonbuild') + +javaprog = jar('myprog', + config_file[0], + 'com/mesonbuild/Simple.java', + 'com/mesonbuild/TextPrinter.java', + main_class : 'com.mesonbuild.Simple') +test('subdirtest', javaprog) diff --git a/test cases/linuxlike/13 cmake dependency/incdir/myinc.h b/test cases/linuxlike/13 cmake dependency/incdir/myinc.h new file mode 100644 index 0000000..4b66a6c --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/incdir/myinc.h @@ -0,0 +1,3 @@ +#pragma once + +#include<zlib.h> diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build new file mode 100644 index 0000000..1cf667a --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/meson.build @@ -0,0 +1,52 @@ +project('external CMake dependency', 'c') + +# Zlib is probably on all dev machines. + +dep = dependency('ZLIB', version : '>=1.2', method : 'cmake') +exe = executable('zlibprog', 'prog-checkver.c', + dependencies : dep, + c_args : '-DFOUND_ZLIB="' + dep.version() + '"') + +assert(dep.version().version_compare('>=1.2'), 'CMake version numbers exposed incorrectly.') + +# Check that CMake targets are extracted +dept = dependency('ZLIB', version : '>=1.2', method : 'cmake', modules : 'ZLIB::ZLIB') +exet = executable('zlibprog_target', 'prog-checkver.c', + dependencies : dep, + c_args : '-DFOUND_ZLIB="' + dep.version() + '"') + +# Check that the version exposed by zlib internally is the same as the one we +# retrieve from the pkg-config file. This assumes that the packager didn't mess +# up, but we can be reasonably sure of that. +test('zlibtest', exe) + +# Test that dependencies of dependencies work. +dep2 = declare_dependency(dependencies : dep) +exe2 = executable('zlibprog2', 'prog.c', dependencies : dep2) +test('zlibtest2', exe2) + +# Try to find a nonexistent library to ensure requires:false works. + +depf1 = dependency('nvakuhrabnsdfasdf', required : false, method : 'cmake') +depf2 = dependency('ZLIB', required : false, method : 'cmake', modules : 'dfggh::hgfgag') + +assert(depf2.found() == false, 'Invalid CMake targets should fail') + +# Try to compile a test that takes a dep and an include_directories + +cc = meson.get_compiler('c') +zlibdep = cc.find_library('z') +code = '''#include<myinc.h> + +int main(int argc, char **argv) { + void * something = deflate; + if(something != 0) + return 0; + return 1; +} +''' + +inc = include_directories('incdir') + +r = cc.run(code, include_directories : inc, dependencies : zlibdep) +assert(r.returncode() == 0, 'Running manual zlib test failed.') diff --git a/test cases/linuxlike/13 cmake dependency/prog-checkver.c b/test cases/linuxlike/13 cmake dependency/prog-checkver.c new file mode 100644 index 0000000..16b7170 --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/prog-checkver.c @@ -0,0 +1,15 @@ +#include <zlib.h> +#include <stdio.h> +#include <string.h> + +int main(int argc, char **argv) { + void * something = deflate; + if(strcmp(ZLIB_VERSION, FOUND_ZLIB) != 0) { + printf("Meson found '%s' but zlib is '%s'\n", FOUND_ZLIB, ZLIB_VERSION); + return 2; + } + if(something != 0) + return 0; + printf("Couldn't find 'deflate'\n"); + return 1; +} diff --git a/test cases/linuxlike/13 cmake dependency/prog.c b/test cases/linuxlike/13 cmake dependency/prog.c new file mode 100644 index 0000000..cea986d --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/prog.c @@ -0,0 +1,8 @@ +#include<zlib.h> + +int main(int argc, char **argv) { + void * something = deflate; + if(something != 0) + return 0; + return 1; +} diff --git a/test cases/nasm/1 configure file/meson.build b/test cases/nasm/1 configure file/meson.build index e128325..85ecaf1 100644 --- a/test cases/nasm/1 configure file/meson.build +++ b/test cases/nasm/1 configure file/meson.build @@ -47,3 +47,9 @@ exe = executable('hello', asm_gen.process('hello.asm'), ) test('test-nasm-configure-file', exe) + +exe2 = executable('hello2', objects : exe.extract_all_objects(), + link_args: link_args, +) + +test('test-nasm-extract-all-objects', exe2) diff --git a/test cases/osx/4 framework/installed_files.txt b/test cases/osx/4 framework/installed_files.txt new file mode 100644 index 0000000..2c6bd93 --- /dev/null +++ b/test cases/osx/4 framework/installed_files.txt @@ -0,0 +1,2 @@ +usr/bin/prog +usr/lib/libstat.a diff --git a/test cases/unit/35 dist script/meson.build b/test cases/unit/35 dist script/meson.build index 3415ec4..fd672a9 100644 --- a/test cases/unit/35 dist script/meson.build +++ b/test cases/unit/35 dist script/meson.build @@ -4,4 +4,4 @@ project('dist script', 'c', exe = executable('comparer', 'prog.c') test('compare', exe) -meson.add_dist_script('replacer.py') +meson.add_dist_script('replacer.py', '"incorrect"', '"correct"') diff --git a/test cases/unit/35 dist script/replacer.py b/test cases/unit/35 dist script/replacer.py index adda365..96ccdcc 100755 --- a/test cases/unit/35 dist script/replacer.py +++ b/test cases/unit/35 dist script/replacer.py @@ -2,11 +2,15 @@ import os import pathlib +import sys + +if len(sys.argv) < 3: + sys.exit('usage: replacer.py <pattern> <replacement>') source_root = pathlib.Path(os.environ['MESON_DIST_ROOT']) modfile = source_root / 'prog.c' contents = modfile.read_text() -contents = contents.replace('"incorrect"', '"correct"') +contents = contents.replace(sys.argv[1], sys.argv[2]) modfile.write_text(contents) diff --git a/test cases/unit/39 python extmodule/meson.build b/test cases/unit/39 python extmodule/meson.build index 4798654..eb00a6a 100644 --- a/test cases/unit/39 python extmodule/meson.build +++ b/test cases/unit/39 python extmodule/meson.build @@ -21,3 +21,6 @@ if py.found() else error('MESON_SKIP_TEST: Python not found, skipping test.') endif + +py = py_mod.find_installation(get_option('python'), required : get_option('disabled_opt')) +assert(not py.found(), 'find_installation not working with disabled feature') diff --git a/test cases/unit/39 python extmodule/meson_options.txt b/test cases/unit/39 python extmodule/meson_options.txt index b8f645d..c85110d 100644 --- a/test cases/unit/39 python extmodule/meson_options.txt +++ b/test cases/unit/39 python extmodule/meson_options.txt @@ -1,3 +1,4 @@ option('python', type: 'string', description: 'Name of or path to the python executable' ) +option('disabled_opt', type: 'feature', value: 'disabled') diff --git a/test cases/unit/45 vscpp17/main.cpp b/test cases/unit/45 vscpp17/main.cpp new file mode 100644 index 0000000..36e4156 --- /dev/null +++ b/test cases/unit/45 vscpp17/main.cpp @@ -0,0 +1,7 @@ +[[nodiscard]] int foo() { + return 0; +} + +int main() { + return foo(); +} diff --git a/test cases/unit/45 vscpp17/meson.build b/test cases/unit/45 vscpp17/meson.build new file mode 100644 index 0000000..afe740b --- /dev/null +++ b/test cases/unit/45 vscpp17/meson.build @@ -0,0 +1,4 @@ +project('msvc_cpp17', 'cpp', default_options: ['cpp_std=c++17']) + +exe = executable('msvc_cpp17', 'main.cpp') +test('msvc_cpp17', exe) diff --git a/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py new file mode 100755 index 0000000..f0d89ee --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python3 + +import os +import sys +import subprocess + +environ = os.environ.copy() +environ['PKG_CONFIG_LIBDIR'] = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'cross_pkgconfig') + +sys.exit( + subprocess.run(['pkg-config'] + sys.argv[1:], env=environ).returncode) diff --git a/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc new file mode 100644 index 0000000..67d7afa --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/cross_pkgconfig/dep_tester.pc @@ -0,0 +1,5 @@ +dep_type=cross + +Name: dependency() test +Description: dependency() test +Version: 0 diff --git a/test cases/unit/46 native dep pkgconfig var/meson.build b/test cases/unit/46 native dep pkgconfig var/meson.build new file mode 100644 index 0000000..d95dbcd --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/meson.build @@ -0,0 +1,15 @@ +project('native dep pkgconfig test') + +if get_option('start_native') + dep_native = dependency('dep_tester', native: true, method: 'pkg-config') + dep_cross = dependency('dep_tester', native: false, method: 'pkg-config') +else + dep_cross = dependency('dep_tester', native: false, method: 'pkg-config') + dep_native = dependency('dep_tester', native: true, method: 'pkg-config') +endif + +dep_type = dep_native.get_pkgconfig_variable('dep_type') +assert(dep_type == 'native', 'Expected native') + +dep_type = dep_cross.get_pkgconfig_variable('dep_type') +assert(dep_type == 'cross', 'Expected cross') diff --git a/test cases/unit/46 native dep pkgconfig var/meson_options.txt b/test cases/unit/46 native dep pkgconfig var/meson_options.txt new file mode 100644 index 0000000..37006dd --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/meson_options.txt @@ -0,0 +1,6 @@ +option( + 'start_native', + type : 'boolean', + value : 'false', + description : 'Start by creating a dependency() with native : true', +) diff --git a/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc b/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc new file mode 100644 index 0000000..affaa97 --- /dev/null +++ b/test cases/unit/46 native dep pkgconfig var/native_pkgconfig/dep_tester.pc @@ -0,0 +1,5 @@ +dep_type=native + +Name: dependency() test +Description: dependency() test +Version: 0 diff --git a/test cases/unit/46 native file binary/meson.build b/test cases/unit/46 native file binary/meson.build new file mode 100644 index 0000000..4489ac1 --- /dev/null +++ b/test cases/unit/46 native file binary/meson.build @@ -0,0 +1,21 @@ +project('test project') + +case = get_option('case') + +if case == 'find_program' + prog = find_program('bash') + result = run_command(prog, ['--version']) + assert(result.stdout().strip().endswith('12345'), 'Didn\'t load bash from config file') +elif case == 'config_dep' + add_languages('cpp') + dep = dependency('llvm') + assert(dep.get_configtool_variable('version').endswith('12345'), 'Didn\'t load llvm from config file') +elif case == 'python3' + prog = import('python3').find_python() + result = run_command(prog, ['--version']) + assert(result.stdout().strip().endswith('12345'), 'Didn\'t load python3 from config file') +elif case == 'python' + prog = import('python').find_installation() + result = run_command(prog, ['--version']) + assert(result.stdout().strip().endswith('12345'), 'Didn\'t load python from config file') +endif diff --git a/test cases/unit/46 native file binary/meson_options.txt b/test cases/unit/46 native file binary/meson_options.txt new file mode 100644 index 0000000..651da0e --- /dev/null +++ b/test cases/unit/46 native file binary/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'case', + type : 'combo', + choices : ['find_program', 'config_dep', 'python3', 'python'] +) diff --git a/test cases/unit/46 reconfigure/main.c b/test cases/unit/46 reconfigure/main.c new file mode 100644 index 0000000..25927f5 --- /dev/null +++ b/test cases/unit/46 reconfigure/main.c @@ -0,0 +1,4 @@ +int main(int argc, char *argv[]) +{ + return 0; +} diff --git a/test cases/unit/46 reconfigure/meson.build b/test cases/unit/46 reconfigure/meson.build new file mode 100644 index 0000000..6eaac5d --- /dev/null +++ b/test cases/unit/46 reconfigure/meson.build @@ -0,0 +1,9 @@ +project('test-reconfigure', 'c') + +message('opt1 ' + get_option('opt1')) +message('opt2 ' + get_option('opt2')) +message('opt3 ' + get_option('opt3')) +message('opt4 ' + get_option('opt4')) + +exe = executable('test1', 'main.c') +test('test1', exe) diff --git a/test cases/unit/46 reconfigure/meson_options.txt b/test cases/unit/46 reconfigure/meson_options.txt new file mode 100644 index 0000000..728f7b7 --- /dev/null +++ b/test cases/unit/46 reconfigure/meson_options.txt @@ -0,0 +1,4 @@ +option('opt1', type : 'string', value : 'default1') +option('opt2', type : 'string', value : 'default2') +option('opt3', type : 'string', value : 'default3') +option('opt4', type : 'string', value : 'default4') diff --git a/test cases/unit/47 testsetup default/envcheck.py b/test cases/unit/47 testsetup default/envcheck.py new file mode 100644 index 0000000..6ba3093 --- /dev/null +++ b/test cases/unit/47 testsetup default/envcheck.py @@ -0,0 +1,11 @@ +#!/usr/bin/env python3 + +import os + +assert('ENV_A' in os.environ) +assert('ENV_B' in os.environ) +assert('ENV_C' in os.environ) + +print('ENV_A is', os.environ['ENV_A']) +print('ENV_B is', os.environ['ENV_B']) +print('ENV_C is', os.environ['ENV_C']) diff --git a/test cases/unit/47 testsetup default/meson.build b/test cases/unit/47 testsetup default/meson.build new file mode 100644 index 0000000..bdd35b8 --- /dev/null +++ b/test cases/unit/47 testsetup default/meson.build @@ -0,0 +1,23 @@ +project('testsetup default', 'c') + +envcheck = find_program('envcheck.py') + +# Defining ENV_A in test-env should overwrite ENV_A from test setup +env_1 = environment() +env_1.set('ENV_A', '1') +test('test-env', envcheck, env: env_1) + +# Defining default env which is used unless --setup is given or the +# env variable is defined in the test. +env_2 = environment() +env_2.set('ENV_A', '2') +env_2.set('ENV_B', '2') +env_2.set('ENV_C', '2') +add_test_setup('mydefault', env: env_2, is_default: true) + +# Defining a test setup that will update some of the env variables +# from the default test setup. +env_3 = env_2 +env_3.set('ENV_A', '3') +env_3.set('ENV_B', '3') +add_test_setup('other', env: env_3) diff --git a/test cases/unit/48 pkgconfig csharp library/meson.build b/test cases/unit/48 pkgconfig csharp library/meson.build new file mode 100644 index 0000000..148d40f --- /dev/null +++ b/test cases/unit/48 pkgconfig csharp library/meson.build @@ -0,0 +1,10 @@ +project('pkgformat', 'cs', + version : '1.0') + +pkgg = import('pkgconfig') + +l = library('libsomething', 'somelib.cs') + +pkgg.generate(l, + version: '1.0', + description: 'A library that does something') diff --git a/test cases/unit/48 pkgconfig csharp library/somelib.cs b/test cases/unit/48 pkgconfig csharp library/somelib.cs new file mode 100644 index 0000000..24d37ed --- /dev/null +++ b/test cases/unit/48 pkgconfig csharp library/somelib.cs @@ -0,0 +1,12 @@ +using System; + +namespace Abc +{ + public static class Something + { + public static bool Api1(this String str) + { + return str == "foo"; + } + } +} diff --git a/test cases/unit/6 std override/meson.build b/test cases/unit/6 std override/meson.build index ef2baac..0eac752 100644 --- a/test cases/unit/6 std override/meson.build +++ b/test cases/unit/6 std override/meson.build @@ -1,10 +1,10 @@ project('cpp std override', 'cpp', - default_options : ['cpp_std=c++03', + default_options : ['cpp_std=c++98', 'werror=true']) executable('plain', 'progp.cpp', override_options : 'cpp_std=none') -executable('v03', 'prog03.cpp', +executable('v98', 'prog98.cpp', override_options : 'werror=false') executable('v11', 'prog11.cpp', override_options : 'cpp_std=c++11') diff --git a/test cases/unit/6 std override/prog03.cpp b/test cases/unit/6 std override/prog98.cpp index d30abc9..67c326d 100644 --- a/test cases/unit/6 std override/prog03.cpp +++ b/test cases/unit/6 std override/prog98.cpp @@ -1,6 +1,6 @@ #include<iostream> int main(int argc, char **argv) { - std::cout << "I am a c++03 test program.\n"; + std::cout << "I am a c++98 test program.\n"; return 0; } diff --git a/test cases/vala/11 generated vapi/installed_files.txt b/test cases/vala/11 generated vapi/installed_files.txt index aeaf2da..ca41d65 100644 --- a/test cases/vala/11 generated vapi/installed_files.txt +++ b/test cases/vala/11 generated vapi/installed_files.txt @@ -1,6 +1,8 @@ -usr/bin/vapigen-test +usr/bin/vapigen-test?exe usr/lib/?libfoo.so +?cygwin:usr/lib/libfoo.dll.a usr/lib/?libbar.so +?cygwin:usr/lib/libbar.dll.a usr/share/vala/vapi/foo-1.0.vapi usr/share/vala/vapi/foo-1.0.deps usr/share/vala/vapi/bar-1.0.vapi diff --git a/test cases/vala/7 shared library/installed_files.txt b/test cases/vala/7 shared library/installed_files.txt index 012b107..83cbb63 100644 --- a/test cases/vala/7 shared library/installed_files.txt +++ b/test cases/vala/7 shared library/installed_files.txt @@ -1,5 +1,7 @@ usr/lib/?libinstalled_vala_lib.so +?cygwin:usr/lib/libinstalled_vala_lib.dll.a usr/lib/?libinstalled_vala_all.so +?cygwin:usr/lib/libinstalled_vala_all.dll.a usr/include/installed_vala_all.h usr/include/valah/installed_vala_all_nolib.h usr/include/installed_vala_onlyh.h diff --git a/test cases/vala/8 generated sources/installed_files.txt b/test cases/vala/8 generated sources/installed_files.txt index e1e9432..ae0f65f 100644 --- a/test cases/vala/8 generated sources/installed_files.txt +++ b/test cases/vala/8 generated sources/installed_files.txt @@ -1,2 +1,3 @@ -usr/bin/generatedtest -usr/bin/onlygentest +usr/bin/generatedtestparent?exe +usr/bin/generatedtest?exe +usr/bin/onlygentest?exe diff --git a/test cases/vala/9 gir/installed_files.txt b/test cases/vala/9 gir/installed_files.txt index 64bddee..890b47a 100644 --- a/test cases/vala/9 gir/installed_files.txt +++ b/test cases/vala/9 gir/installed_files.txt @@ -1,2 +1,3 @@ -usr/lib/?libfoo.so +?gcc:usr/lib/?libfoo.so +?cygwin:usr/lib/libfoo.dll.a usr/share/gir-1.0/Foo-1.0.gir diff --git a/test cases/windows/1 basic/installed_files.txt b/test cases/windows/1 basic/installed_files.txt index 8c8464a..5022d28 100644 --- a/test cases/windows/1 basic/installed_files.txt +++ b/test cases/windows/1 basic/installed_files.txt @@ -1,2 +1,2 @@ usr/bin/prog.exe -usr/bin/prog.pdb +?msvc:usr/bin/prog.pdb diff --git a/test cases/windows/11 exe implib/installed_files.txt b/test cases/windows/11 exe implib/installed_files.txt index bd2abe9..b1e805c 100644 --- a/test cases/windows/11 exe implib/installed_files.txt +++ b/test cases/windows/11 exe implib/installed_files.txt @@ -1,7 +1,7 @@ usr/bin/prog.exe -usr/bin/prog.pdb +?msvc:usr/bin/prog.pdb usr/bin/prog2.exe -usr/bin/prog2.pdb +?msvc:usr/bin/prog2.pdb ?gcc:usr/lib/libprog.exe.a ?gcc:usr/lib/libburble.a ?msvc:usr/lib/prog.exe.lib diff --git a/test cases/windows/16 gui app/meson.build b/test cases/windows/16 gui app/meson.build index 2435218..224d708 100644 --- a/test cases/windows/16 gui app/meson.build +++ b/test cases/windows/16 gui app/meson.build @@ -17,6 +17,10 @@ console_prog = executable('console_prog', 'console_prog.c', gui_app: false) tester = find_program('gui_app_tester.py') -tool = find_program('objdump', 'dumpbin') -test('is_gui', tester, args: [tool.path(), gui_prog, '2']) -test('not_gui', tester, args: [tool.path(), console_prog, '3']) +tool = find_program('objdump', 'dumpbin', required: false) +# TODO: when 'llvm-objdump -f' emits the subsystem type, we could use that also + +if tool.found() + test('is_gui', tester, args: [tool.path(), gui_prog, '2']) + test('not_gui', tester, args: [tool.path(), console_prog, '3']) +endif diff --git a/test cases/windows/7 dll versioning/installed_files.txt b/test cases/windows/7 dll versioning/installed_files.txt index 20482bf..62b5c9a 100644 --- a/test cases/windows/7 dll versioning/installed_files.txt +++ b/test cases/windows/7 dll versioning/installed_files.txt @@ -4,14 +4,19 @@ ?msvc:usr/bin/noversion.dll ?msvc:usr/bin/noversion.pdb ?msvc:usr/lib/noversion.lib +?msvc:usr/lib/noversion.pdb ?msvc:usr/bin/onlyversion-1.dll +?msvc:usr/bin/onlyversion-1.pdb ?msvc:usr/lib/onlyversion.lib ?msvc:usr/bin/onlysoversion-5.dll +?msvc:usr/bin/onlysoversion-5.pdb ?msvc:usr/lib/onlysoversion.lib ?msvc:usr/libexec/customdir.dll ?msvc:usr/libexec/customdir.lib -?msvc:usr/lib/module.dll -?msvc:usr/lib/module.lib +?msvc:usr/libexec/customdir.pdb +?msvc:usr/lib/modules/module.dll +?msvc:usr/lib/modules/module.lib +?msvc:usr/lib/modules/module.pdb ?gcc:usr/bin/?libsome-0.dll ?gcc:usr/lib/libsome.dll.a ?gcc:usr/bin/?libnoversion.dll @@ -22,5 +27,5 @@ ?gcc:usr/lib/libonlysoversion.dll.a ?gcc:usr/libexec/?libcustomdir.dll ?gcc:usr/libexec/libcustomdir.dll.a -?gcc:usr/lib/?libmodule.dll -?gcc:usr/lib/libmodule.dll.a +?gcc:usr/lib/modules/?libmodule.dll +?gcc:usr/lib/modules/libmodule.dll.a diff --git a/test cases/windows/7 dll versioning/meson.build b/test cases/windows/7 dll versioning/meson.build index 80acf88..983c2c4 100644 --- a/test cases/windows/7 dll versioning/meson.build +++ b/test cases/windows/7 dll versioning/meson.build @@ -49,4 +49,6 @@ shared_library('customdir', 'lib.c', install : true, install_dir : get_option('libexecdir')) -shared_module('module', 'lib.c', install : true) +shared_module('module', 'lib.c', + install : true, + install_dir: join_paths(get_option('libdir'), 'modules')) |