aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--azure-pipelines.yml12
-rw-r--r--ci/azure-steps.yml9
-rw-r--r--ci/install-dmd.ps12
-rw-r--r--ciimage/Dockerfile1
-rw-r--r--data/com.mesonbuild.install.policy1
-rw-r--r--data/shell-completions/bash/meson416
-rw-r--r--data/shell-completions/zsh/_meson4
-rwxr-xr-xdocs/genrelnotes.py63
-rw-r--r--docs/markdown/Builtin-options.md4
-rw-r--r--docs/markdown/CMake-module.md71
-rw-r--r--docs/markdown/Configuring-a-build-directory.md7
-rw-r--r--docs/markdown/Continuous-Integration.md2
-rw-r--r--docs/markdown/Creating-Linux-binaries.md2
-rw-r--r--docs/markdown/Creating-OSX-packages.md6
-rw-r--r--docs/markdown/Cross-compilation.md16
-rw-r--r--docs/markdown/Cuda-module.md191
-rw-r--r--docs/markdown/Dependencies.md48
-rw-r--r--docs/markdown/External-commands.md8
-rw-r--r--docs/markdown/Generating-sources.md2
-rw-r--r--docs/markdown/IDE-integration.md210
-rw-r--r--docs/markdown/Native-environments.md17
-rw-r--r--docs/markdown/Pkgconfig-module.md2
-rw-r--r--docs/markdown/Porting-from-autotools.md6
-rw-r--r--docs/markdown/Precompiled-headers.md7
-rw-r--r--docs/markdown/Python-module.md15
-rw-r--r--docs/markdown/Quick-guide.md2
-rw-r--r--docs/markdown/Reference-manual.md54
-rw-r--r--docs/markdown/Reference-tables.md48
-rw-r--r--docs/markdown/Release-notes-for-0.49.0.md2
-rw-r--r--docs/markdown/Release-notes-for-0.50.0.md335
-rw-r--r--docs/markdown/Rewriter.md236
-rw-r--r--docs/markdown/Running-Meson.md257
-rw-r--r--docs/markdown/Style-guide.md32
-rw-r--r--docs/markdown/Unit-tests.md10
-rw-r--r--docs/markdown/Users.md42
-rw-r--r--docs/markdown/Vala.md62
-rw-r--r--docs/markdown/howtox.md21
-rw-r--r--docs/markdown/i18n-module.md1
-rw-r--r--docs/markdown/legal.md15
-rw-r--r--docs/markdown/snippets/clangformat.md11
-rw-r--r--docs/markdown/snippets/crosslib.md7
-rw-r--r--docs/markdown/snippets/find_library_header.md21
-rw-r--r--docs/markdown/snippets/find_library_static.md6
-rw-r--r--docs/markdown/snippets/fortran-include.md12
-rw-r--r--docs/markdown/snippets/includestr.md16
-rw-r--r--docs/markdown/snippets/introspect_buildoptions_no_bd.md11
-rw-r--r--docs/markdown/snippets/notfound_message.md38
-rw-r--r--docs/sitemap.txt3
-rw-r--r--docs/theme/extra/images/favicon.pngbin9637 -> 3970 bytes
-rw-r--r--docs/theme/extra/images/meson_logo.pngbin8008 -> 3970 bytes
-rw-r--r--graphics/meson_logo.svg354
-rw-r--r--graphics/meson_logo_big.pngbin35224 -> 21889 bytes
-rw-r--r--man/meson.14
-rw-r--r--mesonbuild/ast/__init__.py33
-rw-r--r--mesonbuild/ast/interpreter.py235
-rw-r--r--mesonbuild/ast/introspection.py270
-rw-r--r--mesonbuild/ast/postprocess.py116
-rw-r--r--mesonbuild/ast/printer.py203
-rw-r--r--mesonbuild/ast/visitor.py140
-rw-r--r--mesonbuild/astinterpreter.py281
-rw-r--r--mesonbuild/backend/backends.py161
-rw-r--r--mesonbuild/backend/ninjabackend.py416
-rw-r--r--mesonbuild/backend/vs2010backend.py344
-rw-r--r--mesonbuild/backend/xcodebackend.py2
-rw-r--r--mesonbuild/build.py83
-rw-r--r--mesonbuild/compilers/__init__.py10
-rw-r--r--mesonbuild/compilers/c.py227
-rw-r--r--mesonbuild/compilers/compilers.py209
-rw-r--r--mesonbuild/compilers/cpp.py20
-rw-r--r--mesonbuild/compilers/cs.py10
-rw-r--r--mesonbuild/compilers/cuda.py242
-rw-r--r--mesonbuild/compilers/d.py53
-rw-r--r--mesonbuild/compilers/fortran.py80
-rw-r--r--mesonbuild/compilers/java.py10
-rw-r--r--mesonbuild/compilers/objc.py8
-rw-r--r--mesonbuild/compilers/objcpp.py11
-rw-r--r--mesonbuild/compilers/rust.py11
-rw-r--r--mesonbuild/compilers/swift.py10
-rw-r--r--mesonbuild/compilers/vala.py28
-rw-r--r--mesonbuild/coredata.py180
-rw-r--r--mesonbuild/dependencies/__init__.py7
-rw-r--r--mesonbuild/dependencies/base.py739
-rw-r--r--mesonbuild/dependencies/data/CMakeLists.txt27
-rw-r--r--mesonbuild/dependencies/data/CMakePathInfo.txt29
-rw-r--r--mesonbuild/dependencies/dev.py54
-rw-r--r--mesonbuild/dependencies/misc.py133
-rw-r--r--mesonbuild/dependencies/platform.py18
-rw-r--r--mesonbuild/dependencies/ui.py47
-rw-r--r--mesonbuild/envconfig.py431
-rw-r--r--mesonbuild/environment.py661
-rw-r--r--mesonbuild/interpreter.py519
-rw-r--r--mesonbuild/interpreterbase.py71
-rw-r--r--mesonbuild/mconf.py109
-rw-r--r--mesonbuild/mesonlib.py130
-rw-r--r--mesonbuild/mesonmain.py45
-rw-r--r--mesonbuild/minstall.py8
-rw-r--r--mesonbuild/mintro.py704
-rw-r--r--mesonbuild/mlog.py32
-rw-r--r--mesonbuild/modules/__init__.py4
-rw-r--r--mesonbuild/modules/cmake.py221
-rw-r--r--mesonbuild/modules/gnome.py191
-rw-r--r--mesonbuild/modules/hotdoc.py14
-rw-r--r--mesonbuild/modules/i18n.py17
-rw-r--r--mesonbuild/modules/pkgconfig.py51
-rw-r--r--mesonbuild/modules/python.py29
-rw-r--r--mesonbuild/modules/python3.py9
-rw-r--r--mesonbuild/modules/qt.py4
-rw-r--r--mesonbuild/modules/rpm.py104
-rw-r--r--mesonbuild/modules/unstable_cuda.py270
-rw-r--r--mesonbuild/modules/windows.py22
-rw-r--r--mesonbuild/mparser.py99
-rw-r--r--mesonbuild/msetup.py31
-rw-r--r--mesonbuild/msubprojects.py2
-rw-r--r--mesonbuild/mtest.py247
-rw-r--r--mesonbuild/munstable_coredata.py126
-rw-r--r--mesonbuild/optinterpreter.py4
-rw-r--r--mesonbuild/rewriter.py954
-rw-r--r--mesonbuild/scripts/dist.py3
-rw-r--r--mesonbuild/wrap/wrap.py17
-rwxr-xr-xrun_project_tests.py33
-rwxr-xr-xrun_tests.py20
-rwxr-xr-xrun_unittests.py1569
-rw-r--r--setup.py3
-rw-r--r--test cases/common/113 ternary/meson.build5
-rw-r--r--test cases/common/13 pch/c/meson.build2
-rw-r--r--test cases/common/13 pch/c/pch/prog.h5
-rw-r--r--test cases/common/13 pch/c/pch/prog_pch.c5
-rw-r--r--test cases/common/13 pch/cpp/meson.build2
-rw-r--r--test cases/common/13 pch/cpp/pch/prog_pch.cc5
-rw-r--r--test cases/common/13 pch/generated/gen_custom.py5
-rw-r--r--test cases/common/13 pch/generated/gen_generator.py7
-rw-r--r--test cases/common/13 pch/generated/generated_generator.in1
-rw-r--r--test cases/common/13 pch/generated/meson.build16
-rw-r--r--test cases/common/13 pch/generated/pch/prog.h2
-rw-r--r--test cases/common/13 pch/generated/prog.c6
-rw-r--r--test cases/common/13 pch/meson.build3
-rw-r--r--test cases/common/13 pch/mixed/meson.build15
-rw-r--r--test cases/common/13 pch/mixed/pch/func_pch.c1
-rw-r--r--test cases/common/13 pch/mixed/pch/main_pch.cc1
-rw-r--r--test cases/common/13 pch/userDefined/meson.build10
-rw-r--r--test cases/common/13 pch/userDefined/pch/pch.c5
-rw-r--r--test cases/common/13 pch/userDefined/pch/pch.h1
-rw-r--r--test cases/common/13 pch/userDefined/prog.c8
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h1
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/meson.build9
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/pch/prog.h1
-rw-r--r--test cases/common/13 pch/withIncludeDirectories/prog.c10
-rw-r--r--test cases/common/137 get define/meson.build11
-rw-r--r--test cases/common/14 configure file/meson.build13
-rw-r--r--test cases/common/170 dependency factory/meson.build2
-rw-r--r--test cases/common/19 includedir/src/meson.build2
-rw-r--r--test cases/common/190 openmp/main.f9017
-rw-r--r--test cases/common/190 openmp/meson.build18
-rw-r--r--test cases/common/209 custom target build by default/docgen.py12
-rw-r--r--test cases/common/209 custom target build by default/installed_files.txt (renamed from test cases/unit/51 introspect buildoptions/subprojects/evilFile.txt)0
-rw-r--r--test cases/common/209 custom target build by default/meson.build10
-rw-r--r--test cases/common/210 find_library and headers/foo.h (renamed from test cases/common/209 find_library and headers/foo.h)0
-rw-r--r--test cases/common/210 find_library and headers/meson.build (renamed from test cases/common/209 find_library and headers/meson.build)0
-rw-r--r--test cases/common/211 line continuation/meson.build17
-rw-r--r--test cases/common/212 cmake module/cmake_project/CMakeLists.txt4
-rw-r--r--test cases/common/212 cmake module/installed_files.txt2
-rw-r--r--test cases/common/212 cmake module/meson.build31
-rw-r--r--test cases/common/212 cmake module/projectConfig.cmake.in4
-rw-r--r--test cases/common/213 native file path override/installed_files.txt2
-rw-r--r--test cases/common/213 native file path override/main.cpp5
-rw-r--r--test cases/common/213 native file path override/meson.build7
-rw-r--r--test cases/common/213 native file path override/nativefile.ini2
-rw-r--r--test cases/common/214 tap tests/meson.build10
-rw-r--r--test cases/common/214 tap tests/tester.c10
-rw-r--r--test cases/common/215 warning level 0/main.cpp12
-rw-r--r--test cases/common/215 warning level 0/meson.build3
-rw-r--r--test cases/common/23 object extraction/meson.build3
-rw-r--r--test cases/common/36 run program/meson.build6
-rw-r--r--test cases/common/48 pkgconfig-gen/dependencies/meson.build4
-rwxr-xr-xtest cases/common/53 custom target/depfile/dep.py2
-rw-r--r--test cases/common/84 declare dep/entity/meson.build2
-rw-r--r--test cases/common/87 identical target name in subproject/meson.build1
-rw-r--r--test cases/common/87 identical target name in subproject/subprojects/foo/meson.build1
-rw-r--r--test cases/common/97 test workdir/meson.build2
-rwxr-xr-xtest cases/common/97 test workdir/subdir/checker.py5
-rw-r--r--test cases/common/97 test workdir/subdir/meson.build4
-rw-r--r--test cases/cuda/1 simple/meson.build5
-rw-r--r--test cases/cuda/1 simple/prog.cu30
-rw-r--r--test cases/cuda/2 split/lib.cu13
-rw-r--r--test cases/cuda/2 split/main.cpp7
-rw-r--r--test cases/cuda/2 split/meson.build7
-rw-r--r--test cases/cuda/2 split/static/lib.cu13
-rw-r--r--test cases/cuda/2 split/static/libsta.cu13
-rw-r--r--test cases/cuda/2 split/static/main_static.cpp7
-rw-r--r--test cases/cuda/2 split/static/meson.build4
-rw-r--r--test cases/cuda/3 cudamodule/meson.build16
-rw-r--r--test cases/cuda/3 cudamodule/prog.cu30
-rw-r--r--test cases/cuda/4 shared/main.cu20
-rw-r--r--test cases/cuda/4 shared/meson.build6
-rw-r--r--test cases/cuda/4 shared/shared/kernels.cu14
-rw-r--r--test cases/cuda/4 shared/shared/kernels.h86
-rw-r--r--test cases/cuda/4 shared/shared/meson.build5
-rw-r--r--test cases/cuda/5 threads/main.cu20
-rw-r--r--test cases/cuda/5 threads/meson.build7
-rw-r--r--test cases/cuda/5 threads/shared/kernels.cu14
-rw-r--r--test cases/cuda/5 threads/shared/kernels.h86
-rw-r--r--test cases/cuda/5 threads/shared/meson.build5
-rw-r--r--test cases/failing test/4 hard error/main.c3
-rw-r--r--test cases/failing test/4 hard error/meson.build4
-rw-r--r--test cases/failing test/5 tap tests/meson.build6
-rw-r--r--test cases/failing test/5 tap tests/tester.c10
-rw-r--r--test cases/failing/91 invalid configure file/input0
-rw-r--r--test cases/failing/91 invalid configure file/meson.build9
-rw-r--r--test cases/failing/92 kwarg dupe/meson.build (renamed from test cases/failing/91 kwarg dupe/meson.build)0
-rw-r--r--test cases/failing/92 kwarg dupe/prog.c (renamed from test cases/failing/91 kwarg dupe/prog.c)0
-rw-r--r--test cases/failing/93 missing pch file/meson.build (renamed from test cases/failing/92 missing pch file/meson.build)0
-rw-r--r--test cases/failing/93 missing pch file/prog.c (renamed from test cases/failing/92 missing pch file/prog.c)0
-rw-r--r--test cases/failing/94 pch source different folder/include/pch.h0
-rw-r--r--test cases/failing/94 pch source different folder/meson.build5
-rw-r--r--test cases/failing/94 pch source different folder/prog.c1
-rw-r--r--test cases/failing/94 pch source different folder/src/pch.c0
-rw-r--r--test cases/fortran/1 basic/meson.build5
-rw-r--r--test cases/fortran/10 find library/gzip.f9056
-rw-r--r--test cases/fortran/10 find library/main.f9078
-rw-r--r--test cases/fortran/11 compiles links runs/meson.build20
-rw-r--r--test cases/fortran/12 submodule/a1.f9025
-rw-r--r--test cases/fortran/12 submodule/a2.f9010
-rw-r--r--test cases/fortran/12 submodule/a3.f9010
-rw-r--r--test cases/fortran/12 submodule/child.f9010
-rw-r--r--test cases/fortran/12 submodule/meson.build7
-rw-r--r--test cases/fortran/12 submodule/parent.f9023
-rw-r--r--test cases/fortran/13 coarray/main.f909
-rw-r--r--test cases/fortran/13 coarray/meson.build10
-rw-r--r--test cases/fortran/15 include/inc1.f905
-rw-r--r--test cases/fortran/15 include/inc2.f902
-rw-r--r--test cases/fortran/15 include/main.f908
-rw-r--r--test cases/fortran/15 include/meson.build4
-rw-r--r--test cases/fortran/4 self dependency/meson.build4
-rw-r--r--test cases/fortran/4 self dependency/selfdep.f9015
-rw-r--r--test cases/fortran/4 self dependency/src/selfdep_mod.f906
-rw-r--r--test cases/fortran/4 self dependency/subprojects/sub1/main.f906
-rw-r--r--test cases/fortran/4 self dependency/subprojects/sub1/meson.build3
-rw-r--r--test cases/fortran/5 static/main.f9010
-rw-r--r--test cases/fortran/5 static/static_hello.f9018
-rw-r--r--test cases/fortran/6 dynamic/dynamic.f9018
-rw-r--r--test cases/fortran/6 dynamic/main.f909
-rw-r--r--test cases/fortran/7 generated/prog.f908
-rw-r--r--test cases/fortran/8 module names/mod1.f904
-rw-r--r--test cases/fortran/8 module names/mod2.f904
-rw-r--r--test cases/fortran/8 module names/test.f9011
-rw-r--r--test cases/fortran/9 cpp/fortran.f12
-rw-r--r--test cases/fortran/9 cpp/meson.build8
-rw-r--r--test cases/frameworks/17 mpi/meson.build12
-rw-r--r--test cases/frameworks/25 hdf5/main.c30
-rw-r--r--test cases/frameworks/25 hdf5/main.cpp29
-rw-r--r--test cases/frameworks/25 hdf5/main.f9017
-rw-r--r--test cases/frameworks/25 hdf5/meson.build43
-rw-r--r--test cases/frameworks/26 netcdf/main.c14
-rw-r--r--test cases/frameworks/26 netcdf/main.cpp15
-rw-r--r--test cases/frameworks/26 netcdf/main.f9019
-rw-r--r--test cases/frameworks/26 netcdf/meson.build35
-rw-r--r--test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake9
-rw-r--r--test cases/linuxlike/13 cmake dependency/meson.build6
-rw-r--r--test cases/linuxlike/14 static dynamic linkage/main.c7
-rw-r--r--test cases/linuxlike/14 static dynamic linkage/meson.build20
-rwxr-xr-xtest cases/linuxlike/14 static dynamic linkage/verify_static.py16
-rw-r--r--test cases/osx/2 library versions/meson.build32
-rw-r--r--test cases/osx/2 library versions/require_pkgconfig.py9
-rw-r--r--test cases/osx/5 extra frameworks/installed_files.txt2
-rw-r--r--test cases/osx/5 extra frameworks/meson.build13
-rw-r--r--test cases/osx/5 extra frameworks/prog.c3
-rw-r--r--test cases/osx/5 extra frameworks/stat.c1
-rw-r--r--test cases/osx/6 multiframework/main.m5
-rw-r--r--test cases/osx/6 multiframework/meson.build13
-rw-r--r--test cases/rewrite/1 basic/addSrc.json94
-rw-r--r--test cases/rewrite/1 basic/addTgt.json9
-rw-r--r--test cases/rewrite/1 basic/added.txt5
-rw-r--r--test cases/rewrite/1 basic/info.json57
-rw-r--r--test cases/rewrite/1 basic/meson.build20
-rw-r--r--test cases/rewrite/1 basic/removed.txt5
-rw-r--r--test cases/rewrite/1 basic/rmSrc.json88
-rw-r--r--test cases/rewrite/1 basic/rmTgt.json17
-rw-r--r--test cases/rewrite/2 subdirs/addSrc.json13
-rw-r--r--test cases/rewrite/2 subdirs/addTgt.json10
-rw-r--r--test cases/rewrite/2 subdirs/info.json12
-rw-r--r--test cases/rewrite/2 subdirs/meson.build1
-rw-r--r--test cases/rewrite/2 subdirs/rmTgt.json7
-rw-r--r--test cases/rewrite/2 subdirs/sub1/after.txt1
-rw-r--r--test cases/rewrite/2 subdirs/sub2/meson.build1
-rw-r--r--test cases/rewrite/3 kwargs/add.json29
-rw-r--r--test cases/rewrite/3 kwargs/defopts_delete.json18
-rw-r--r--test cases/rewrite/3 kwargs/defopts_set.json24
-rw-r--r--test cases/rewrite/3 kwargs/delete.json20
-rw-r--r--test cases/rewrite/3 kwargs/info.json20
-rw-r--r--test cases/rewrite/3 kwargs/meson.build7
-rw-r--r--test cases/rewrite/3 kwargs/remove.json29
-rw-r--r--test cases/rewrite/3 kwargs/remove_regex.json20
-rw-r--r--test cases/rewrite/3 kwargs/set.json34
-rw-r--r--test cases/rewrite/4 same name targets/addSrc.json8
-rw-r--r--test cases/rewrite/4 same name targets/info.json12
-rw-r--r--test cases/rewrite/4 same name targets/meson.build6
-rw-r--r--test cases/rewrite/4 same name targets/sub1/meson.build3
-rw-r--r--test cases/rewrite/5 sorting/meson.build33
-rw-r--r--test cases/unit/17 prebuilt shared/patron.c1
-rw-r--r--test cases/unit/20 subproj dep variables/meson.build3
-rw-r--r--test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build3
-rw-r--r--test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap1
-rw-r--r--test cases/unit/47 native file binary/meson.build (renamed from test cases/unit/46 native file binary/meson.build)0
-rw-r--r--test cases/unit/47 native file binary/meson_options.txt (renamed from test cases/unit/46 native file binary/meson_options.txt)0
-rw-r--r--test cases/unit/48 reconfigure/main.c (renamed from test cases/unit/46 reconfigure/main.c)0
-rw-r--r--test cases/unit/48 reconfigure/meson.build (renamed from test cases/unit/46 reconfigure/meson.build)0
-rw-r--r--test cases/unit/48 reconfigure/meson_options.txt (renamed from test cases/unit/46 reconfigure/meson_options.txt)0
-rw-r--r--test cases/unit/49 testsetup default/envcheck.py (renamed from test cases/unit/47 testsetup default/envcheck.py)0
-rw-r--r--test cases/unit/49 testsetup default/meson.build (renamed from test cases/unit/47 testsetup default/meson.build)0
-rw-r--r--test cases/unit/50 pkgconfig csharp library/meson.build (renamed from test cases/unit/48 pkgconfig csharp library/meson.build)0
-rw-r--r--test cases/unit/50 pkgconfig csharp library/somelib.cs (renamed from test cases/unit/48 pkgconfig csharp library/somelib.cs)0
-rw-r--r--test cases/unit/51 ldflagdedup/bob.c (renamed from test cases/unit/49 ldflagdedup/bob.c)0
-rw-r--r--test cases/unit/51 ldflagdedup/meson.build (renamed from test cases/unit/49 ldflagdedup/meson.build)0
-rw-r--r--test cases/unit/51 ldflagdedup/prog.c (renamed from test cases/unit/49 ldflagdedup/prog.c)0
-rw-r--r--test cases/unit/52 pkgconfig static link order/meson.build (renamed from test cases/unit/50 pkgconfig static link order/meson.build)0
-rw-r--r--test cases/unit/53 clang-format/.clang-format (renamed from test cases/unit/51 clang-format/.clang-format)0
-rw-r--r--test cases/unit/53 clang-format/meson.build (renamed from test cases/unit/51 clang-format/meson.build)0
-rw-r--r--test cases/unit/53 clang-format/prog_expected_c (renamed from test cases/unit/51 clang-format/prog_expected_c)0
-rw-r--r--test cases/unit/53 clang-format/prog_orig_c (renamed from test cases/unit/51 clang-format/prog_orig_c)0
-rw-r--r--test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson.build9
-rw-r--r--test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson_options.txt (renamed from test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson_options.txt)0
-rw-r--r--test cases/unit/55 dedup compiler libs/app/app.c13
-rw-r--r--test cases/unit/55 dedup compiler libs/app/meson.build2
-rw-r--r--test cases/unit/55 dedup compiler libs/liba/liba.c18
-rw-r--r--test cases/unit/55 dedup compiler libs/liba/liba.h8
-rw-r--r--test cases/unit/55 dedup compiler libs/liba/meson.build8
-rw-r--r--test cases/unit/55 dedup compiler libs/libb/libb.c7
-rw-r--r--test cases/unit/55 dedup compiler libs/libb/libb.h6
-rw-r--r--test cases/unit/55 dedup compiler libs/libb/meson.build6
-rw-r--r--test cases/unit/55 dedup compiler libs/meson.build7
-rw-r--r--test cases/unit/55 introspection/meson.build20
-rw-r--r--test cases/unit/55 introspection/sharedlib/meson.build2
-rw-r--r--test cases/unit/55 introspection/sharedlib/shared.cpp9
-rw-r--r--test cases/unit/55 introspection/sharedlib/shared.hpp10
-rw-r--r--test cases/unit/55 introspection/staticlib/meson.build2
-rw-r--r--test cases/unit/55 introspection/staticlib/static.c5
-rw-r--r--test cases/unit/55 introspection/staticlib/static.h3
-rw-r--r--test cases/unit/55 introspection/t1.cpp13
-rw-r--r--test cases/unit/55 introspection/t2.cpp8
-rw-r--r--test cases/unit/55 introspection/t3.cpp16
-rw-r--r--test cases/unit/56 introspect buildoptions/c_compiler.py (renamed from test cases/unit/51 introspect buildoptions/c_compiler.py)0
-rw-r--r--test cases/unit/56 introspect buildoptions/main.c6
-rw-r--r--test cases/unit/56 introspect buildoptions/meson.build (renamed from test cases/unit/51 introspect buildoptions/meson.build)5
-rw-r--r--test cases/unit/56 introspect buildoptions/meson_options.txt (renamed from test cases/unit/51 introspect buildoptions/meson_options.txt)0
-rw-r--r--test cases/unit/56 introspect buildoptions/subprojects/evilFile.txt0
-rw-r--r--test cases/unit/56 introspect buildoptions/subprojects/projectA/meson.build (renamed from test cases/unit/51 introspect buildoptions/subprojects/projectA/meson.build)0
-rw-r--r--test cases/unit/56 introspect buildoptions/subprojects/projectA/meson_options.txt (renamed from test cases/unit/51 introspect buildoptions/subprojects/projectA/meson_options.txt)0
-rw-r--r--test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson.build (renamed from test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build)0
-rw-r--r--test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson_options.txt1
-rw-r--r--test cases/unit/57 native file override/crossfile16
-rw-r--r--test cases/unit/57 native file override/meson.build10
-rw-r--r--test cases/unit/57 native file override/meson_options.txt13
-rw-r--r--test cases/unit/57 native file override/nativefile16
-rw-r--r--test cases/unit/58 pkgconfig relative paths/pkgconfig/librelativepath.pc9
-rw-r--r--test cases/warning/1 version for string div/a/b.c3
-rw-r--r--test cases/warning/1 version for string div/meson.build3
-rwxr-xr-xtools/cmake2meson.py52
358 files changed, 12987 insertions, 3418 deletions
diff --git a/.gitignore b/.gitignore
index fc09082..b408190 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+.mypy_cache/
/.project
/.pydevproject
/.settings
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 39e41e9..483f1eb 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -50,6 +50,11 @@ jobs:
backend: ninja
steps:
+ - task: UsePythonVersion@0
+ inputs:
+ versionSpec: '3.5'
+ addToPath: true
+ architecture: 'x64'
- template: ci/azure-steps.yml
- job: cygwin
@@ -76,12 +81,14 @@ jobs:
libglib2.0-devel,^
libgtk3-devel,^
ninja,^
- python3-pip,^
+ python35-pip,^
vala,^
zlib-devel
displayName: Install Dependencies
- script: |
+ set BOOST_ROOT=
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
+ cp /usr/bin/python3.5 /usr/bin/python3
env.exe -- python3 run_tests.py --backend=ninja
displayName: Run Tests
- task: CopyFiles@2
@@ -136,6 +143,7 @@ jobs:
git ^
mercurial ^
mingw-w64-$(MSYS2_ARCH)-cmake ^
+ mingw-w64-$(MSYS2_ARCH)-ninja ^
mingw-w64-$(MSYS2_ARCH)-pkg-config ^
mingw-w64-$(MSYS2_ARCH)-python2 ^
mingw-w64-$(MSYS2_ARCH)-python3 ^
@@ -143,8 +151,8 @@ jobs:
%TOOLCHAIN%
displayName: Install Dependencies
- script: |
+ set BOOST_ROOT=
set PATH=%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem
- %MSYS2_ROOT%\usr\bin\bash -lc "wget https://github.com/mesonbuild/cidata/raw/master/ninja.exe; mv ninja.exe /$MSYSTEM/bin"
set PATHEXT=%PATHEXT%;.py
if %compiler%==clang ( set CC=clang && set CXX=clang++ )
%MSYS2_ROOT%\usr\bin\bash -lc "MSYSTEM= python3 run_tests.py --backend=ninja"
diff --git a/ci/azure-steps.yml b/ci/azure-steps.yml
index 71642f0..7e0f6fe 100644
--- a/ci/azure-steps.yml
+++ b/ci/azure-steps.yml
@@ -71,6 +71,8 @@ steps:
Start-Process "boost_$boost_filename-msvc-$boost_abi_tag-$boost_bitness.exe" -ArgumentList "/dir=$(System.WorkFolder)\boost_$boost_filename /silent" -Wait
$env:BOOST_ROOT = "$(System.WorkFolder)\boost_$boost_filename"
$env:Path = "$env:Path;$env:BOOST_ROOT\lib$boost_bitness-msvc-$boost_abi_tag"
+ } else {
+ $env:BOOST_ROOT = ""
}
# install D compiler and dub packages
@@ -142,9 +144,16 @@ steps:
MSBuild /version
}
+ echo "=== PATH BEGIN ==="
+ echo ($env:Path).Replace(';',"`n")
+ echo "=== PATH END ==="
+ echo ""
+ echo "Locating Python:"
where.exe python
python --version
+ echo ""
+ echo "=== Start running tests ==="
python run_tests.py --backend $(backend)
- task: PublishTestResults@2
diff --git a/ci/install-dmd.ps1 b/ci/install-dmd.ps1
index fc8226c..aeacdf2 100644
--- a/ci/install-dmd.ps1
+++ b/ci/install-dmd.ps1
@@ -68,4 +68,4 @@ $dmd_bin = Join-Path $dmd_install "dmd2\windows\bin"
$Env:Path = $Env:Path + ";" + $dmd_bin
#echo "Testing DMD..."
-& dmd.exe --version 2>&1>$null
+& dmd.exe --version
diff --git a/ciimage/Dockerfile b/ciimage/Dockerfile
index 520ce0f..980ed53 100644
--- a/ciimage/Dockerfile
+++ b/ciimage/Dockerfile
@@ -20,6 +20,7 @@ RUN sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" \
&& apt-get -y install --no-install-recommends wine-stable \
&& apt-get -y install llvm-dev libclang-dev \
&& apt-get -y install libgcrypt11-dev \
+&& apt-get -y install libhdf5-dev \
&& dub fetch urld && dub build urld --compiler=gdc \
&& dub fetch dubtestproject \
&& dub build dubtestproject:test1 --compiler=ldc2 \
diff --git a/data/com.mesonbuild.install.policy b/data/com.mesonbuild.install.policy
index 6fba47c..65bf3ef 100644
--- a/data/com.mesonbuild.install.policy
+++ b/data/com.mesonbuild.install.policy
@@ -17,7 +17,6 @@
</defaults>
<annotate key="org.freedesktop.policykit.exec.path">/usr/bin/python3</annotate>
<annotate key="org.freedesktop.policykit.exec.argv1">/usr/bin/meson</annotate>
- <annotate key="org.freedesktop.policykit.exec.argv2">install</annotate>
</action>
</policyconfig>
diff --git a/data/shell-completions/bash/meson b/data/shell-completions/bash/meson
new file mode 100644
index 0000000..993885b
--- /dev/null
+++ b/data/shell-completions/bash/meson
@@ -0,0 +1,416 @@
+_meson() {
+ command="${COMP_WORDS[1]}"
+ case "$command" in
+ setup |\
+ configure |\
+ install |\
+ introspect |\
+ init |\
+ test |\
+ wrap |\
+ subprojects |\
+ help)
+ _meson-$command "${COMP_WORDS[@]:1}"
+ ;;
+ *)
+ _meson-setup "${COMP_WORDS[@]}"
+ ;;
+ esac
+} &&
+complete -F _meson meson
+
+_meson_complete_option() {
+ option_string=$1
+
+ if [[ $# -eq 2 ]] && ! [[ "$option_string" == *=* ]]; then
+ option_string="$option_string=$2"
+ fi
+
+ if [[ "$option_string" == *=* ]]; then
+ _meson_complete_option_value "$option_string"
+ else
+ _meson_complete_option_name "$option_string"
+ fi
+}
+
+_meson_complete_option_name() {
+ option=$1
+ options=($(python3 -c 'import sys, json
+for option in json.load(sys.stdin):
+ print(option["name"])
+' <<< "$(_meson_get_options)"))
+ compopt -o nospace
+ COMPREPLY=($(compgen -W '${options[@]}' -S= -- "$option"))
+}
+
+_meson_complete_option_value() {
+ cur=$1
+ option_name=${cur%%=*}
+ option_value=${cur#*=}
+
+ if _meson_complete_filedir "$option_name" "$option_value"; then
+ return
+ fi
+
+# TODO: support all the option types
+ options=($(python3 -c 'import sys, json
+for option in json.load(sys.stdin):
+ if option["name"] != "'$option_name'":
+ continue
+ choices = []
+ if option["type"] == "boolean":
+ choices.append("true")
+ choices.append("false")
+ elif option["type"] == "combo":
+ for choice in option["choices"]:
+ choices.append(choice)
+ for choice in choices:
+ if choice.startswith("'$cur'"):
+ print(choice)
+' <<< "$(_meson_get_options)"))
+ COMPREPLY=("${options[@]}")
+}
+
+_meson_get_options() {
+ local options
+ for builddir in "${COMP_WORDS[@]}"; do
+ if [ -d "$builddir" ]; then
+ break
+ fi
+ builddir=.
+ done
+ options=$(meson introspect "$builddir" --buildoptions 2>/dev/null) &&
+ echo "$options" ||
+ echo '[]'
+}
+
+_meson_complete_filedir() {
+ _filedir_in() {
+ pushd "$1" &>/dev/null
+ local COMPREPLY=()
+ _filedir
+ echo "${COMPREPLY[@]}"
+ popd &>/dev/null
+ }
+
+ option=$1
+ cur=$2
+ case $option in
+ prefix |\
+ libdir |\
+ libexecdir |\
+ bindir |\
+ sbindir |\
+ includedir |\
+ datadir |\
+ mandir |\
+ infodir |\
+ localedir |\
+ sysconfdir |\
+ localstatedir |\
+ sharedstatedir)
+ _filedir -d
+ ;;
+ cross-file)
+ _filedir
+ COMPREPLY+=($(_filedir_in "$XDG_DATA_DIRS"/meson/cross))
+ COMPREPLY+=($(_filedir_in /usr/local/share/meson/cross))
+ COMPREPLY+=($(_filedir_in /usr/share/meson/cross))
+ COMPREPLY+=($(_filedir_in "$XDG_DATA_HOME"/meson/cross))
+ COMPREPLY+=($(_filedir_in ~/.local/share/meson/cross))
+ ;;
+ *)
+ return 1;;
+ esac
+ return 0
+}
+
+_meson-setup() {
+
+ shortopts=(
+ h
+ D
+ v
+ )
+
+ longopts=(
+ help
+ prefix
+ libdir
+ libexecdir
+ bindir
+ sbindir
+ includedir
+ datadir
+ mandir
+ infodir
+ localedir
+ sysconfdir
+ localstatedir
+ sharedstatedir
+ backend
+ buildtype
+ strip
+ unity
+ werror
+ layout
+ default-library
+ warnlevel
+ stdsplit
+ errorlogs
+ cross-file
+ version
+ wrap-mode
+ )
+
+ local cur prev
+ if _get_comp_words_by_ref cur prev &>/dev/null &&
+ [ "${prev:0:2}" = '--' ] && _meson_complete_option "${prev:2}" "$cur"; then
+ return
+ elif _get_comp_words_by_ref cur prev &>/dev/null &&
+ [ "${prev:0:1}" = '-' ] && [ "${prev:1:2}" != '-' ] && _meson_complete_option "${prev:1}"; then
+ return
+ elif _get_comp_words_by_ref -n '=' cur prev &>/dev/null; then
+ if [ $prev == -D ]; then
+ _meson_complete_option "$cur"
+ return
+ fi
+ else
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ fi
+
+ if [[ "$cur" == "--"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ elif [[ "$cur" == "-"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}"))
+ else
+ _filedir -d
+ if [ -z "$cur" ]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}'))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}'))
+ fi
+
+ if [ $COMP_CWORD -eq 1 ]; then
+ COMPREPLY+=($(compgen -W 'setup configure test introspect' -- "$cur"))
+ fi
+ fi
+}
+
+_meson-configure() {
+
+ shortopts=(
+ h
+ D
+ )
+
+ longopts=(
+ help
+ clearcache
+ )
+
+ local cur prev
+ if _get_comp_words_by_ref -n '=' cur prev &>/dev/null; then
+ if [ $prev == -D ]; then
+ _meson_complete_option "$cur"
+ return
+ fi
+ else
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ fi
+
+ if [[ "$cur" == "--"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ elif [[ "$cur" == "-"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}"))
+ else
+ for dir in "${COMP_WORDS[@]}"; do
+ if [ -d "$dir" ]; then
+ break
+ fi
+ dir=.
+ done
+ if [ ! -d "$dir/meson-private" ]; then
+ _filedir -d
+ fi
+
+ if [ -z "$cur" ]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}'))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}'))
+ fi
+ fi
+}
+
+_meson-test() {
+ shortopts=(
+ q
+ v
+ t
+ C
+ )
+
+ longopts=(
+ quiet
+ verbose
+ timeout-multiplier
+ repeat
+ no-rebuild
+ gdb
+ list
+ wrapper --wrap
+ no-suite
+ suite
+ no-stdsplit
+ print-errorlogs
+ benchmark
+ logbase
+ num-processes
+ setup
+ test-args
+ )
+
+ local cur prev
+ if _get_comp_words_by_ref -n ':' cur prev &>/dev/null; then
+ case $prev in
+ --repeat)
+ # number, can't be completed
+ return
+ ;;
+ --wrapper)
+ _command_offset $COMP_CWORD
+ return
+ ;;
+ -C)
+ _filedir -d
+ return
+ ;;
+ --suite | --no-suite)
+ for i in "${!COMP_WORDS[@]}"; do
+ opt="${COMP_WORDS[i]}"
+ dir="${COMP_WORDS[i+1]}"
+ case "$opt" in
+ -C)
+ break
+ ;;
+ esac
+ dir=.
+ done
+ suites=($(python3 -c 'import sys, json;
+for test in json.load(sys.stdin):
+ for suite in test["suite"]:
+ print(suite)
+ ' <<< "$(meson introspect "$dir" --tests)"))
+# TODO
+ COMPREPLY+=($(compgen -W "${suites[*]}" -- "$cur"))
+ return
+ ;;
+ --logbase)
+ # free string, can't be completed
+ return
+ ;;
+ --num-processes)
+ # number, can't be completed
+ return
+ ;;
+ -t | --timeout-multiplier)
+ # number, can't be completed
+ return
+ ;;
+ --setup)
+ # TODO
+ return
+ ;;
+ --test-args)
+ return
+ ;;
+ esac
+ else
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ fi
+
+ if [[ "$cur" == "--"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ elif [[ "$cur" == "-"* && ${#cur} -gt 1 ]]; then
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}"))
+ else
+ for dir in "${COMP_WORDS[@]}"; do
+ if [ -d "$dir" ]; then
+ break
+ fi
+ dir=.
+ done
+ if [ ! -d "$dir/meson-private" ]; then
+ _filedir -d
+ fi
+
+ for i in "${!COMP_WORDS[@]}"; do
+ opt="${COMP_WORDS[i]}"
+ dir="${COMP_WORDS[i+1]}"
+ case "$opt" in
+ -C)
+ break
+ ;;
+ esac
+ dir=.
+ done
+ tests=($(python3 -c 'import sys, json;
+for test in json.load(sys.stdin):
+ print(test["name"])
+' <<< "$(meson introspect "$dir" --tests)"))
+ COMPREPLY+=($(compgen -W "${tests[*]}" -- "$cur"))
+
+ if [ -z "$cur" ]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}"))
+ fi
+ fi
+}
+
+_meson-introspect() {
+ shortopts=(
+ h
+ )
+
+ longopts=(
+ targets
+ installed
+ buildsystem-files
+ buildoptions
+ tests
+ benchmarks
+ dependencies
+ projectinfo
+ )
+
+ local cur prev
+ if ! _get_comp_words_by_ref cur prev &>/dev/null; then
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ fi
+
+ if [[ "$cur" == "--"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ elif [[ "$cur" == "-"* ]]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}' -- "${cur:2}"))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}' -- "${cur:1}"))
+ else
+ for dir in "${COMP_WORDS[@]}"; do
+ if [ -d "$dir" ]; then
+ break
+ fi
+ dir=.
+ done
+ if [ ! -d "$dir/meson-private" ]; then
+ _filedir -d
+ fi
+
+ if [ -z "$cur" ]; then
+ COMPREPLY+=($(compgen -P '--' -W '${longopts[*]}'))
+ COMPREPLY+=($(compgen -P '-' -W '${shortopts[*]}'))
+ fi
+ fi
+}
+
+_meson-wrap() {
+ : TODO
+}
diff --git a/data/shell-completions/zsh/_meson b/data/shell-completions/zsh/_meson
index 481d04c..49860d5 100644
--- a/data/shell-completions/zsh/_meson
+++ b/data/shell-completions/zsh/_meson
@@ -49,8 +49,7 @@ local -a meson_commands=(
# TODO: implement build option completion
(( $+functions[__meson_build_options] )) || __meson_build_options() {}
-# TODO: implement target name completion
-(( $+functions[__meson_targets] )) || __meson_targets() {}
+
# `meson introspect` currently can provide that information in JSON.
# We can:
# 1) pipe its output to python3 -m json.tool | grep "$alovelyregex" | cut <...>
@@ -170,7 +169,6 @@ local -a meson_commands=(
local -a specs=(
'--targets[list top level targets]'
'--installed[list all installed files and directories]'
- '--target-files[list source files for a given target]:target:__meson_targets'
'--buildsystem-files[list files that belong to the build system]'
'--buildoptions[list all build options]'
'--tests[list all unit tests]'
diff --git a/docs/genrelnotes.py b/docs/genrelnotes.py
new file mode 100755
index 0000000..e5ff432
--- /dev/null
+++ b/docs/genrelnotes.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+
+# Copyright 2019 The Meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys, os, subprocess
+from glob import glob
+
+relnote_template = '''---
+title: Release %s
+short-description: Release notes for %s
+...
+
+# New features
+
+'''
+
+
+def add_to_sitemap(from_version, to_version):
+ sitemapfile = '../sitemap.txt'
+ sf = open(sitemapfile)
+ lines = sf.readlines()
+ sf.close()
+ with open(sitemapfile, 'w') as sf:
+ for line in lines:
+ if 'Release-notes' in line and from_version in line:
+ new_line = line.replace(from_version, to_version)
+ sf.write(new_line)
+ sf.write(line)
+
+def generate(from_version, to_version):
+ ofilename = 'Release-notes-for-%s.md' % to_version
+ with open(ofilename, 'w') as ofile:
+ ofile.write(relnote_template % (to_version, to_version))
+ for snippetfile in glob('snippets/*.md'):
+ snippet = open(snippetfile).read()
+ ofile.write(snippet)
+ if not snippet.endswith('\n'):
+ ofile.write('\n')
+ ofile.write('\n')
+ subprocess.check_call(['git', 'rm', snippetfile])
+ subprocess.check_call(['git', 'add', ofilename])
+ add_to_sitemap(from_version, to_version)
+
+if __name__ == '__main__':
+ if len(sys.argv) != 3:
+ print(sys.argv[0], 'from_version to_version')
+ sys.exit(1)
+ from_version = sys.argv[1]
+ to_version = sys.argv[2]
+ os.chdir('markdown')
+ generate(from_version, to_version)
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index 288bd79..5db6e02 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -40,9 +40,9 @@ Installation options are all relative to the prefix, except:
| localstatedir | var | Localstate data directory |
| sharedstatedir | com | Architecture-independent data directory |
| werror | false | Treat warnings as errors |
-| warning_level {1, 2, 3} | 1 | Set the warning level. From 1 = lowest to 3 = highest |
+| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest |
| layout {mirror,flat} | mirror | Build directory layout. |
-| default-library {shared, static, both} | shared | Default library type. |
+| default_library {shared, static, both} | shared | Default library type. |
| backend {ninja, vs,<br>vs2010, vs2015, vs2017, xcode} | | Backend to use (default: ninja). |
| stdsplit | | Split stdout and stderr in test logs. |
| errorlogs | | Whether to print the logs from failing tests. |
diff --git a/docs/markdown/CMake-module.md b/docs/markdown/CMake-module.md
new file mode 100644
index 0000000..4cc97cf
--- /dev/null
+++ b/docs/markdown/CMake-module.md
@@ -0,0 +1,71 @@
+# CMake module
+
+This module provides helper tools for generating cmake package files.
+
+
+## Usage
+
+To use this module, just do: **`cmake = import('cmake')`**. The
+following functions will then be available as methods on the object
+with the name `cmake`. You can, of course, replace the name `cmake`
+with anything else.
+
+### cmake.write_basic_package_version_file()
+
+This function is the equivalent of the corresponding [CMake function](https://cmake.org/cmake/help/v3.11/module/CMakePackageConfigHelpers.html#generating-a-package-version-file),
+it generates a `name` package version file.
+
+* `name`: the name of the package.
+* `version`: the version of the generated package file.
+* `compatibility`: a string indicating the kind of compatibility, the accepted values are
+`AnyNewerVersion`, `SameMajorVersion`, `SameMinorVersion` or `ExactVersion`.
+It defaults to `AnyNewerVersion`. Depending on your cmake installation some kind of
+compatibility may not be available.
+* `install_dir`: optional installation directory, it defaults to `$(libdir)/cmake/$(name)`
+
+
+Example:
+
+```meson
+cmake = import('cmake')
+
+cmake.write_basic_package_version_file(name: 'myProject', version: '1.0.0')
+```
+
+### cmake.configure_package_config_file()
+
+This function is the equivalent of the corresponding [CMake function](https://cmake.org/cmake/help/v3.11/module/CMakePackageConfigHelpers.html#generating-a-package-configuration-file),
+it generates a `name` package configuration file from the `input` template file. Just like the cmake function
+in this file the `@PACKAGE_INIT@` statement will be replaced by the appropriate piece of cmake code.
+The equivalent `PATH_VARS` argument is given through the `configuration` parameter.
+
+* `name`: the name of the package.
+* `input`: the template file where that will be treated for variable substitutions contained in `configuration`.
+* `install_dir`: optional installation directory, it defaults to `$(libdir)/cmake/$(name)`.
+* `configuration`: a `configuration_data` object that will be used for variable substitution in the template file.
+
+
+Example:
+
+meson.build:
+
+```meson
+cmake = import('cmake')
+
+conf = configuration_data()
+conf.set_quoted('VAR', 'variable value')
+
+cmake.configure_package_config_file(
+ name: 'myProject',
+ input: 'myProject.cmake.in',
+ configuration: conf
+)
+```
+
+myProject.cmake.in:
+
+```text
+@PACKAGE_INIT@
+
+set(MYVAR VAR)
+```
diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md
index 0c7487f..ddda6fe 100644
--- a/docs/markdown/Configuring-a-build-directory.md
+++ b/docs/markdown/Configuring-a-build-directory.md
@@ -33,7 +33,7 @@ sample output for a simple project.
optimization 3 [0, g, 1, 2, 3, s] Optimization level
strip false [true, false] Strip targets on install
unity off [on, off, subprojects] Unity build
- warning_level 1 [1, 2, 3] Compiler warning level to use
+ warning_level 1 [0, 1, 2, 3] Compiler warning level to use
werror false [true, false] Treat warnings as errors
Backend options:
@@ -111,3 +111,8 @@ you would issue the following command.
Then you would run your build command (usually `ninja`), which would
cause Meson to detect that the build setup has changed and do all the
work required to bring your build tree up to date.
+
+Since 0.50.0, it is also possible to get a list of all build options
+by invoking `meson configure` with the project source directory or
+the path to the root `meson.build`. In this case, meson will print the
+default values of all options similar to the example output from above.
diff --git a/docs/markdown/Continuous-Integration.md b/docs/markdown/Continuous-Integration.md
index 93c772d..5eec67d 100644
--- a/docs/markdown/Continuous-Integration.md
+++ b/docs/markdown/Continuous-Integration.md
@@ -154,7 +154,7 @@ matrix:
- os: osx
install:
- - export PATH="`pwd`/build:${PATH}"
+ - export PATH="$(pwd)/build:${PATH}"
- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update && brew install python3 ninja; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then wget https://github.com/ninja-build/ninja/releases/download/v1.7.2/ninja-linux.zip && unzip -q ninja-linux.zip -d build; fi
- pip3 install meson
diff --git a/docs/markdown/Creating-Linux-binaries.md b/docs/markdown/Creating-Linux-binaries.md
index 084e157..c3b4b64 100644
--- a/docs/markdown/Creating-Linux-binaries.md
+++ b/docs/markdown/Creating-Linux-binaries.md
@@ -119,7 +119,7 @@ with the following content:
#!/bin/bash
cd "${0%/*}"
-export LD_LIBRARY_PATH="`pwd`/lib"
+export LD_LIBRARY_PATH="$(pwd)/lib"
bin/myapp
```
diff --git a/docs/markdown/Creating-OSX-packages.md b/docs/markdown/Creating-OSX-packages.md
index 14b2af8..bda06a3 100644
--- a/docs/markdown/Creating-OSX-packages.md
+++ b/docs/markdown/Creating-OSX-packages.md
@@ -53,10 +53,8 @@ install_data('Info.plist', install_dir : 'Contents')
```
The format of `Info.plist` can be found in the link or the sample
-project linked above. Be careful, the sample code on the linked page
-is malformed, it is missing a less than character (<) before
-`!DOCTYPE`. The simplest way to get an icon in the `icns` format is to
-save your image as a tiff an then use the `tiff2icns` helper
+project linked above. The simplest way to get an icon in the `icns`
+format is to save your image as a tiff an then use the `tiff2icns` helper
application that comes with XCode.
Some applications assume that the working directory of the app process
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index 7d316ed..36620eb 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -150,7 +150,7 @@ binaries are not actually compatible. In such cases you may use the
needs_exe_wrapper = true
```
-The last bit is the definition of host and target machines. Every
+The next bit is the definition of host and target machines. Every
cross build definition must have one or both of them. If it had
neither, the build would not be a cross build but a native build. You
do not need to define the build machine, as all necessary information
@@ -186,6 +186,20 @@ If you do not define your host machine, it is assumed to be the build
machine. Similarly if you do not specify target machine, it is assumed
to be the host machine.
+Additionally, you can define the paths that you want to install to in your
+cross file. This may be especially useful when cross compiling an entire
+operating system, or for operating systems to use internally for consistency.
+
+```ini
+[paths]
+prefix = '/my/prefix'
+libdir = 'lib/i386-linux-gnu'
+bindir = 'bin'
+```
+
+This will be overwritten by any options passed on the command line.
+
+
## Starting a cross build
diff --git a/docs/markdown/Cuda-module.md b/docs/markdown/Cuda-module.md
new file mode 100644
index 0000000..f161eac
--- /dev/null
+++ b/docs/markdown/Cuda-module.md
@@ -0,0 +1,191 @@
+---
+short-description: CUDA module
+authors:
+ - name: Olexa Bilaniuk
+ years: [2019]
+ has-copyright: false
+...
+
+# Unstable CUDA Module
+_Since: 0.50.0_
+
+This module provides helper functionality related to the CUDA Toolkit and
+building code using it.
+
+
+**Note**: this module is unstable. It is only provided as a technology preview.
+Its API may change in arbitrary ways between releases or it might be removed
+from Meson altogether.
+
+
+## Importing the module
+
+The module may be imported as follows:
+
+``` meson
+cuda = import('unstable-cuda')
+```
+
+It offers several useful functions that are enumerated below.
+
+
+## Functions
+
+### `nvcc_arch_flags()`
+_Since: 0.50.0_
+
+``` meson
+cuda.nvcc_arch_flags(nvcc_or_version, ...,
+ detected: string_or_array)
+```
+
+Returns a list of `-gencode` flags that should be passed to `cuda_args:` in
+order to compile a "fat binary" for the architectures/compute capabilities
+enumerated in the positional argument(s). The flags shall be acceptable to
+the NVCC compiler object `nvcc_or_version`, or its version string.
+
+A set of architectures and/or compute capabilities may be specified by:
+
+- The single positional argument `'All'`, `'Common'` or `'Auto'`
+- As (an array of)
+ - Architecture names (`'Kepler'`, `'Maxwell+Tegra'`, `'Turing'`) and/or
+ - Compute capabilities (`'3.0'`, `'3.5'`, `'5.3'`, `'7.5'`)
+
+A suffix of `+PTX` requests PTX code generation for the given architecture.
+A compute capability given as `A.B(X.Y)` requests PTX generation for an older
+virtual architecture `X.Y` before binary generation for a newer architecture
+`A.B`.
+
+Multiple architectures and compute capabilities may be passed in using
+
+- Multiple positional arguments
+- Lists of strings
+- Space (` `), comma (`,`) or semicolon (`;`)-separated strings
+
+The single-word architectural sets `'All'`, `'Common'` or `'Auto'` cannot be
+mixed with architecture names or compute capabilities. Their interpretation is:
+
+| Name | Compute Capability |
+|-------------------|--------------------|
+| `'All'` | All CCs supported by given NVCC compiler. |
+| `'Common'` | Relatively common CCs supported by given NVCC compiler. Generally excludes Tegra and Tesla devices. |
+| `'Auto'` | The CCs provided by the `detected:` keyword, filtered for support by given NVCC compiler. |
+
+As a special case, when `nvcc_arch_flags()` is invoked with
+
+- an NVCC `compiler` object `nvcc`,
+- `'Auto'` mode and
+- no `detected:` keyword,
+
+Meson uses `nvcc`'s architecture auto-detection results.
+
+The supported architecture names and their corresponding compute capabilities
+are:
+
+| Name | Compute Capability |
+|-------------------|--------------------|
+| `'Fermi'` | 2.0, 2.1(2.0) |
+| `'Kepler'` | 3.0, 3.5 |
+| `'Kepler+Tegra'` | 3.2 |
+| `'Kepler+Tesla'` | 3.7 |
+| `'Maxwell'` | 5.0, 5.2 |
+| `'Maxwell+Tegra'` | 5.3 |
+| `'Pascal'` | 6.0, 6.1 |
+| `'Pascal+Tegra'` | 6.2 |
+| `'Volta'` | 7.0 |
+| `'Xavier'` | 7.2 |
+| `'Turing'` | 7.5 |
+
+
+Examples:
+
+ cuda.nvcc_arch_flags('10.0', '3.0', '3.5', '5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', ['3.0', '3.5', '5.0+PTX'])
+ cuda.nvcc_arch_flags('10.0', [['3.0', '3.5'], '5.0+PTX'])
+ cuda.nvcc_arch_flags('10.0', '3.0 3.5 5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', '3.0,3.5,5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', '3.0;3.5;5.0+PTX')
+ cuda.nvcc_arch_flags('10.0', 'Kepler 5.0+PTX')
+ # Returns ['-gencode', 'arch=compute_30,code=sm_30',
+ # '-gencode', 'arch=compute_35,code=sm_35',
+ # '-gencode', 'arch=compute_50,code=sm_50',
+ # '-gencode', 'arch=compute_50,code=compute_50']
+
+ cuda.nvcc_arch_flags('10.0', '3.5(3.0)')
+ # Returns ['-gencode', 'arch=compute_30,code=sm_35']
+
+ cuda.nvcc_arch_flags('8.0', 'Common')
+ # Returns ['-gencode', 'arch=compute_30,code=sm_30',
+ # '-gencode', 'arch=compute_35,code=sm_35',
+ # '-gencode', 'arch=compute_50,code=sm_50',
+ # '-gencode', 'arch=compute_52,code=sm_52',
+ # '-gencode', 'arch=compute_60,code=sm_60',
+ # '-gencode', 'arch=compute_61,code=sm_61',
+ # '-gencode', 'arch=compute_61,code=compute_61']
+
+ cuda.nvcc_arch_flags('9.2', 'Auto', detected: '6.0 6.0 6.0 6.0')
+ cuda.nvcc_arch_flags('9.2', 'Auto', detected: ['6.0', '6.0', '6.0', '6.0'])
+ # Returns ['-gencode', 'arch=compute_60,code=sm_60']
+
+ cuda.nvcc_arch_flags(nvcc, 'All')
+ # Returns ['-gencode', 'arch=compute_20,code=sm_20',
+ # '-gencode', 'arch=compute_20,code=sm_21',
+ # '-gencode', 'arch=compute_30,code=sm_30',
+ # '-gencode', 'arch=compute_32,code=sm_32',
+ # '-gencode', 'arch=compute_35,code=sm_35',
+ # '-gencode', 'arch=compute_37,code=sm_37',
+ # '-gencode', 'arch=compute_50,code=sm_50', # nvcc.version() < 7.0
+ # '-gencode', 'arch=compute_52,code=sm_52',
+ # '-gencode', 'arch=compute_53,code=sm_53', # nvcc.version() >= 7.0
+ # '-gencode', 'arch=compute_60,code=sm_60',
+ # '-gencode', 'arch=compute_61,code=sm_61', # nvcc.version() >= 8.0
+ # '-gencode', 'arch=compute_70,code=sm_70',
+ # '-gencode', 'arch=compute_72,code=sm_72', # nvcc.version() >= 9.0
+ # '-gencode', 'arch=compute_75,code=sm_75'] # nvcc.version() >= 10.0
+
+_Note:_ This function is intended to closely replicate CMake's FindCUDA module
+function `CUDA_SELECT_NVCC_ARCH_FLAGS(out_variable, [list of CUDA compute architectures])`
+
+
+
+### `nvcc_arch_readable()`
+_Since: 0.50.0_
+
+``` meson
+cuda.nvcc_arch_readable(nvcc_or_version, ...,
+ detected: string_or_array)
+```
+
+Has precisely the same interface as [`nvcc_arch_flags()`](#nvcc_arch_flags),
+but rather than returning a list of flags, it returns a "readable" list of
+architectures that will be compiled for. The output of this function is solely
+intended for informative message printing.
+
+ archs = '3.0 3.5 5.0+PTX'
+ readable = cuda.nvcc_arch_readable(nvcc, archs)
+ message('Building for architectures ' + ' '.join(readable))
+
+This will print
+
+ Message: Building for architectures sm30 sm35 sm50 compute50
+
+_Note:_ This function is intended to closely replicate CMake's FindCUDA module function
+`CUDA_SELECT_NVCC_ARCH_FLAGS(out_variable, [list of CUDA compute architectures])`
+
+
+
+### `min_driver_version()`
+_Since: 0.50.0_
+
+``` meson
+cuda.min_driver_version(nvcc_or_version)
+```
+
+Returns the minimum NVIDIA proprietary driver version required, on the host
+system, by kernels compiled with the given NVCC compiler or its version string.
+
+The output of this function is generally intended for informative message
+printing, but could be used for assertions or to conditionally enable
+features known to exist within the minimum NVIDIA driver required.
+
+
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index e3fedc4..47fce8b 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -148,6 +148,14 @@ it automatically.
cmake_dep = dependency('ZLIB', method : 'cmake', modules : ['ZLIB::ZLIB'])
```
+It is also possible to reuse existing `Find<name>.cmake` files with the
+`cmake_module_path` property. Using this property is equivalent to setting the
+`CMAKE_MODULE_PATH` variable in CMake. The path(s) given to `cmake_module_path`
+should all be relative to the project source directory. Absolute paths
+should only be used if the CMake files are not stored in the project itself.
+
+Additional CMake parameters can be specified with the `cmake_args` property.
+
### Some notes on Dub
Please understand that meson is only able to find dependencies that
@@ -247,6 +255,16 @@ libraries that have been compiled for single-threaded use instead.
`method` may be `auto`, `config-tool`, `pkg-config`, `cmake` or `extraframework`.
+## Fortran Coarrays
+
+*(added 0.50.0)*
+
+ Coarrays are a Fortran language intrinsic feature, enabled by
+`dependency('coarray')`.
+
+GCC will use OpenCoarrays if present to implement coarrays, while Intel and NAG
+use internal coarray support.
+
## GL
This finds the OpenGL library in a way appropriate to the platform.
@@ -269,6 +287,20 @@ e = executable('testprog', 'test.cc', dependencies : gtest_dep)
test('gtest test', e)
```
+## HDF5
+
+*(added 0.50.0)*
+
+HDF5 is supported for C, C++ and Fortran. Because dependencies are
+language-specific, you must specify the requested language using the
+`language` keyword argument, i.e.,
+ * `dependency('hdf5', language: 'c')` for the C HDF5 headers and libraries
+ * `dependency('hdf5', language: 'cpp')` for the C++ HDF5 headers and libraries
+ * `dependency('hdf5', language: 'fortran')` for the Fortran HDF5 headers and libraries
+
+Meson uses pkg-config to find HDF5. The standard low-level HDF5 function and the `HL` high-level HDF5 functions are linked for each language.
+
+
## libwmf
*(added 0.44.0)*
@@ -322,6 +354,20 @@ are not in your path, they can be specified by setting the standard
environment variables `MPICC`, `MPICXX`, `MPIFC`, `MPIF90`, or
`MPIF77`, during configuration.
+## NetCDF
+
+*(added 0.50.0)*
+
+NetCDF is supported for C, C++ and Fortran. Because NetCDF dependencies are
+language-specific, you must specify the requested language using the
+`language` keyword argument, i.e.,
+ * `dependency('netcdf', language: 'c')` for the C NetCDF headers and libraries
+ * `dependency('netcdf', language: 'cpp')` for the C++ NetCDF headers and libraries
+ * `dependency('netcdf', language: 'fortran')` for the Fortran NetCDF headers and libraries
+
+Meson uses pkg-config to find NetCDF.
+
+
## OpenMP
*(added 0.46.0)*
@@ -400,7 +446,7 @@ include path of the given module(s) to the compiler flags. (since v0.47.0)
**Note** using private headers in your project is a bad idea, do so at your own
risk.
-`method` may be `auto`, `pkgconfig` or `qmake`.
+`method` may be `auto`, `pkg-config` or `qmake`.
## SDL2
diff --git a/docs/markdown/External-commands.md b/docs/markdown/External-commands.md
index 9336ec3..4c8c8e4 100644
--- a/docs/markdown/External-commands.md
+++ b/docs/markdown/External-commands.md
@@ -16,6 +16,14 @@ output = r.stdout().strip()
errortxt = r.stderr().strip()
```
+Additionally, since 0.50.0, you can pass the command [`environment`](Reference-manual.html#environment-object) object:
+
+```meson
+env = environment()
+env.set('FOO', 'bar')
+run_command('command', 'arg1', 'arg2', env: env)
+```
+
The `run_command` function returns an object that can be queried for
return value and text written to stdout and stderr. The `strip` method
call is used to strip trailing and leading whitespace from
diff --git a/docs/markdown/Generating-sources.md b/docs/markdown/Generating-sources.md
index 306bee3..fe7d7ef 100644
--- a/docs/markdown/Generating-sources.md
+++ b/docs/markdown/Generating-sources.md
@@ -117,7 +117,7 @@ idep_foo = declare_dependency(
```
See [dependencies](Dependencies.md#declaring-your-own), and
-[reference](Reference-manual.md#decalre_dependency) for more information.
+[reference](Reference-manual.md#declare_dependency) for more information.
## Using generator()
diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md
index c75392c..7bbec5d 100644
--- a/docs/markdown/IDE-integration.md
+++ b/docs/markdown/IDE-integration.md
@@ -4,39 +4,138 @@ short-description: Meson's API to integrate Meson support into an IDE
# IDE integration
-Meson has exporters for Visual Studio and XCode, but writing a custom backend for every IDE out there is not a scalable approach. To solve this problem, Meson provides an API that makes it easy for any IDE or build tool to integrate Meson builds and provide an experience comparable to a solution native to the IDE.
+Meson has exporters for Visual Studio and XCode, but writing a custom backend
+for every IDE out there is not a scalable approach. To solve this problem,
+Meson provides an API that makes it easy for any IDE or build tools to
+integrate Meson builds and provide an experience comparable to a solution
+native to the IDE.
-The basic tool for this is `meson introspect`.
+All the resources required for such a IDE integration can be found in
+the `meson-info` directory in the build directory.
-The first thing to do when setting up a Meson project in an IDE is to select the source and build directories. For this example we assume that the source resides in an Eclipse-like directory called `workspace/project` and the build tree is nested inside it as `workspace/project/build`. First we initialise Meson by running the following command in the source directory.
+The first thing to do when setting up a Meson project in an IDE is to select
+the source and build directories. For this example we assume that the source
+resides in an Eclipse-like directory called `workspace/project` and the build
+tree is nested inside it as `workspace/project/build`. First, we initialize
+Meson by running the following command in the source directory.
meson builddir
-For the remainder of the document we assume that all commands are executed inside the build directory unless otherwise specified.
+With this command meson will configure the project and also generate
+introspection information that is stored in `intro-*.json` files in the
+`meson-info` directory. The introspection dump will be automatically updated
+when meson is (re)configured, or the build options change. Thus, an IDE can
+watch for changes in this directory to know when something changed.
-The first thing you probably want is to get a list of top level targets. For that we use the introspection tool. It comes with extensive command line help so we recommend using that in case problems appear.
+The `meson-info` directory should contain the following files:
- meson introspect --targets
+ File | Description
+ ------------------------------- | ---------------------------------------------------------------------
+ `intro-benchmarks.json` | Lists all benchmarks
+ `intro-buildoptions.json` | Contains a full list of meson configuration options for the project
+ `intro-buildsystem_files.json` | Full list of all meson build files
+ `intro-dependencies.json` | Lists all dependencies used in the project
+ `intro-installed.json` | Contains mapping of files to their installed location
+ `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.)
+ `intro-targets.json` | Full list of all build targets
+ `intro-tests.json` | Lists all tests with instructions how to run them
-The JSON formats will not be specified in this document. The easiest way of learning them is to look at sample output from the tool.
+The content of the JSON files is further specified in the remainder of this document.
-Once you have a list of targets, you probably need the list of source files that comprise the target. To get this list for a target, say `exampletarget`, issue the following command.
+## The `targets` section
- meson introspect --target-files exampletarget
+The most important file for an IDE is probably `intro-targets.json`. Here each
+target with its sources and compiler parameters is specified. The JSON format
+for one target is defined as follows:
-In order to make code completion work, you need the compiler flags for each compilation step. Meson does not provide this itself, but the Ninja tool Meson uses to build does provide it. To find out the compile steps necessary to build target foo, issue the following command.
+```json
+{
+ "name": "Name of the target",
+ "id": "The internal ID meson uses",
+ "type": "<TYPE>",
+ "defined_in": "/Path/to/the/targets/meson.build",
+ "subproject": null,
+ "filename": ["list", "of", "generated", "files"],
+ "build_by_default": true / false,
+ "target_sources": [],
+ "installed": true / false,
+}
+```
- ninja -t commands foo
+If the key `installed` is set to `true`, the key `install_filename` will also
+be present. It stores the installation location for each file in `filename`.
+If one file in `filename` is not installed, its corresponding install location
+is set to `null`.
-Note that if the target has dependencies (such as generated sources), then the commands for those show up in this list as well, so you need to do some filtering. Alternatively you can grab every command invocation in the [Clang tools db](https://clang.llvm.org/docs/JSONCompilationDatabase.html) format that is written to a file called `compile_commands.json` in the build directory.
+The `subproject' key specifies the name of the subproject this target was
+defined in, or `null` if the target was defined in the top level project.
-## Build Options
+A target usually generates only one file. However, it is possible for custom
+targets to have multiple outputs.
+
+### Target sources
+
+The `intro-targets.json` file also stores a list of all source objects of the
+target in the `target_sources`. With this information, an IDE can provide code
+completion for all source files.
-The next thing to display is the list of options that can be set. These include build type and so on. Here's how to extract them.
+```json
+{
+ "language": "language ID",
+ "compiler": ["The", "compiler", "command"],
+ "parameters": ["list", "of", "compiler", "parameters"],
+ "sources": ["list", "of", "all", "source", "files", "for", "this", "language"],
+ "generated_sources": ["list", "of", "all", "source", "files", "that", "where", "generated", "somewhere", "else"]
+}
+```
- meson introspect --buildoptions
+It should be noted that the compiler parameters stored in the `parameters`
+differ from the actual parameters used to compile the file. This is because
+the parameters are optimized for the usage in an IDE to provide autocompletion
+support, etc. It is thus not recommended to use this introspection information
+for actual compilation.
+
+### Possible values for `type`
+
+The following table shows all valid types for a target.
+
+ value of `type` | Description
+ ---------------- | -------------------------------------------------------------------------------------------------
+ `executable` | This target will generate an executable file
+ `static library` | Target for a static library
+ `shared library` | Target for a shared library
+ `shared module` | A shared library that is meant to be used with dlopen rather than linking into something else
+ `custom` | A custom target
+ `run` | A Meson run target
+ `jar` | A Java JAR target
+
+### Using `--targets` without a build directory
+
+It is also possible to get most targets without a build directory. This can be
+done by running `meson introspect --targets /path/to/meson.build`.
+
+The generated output is similar to running the introspection with a build
+directory or reading the `intro-targets.json`. However, there are some key
+differences:
+
+- The paths in `filename` now are _relative_ to the future build directory
+- The `install_filename` key is completely missing
+- There is only one entry in `target_sources`:
+ - With the language set to `unknown`
+ - Empty lists for `compiler` and `parameters` and `generated_sources`
+ - The `sources` list _should_ contain all sources of the target
+
+There is no guarantee that the sources list in `target_sources` is correct.
+There might be differences, due to internal limitations. It is also not
+guaranteed that all targets will be listed in the output. It might even be
+possible that targets are listed, which won't exist when meson is run normally.
+This can happen if a target is defined inside an if statement.
+Use this feature with care.
-This command returns a list of all supported buildoptions with the format:
+## Build Options
+
+The list of all build options (build type, warning level, etc.) is stored in
+the `intro-buildoptions.json` file. Here is the JSON format for each option.
```json
{
@@ -56,7 +155,8 @@ The supported types are:
- integer
- array
-For the type `combo` the key `choices` is also present. Here all valid values for the option are stored.
+For the type `combo` the key `choices` is also present. Here all valid values
+for the option are stored.
The possible values for `section` are:
@@ -74,25 +174,81 @@ Since Meson 0.50.0 it is also possible to get the default buildoptions
without a build directory by providing the root `meson.build` instead of a
build directory to `meson introspect --buildoptions`.
-Running `--buildoptions` without a build directory produces the same output as running
-it with a freshly configured build directory.
+Running `--buildoptions` without a build directory produces the same output as
+running it with a freshly configured build directory.
+
+However, this behavior is not guaranteed if subprojects are present. Due to
+internal limitations all subprojects are processed even if they are never used
+in a real meson run. Because of this options for the subprojects can differ.
-However, this behavior is not guaranteed if subprojects are present. Due to internal
-limitations all subprojects are processed even if they are never used in a real meson run.
-Because of this options for the subprojects can differ.
+## The dependencies section
+
+The list of all _found_ dependencies can be acquired from
+`intro-dependencies.json`. Here, the name, compiler and linker arguments for
+a dependency are listed.
+
+### Scanning for dependecie with `--scan-dependencies`
+
+It is also possible to get most dependencies used without a build directory.
+This can be done by running `meson introspect --scan-dependencies /path/to/meson.build`.
+
+The output format is as follows:
+
+```json
+[
+ {
+ "name": "The name of the dependency",
+ "required": true,
+ "conditional": false,
+ "has_fallback": false
+ }
+]
+```
+
+The `required` keyword specifies whether the dependency is marked as required
+in the `meson.build` (all dependencies are required by default). The
+`conditional` key indicates whether the `dependency()` function was called
+inside a conditional block. In a real meson run these dependencies might not be
+used, thus they _may_ not be required, even if the `required` key is set. The
+`has_fallback` key just indicates whether a fallback was directly set in the
+`dependency()` function.
## Tests
-Compilation and unit tests are done as usual by running the `ninja` and `ninja test` commands. A JSON formatted result log can be found in `workspace/project/builddir/meson-logs/testlog.json`.
+Compilation and unit tests are done as usual by running the `ninja` and
+`ninja test` commands. A JSON formatted result log can be found in
+`workspace/project/builddir/meson-logs/testlog.json`.
+
+When these tests fail, the user probably wants to run the failing test in a
+debugger. To make this as integrated as possible, extract the tests from the
+`intro-tests.json` and `intro-benchmarks.json` files. This provides you with
+all the information needed to run the test: what command to execute, command
+line arguments and environment variable settings.
+
+```json
+{
+ "name": "name of the test",
+ "workdir": "the working directory (can be null)",
+ "timeout": "the test timeout",
+ "suite": ["list", "of", "test", "suites"],
+ "is_parallel": true / false,
+ "cmd": ["command", "to", "run"],
+ "env": {
+ "VARIABLE1": "value 1",
+ "VARIABLE2": "value 2"
+ }
+}
+```
-When these tests fail, the user probably wants to run the failing test in a debugger. To make this as integrated as possible, extract the test test setups with this command.
+# Programmatic interface
- meson introspect --tests
+Meson also provides the `meson introspect` for project introspection via the
+command line. Use `meson introspect -h` to see all available options.
-This provides you with all the information needed to run the test: what command to execute, command line arguments and environment variable settings.
+This API can also work without a build directory for the `--projectinfo` command.
# Existing integrations
- [Gnome Builder](https://wiki.gnome.org/Apps/Builder)
- [Eclipse CDT](https://www.eclipse.org/cdt/) (experimental)
-- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) \ No newline at end of file
+- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs)
diff --git a/docs/markdown/Native-environments.md b/docs/markdown/Native-environments.md
index a9719a7..f0d41eb 100644
--- a/docs/markdown/Native-environments.md
+++ b/docs/markdown/Native-environments.md
@@ -43,6 +43,23 @@ rust = '/usr/local/bin/rust'
llvm-config = '/usr/local/llvm-svn/bin/llvm-config'
```
+### Paths and Directories
+
+As of 0.50.0 paths and directories such as libdir can be defined in the native
+file in a paths section
+
+```ini
+[paths]
+libdir = 'mylibdir'
+prefix = '/my prefix'
+```
+
+These values will only be loaded when not cross compiling. Any arguments on the
+command line will override any options in the native file. For example, passing
+`--libdir=otherlibdir` would result in a prefix of `/my prefix` and a libdir of
+`otherlibdir`.
+
+
## Loading multiple native files
Unlike cross file, native files allow layering. More than one native file can be
diff --git a/docs/markdown/Pkgconfig-module.md b/docs/markdown/Pkgconfig-module.md
index 7e93524..da18957 100644
--- a/docs/markdown/Pkgconfig-module.md
+++ b/docs/markdown/Pkgconfig-module.md
@@ -51,7 +51,7 @@ keyword arguments.
e.g. `datadir=${prefix}/share`. The names `prefix`, `libdir` and
`installdir` are reserved and may not be used.
- `version` a string describing the version of this library, used to set the
- `Version:` field. Defaults to the project version if unspecified.
+ `Version:` field. (*since 0.46.0*) Defaults to the project version if unspecified.
- `d_module_versions` a list of module version flags used when compiling
D sources referred to by this pkg-config file
diff --git a/docs/markdown/Porting-from-autotools.md b/docs/markdown/Porting-from-autotools.md
index 5c4c35d..b60ecfe 100644
--- a/docs/markdown/Porting-from-autotools.md
+++ b/docs/markdown/Porting-from-autotools.md
@@ -450,9 +450,9 @@ AM_CPPFLAGS = \
`meson.build`:
```meson
-add_global_arguments('-DG_LOG_DOMAIN="As"', language : 'c')
-add_global_arguments('-DAS_COMPILATION', language : 'c')
-add_global_arguments('-DLOCALSTATEDIR="/var"', language : 'c')
+add_project_arguments('-DG_LOG_DOMAIN="As"', language : 'c')
+add_project_arguments('-DAS_COMPILATION', language : 'c')
+add_project_arguments('-DLOCALSTATEDIR="/var"', language : 'c')
```
### Tests
diff --git a/docs/markdown/Precompiled-headers.md b/docs/markdown/Precompiled-headers.md
index 57b2923..8dfb438 100644
--- a/docs/markdown/Precompiled-headers.md
+++ b/docs/markdown/Precompiled-headers.md
@@ -70,13 +70,16 @@ has multiple languages, you can specify multiple pch files like this.
```meson
executable('multilang', sources : srclist,
- c_pch : 'pch/c_pch.h', cpp_pch : 'pch/cpp_pch.h'])
+ c_pch : 'pch/c_pch.h', cpp_pch : 'pch/cpp_pch.h')
```
Using precompiled headers with MSVC
--
+Since Meson version 0.50.0, precompiled headers with MSVC work just like
+with GCC. Meson will automatically create the matching pch implementation
+file for you.
-MSVC is a bit trickier, because in addition to the header file, it
+Before version 0.50.0, in addition to the header file, Meson
also requires a corresponding source file. If your header is called
`foo_pch.h`, the corresponding source file is usually called
`foo_pch.cpp` and it resides in the same `pch` subdirectory as the
diff --git a/docs/markdown/Python-module.md b/docs/markdown/Python-module.md
index a50a33d..edd10ba 100644
--- a/docs/markdown/Python-module.md
+++ b/docs/markdown/Python-module.md
@@ -61,6 +61,17 @@ added methods.
### Methods
+#### `path()`
+
+```meson
+str py_installation.path()
+```
+
+*Added 0.50.0*
+
+Works like the path method of other `ExternalProgram` objects. Was not
+provided prior to 0.50.0 due to a bug.
+
#### `extension_module()`
``` meson
@@ -220,7 +231,7 @@ It exposes the same methods as its parent class.
[shared_module]: Reference-manual.md#shared_module
[external program]: Reference-manual.md#external-program-object
[dependency]: Reference-manual.md#dependency
-[install_data]: Reference-manual.md#install-data
-[configure_file]: Reference-manual.md#configure-file
+[install_data]: Reference-manual.md#install_data
+[configure_file]: Reference-manual.md#configure_file
[dependency object]: Reference-manual.md#dependency-object
[buildtarget object]: Reference-manual.md#build-target-object
diff --git a/docs/markdown/Quick-guide.md b/docs/markdown/Quick-guide.md
index 549dcfc..8c6da69 100644
--- a/docs/markdown/Quick-guide.md
+++ b/docs/markdown/Quick-guide.md
@@ -82,7 +82,7 @@ build and install Meson projects are the following.
```console
$ cd /path/to/source/root
-$ CFLAGS=... CXXFLAGS=... LDFLAGS=.. meson --prefix /usr --buildtype=plain builddir
+$ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=...
$ ninja -v -C builddir
$ ninja -C builddir test
$ DESTDIR=/path/to/staging/root ninja -C builddir install
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 0ddd4a9..7668fa0 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -231,6 +231,11 @@ the `@variable@` syntax.
- `input` the input file name. If it's not specified in configuration
mode, all the variables in the `configuration:` object (see above)
are written to the `output:` file.
+- `install` *(added 0.50.0)* When true, this generated file is installed during
+the install step, and `install_dir` must be set and not empty. When false, this
+generated file is not installed regardless of the value of `install_dir`.
+When omitted it defaults to true when `install_dir` is set and not empty,
+false otherwise.
- `install_dir` the subdirectory to install the generated file to
(e.g. `share/myproject`), if omitted or given the value of empty
string, the file is not installed.
@@ -261,6 +266,9 @@ following.
- `build_by_default` *(added 0.38)* causes, when set to true, to
have this target be built by default, that is, when invoking plain
`ninja`; the default value is false
+ *(changed in 0.50)* if `build_by_default` is explicitly set to false, `install`
+ will no longer override it. If `build_by_default` is not set, `install` will
+ still determine its default.
- `build_always` (deprecated) if `true` this target is always considered out of
date and is rebuilt every time. Equivalent to setting both
`build_always_stale` and `build_by_default` to true.
@@ -565,6 +573,8 @@ be passed to [shared and static libraries](#library).
- `d_module_versions` list of module version identifiers set when compiling D sources
- `d_debug` list of module debug identifiers set when compiling D sources
- `pie` *(added 0.49.0)* build a position-independent executable
+- `native`, is a boolean controlling whether the target is compiled for the
+ build or host machines. Defaults to false, building for the host machine.
The list of `sources`, `objects`, and `dependencies` is always
flattened, which means you can freely nest and add lists while
@@ -1107,8 +1117,8 @@ This function prints its argument to stdout prefixed with WARNING:.
The first argument to this function must be a string defining the name
of this project. It is followed by programming languages that the
project uses. Supported values for languages are `c`, `cpp` (for
-`C++`), `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`) and
-`vala`. In versions before `0.40.0` you must have at least one
+`C++`), `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`),
+`vala` and `rust`. In versions before `0.40.0` you must have at least one
language listed.
The project name can be any string you want, it's not used for
@@ -1173,12 +1183,14 @@ and Meson will set three environment variables `MESON_SOURCE_ROOT`,
directory, build directory and subdirectory the target was defined in,
respectively.
-This function has one keyword argument.
+This function supports the following keyword arguments:
- `check` takes a boolean. If `true`, the exit status code of the command will
be checked, and the configuration will fail if it is non-zero. The default is
`false`.
Since 0.47.0
+ - `env` an [environment object](#environment-object) to use a custom environment
+ Since 0.50.0
See also [External commands](External-commands.md).
@@ -1394,10 +1406,7 @@ executable to run. The executable can be an [executable build target
object](#build-target-object) returned by
[`executable()`](#executable) or an [external program
object](#external-program-object) returned by
-[`find_program()`](#find_program). The executable's exit code is used
-by the test harness to record the outcome of the test, for example
-exit code zero indicates success. For more on the Meson test harness
-protocol read [Unit Tests](Unit-tests.md).
+[`find_program()`](#find_program).
Keyword arguments are the following:
@@ -1434,6 +1443,12 @@ Keyword arguments are the following:
before test is executed even if they have `build_by_default : false`.
Since 0.46.0
+- `protocol` specifies how the test results are parsed and can be one
+ of `exitcode` (the executable's exit code is used by the test harness
+ to record the outcome of the test) or `tap` ([Test Anything
+ Protocol](https://www.testanything.org/)). For more on the Meson test
+ harness protocol read [Unit Tests](Unit-tests.md). Since 0.50.0
+
Defined tests can be run in a backend-agnostic way by calling
`meson test` inside the build dir, or by using backend-specific
commands, such as `ninja test` or `msbuild RUN_TESTS.vcxproj`.
@@ -1709,7 +1724,9 @@ the following methods:
instead of a not-found dependency. *Since 0.50.0* the `has_headers` keyword
argument can be a list of header files that must be found as well, using
`has_header()` method. All keyword arguments prefixed with `header_` will be
- passed down to `has_header()` method with the prefix removed.
+ passed down to `has_header()` method with the prefix removed. *Since 0.51.0*
+ the `static` keyword (boolean) can be set to `true` to limit the search to
+ static libraries and `false` for dynamic/shared.
- `first_supported_argument(list_of_strings)`, given a list of
strings, returns the first argument that passes the `has_argument`
@@ -2007,11 +2024,13 @@ A build target is either an [executable](#executable),
previous versions. The default will eventually be changed to `true`
in a future version.
-- `extract_objects()` returns an opaque value representing the
- generated object files of arguments, usually used to take single
- object files and link them to unit tests or to compile some source
- files with custom flags. To use the object file(s) in another build
- target, use the `objects:` keyword argument.
+- `extract_objects(source1, source2, ...)` takes as its arguments
+ a number of source files as [`string`](#string-object) or
+ [`files()`](#files) and returns an opaque value representing the
+ object files generated for those source files. This is typically used
+ to take single object files and link them to unit tests or to compile
+ some source files with custom flags. To use the object file(s)
+ in another build target, use the `objects:` keyword argument.
- `full_path()` returns a full path pointing to the result target file.
NOTE: In most cases using the object itself will do the same job as
@@ -2121,8 +2140,8 @@ an external dependency with the following methods:
partial dependency with the same rules. So , given:
```meson
- dep1 = declare_dependency(compiler_args : '-Werror=foo', link_with : 'libfoo')
- dep2 = declare_dependency(compiler_args : '-Werror=bar', dependencies : dep1)
+ dep1 = declare_dependency(compile_args : '-Werror=foo', link_with : 'libfoo')
+ dep2 = declare_dependency(compile_args : '-Werror=bar', dependencies : dep1)
dep3 = dep2.partial_dependency(compile_args : true)
```
@@ -2165,7 +2184,7 @@ and has the following methods:
This object is returned by [`environment()`](#environment) and stores
detailed information about how environment variables should be set
during tests. It should be passed as the `env` keyword argument to
-tests. It has the following methods.
+tests and other functions. It has the following methods.
- `append(varname, value1, value2, ...)` appends the given values to
the old value of the environment variable, e.g. `env.append('FOO',
@@ -2242,7 +2261,8 @@ sample piece of code with [`compiler.run()`](#compiler-object) or
[`run_command()`](#run_command). It has the following methods:
- `compiled()` if true, the compilation succeeded, if false it did not
- and the other methods return unspecified data
+ and the other methods return unspecified data. This is only available
+ for `compiler.run()` results.
- `returncode()` the return code of executing the compiled binary
- `stderr()` the standard error produced when the command was run
- `stdout()` the standard out produced when the command was run
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index fa913f5..d3a6815 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -13,6 +13,7 @@ These are return values of the `get_id` (Compiler family) and
| clang | The Clang compiler | gcc |
| clang-cl | The Clang compiler (MSVC compatible driver) | msvc |
| dmd | D lang reference compiler | |
+| flang | Flang Fortran compiler | |
| g95 | The G95 Fortran compiler | |
| gcc | The GNU Compiler Collection | gcc |
| intel | Intel compiler | msvc on windows, otherwise gcc |
@@ -23,7 +24,7 @@ These are return values of the `get_id` (Compiler family) and
| nagfor | The NAG Fortran compiler | |
| open64 | The Open64 Fortran Compiler | |
| pathscale | The Pathscale Fortran compiler | |
-| pgi | The Portland Fortran compiler | |
+| pgi | Portland PGI C/C++/Fortran compilers | |
| rustc | Rust compiler | |
| sun | Sun Fortran compiler | |
| valac | Vala compiler | |
@@ -99,24 +100,30 @@ future releases.
These are the parameter names for passing language specific arguments to your build target.
-| Language | Parameter name |
-| ----- | ----- |
-| C | c_args |
-| C++ | cpp_args |
-| C# | cs_args |
-| D | d_args |
-| Fortran | fortran_args |
-| Java | java_args |
-| Objective C | objc_args |
-| Objective C++ | objcpp_args |
-| Rust | rust_args |
-| Vala | vala_args |
+| Language | compiler name | linker name |
+| ------------- | ------------- | ----------------- |
+| C | c_args | c_link_args |
+| C++ | cpp_args | cpp_link_args |
+| C# | cs_args | cs_link_args |
+| D | d_args | d_link_args |
+| Fortran | fortran_args | fortran_link_args |
+| Java | java_args | java_link_args |
+| Objective C | objc_args | objc_link_args |
+| Objective C++ | objcpp_args | objcpp_link_args |
+| Rust | rust_args | rust_link_args |
+| Vala | vala_args | vala_link_args |
## Compiler and linker flag environment variables
These environment variables will be used to modify the compiler and
linker flags.
+It is recommended that you **do not use these**. They are provided purely to
+for backwards compatibility with other build systems. There are many caveats to
+their use, especially when rebuilding the project. It is **highly** recommended
+that you use [the command line arguments](#language-arguments-parameters-names)
+instead.
+
| Name | Comment |
| ----- | ------- |
| CFLAGS | Flags for the C compiler |
@@ -189,3 +196,18 @@ which are supported by MSVC, GCC, Clang, and other compilers.
|----------------------|
| dllexport |
| dllimport |
+
+
+## Dependency lookup methods
+
+These are the values that can be passed to `dependency` function's
+`method` keyword argument.
+
+| Name | Comment |
+| ----- | ------- |
+| auto | Automatic method selection |
+| pkg-config | Use Pkg-Config |
+| cmake | Look up as a CMake module |
+| config-tool | Use a custom dep tool such as `cups-config` |
+| system | System provided (e.g. OpenGL) |
+| extraframework | A macOS/iOS framework |
diff --git a/docs/markdown/Release-notes-for-0.49.0.md b/docs/markdown/Release-notes-for-0.49.0.md
index 4ed4be2..9889a39 100644
--- a/docs/markdown/Release-notes-for-0.49.0.md
+++ b/docs/markdown/Release-notes-for-0.49.0.md
@@ -3,6 +3,8 @@ title: Release 0.49
short-description: Release notes for 0.49
...
+# New features
+
## Libgcrypt dependency now supports libgcrypt-config
Earlier, `dependency('libgcrypt')` could only detect the library with pkg-config
diff --git a/docs/markdown/Release-notes-for-0.50.0.md b/docs/markdown/Release-notes-for-0.50.0.md
index cb4fe0d..44e8573 100644
--- a/docs/markdown/Release-notes-for-0.50.0.md
+++ b/docs/markdown/Release-notes-for-0.50.0.md
@@ -1,17 +1,336 @@
---
-title: Release 0.50
-short-description: Release notes for 0.49 (preliminary)
+title: Release 0.50.0
+short-description: Release notes for 0.50.0
...
# New features
-This page is a placeholder for the eventual release notes.
+## Added `cmake_module_path` and `cmake_args` to dependency
-Notable new features should come with release note updates. This is
-done by creating a file snippet called `snippets/featurename.md` and
-whose contents should look like this:
+The CMake dependency backend can now make use of existing `Find<name>.cmake`
+files by setting the `CMAKE_MODULE_PATH` with the new `dependency()` property
+`cmake_module_path`. The paths given to `cmake_module_path` should be relative
+to the project source directory.
- ## Feature name
+Furthermore the property `cmake_args` was added to give CMake additional
+parameters.
- A short description explaining the new feature and how it should be used.
+## Added PGI compiler support
+Nvidia / PGI C, C++ and Fortran
+[no-cost](https://www.pgroup.com/products/community.htm) compilers are
+now supported. They have been tested on Linux so far.
+
+
+
+## Fortran Coarray
+
+Fortran 2008 / 2018 coarray support was added via `dependency('coarray')`
+
+## Libdir defaults to `lib` when cross compiling
+
+Previously `libdir` defaulted to the value of the build machine such
+as `lib/x86_64-linux-gnu`, which is almost always incorrect when cross
+compiling. It now defaults to plain `lib` when cross compiling. Native
+builds remain unchanged and will point to the current system's library
+dir.
+
+## Native and Cross File Paths and Directories
+
+A new `[paths]` section has been added to native and cross files. This
+can be used to set paths such a prefix and libdir in a persistent way.
+
+## Add warning_level 0 option
+
+Adds support for a warning level 0 which does not enable any static
+analysis checks from the compiler
+
+## A builtin target to run clang-format
+
+If you have `clang-format` installed and there is a `.clang-format`
+file in the root of your master project, Meson will generate a run
+target called `clang-format` so you can reformat all files with one
+command:
+
+```meson
+ninja clang-format
+```
+
+
+## Added `.path()` method to object returned by `python.find_installation()`
+
+`ExternalProgram` objects as well as the object returned by the
+`python3` module provide this method, but the new `python` module did
+not.
+
+## Fix ninja console log from generators with multiple output nodes
+
+This resolves [issue #4760](https://github.com/mesonbuild/meson/issues/4760)
+where a generator with multiple output nodes printed an empty string to the console
+
+## `introspect --buildoptions` can now be used without configured build directory
+
+It is now possible to run `meson introspect --buildoptions /path/to/meson.build`
+without a configured build directory.
+
+Running `--buildoptions` without a build directory produces the same
+output as running it with a freshly configured build directory.
+
+However, this behavior is not guaranteed if subprojects are
+present. Due to internal limitations all subprojects are processed
+even if they are never used in a real meson run. Because of this
+options for the subprojects can differ.
+
+## `include_directories` accepts a string
+
+The `include_directories` keyword argument now accepts plain strings
+rather than an include directory object. Meson will transparently
+expand it so that a declaration like this:
+
+```meson
+executable(..., include_directories: 'foo')
+```
+
+Is equivalent to this:
+
+```meson
+foo_inc = include_directories('foo')
+executable(..., include_directories: foo_inc)
+```
+
+## Fortran submodule support
+
+Initial support for Fortran `submodule` was added, where the submodule is in
+the same or different file than the parent `module`.
+The submodule hierarchy specified in the source Fortran code `submodule`
+statements are used by Meson to resolve source file dependencies.
+For example:
+
+```fortran
+submodule (ancestor:parent) child
+```
+
+
+## Add `subproject_dir` to `--projectinfo` introspection output
+
+This allows applications interfacing with Meson (such as IDEs) to know about
+an overridden subproject directory.
+
+## Find library with its headers
+
+The `find_library()` method can now also verify if the library's headers are
+found in a single call, using the `has_header()` method internally.
+
+```meson
+# Aborts if the 'z' library is found but not its header file
+zlib = find_library('z', has_headers : 'zlib.h')
+# Returns not-found if the 'z' library is found but not its header file
+zlib = find_library('z', has_headers : 'zlib.h', required : false)
+```
+
+Any keyword argument with the `header_` prefix passed to `find_library()` will
+be passed to the `has_header()` method with the prefix removed.
+
+```meson
+libfoo = find_library('foo',
+ has_headers : ['foo.h', 'bar.h'],
+ header_prefix : '#include <baz.h>',
+ header_include_directories : include_directories('.'))
+```
+
+## NetCDF
+
+NetCDF support for C, C++ and Fortran is added via pkg-config.
+
+## Added the Flang compiler
+
+[Flang](https://github.com/flang-compiler/flang/releases) Fortran
+compiler support was added. As with other Fortran compilers, flang is
+specified using `FC=flang meson ..` or similar.
+
+## New `not_found_message` for `dependency()`
+
+You can now specify a `not_found_message` that will be printed if the
+specified dependency was not found. The point is to convert constructs
+that look like this:
+
+```meson
+d = dependency('something', required: false)
+if not d.found()
+ message('Will not be able to do something.')
+endif
+```
+
+Into this:
+
+```meson
+d = dependency('something',
+ required: false,
+ not_found_message: 'Will not be able to do something.')
+```
+
+Or constructs like this:
+
+```meson
+d = dependency('something', required: false)
+if not d.found()
+ error('Install something by doing XYZ.')
+endif
+```
+
+into this:
+
+```meson
+d = dependency('something',
+ not_found_message: 'Install something by doing XYZ.')
+```
+
+Which works, because the default value of `required` is `true`.
+
+## Cuda support
+
+Compiling Cuda source code is now supported, though only with the
+Ninja backend. This has been tested only on Linux for now.
+
+Because NVidia's Cuda compiler does not produce `.d` dependency files,
+dependency tracking does not work.
+
+## `run_command()` accepts `env` kwarg
+
+You can pass [`environment`](Reference-manual.html#environment-object)
+object to [`run_command`](Reference-manual.html#run-command), just
+like to `test`:
+
+```meson
+env = environment()
+env.set('FOO', 'bar')
+run_command('command', 'arg1', 'arg2', env: env)
+```
+
+## `extract_objects:` accepts `File` arguments
+
+The `extract_objects` function now supports File objects to tell it
+what to extract. Previously, file paths could only be passed as strings.
+
+## Changed the JSON format of the introspection
+
+All paths used in the meson introspection JSON format are now absolute. This
+affects the `filename` key in the targets introspection and the output of
+`--buildsystem-files`.
+
+Furthermore, the `filename` and `install_filename` keys in the targets
+introspection are now lists of strings with identical length.
+
+The `--target-files` option is now deprecated, since the same information
+can be acquired from the `--tragets` introspection API.
+
+## Meson file rewriter
+
+This release adds the functionality to perform some basic modification
+on the `meson.build` files from the command line. The currently
+supported operations are:
+
+- For build targets:
+ - Add/Remove source files
+ - Add/Remove targets
+ - Modify a select set of kwargs
+ - Print some JSON information
+- For dependencies:
+ - Modify a select set of kwargs
+- For the project function:
+ - Modify a select set of kwargs
+ - Modify the default options list
+
+For more information see the rewriter documentation.
+
+## `introspect --scan-dependencies` can now be used to scan for dependencies used in a project
+
+It is now possible to run `meson introspect --scan-dependencies
+/path/to/meson.build` without a configured build directory to scan for
+dependencies.
+
+The output format is as follows:
+
+```json
+[
+ {
+ "name": "The name of the dependency",
+ "required": true,
+ "conditional": false,
+ "has_fallback": false
+ }
+]
+```
+
+The `required` keyword specifies whether the dependency is marked as required
+in the `meson.build` (all dependencies are required by default). The
+`conditional` key indicates whether the `dependency()` function was called
+inside a conditional block. In a real meson run these dependencies might not be
+used, thus they _may_ not be required, even if the `required` key is set. The
+`has_fallback` key just indicates whether a fallback was directly set in the
+`dependency()` function.
+
+## `introspect --targets` can now be used without configured build directory
+
+It is now possible to run `meson introspect --targets /path/to/meson.build`
+without a configured build directory.
+
+The generated output is similar to running the introspection with a build
+directory. However, there are some key differences:
+
+- The paths in `filename` now are _relative_ to the future build directory
+- The `install_filename` key is completely missing
+- There is only one entry in `target_sources`:
+ - With the language set to `unknown`
+ - Empty lists for `compiler` and `parameters` and `generated_sources`
+ - The `sources` list _should_ contain all sources of the target
+
+There is no guarantee that the sources list in `target_sources` is correct.
+There might be differences, due to internal limitations. It is also not
+guaranteed that all targets will be listed in the output. It might even be
+possible that targets are listed, which won't exist when meson is run normally.
+This can happen if a target is defined inside an if statement.
+Use this feature with care.
+
+## Added option to introspect multiple parameters at once
+
+Meson introspect can now print the results of multiple introspection
+commands in a single call. The results are then printed as a single JSON
+object.
+
+The format for a single command was not changed to keep backward
+compatibility.
+
+Furthermore the option `-a,--all`, `-i,--indent` and `-f,--force-object-output`
+were added to print all introspection information in one go, format the
+JSON output (the default is still compact JSON) and force use the new
+output format, even if only one introspection command was given.
+
+A complete introspection dump is also stored in the `meson-info`
+directory. This dump will be (re)generated each time meson updates the
+configuration of the build directory.
+
+Additionlly the format of `meson introspect target` was changed:
+
+ - New: the `sources` key. It stores the source files of a target and their compiler parameters.
+ - New: the `defined_in` key. It stores the meson file where a target is defined
+ - New: the `subproject` key. It stores the name of the subproject where a target is defined.
+ - Added new target types (`jar`, `shared module`).
+
+## `meson configure` can now print the default options of an unconfigured project
+
+With this release, it is also possible to get a list of all build options
+by invoking `meson configure` with the project source directory or
+the path to the root `meson.build`. In this case, meson will print the
+default values of all options.
+
+## HDF5
+
+HDF5 support is added via pkg-config.
+
+## Added the `meson-info.json` introspection file
+
+Meson now generates a `meson-info.json` file in the `meson-info` directory
+to provide introspection information about the latest meson run. This file
+is updated when the build configuration is changed and the build files are
+(re)generated.
diff --git a/docs/markdown/Rewriter.md b/docs/markdown/Rewriter.md
new file mode 100644
index 0000000..b6301d6
--- /dev/null
+++ b/docs/markdown/Rewriter.md
@@ -0,0 +1,236 @@
+---
+short-description: Automatic modification of the build system files
+...
+
+# Meson file rewriter
+
+Since version 0.50.0, meson has the functionality to perform some basic
+modification on the `meson.build` files from the command line. The currently
+supported operations are:
+
+- For build targets:
+ - Add/Remove source files
+ - Add/Remove targets
+ - Modify a select set of kwargs
+ - Print some JSON information
+- For dependencies:
+ - Modify a select set of kwargs
+- For the project function:
+ - Modify a select set of kwargs
+ - Modify the default options list
+
+The rewriter has both, a normal command line interface and a "script mode". The
+normal CLI is mostly designed for everyday use. The "script mode", on the
+other hand, is meant to be used by external programs (IDEs, graphical
+frontends, etc.)
+
+The rewriter itself is considered stable, however the user interface and the
+"script mode" API might change in the future. These changes may also break
+backwards comaptibility to older releases.
+
+We are also open to suggestions for API improvements.
+
+## Using the rewriter
+
+All rewriter functions are accessed via `meson rewrite`. The meson rewriter
+assumes that it is run inside the project root directory. If this isn't the
+case, use `--sourcedir` to specify the actual project source directory.
+
+### Adding and removing sources
+
+The most common operations will probably be the adding and removing of source
+files to a build target. This can be easily done with:
+
+```bash
+meson rewrite target <target name/id> {add/rm} [list of sources]
+```
+
+For instance, given the following example
+
+```meson
+src = ['main.cpp', 'fileA.cpp']
+
+exe1 = executable('testExe', src)
+```
+
+the source `fileB.cpp` can be added with:
+
+```bash
+meson rewrite target testExe add fileB.cpp
+```
+
+After executing this command, the new `meson.build` will look like this:
+
+```meson
+src = ['main.cpp', 'fileA.cpp', 'fileB.cpp']
+
+exe1 = executable('testExe', src)
+```
+
+In this case, `exe1` could also have been used for the target name. This is
+possible because the rewriter also searches for assignments and unique meson
+IDs, which can be acquired with introspection. If there are multiple targets
+with the same name, meson will do nothing and print an error message.
+
+For more information see the help output of the rewriter target command.
+
+### Setting the project version
+
+It is also possible to set kwargs of specific functions with the rewriter. The
+general command for setting or removing kwargs is:
+
+```bash
+meson rewriter kwargs {set/delete} <function type> <function ID> <key1> <value1> <key2> <value2> ...
+```
+
+For instance, setting the project version can be achieved with this command:
+
+```bash
+meson rewriter kwargs set project / version 1.0.0
+```
+
+Currently, only the following function types are supported:
+
+- dependency
+- target (any build target, the function ID is the target name/ID)
+- project (the function ID must be `/` since project() can only be called once)
+
+For more information see the help output of the rewriter kwargs command.
+
+### Setting the project default options
+
+For setting and deleting default options, use the following command:
+
+```bash
+meson rewrite default-options {set/delete} <opt1> <value1> <opt2> <value2> ...
+```
+
+## Limitations
+
+Rewriting a meson file is not guranteed to keep the indentation of the modified
+functions. Additionally, comments inside a modified statement will be removed.
+Furthermore, all source files will be sorted alphabetically.
+
+For instance adding `e.c` to srcs in the following code
+
+```meson
+# Important comment
+
+srcs = [
+'a.c', 'c.c', 'f.c',
+# something important about b
+ 'b.c', 'd.c', 'g.c'
+]
+
+# COMMENT
+```
+
+would result in the following code:
+
+```meson
+# Important comment
+
+srcs = [
+ 'a.c',
+ 'b.c',
+ 'c.c',
+ 'd.c',
+ 'e.c',
+ 'f.c',
+ 'g.c'
+]
+
+# COMMENT
+```
+
+## Using the "script mode"
+
+The "script mode" should be the preferred API for third party programs, since
+it offers more flexibility and higher API stability. The "scripts" are stored
+in JSON format and executed with `meson rewrite command <JSON file or string>`.
+
+The JSON format is defined as follows:
+
+```json
+[
+ {
+ "type": "function to execute",
+ ...
+ }, {
+ "type": "other function",
+ ...
+ },
+ ...
+]
+```
+
+Each object in the main array must have a `type` entry which specifies which
+function should be executed.
+
+Currently, the following functions are supported:
+
+- target
+- kwargs
+- default_options
+
+### Target modification format
+
+The format for the type `target` is defined as follows:
+
+```json
+{
+ "type": "target",
+ "target": "target ID/name/assignment variable",
+ "operation": "one of ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']",
+ "sources": ["list", "of", "source", "files", "to", "add, remove"],
+ "subdir": "subdir where the new target should be added (only has an effect for operation 'tgt_add')",
+ "target_type": "function name of the new target -- same as in the CLI (only has an effect for operation 'tgt_add')"
+}
+```
+
+The keys `sources`, `subdir` and `target_type` are optional.
+
+### kwargs modification format
+
+The format for the type `target` is defined as follows:
+
+```json
+{
+ "type": "kwargs",
+ "function": "one of ['dependency', 'target', 'project']",
+ "id": "function ID",
+ "operation": "one of ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']",
+ "kwargs": {
+ "key1": "value1",
+ "key2": "value2",
+ ...
+ }
+}
+```
+
+### Default options modification format
+
+The format for the type `default_options` is defined as follows:
+
+```json
+{
+ "type": "default_options",
+ "operation": "one of ['set', 'delete']",
+ "options": {
+ "opt1": "value1",
+ "opt2": "value2",
+ ...
+ }
+}
+```
+
+For operation `delete`, the values of the `options` can be anything (including `null`)
+
+## Extracting information
+
+The rewriter also offers operation `info` for the types `target` and `kwargs`.
+When this operation is used, meson will print a JSON dump to stderr, containing
+all available information to the rewriter about the build target / function
+kwargs in question.
+
+The output format is currently experimental and may change in the future.
diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md
index 426e87d..910513c 100644
--- a/docs/markdown/Running-Meson.md
+++ b/docs/markdown/Running-Meson.md
@@ -4,49 +4,45 @@ short-description: Building a project with Meson
# Running Meson
-There are two different ways of invoking Meson. First, you can run it
-directly from the source tree with the command
-`/path/to/source/meson.py`. Meson may also be installed in which case
-the command is simply `meson`. In this manual we only use the latter
-format for simplicity.
+There are two different ways of invoking Meson. First, you can run it directly
+from the source tree with the command `/path/to/source/meson.py`. Meson may
+also be installed in which case the command is simply `meson`. In this manual
+we only use the latter format for simplicity.
-Additionally, the invocation can pass options to meson.
-The list of options is documented [here](Builtin-options.md).
+Additionally, the invocation can pass options to meson. The list of options is
+documented [here](Builtin-options.md).
-At the time of writing only a command line version of Meson is
-available. This means that Meson must be invoked using the
-terminal. If you wish to use the MSVC compiler, you need to run Meson
-under "Visual Studio command prompt".
+At the time of writing only a command line version of Meson is available. This
+means that Meson must be invoked using the terminal. If you wish to use the
+MSVC compiler, you need to run Meson under "Visual Studio command prompt".
-Configuring the source
-==
+## Configuring the build directory
-Let us assume that we have a source tree that has a Meson build
-system. This means that at the topmost directory has a file called
-`meson.build`. We run the following commands to get the build started.
+Let us assume that we have a source tree that has a Meson build system. This
+means that at the topmost directory has a file called `meson.build`. We run the
+following commands to get the build started.
+```sh
+cd /path/to/source/root
+meson setup builddir
+```
- cd /path/to/source/root
- mkdir builddir
- cd builddir
- meson ..
+We invoke Meson with the `setup` command, giving it the location of the build
+directory. Meson uses [out of source
+builds](http://voices.canonical.com/jussi.pakkanen/2013/04/16/why-you-should-consider-using-separate-build-directories/).
-First we create a directory to hold all files generated during the
-build. Then we go into it and invoke Meson, giving it the location of
-the source root.
+Hint: The syntax of meson is `meson [command] [arguments] [options]`. The
+`setup` command takes a `builddir` and a `srcdir` argument. If no `srcdir` is
+given Meson will deduce the `srcdir` based on `pwd` and the location of
+`meson.build`.
-Hint: The syntax of meson is `meson [options] [srcdir] [builddir]`,
-but you may omit either `srcdir` or `builddir`. Meson will deduce the
-`srcdir` by the location of `meson.build`. The other one will be your
-`pwd`.
+Meson then loads the build configuration file and writes the corresponding
+build backend in the build directory. By default Meson generates a *debug
+build*, which turns on basic warnings and debug information and disables
+compiler optimizations.
-Meson then loads the build configuration file and writes the
-corresponding build backend in the build directory. By default Meson
-generates a *debug build*, which turns on basic warnings and debug
-information and disables compiler optimizations.
-
-You can specify a different type of build with the `--buildtype`
-command line argument. It can have one of the following values.
+You can specify a different type of build with the `--buildtype` command line
+argument. It can have one of the following values.
| value | meaning |
| ------ | -------- |
@@ -55,122 +51,123 @@ command line argument. It can have one of the following values.
| `debugoptimized` | debug info is generated and the code is optimized (on most compilers this means `-g -O2`) |
| `release` | full optimization, no debug info |
-The build directory is mandatory. The reason for this is that it
-simplifies the build process immensely. Meson will not under any
-circumstances write files inside the source directory (if it does, it
-is a bug and should be fixed). This means that the user does not need
-to add a bunch of files to their revision control's ignore list. It
-also means that you can create arbitrarily many build directories for
-any given source tree. If we wanted to test building the source code
-with the Clang compiler instead of the system default, we could just
-type the following commands.
-
- cd /path/to/source/root
- mkdir buildclang
- cd buildclang
- CC=clang CXX=clang++ meson ..
-
-This separation is even more powerful if your code has multiple
-configuration options (such as multiple data backends). You can create
-a separate subdirectory for each of them. You can also have build
-directories for optimized builds, code coverage, static analysis and
-so on. They are all neatly separated and use the same source
-tree. Changing between different configurations is just a question of
-changing to the corresponding directory.
-
-Unless otherwise mentioned, all following command line invocations are
-meant to be run in the build directory.
-
-By default Meson will use the Ninja backend to build your project. If
-you wish to use any of the other backends, you need to pass the
-corresponding argument during configuration time. As an example, here
-is how you would use Meson to generate a Visual studio solution.
-
- meson <source dir> <build dir> --backend=vs2010
-
-You can then open the generated solution with Visual Studio and
-compile it in the usual way. A list of backends can be obtained with
-`meson --help`.
-
-Environment Variables
---
-
-Sometimes you want to add extra compiler flags, this can be done by
-passing them in environment variables when calling meson. See [the
-reference
-tables](Reference-tables.md#compiler-and-linker-flag-envrionment-variables)
-for a list of all the environment variables. Be aware however these
-environment variables are only used for the native compiler and will
-not affect the compiler used for cross-compiling, where the flags
-specified in the cross file will be used.
-
-Furthermore it is possible to stop meson from adding flags itself by
-using the `--buildtype=plain` option, in this case you must provide
-the full compiler and linker arguments needed.
-
-Building the source
-==
+The build directory is mandatory. The reason for this is that it simplifies the
+build process immensely. Meson will not under any circumstances write files
+inside the source directory (if it does, it is a bug and should be fixed). This
+means that the user does not need to add a bunch of files to their revision
+control's ignore list. It also means that you can create arbitrarily many build
+directories for any given source tree.
+
+For example, if we wanted to test building the source code with the Clang
+compiler instead of the system default, we could just type the following
+commands:
+
+```sh
+cd /path/to/source/root
+CC=clang CXX=clang++ meson setup buildclang
+```
+
+This separation is even more powerful if your code has multiple configuration
+options (such as multiple data backends). You can create a separate
+subdirectory for each of them. You can also have build directories for
+optimized builds, code coverage, static analysis and so on. They are all neatly
+separated and use the same source tree. Changing between different
+configurations is just a question of changing to the corresponding directory.
+
+Unless otherwise mentioned, all following command line invocations are meant to
+be run in the source directory.
+
+By default Meson will use the Ninja backend to build your project. If you wish
+to use any of the other backends, you need to pass the corresponding argument
+during configuration time. As an example, here is how you would use Meson to
+generate a Visual studio solution.
+
+```sh
+meson setup <build dir> --backend=vs2010
+```
+
+You can then open the generated solution with Visual Studio and compile it in
+the usual way. A list of backends can be obtained with `meson setup --help`.
+
+## Environment variables
+
+Sometimes you want to add extra compiler flags, this can be done by passing
+them in environment variables when calling meson. See [the reference
+tables](Reference-tables.md#compiler-and-linker-flag-envrionment-variables) for
+a list of all the environment variables. Be aware however these environment
+variables are only used for the native compiler and will not affect the
+compiler used for cross-compiling, where the flags specified in the cross file
+will be used.
+
+Furthermore it is possible to stop meson from adding flags itself by using the
+`--buildtype=plain` option, in this case you must provide the full compiler and
+linker arguments needed.
+
+## Building from the source
If you are not using an IDE, Meson uses the [Ninja build
-system](https://ninja-build.org/) to actually build the code. To start
-the build, simply type the following command.
+system](https://ninja-build.org/) to actually build the code. To start the
+build, simply type the following command.
- ninja
+```sh
+ninja -C builddir
+```
-The main usability difference between Ninja and Make is that Ninja
-will automatically detect the number of CPUs in your computer and
-parallelize itself accordingly. You can override the amount of
-parallel processes used with the command line argument `-j <num
-processes>`.
+The main usability difference between Ninja and Make is that Ninja will
+automatically detect the number of CPUs in your computer and parallelize itself
+accordingly. You can override the amount of parallel processes used with the
+command line argument `-j <num processes>`.
-It should be noted that after the initial configure step `ninja` is
-the only command you ever need to type to compile. No matter how you
-alter your source tree (short of moving it to a completely new
-location), Meson will detect the changes and regenerate itself
-accordingly. This is especially handy if you have multiple build
-directories. Often one of them is used for development (the "debug"
-build) and others only every now and then (such as a "static analysis"
-build). Any configuration can be built just by `cd`'ing to the
-corresponding directory and running Ninja.
+It should be noted that after the initial configure step `ninja` is the only
+command you ever need to type to compile. No matter how you alter your source
+tree (short of moving it to a completely new location), Meson will detect the
+changes and regenerate itself accordingly. This is especially handy if you have
+multiple build directories. Often one of them is used for development (the
+"debug" build) and others only every now and then (such as a "static analysis"
+build). Any configuration can be built just by `cd`'ing to the corresponding
+directory and running Ninja.
-Running tests
-==
+## Running tests
-Meson provides native support for running tests. The command to do
-that is simple.
+Meson provides native support for running tests. The command to do that is
+simple.
- ninja test
+```sh
+ninja -C builddir test
+```
-Meson does not force the use of any particular testing framework. You
-are free to use GTest, Boost Test, Check or even custom executables.
+Meson does not force the use of any particular testing framework. You are free
+to use GTest, Boost Test, Check or even custom executables.
-Installing
-==
+## Installing
Installing the built software is just as simple.
- ninja install
+```sh
+ninja -C builddir install
+```
Note that Meson will only install build targets explicitly tagged as
-installable, as detailed in the [installing targets documentation](Installing.md).
+installable, as detailed in the [installing targets
+documentation](Installing.md).
-By default Meson installs to `/usr/local`. This can be changed by
-passing the command line argument `--prefix /your/prefix` to Meson
-during configure time. Meson also supports the `DESTDIR` variable used
-in e.g. building packages. It is used like this:
+By default Meson installs to `/usr/local`. This can be changed by passing the
+command line argument `--prefix /your/prefix` to Meson during configure time.
+Meson also supports the `DESTDIR` variable used in e.g. building packages. It
+is used like this:
- DESTDIR=/path/to/staging ninja install
+```sh
+DESTDIR=/path/to/staging ninja -C builddir install
+```
-Command line help
-==
+## Command line help
-Meson has a standard command line help feature. It can be accessed
-with the following command.
+Meson has a standard command line help feature. It can be accessed with the
+following command.
meson --help
-Exit status
-==
+## Exit status
-Meson exits with status 0 if successful, 1 for problems with the command line or
-meson.build file, and 2 for internal errors.
+Meson exits with status 0 if successful, 1 for problems with the command line
+or meson.build file, and 2 for internal errors.
diff --git a/docs/markdown/Style-guide.md b/docs/markdown/Style-guide.md
index 9008592..ee2ecfe 100644
--- a/docs/markdown/Style-guide.md
+++ b/docs/markdown/Style-guide.md
@@ -34,3 +34,35 @@ Try to keep cross compilation arguments away from your build files as
much as possible. Keep them in the cross file instead. This adds
portability, since all changes needed to compile to a different
platform are isolated in one place.
+
+# Sorting source paths
+
+The source file arrays should all be sorted. This makes it easier to spot
+errors and often reduces merge conflicts. Furthermore, the paths should be
+sorted with a natural sorting algorithm, so that numbers are sorted in an
+intuitive way (`1, 2, 3, 10, 20` instead of `1, 10, 2, 20, 3`).
+
+Numbers should also be sorted before characters (`a111` before `ab0`).
+Furthermore, strings should be sorted case insensitive.
+
+Additionally, if a path contains a directory it should be sorted before
+normal files. This rule also applies recursively for subdirectories.
+
+The following example shows correct source list definition:
+
+```meson
+sources = files([
+ 'aaa/a1.c',
+ 'aaa/a2.c',
+ 'bbb/subdir1/b1.c',
+ 'bbb/subdir2/b2.c',
+ 'bbb/subdir10/b3.c',
+ 'bbb/subdir20/b4.c',
+ 'bbb/b5.c',
+ 'bbb/b6.c',
+ 'f1.c',
+ 'f2.c',
+ 'f10.c',
+ 'f20.c'
+])
+```
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index a8e7273..9e61739 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -51,9 +51,15 @@ By default Meson uses as many concurrent processes as there are cores on the tes
$ MESON_TESTTHREADS=5 ninja test
```
-## Skipped tests
+## Skipped tests and hard errors
-Sometimes a test can only determine at runtime that it can not be run. The GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0.
+Sometimes a test can only determine at runtime that it can not be run.
+
+For the default `exitcode` testing protocol, the GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0.
+
+For TAP-based tests, skipped tests should print a single line starting with `1..0 # SKIP`.
+
+In addition, sometimes a test fails set up so that it should fail even if it is marked as an expected failure. The GNU standard approach in this case is to exit the program with error code 99. Again, Meson will detect this and report these tests as `ERROR`, ignoring the setting of `should_fail`. This behavior was added in version 0.50.0.
## Testing tool
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index 57257f9..c0f82f5 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -7,21 +7,27 @@ title: Users
If you have a project that uses Meson that you want to add to this list, please [file a pull-request](https://github.com/mesonbuild/meson/edit/master/docs/markdown/Users.md) for it. All the software on this list is tested for regressions before release, so it's highly recommended that projects add themselves here. Some additional projects are
listed in the [`meson` GitHub topic](https://github.com/topics/meson).
+ - [2048.cpp](https://github.com/plibither8/2048.cpp), a fully featured terminal version of the game "2048" written in C++
+ - [Akira](https://github.com/akiraux/Akira), a native Linux app for UI and UX design built in Vala and Gtk
- [AQEMU](https://github.com/tobimensch/aqemu), a Qt GUI for QEMU virtual machines, since version 0.9.3
- [Arduino sample project](https://github.com/jpakkane/mesonarduino)
- - [bolt](https://gitlab.freedesktop.org/bolt/bolt) Userspace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux
+ - [bolt](https://gitlab.freedesktop.org/bolt/bolt), userspace daemon to enable security levels for Thunderboltâ„¢ 3 on Linux
- [Budgie Desktop](https://github.com/budgie-desktop/budgie-desktop), a desktop environment built on GNOME technologies
+ - [Cage](https://github.com/Hjdskes/cage), a Wayland kiosk
- [casync](https://github.com/systemd/casync), Content-Addressable Data Synchronization Tool
- [cinnamon-desktop](https://github.com/linuxmint/cinnamon-desktop), the cinnamon desktop library
+ - [dav1d](https://code.videolan.org/videolan/dav1d), an AV1 decoder
- [dbus-broker](https://github.com/bus1/dbus-broker), Linux D-Bus Message Broker
- - [Dpdk](http://dpdk.org/browse/dpdk), Data plane development kit, a set of libraries and drivers for fast packet processing
+ - [DPDK](http://dpdk.org/browse/dpdk), Data Plane Development Kit, a set of libraries and drivers for fast packet processing
- [DXVK](https://github.com/doitsujin/dxvk), a Vulkan-based Direct3D 11 implementation for Linux using Wine
- [elementary OS](https://github.com/elementary/), Linux desktop oriented distribution
- - [Emeus](https://github.com/ebassi/emeus), Constraint based layout manager for GTK+
- - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson) Sample project for using the ESP8266 Arduino port with Meson
+ - [Emeus](https://github.com/ebassi/emeus), constraint based layout manager for GTK+
+ - [ESP8266 Arduino sample project](https://github.com/trilader/arduino-esp8266-meson), sample project for using the ESP8266 Arduino port with Meson
+ - [FeedReader](https://github.com/jangernert/FeedReader), a modern desktop application designed to complement existing web-based RSS accounts
- [Fractal](https://wiki.gnome.org/Apps/Fractal/), a Matrix messaging client for GNOME
- [Frida](https://github.com/frida/frida-core), a dynamic binary instrumentation toolkit
- [fwupd](https://github.com/hughsie/fwupd), a simple daemon to allow session software to update firmware
+ - [GameMode](https://github.com/FeralInteractive/gamemode), a daemon/lib combo for Linux that allows games to request a set of optimisations be temporarily applied to the host OS
- [Geary](https://wiki.gnome.org/Apps/Geary), an email application built around conversations, for the GNOME 3 desktop.
- [GLib](https://gitlab.gnome.org/GNOME/glib), cross-platform C library used by GTK+ and GStreamer (not the default yet)
- [GNOME Boxes](https://gitlab.gnome.org/GNOME/gnome-boxes), a GNOME hypervisor
@@ -39,10 +45,16 @@ listed in the [`meson` GitHub topic](https://github.com/topics/meson).
- [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO
- [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux
- [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C
- - [IGT](https://cgit.freedesktop.org/xorg/app/intel-gpu-tools/), Linux kernel graphics driver test suite.
+ - [IGT](https://cgit.freedesktop.org/xorg/app/intel-gpu-tools/), Linux kernel graphics driver test suite
+ - [iSH](https://github.com/tbodt/ish), Linux shell for iOS
+ - [json](https://github.com/nlohmann/json), JSON for Modern C++
- [JsonCpp](https://github.com/open-source-parsers/jsoncpp), a C++ library for interacting with JSON
- [Json-glib](https://gitlab.gnome.org/GNOME/json-glib), GLib-based JSON manipulation library
+ - [Kiwix libraries](https://github.com/kiwix/kiwix-lib)
+ - [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation
- [Ksh](https://github.com/att/ast), a Korn Shell
+ - [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network
+ - [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android
- [Libdrm](https://cgit.freedesktop.org/drm/libdrm/), a library for abstracting DRM kernel interfaces
- [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management
- [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface
@@ -55,30 +67,42 @@ lookup based on OpenStreetMap data
format files
- [Libva](https://github.com/intel/libva), an implementation for the VA (VIdeo Acceleration) API
- [Libzim](https://github.com/openzim/libzim), the reference implementation for the ZIM file format
- - [Kiwix libraries](https://github.com/kiwix/kiwix-lib)
- - [Mesa](https://cgit.freedesktop.org/mesa/mesa/), An open source graphics driver project
+ - [Marker](https://github.com/fabiocolacio/Marker), a GTK-3 markdown editor
+ - [Mesa](https://cgit.freedesktop.org/mesa/mesa/), an open source graphics driver project
- [MiracleCast](https://github.com/albfan/miraclecast), connect external monitors to your system via Wifi-Display specification aka Miracast
- [Nautilus](https://gitlab.gnome.org/GNOME/nautilus), the GNOME file manager
- [Nemo](https://github.com/linuxmint/nemo), the file manager for the Cinnamon desktop environment
+ - [oomd](https://github.com/facebookincubator/oomd), a userspace Out-Of-Memory (OOM) killer for Linux systems
+ - [OpenH264](https://github.com/cisco/openh264), open source H.264 codec
+ - [OpenHMD](https://github.com/OpenHMD/OpenHMD), a free and open source API and drivers for immersive technology, such as head mounted displays with built in head tracking
- [Orc](http://cgit.freedesktop.org/gstreamer/orc/), the Optimized Inner Loop Runtime Compiler (not the default yet)
+ - [OTS](https://github.com/khaledhosny/ots), the OpenType Sanitizer, parses and serializes OpenType files (OTF, TTF) and WOFF and WOFF2 font files, validating and sanitizing them as it goes. Used by Chromium and Firefox
- [Outlier](https://github.com/kerolasa/outlier), a small Hello World style meson example project
- [Pango](https://git.gnome.org/browse/pango/), an Internationalized text layout and rendering library (not the default yet)
- [Parzip](https://github.com/jpakkane/parzip), a multithreaded reimplementation of Zip
+ - [Peek](https://github.com/phw/peek), simple animated GIF screen recorder with an easy to use interface
- [PipeWire](https://github.com/PipeWire/pipewire), a framework for video and audio for containerized applications
- [Pithos](https://github.com/pithos/pithos), a Pandora Radio client
- [Pitivi](https://github.com/pitivi/pitivi/), a nonlinear video editor
+ - [Playerctl](https://github.com/acrisci/playerctl), mpris command-line controller and library for spotify, vlc, audacious, bmp, cmus, and others
- [Polari](https://gitlab.gnome.org/GNOME/polari), an IRC client
- [radare2](https://github.com/radare/radare2), unix-like reverse engineering framework and commandline tools (not the default)
- [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP
+ - [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor
- [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool
- [systemd](https://github.com/systemd/systemd), the init system
- [szl](https://github.com/dimkr/szl), a lightweight, embeddable scripting language
- [Taisei Project](https://taisei-project.org/), an open-source Touhou Project clone and fangame
- - [xi-gtk](https://github.com/eyelash/xi-gtk), a GTK+ front-end for the Xi editor
- - [Xorg](https://cgit.freedesktop.org/xorg/xserver/), the X.org display server (not the default yet)
+ - [Terminology](https://github.com/billiob/terminology), a terminal emulator based on the Enlightenment Foundation Libraries
+ - [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3
- [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala
- [Wayland and Weston](https://lists.freedesktop.org/archives/wayland-devel/2016-November/031984.html), a next generation display server (not merged yet)
- [wlroots](https://github.com/swaywm/wlroots), a modular Wayland compositor library
+ - [wxFormBuilder](https://github.com/wxFormBuilder/wxFormBuilder), RAD tool for wxWidgets GUI design
+ - [xi-gtk](https://github.com/eyelash/xi-gtk), a GTK+ front-end for the Xi editor
+ - [Xorg](https://cgit.freedesktop.org/xorg/xserver/), the X.org display server (not the default yet)
+ - [zathura](https://github.com/pwmt/zathura), a highly customizable and functional document viewer based on the
+girara user interface library and several document libraries
- [ZStandard](https://github.com/facebook/zstd/commit/4dca56ed832c6a88108a2484a8f8ff63d8d76d91), a compression algorithm developed at Facebook (not used by default)
Note that a more up-to-date list of GNOME projects that use Meson can be found [here](https://wiki.gnome.org/Initiatives/GnomeGoals/MesonPorting).
diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md
index 2184ebc..9e95851 100644
--- a/docs/markdown/Vala.md
+++ b/docs/markdown/Vala.md
@@ -18,7 +18,7 @@ dependencies = [
sources = files('app.vala')
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
You must always specify the `glib-2.0` and `gobject-2.0` libraries as
@@ -53,7 +53,7 @@ This first example is a simple addition to the `meson.build` file because:
* the library has a `pkg-config` file, `gtk+-3.0.pc`
* the VAPI is distributed with Vala and so installed with the Vala compiler
* the VAPI is installed in Vala's standard search path
- * the VAPI has the same name as the `pkg-config` file, `gtk+-3.0.vapi`
+ * the VAPI, `gtk+-3.0.vapi`, has the same name as the `pkg-config` file
Everything works seamlessly in the background and only a single extra line is
needed:
@@ -69,7 +69,7 @@ dependencies = [
sources = files('app.vala')
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
GTK+ is the graphical toolkit used by GNOME, elementary OS and other desktop
@@ -89,7 +89,7 @@ installed. When setting a minimum version of GLib, Meson will also pass this to
the Vala compiler using the `--target-glib` option.
This is needed when using GTK+'s user interface definition files with Vala's
-`[GtkTemplate]`, `[GtkChild]` and `[GtkCallback]` annotations. This requires
+`[GtkTemplate]`, `[GtkChild]` and `[GtkCallback]` attributes. This requires
`--target-glib 2.38`, or a newer version, to be passed to Vala. With Meson this
is simply done with:
@@ -104,7 +104,7 @@ dependencies = [
sources = files('app.vala')
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
Using `[GtkTemplate]` also requires the GTK+ user interface definition files to
@@ -128,7 +128,7 @@ sources += import( 'gnome' ).compile_resources(
source_dir: 'src/resources',
)
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
@@ -146,10 +146,11 @@ the VAPI search path. In Meson this is done with the `add_project_arguments()`
function:
```meson
-project('vala app', 'c', 'vala')
+project('vala app', 'vala', 'c')
+
+vapi_dir = join_paths(meson.current_source_dir(), 'vapi')
-add_project_arguments(['--vapidir', join_paths(meson.current_source_dir(), 'vapi')],
- language: 'vala')
+add_project_arguments(['--vapidir', vapi_dir], language: 'vala')
dependencies = [
dependency('glib-2.0'),
@@ -159,13 +160,13 @@ dependencies = [
sources = files('app.vala')
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
If the VAPI is for an external library then make sure that the VAPI name
corresponds to the pkg-config file name.
-The [`vala-extra-vapis` repository](https://github.com/nemequ/vala-extra-vapis)
+The [`vala-extra-vapis` repository](https://gitlab.gnome.org/GNOME/vala-extra-vapis)
is a community maintained repository of VAPIs that are not distributed.
Developers use the repository to share early work on new bindings and
improvements to existing bindings. So the VAPIs can frequently change. It is
@@ -179,7 +180,7 @@ with the `vala-extra-vapis` repository.
### Libraries without pkg-config files
A library that does not have a corresponding pkg-config file may mean
`dependency()` is unsuitable for finding the C and Vala interface files. In this
-case it is necessary to use `find_library()`.
+case it is necessary to use the `find_library()` method of the compiler object.
The first example uses Vala's POSIX binding. There is no pkg-config file because
POSIX includes the standard C library on Unix systems. All that is needed is the
@@ -198,7 +199,7 @@ dependencies = [
sources = files('app.vala')
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
The next example shows how to link with a C library where no additional VAPI is
@@ -217,8 +218,39 @@ dependencies = [
sources = files('app.vala')
-executable('app_name', sources, dependencies : dependencies)
+executable('app_name', sources, dependencies: dependencies)
```
+The `required: false` means the build will continue when using another C library
+that does not separate the maths library. See [Add math library (-lm)
+portably](howtox.md#add-math-library-lm-portably).
+
+The final example shows how to use a library that does not have a pkg-config
+file and the VAPI is in the `vapi` directory of your project source files:
+```meson
+project('vala app', 'vala', 'c')
+
+vapi_dir = join_paths(meson.current_source_dir(), 'vapi')
+
+add_project_arguments(['--vapidir', vapi_dir], language: 'vala')
+
+dependencies = [
+ dependency('glib-2.0'),
+ dependency('gobject-2.0'),
+ meson.get_compiler('c').find_library('foo'),
+ meson.get_compiler('vala').find_library('foo', dir: vapi_dir),
+]
+
+sources = files('app.vala')
+
+executable('app_name', sources, dependencies: dependencies)
+```
+The `find_library()` method of the C compiler object will try to find the C
+header files and the library to link with.
+
+The `find_library()` method of the Vala compiler object needs to have the `dir`
+keyword added to include the project VAPI directory. This is not added
+automatically by `add_project_arguments()`.
+
## Building libraries
@@ -260,7 +292,7 @@ Meson can generate a GIR as part of the build. For a Vala library the
`vala_gir` option has to be set for the `library`:
```meson
-foo_lib = library('foo', 'foo.vala',
+foo_lib = shared_library('foo', 'foo.vala',
vala_gir: 'Foo-1.0.gir',
dependencies: [glib_dep, gobject_dep],
install: true,
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index adcec7c..8ae4fde 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -52,7 +52,9 @@ executable(..., dependencies : thread_dep)
## Set extra compiler and linker flags from the outside (when e.g. building distro packages)
-The behavior is the same as with other build systems, with environment variables during first invocation.
+The behavior is the same as with other build systems, with environment
+variables during first invocation. Do not use these when you need to rebuild
+the source
```console
$ CFLAGS=-fsomething LDFLAGS=-Wl,--linker-flag meson <options>
@@ -201,3 +203,20 @@ executable(..., dependencies : m_dep)
```meson
executable(..., install : true, install_dir : get_option('libexecdir'))
```
+
+## Use existing `Find<name>.cmake` files
+
+Meson can use the CMake `find_package()` ecosystem if CMake is installed.
+To find a dependency with custom `Find<name>.cmake`, set the `cmake_module_path`
+property to the path in your project where the CMake scripts are stored.
+
+Example for a `FindCmakeOnlyDep.cmake` in a `cmake` subdirectory:
+
+```meson
+cm_dep = dependency('CmakeOnlyDep', cmake_module_path : 'cmake')
+```
+
+The `cmake_module_path` property is only needed for custom CMake scripts. System
+wide CMake scripts are found automatically.
+
+More information can be found [here](Dependencies.md#cmake)
diff --git a/docs/markdown/i18n-module.md b/docs/markdown/i18n-module.md
index 88f059b..9053edc 100644
--- a/docs/markdown/i18n-module.md
+++ b/docs/markdown/i18n-module.md
@@ -29,6 +29,7 @@ argument which is the name of the gettext module.
[source](https://github.com/mesonbuild/meson/blob/master/mesonbuild/modules/i18n.py)
for for their value
* `install`: (*Added 0.43.0*) if false, do not install the built translations.
+* `install_dir`: (*Added 0.50.0*) override default install location, default is `localedir`
This function also defines targets for maintainers to use:
**Note**: These output to the source directory
diff --git a/docs/markdown/legal.md b/docs/markdown/legal.md
index 474d129..bd86ff4 100644
--- a/docs/markdown/legal.md
+++ b/docs/markdown/legal.md
@@ -1,12 +1,23 @@
# Legal information
-Meson is copyrighted by all members of the Meson development team. Meson is licensed under the [Apache 2 license].
+Meson is copyrighted by all members of the Meson development team.
+Meson is licensed under the [Apache 2 license].
Meson is a registered trademark of Jussi Pakkanen.
+## Meson logo licensing
+
+Meson's logo is (C) Jussi Pakkanen and used by the Meson project with
+specific permission. It is not licensed under the same terms as the
+rest of the project.
+
+If you are a third party and want to use the Meson logo, you must
+first must obtain written permission from Jussi Pakkanen.
+
## Website licensing
-The meson website content is released under [Creative Commons Attribution-ShareAlike 4.0 International].
+The meson website content is released under [Creative Commons
+Attribution-ShareAlike 4.0 International].
All code samples on the website are released under [CC0 1.0 Universal].
diff --git a/docs/markdown/snippets/clangformat.md b/docs/markdown/snippets/clangformat.md
deleted file mode 100644
index 8983243..0000000
--- a/docs/markdown/snippets/clangformat.md
+++ /dev/null
@@ -1,11 +0,0 @@
-## A builtin target to run clang-format
-
-If you have `clang-format` installed and there is a `.clang-format`
-file in the root of your master project, Meson will generate a run
-target called `clang-format` so you can reformat all files with one
-command:
-
-```meson
-ninja clang-format
-```
-
diff --git a/docs/markdown/snippets/crosslib.md b/docs/markdown/snippets/crosslib.md
deleted file mode 100644
index 14fcc81..0000000
--- a/docs/markdown/snippets/crosslib.md
+++ /dev/null
@@ -1,7 +0,0 @@
-## Libdir defaults to `lib` when cross compiling
-
-Previously `libdir` defaulted to the value of the build machine such
-as `lib/x86_64-linux-gnu`, which is almost always incorrect when cross
-compiling. It now defaults to plain `lib` when cross compiling. Native
-builds remain unchanged and will point to the current system's library
-dir.
diff --git a/docs/markdown/snippets/find_library_header.md b/docs/markdown/snippets/find_library_header.md
deleted file mode 100644
index 55597ab..0000000
--- a/docs/markdown/snippets/find_library_header.md
+++ /dev/null
@@ -1,21 +0,0 @@
-## Find library with its headers
-
-The `find_library()` method can now also verify if the library's headers are
-found in a single call, using the `has_header()` method internally.
-
-```meson
-# Aborts if the 'z' library is found but not its header file
-zlib = find_library('z', has_headers : 'zlib.h')
-# Returns not-found if the 'z' library is found but not its header file
-zlib = find_library('z', has_headers : 'zlib.h', required : false)
-```
-
-Any keyword argument with the `header_` prefix passed to `find_library()` will
-be passed to the `has_header()` method with the prefix removed.
-
-```meson
-libfoo = find_library('foo',
- has_headers : ['foo.h', 'bar.h'],
- header_prefix : '#include <baz.h>',
- header_include_directories : include_directories('.'))
-```
diff --git a/docs/markdown/snippets/find_library_static.md b/docs/markdown/snippets/find_library_static.md
new file mode 100644
index 0000000..a1b7fa9
--- /dev/null
+++ b/docs/markdown/snippets/find_library_static.md
@@ -0,0 +1,6 @@
+## Add keyword `static` to `find_library`
+
+`find_library` has learned the `static` keyword. They keyword must be a boolean,
+where `true` only searches for static libraries and `false` only searches for
+dynamic/shared. Leaving the keyword unset will keep the old behavior of first
+searching for dynamic and then falling back to static.
diff --git a/docs/markdown/snippets/fortran-include.md b/docs/markdown/snippets/fortran-include.md
new file mode 100644
index 0000000..a811765
--- /dev/null
+++ b/docs/markdown/snippets/fortran-include.md
@@ -0,0 +1,12 @@
+## Fortran `include` statements recursively parsed
+
+While non-standard and generally not recommended, some legacy Fortran programs use `include` directives to inject code inline.
+Since v0.51, Meson can handle Fortran `include` directives recursively.
+
+DO NOT list `include` files as sources for a target, as in general their syntax is not correct as a standalone target.
+In general `include` files are meant to be injected inline as if they were copy and pasted into the source file.
+
+`include` was never standard and was superceded by Fortran 90 `module`.
+
+The `include` file is only recognized by Meson if it has a Fortran file suffix, such as `.f` `.F` `.f90` `.F90` or similar.
+This is to avoid deeply nested scanning of large external legacy C libraries that only interface to Fortran by `include biglib.h` or similar.
diff --git a/docs/markdown/snippets/includestr.md b/docs/markdown/snippets/includestr.md
deleted file mode 100644
index fd4c130..0000000
--- a/docs/markdown/snippets/includestr.md
+++ /dev/null
@@ -1,16 +0,0 @@
-## `include_directories` accepts a string
-
-The `include_directories` keyword argument now accepts plain strings
-rather than an include directory object. Meson will transparently
-expand it so that a declaration like this:
-
-```meson
-executable(..., include_directories: 'foo')
-```
-
-Is equivalent to this:
-
-```meson
-foo_inc = include_directories('foo')
-executable(..., include_directories: inc)
-```
diff --git a/docs/markdown/snippets/introspect_buildoptions_no_bd.md b/docs/markdown/snippets/introspect_buildoptions_no_bd.md
deleted file mode 100644
index f008474..0000000
--- a/docs/markdown/snippets/introspect_buildoptions_no_bd.md
+++ /dev/null
@@ -1,11 +0,0 @@
-## `introspect --buildoptions` can now be used without configured build directory
-
-It is now possible to run `meson introspect --buildoptions /path/to/meson.build`
-without a configured build directory.
-
-Running `--buildoptions` without a build directory produces the same output as running
-it with a freshly configured build directory.
-
-However, this behavior is not guaranteed if subprojects are present. Due to internal
-limitations all subprojects are processed even if they are never used in a real meson run.
-Because of this options for the subprojects can differ. \ No newline at end of file
diff --git a/docs/markdown/snippets/notfound_message.md b/docs/markdown/snippets/notfound_message.md
deleted file mode 100644
index d73c6b2..0000000
--- a/docs/markdown/snippets/notfound_message.md
+++ /dev/null
@@ -1,38 +0,0 @@
-## New `not_found_message` for dependency
-
-You can now specify a `not_found_message` that will be printed if the
-specified dependency was not found. The point is to convert constructs
-that look like this:
-
-```meson
-d = dependency('something', required: false)
-if not d.found()
- message('Will not be able to do something.')
-endif
-```
-
-Into this:
-
-```meson
-d = dependency('something',
- required: false,
- not_found_message: 'Will not be able to do something.')
-```
-
-Or constructs like this:
-
-```meson
-d = dependency('something', required: false)
-if not d.found()
- error('Install something by doing XYZ.')
-endif
-```
-
-into this:
-
-```meson
-d = dependency('something',
- not_found_message: 'Install something by doing XYZ.')
-```
-
-Which works, because the default value of `required` is `true`.
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index b8c41b4..bea2a31 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -30,6 +30,7 @@ index.md
Subprojects.md
Disabler.md
Modules.md
+ CMake-module.md
Dlang-module.md
Gnome-module.md
Hotdoc-module.md
@@ -43,6 +44,7 @@ index.md
RPM-module.md
Simd-module.md
Windows-module.md
+ Cuda-module.md
Java.md
Vala.md
D.md
@@ -58,6 +60,7 @@ index.md
Reference-manual.md
Reference-tables.md
Style-guide.md
+ Rewriter.md
FAQ.md
Reproducible-builds.md
howtox.md
diff --git a/docs/theme/extra/images/favicon.png b/docs/theme/extra/images/favicon.png
index 5e7b941..6800fe8 100644
--- a/docs/theme/extra/images/favicon.png
+++ b/docs/theme/extra/images/favicon.png
Binary files differ
diff --git a/docs/theme/extra/images/meson_logo.png b/docs/theme/extra/images/meson_logo.png
index 2720af5..6800fe8 100644
--- a/docs/theme/extra/images/meson_logo.png
+++ b/docs/theme/extra/images/meson_logo.png
Binary files differ
diff --git a/graphics/meson_logo.svg b/graphics/meson_logo.svg
index 5e647fe..d5b47bc 100644
--- a/graphics/meson_logo.svg
+++ b/graphics/meson_logo.svg
@@ -7,17 +7,60 @@
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
xmlns:svg="http://www.w3.org/2000/svg"
xmlns="http://www.w3.org/2000/svg"
- xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
- width="744.09448819"
- height="1052.3622047"
- id="svg2"
+ width="210mm"
+ height="297mm"
+ viewBox="0 0 210 297"
version="1.1"
- inkscape:version="0.91 r13725"
+ id="svg1769"
+ inkscape:version="0.92.3 (2405546, 2018-03-11)"
sodipodi:docname="meson_logo.svg">
<defs
- id="defs4" />
+ id="defs1763">
+ <marker
+ inkscape:stockid="Arrow1Lend"
+ orient="auto"
+ refY="0.0"
+ refX="0.0"
+ id="marker4405"
+ style="overflow:visible;"
+ inkscape:isstock="true">
+ <path
+ id="path4403"
+ d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
+ style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
+ transform="scale(0.8) rotate(180) translate(12.5,0)" />
+ </marker>
+ <marker
+ inkscape:stockid="Arrow1Lstart"
+ orient="auto"
+ refY="0.0"
+ refX="0.0"
+ id="Arrow1Lstart"
+ style="overflow:visible"
+ inkscape:isstock="true">
+ <path
+ id="path2389"
+ d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
+ style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
+ transform="scale(0.8) translate(12.5,0)" />
+ </marker>
+ <marker
+ inkscape:stockid="Arrow1Lend"
+ orient="auto"
+ refY="0.0"
+ refX="0.0"
+ id="Arrow1Lend"
+ style="overflow:visible;"
+ inkscape:isstock="true">
+ <path
+ id="path2392"
+ d="M 0.0,0.0 L 5.0,-5.0 L -12.5,0.0 L 5.0,5.0 L 0.0,0.0 z "
+ style="fill-rule:evenodd;stroke:#000000;stroke-width:1pt;stroke-opacity:1;fill:#000000;fill-opacity:1"
+ transform="scale(0.8) rotate(180) translate(12.5,0)" />
+ </marker>
+ </defs>
<sodipodi:namedview
id="base"
pagecolor="#ffffff"
@@ -26,51 +69,25 @@
inkscape:pageopacity="0.0"
inkscape:pageshadow="2"
inkscape:zoom="1.4"
- inkscape:cx="333.91453"
- inkscape:cy="840.95374"
- inkscape:document-units="px"
+ inkscape:cx="369.10197"
+ inkscape:cy="819.78077"
+ inkscape:document-units="mm"
inkscape:current-layer="layer1"
showgrid="false"
- inkscape:window-width="1147"
- inkscape:window-height="710"
- inkscape:window-x="65"
- inkscape:window-y="24"
- inkscape:window-maximized="1"
- showguides="true"
- inkscape:guide-bbox="true">
- <inkscape:grid
- type="xygrid"
- id="grid3755" />
- <sodipodi:guide
- orientation="0,1"
- position="121.42857,983.92857"
- id="guide3805" />
- <sodipodi:guide
- orientation="0,1"
- position="133.57143,703.92857"
- id="guide3807" />
- <sodipodi:guide
- orientation="0,1"
- position="141.42857,892.14286"
- id="guide3809" />
- <sodipodi:guide
- orientation="0,1"
- position="340,757.14285"
- id="guide3811" />
- <sodipodi:guide
- position="340,1017.8571"
- orientation="1,0"
- id="guide4163" />
- </sodipodi:namedview>
+ inkscape:window-width="1226"
+ inkscape:window-height="910"
+ inkscape:window-x="536"
+ inkscape:window-y="82"
+ inkscape:window-maximized="0" />
<metadata
- id="metadata7">
+ id="metadata1766">
<rdf:RDF>
<cc:Work
rdf:about="">
<dc:format>image/svg+xml</dc:format>
<dc:type
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
- <dc:title />
+ <dc:title></dc:title>
</cc:Work>
</rdf:RDF>
</metadata>
@@ -78,21 +95,246 @@
inkscape:label="Layer 1"
inkscape:groupmode="layer"
id="layer1">
- <image
- y="362.68222"
- x="91.928558"
- id="image3780"
- xlink:href="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABH0AAAPYCAIAAADIAhIwAAAAA3NCSVQICAjb4U/gAAAgAElEQVR4 nO3d0XLjyA1AUWlr//+XmQdvPB5bkkmK6AbQ5zylajeJLJFg3yYt37dtuwEAABDmn9kvAAAAoDnd BQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTS XQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs 3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE 0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABA LN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAA xNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAA QCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEA AMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcA AEAs3QXkcr/f7/f77FcBAHCl+7Zts18DsJCPptq27WhcGVYAQF26C7hY9N0qUwsAKEd3AW+Z9Uyg 2QUAFKK7gGMS/vKVOQYAJKe7gN8lbK1nzDQAICHdBfxRqK9eM9kAgFR0F6yuTWv9ZL4BRe2czKYc FKK7YEWNW+sZsw7IYMz4NfEgId0F/S1YWS8YekCQ5MPW9IO5dBc0kfx6X4WRCBxSdPaadTCe7oKq il7sqzMzYSlLTVrzDULpLihjqct/foYnZPNzSG7bZnK+w6CDC+kuSM2KITPzEwYwBpMw8eBNugvS scioy0SFPUy5Bow7OEp3QQpWIc0YrWCsrcCsg/10F8xkXdKbActSDLRlmXWwh+6CaaxRlmXw0oY5 xieTDV7TXTCBlQpfmcOUY4jxkGkGL+guGMpihRcMZDIzvtjJKIOHdBcMYsnCTsYyqZhdnGCOwU// zn4B0J9VC4d8HjAWLsxldgFcyP0uiGXhwlWMa8YwtbiKqQVf6S6IYu1CBEObIEYWEYws+KS7IIQV DNFMb65iXhHKsIIPugsuZgXDSGY4bzKyGMCkgpvuggtZvjCLSc5R5hWDGVOgu+Cw+/2vE8fyhQwM c/YztZjCmGJxugvO+EgvaxdSMc95wbwiA2OKlekuOMMKhrRMdb4xr0jFjGJZuguOsYIhP4OdD+YV OZlRrOmf2S8AKrGIoQQHKjeHAYk5OFmT7oK9XCcoxOG6OAcAQDaeM4RdLGKoyIRfjUlFIQYUq9Fd 8AvrGKoz51dgUlGR6cRSPGcIr1jK0IDDuD0fMUU5dFmK7oKnXA9ow8HcmA+X0hzArEN3wWOuBEB+ JhVAFboLHrCUoR9HdTP3+91nClCI7oLvLGWA5IwpOnE8swjdBX8x/WnM4d2Dz5F+HNWs4N/ZLwCy MPSB/EwqgKL8/S643SxlWImxX5QxRXumE715zhCsZoDsjCmA6nQXq7OaYTWO+XJ8ZAAN6C7W5VuY gfyMKdbhaKc33cWiDHdW5vivwifFahzzNKa7WJGxDuRnUrEmRz5d6S6WY6ADyXkKmsU5/mlJd7EW oxw+OBcAYCTdxUIsNIH8TCq4ORHoSHexChMcvnFSJORDgU9OB5rRXQDrsqxJxccB0JjuYglWM0By xhT85LygE90FsDTLmgx8CgDt6S76s6CB15wjc3n/4QUnCG3oLpozr4HMzCiAReguAJhDdMEezhR6 0F10ZlIDQAMu6DSguwBgAutIgKXoLtqypgEAIAndBYB9itG84XCUs4bqdBc9mc4A0IyLO6XpLhoy l4HMzCiABekuAABqsG1BXbqLbkxkIDMzCmBNuotWLGgAoDfXeorSXQAwiPUiwLJ0FwC3myQA6jCv qEh30YcpDGRmRsGFnFCUo7sAIJw1IsDidBdNWNMAwFJc+qlFdwEAAMTSXXRgxwsu4VQK4o2FIE4u CtFdAAAAsXQX5dnrAjIzoyCUU4wqdBcAAIVJL0rQXdRm1MK1nFNARWYX+ekuAIhiLQjDON1ITndR mAkLAHyyMCAz3QXAXyxcruKdhPGcd6Slu6jKYAUAoArdBQDXszcEszj7yEl3UZKRCqGcYgBwLd0F AL8QolCLc5aEdBf1GKYAANSiuwB4wAbHV9u2Hfr3vXsAfKO7KMZqBgD4lQUD2eguALiS1R4AP+ku AB7TDwBwFd1FJVaBAMBOlg2korsA4DLWeZCKU5I8dBdlGJ0AABSluwB4yn7HId4uSMiJSRK6CwAA IJbuogabVUByxhQAL+guAAA6sy1CBroLAAAglu6iANtUQHLGFACv6S4AAJqzOcJ0uovsDEoAAKrT XQAA9Gcnl7l0F6kZkQAANKC7AABYgv1cJtJd5GU4AiUYVgD8SneRlHUMAHA5Cwxm0V0AvGKNAgDv 010AAACxdBcZ2V8HAIJYZjCF7gIAAIilu0jHLhQAAM3oLgAAgFi6i1zc7AIAoB/dBcAvbIi84M2B ipy5jKe7SMQQBACgJd0FAAAQS3cBAADE0l1k4SFDAGAYCw8G010AAACxdBcAv7Mx/JC3BYCddBcp WLsAAINZfjCS7iK1bdtmvwQAAHiX7iI1G1EAADSguwAAWJQdXobRXcxn5EEJTlUAOE13AcAZQhSA /XQXAABALN3FZDaMAQBoT3cBALAuW8CMobsAAABi6S5mssMEFGV8AXCI7gIAAIiluwAAAGLpLgD2 8nAd0JLhxgC6i2nMOAAAFqG7AOAY20YAHKW7ANhr27bZLwEAStJdzGG3GADIw8qEaLoLgL2sSwDg HN3FBJZuAAAsRXcBsJff7wKAc3QXAHu5WQ0A5+guADhAfAJwgu5iNEsWAABWo7sAAMDWMLF0FwAA QCzdxVB2kgCAtCxUiKO7AAAAYukuAAD4j1teBNFdjGOQAQCwJt0FAHvZPwLgHN0FAAB/2GEhgu5i ECMMAKjCuoXL6S4AAIBYugsAACCW7mIEN+sBgFqsXriW7gIAAIiluwAA4AG3vLiQ7iKcmQVtbNs2 +yUAQEm6CwAAHrN9zFV0F7FMKwAA0F0AAACxdBcAADzl4R0uobsIZE4BAA1Y0vA+3QUAABBLdwEA wC/c8uJNugsAACCW7gIAgN+55cU7dBdRzCYAAPiguwAAAGLpLkK42QUA9GOFw2m6CwAA9pJenKO7 AAAAYukurmcfCABozFKHE3QXFzOJoKtt22a/BACoSncBwC72lYBPBgJH6S4AAIBYuguAXWzuAnxl KnKI7uJKBhDQmN9wA+A03QXALqoD4Bs7zuynu7iM0QMAAA/pLgAAOMm+MzvpLgB2sbYAgNN0FwDs ojwBOE13AQDAeTZl2EN3cQ0TBwAAntFdAAAAsXQXAAC8xYM//Ep3cQGzBgAAXtBdAADwLtvQvKa7 AAAAYukuAACAWLqLd7mrDgBwsyjiJd0FAAAQS3cBAADE0l0AAHANjxryjO4CAACIpbt4i00dAICv rI54SHcBAADE0l2cZzsHVrPyWb/yzw4cZWLwk+4CAACIpbsAAABi6S4AALiYRw35RncBcICVBMBO BiZf6S4AAIBYuouTbOEAALxmvcQn3QXAMQsuIxb8kQG4lu4CAIAoNm74oLsAACCQ9OKmuzjH+IDF GQIAcIjuAgAAiKW7AOAVN/cAeJ/u4jBLEACAQyyf0F0AAACxdBeHbds2+yUAAEAluovD3CgHbkYB wEHG5uJ0FwAAQCzdxTG2aoClGHoAXEJ3AQDACLZyVqa7AAAAYukuDrBJA3xlJgDATroLAB4TlgBc RXcBAMAgNnSWpbsAAABi6S72sj0DAADn6C4Azmu8I9P4RwPmMl7WpLsAAABi6S4A+M5uNBDKkFmQ 7gLgLVYPAPAr3cUu1lUAAHCa7gKAv9hpAuByugsAACCW7gLgXW4QARxlcq5GdwHAH1ZCAETQXfzO KgQAAN6huwDgP7aZAAiiu/iFVQiwh1kBcJTJuRTdBQAAEEt3AcDtZuMZgEi6i1esQoBFGHcAhNJd AAAwh02fdeguAFZn3QNANN3FUxYiAABwCd0FAAAQS3cBcI2iN8mLvmwAatFdAKxLdAEwhu7iMWsR AAC4iu4CAACIpbt4wM0u4Jxa06PWqwWgNN0FwIpEFwAj6S4AAIBY/85+AQAwmptdi9u27fM/OxiY 7n6/fz0m6Up3AXCl/AsI6+zFfTs+nx2uP49kRw7wjuxXR8ZzXQHelPzKYsqt7JKD0yFEhOSTk/fp Lr5zOQHel/biYsQtK+iYdERxobSTk0t4zhCAVVgic7nPhbKjC3hNdwGBfm7dWZosIuFveTn2VrNt 2+eHPuBoFGDAa+mui0zngsEl9s8Wh1xX2a4vjrQFfaTXrEPRIccJ2SYnF9JdfOc6wZvOTZWPA+/h t4d93UX+uoFNfnkuMQ6bNU0/Ah14HDX9oCWO7uIvrhC8Y9g8caBWkeES42hZU4Zj7+bw47gkhy4R /pn9AoAOtm0bealwWarifr/PXXda9TKXYcVRplZjugt415SFxeDS4x2zlhGWL8tKNRxSvRhKMLu6 0l384TznKPHDTuPHi4FGHuYkcNNdwDlJiivDa2CnkSEkupaVZDTBmwyxlnQXcFiqZU2qF8NroSuJ z/9x65VlZZ4GmV8bMIbuAo6xeuAd0eklupZlNAHJ+R55/mOxwh5pJ4YDuJZrDySfPmlH0zeOVQ6p cmCzk/tdwF6ZLwCZXxs/fb0xdXolev+/614XJTn9gRL+nf0CgAIsa4jwNb32H2NCC4CKPGfI7WYd w0uFpoQjuYHXx5uPmJ8KzaibY5iDah3evOY5Q+CVWhO/1qvloRdPD1qw8pOzHqjC/S5uN6sZnig6 HxzPsIKiA+pmRnFE3eOcn9zvAh6rO+vrvnJgJ6c5i1DpnegunNI8YE0DpGVAARXpLuC7BmuaBj8C 8FCDs7vBjwCcoLuAnqxsgA8e6wAy0F3AXzrlSqefBbidPamzjYJDf7AObBy0obsAAMb5iC7pBavR XcAf/dYB/X4ioA0Dip3c8upBd63OmUx7VjYAVGfB1oDuAv7TuE8a/2hAUZbRsBrdBdxuygRIr9+Y 8gUbHKLVq9NdS3MCsw6LGyCVz6FkOrGTQ6U63QUAMIFlNCxFdwGrXPsX+TGhn23bep+/vX864IPu AgBS81Q80IDugtXZZwWSW2FMrfAz8iYbENXpLmAhVjYAwBS6a112TQDIb53tknV+UliT7oKlLXiZ X/BHhrqcsEAbugvWZUEDZLbgjFrwR4Z16C5Y1MpX95V/dsjPGQq0pLsW5Ze7ACAh2ckL1m+l6S4A IBErS6Al3QUrsp8KADCS7gIA0ll5e2jlnx0a012wHFf0mzcBABhLd63Io/MAZGZnBJ6xiqtLd8Fa rGaA5IypmzcBOtJdAAAAsXQXLMQGKgBU51HDonTXcpyryxJd33hDAIBhdBcAkIUNkU/eCp5xbBSl uwCAFKwmgcZ0FwAwn+gCetNda/HLXcuyoHnI2wJJOBkf8rZAJ7oLAJhJXcAhttGL0l3QnzUNQFEG OLShuwAAoBK3vCrSXQDANO7nAIvQXdCcNQ1AacY49KC7FuKWNACpKApgHboLAAAglu6CzuwlAzRg mPOT55jK0V0AwARaAliK7gIAAIilu6Ate8l7eJeAEgwrqE53rcJDwAAAnVjd1aK7oCc7owDNGOxQ mu4CAEaTEMBqdBc0ZEEDACvwqGEhugtYnUwFqjCvoC7dBQAMJR6ABeku6MaCBgAgG921BM/+AgDA RLoLAKAMDzVAUboLAAAglu6CVuyDAslt2+bpd2BBuqs/lzf4lV6FkZxxcCErvSp0V3NORQDyUFzA snRXZ6JrNRY0ACsw7aEi3QUAAIXZai9BdwHcbvaPAYBIuguakA0AAGnprrbccQYAgCR0FwBAMZ5x gHJ0F3TgAnwJbyMAEER39eQhQwAAyEN3AQAAxNJdAAD1eDQaatFdUJ5LLwAszu+Y5Ke7AP4QsQBA BN0FAIxgX+Ny3lIoRHdBbS66AAD56a6GPOALALAaK8DkdBcU5mYXAEAJugvgL2oWIjizgnhjoQrd 1Y1bzAAACxLhyekuAAAoz+Z7crqrFecbAKzGXQ4oQXcBfGcRAwBcS3dBVdoAAKAK3QUAxLJPBKC7 AAAAYukuKMnmcTTvMFzI1z7BGM61zHQXABDLRkY07zDkp7v6sMMBAAA56S4AINC2bXYGB3DLC5LT XVCPiytQi6kFoLuasJUIAIA1YVq6C+oxUgH4yX1Fbg6DxHQXAABALN0FxWzbZisLyM+kgik8FJOW 7urACQZAKl+jy0VqJLkLaekugMcsX+BNTiIYz3mXlu6CSgxTID+TCuAn3QUAhPCEIcAn3VWeqxoA mW3b5lI1kvuNkNO/s18AANDH10X/x38WXQA33QWF2MIEKjK7AG6eM6zOJiIAAJ8sDtPSXQBP2aeH Q5wyAM/oLgCAVgQwJKS7AAAAYukuqMHm5SzeeQDgfbqrML83CQM40QCoxZUrJ90FAAAQS3cBABfw UG4qPg7IRncBAADE0l1VeXJ3KbYtJ/LmAwDv010AwLvsUAC8prsAAABi6S4AAIBYugsAoCEPf0Iq uqskX6oBAACF6C4AAGjFHn1CugsAACCW7oLsPKA/nY8AKMr4gjx0FwAAQCzdBQAA3fgVr2x0Vz3O IgAAqEV3AQBAQzbrU9FdxTh/AID9fLUGJKG7IDXXSwCABnQXAPAWO0QAv9JdAAAAsXQXAAD05KsB 8tBdlThzVuPRHSA/kwpgD90FANCZNoYMdFcZbnYBAHCUNWQSugsAOMmNFICddBcAAEAs3QVJ2UUG AC7hUcMMdBcAAEAs3VWDXQoAsnFbHmA/3QUZWc0AAHSiuwAAoDkPT02nuwAAAGLpLkjHQ4ZAfiYV wCG6CwAAIJbuAgBozv1JmE53AfzOkgW+ckYAHKW7AAAAYumuAnzv51LsIgMAESwp59JdAAAAsXQX AHCA2/IAJ+guAACAWLorO0/iLsUuMgBAS7oLANjL9hDAOboLAAAglu6CLOwiAwB0pbsAdhHGAMBp ugsA2MXuQ2k+PphLdwEAAMTSXQB72S1mZY5/aMAfKJpId0EKFjQAAI3prtTsSQAAQAO6C+AAdyZZ kyMf4E26CwAAIJbugvlsJAOZmVEA79NdAMdYgwIAR+kuAOApGw2d+DRhIt0FAAAQS3fBZHYfK/Kp sQiHOsBVdBcAAKzCn4edRXfl5awAYCI3uwAupLsAzrAkBQD2010AAACxdBcA8J07ul35ZGEW3QVw kuULALCT7oKZLNwBgMF8edsUugsA+IstIYDL6S4AgIXoaphCdyXl/i+UYPlCP45qgAi6CwAAIJbu AgAAiKW7YBoP8/Tgc6QTxzNAEN0FAAAQS3cBvMstAqAWUwtf4Tae7gIAbjdrcYBIugsAACCW7gIA AIiluwAu4AEtqnMMA4TSXQAAALF0FwCszs0uWJCvNBxMdwFcw8oVAHhGd8Ec1uhAEsYRwAC6KyO3 fQEYQ3Qty0cPg+kugMtYxwAAD+kuAFiUnQKAYXQXwJUsZKnCsQowku4CAACIpbvS8aUaUJ3bCOTn KAUYTHcBwFpEF8B4ugvgeta1pOXgBJhCdwHAKkQXwCy6CwCWILoAJtJdufhSDWjDGhcA+KS7YAIr cmAwYwdgLt0FEMVKlwy2bXMoAkynuwAAAGLpLoBA7jMwlyMQIAndBaNZBgFjmDYAeeiuRHyZIbRk 7ct4fqeLPRwkMJLuAghnccNIjjeAhHQXAABALN0FMIJbEAzg8UKAtHQXwCAWxIRygAFkprsAxrEy JohDCyA53QUwlPUxl3NQAef4Mu2RdFcWjntYh1UyF3I4AZSguwAmsFbmEg4k9rC3CxnoLoA5rJh5 k0OInRwqkIHuApjGYojTHDwAtegugJmsnjnBYQNcxWOow+gugMmsoQGgPd2Vgp0GWJz0Yqdt2xwt XMXyA0b6d/YLAOB2+396WQbxkNYCghgvw+gugETUF99YEgH04DlDgHQ8S8YHhwEQzU7fMO53ASTl 3tfKFBdAM7oLIDX1tRrFBdCS7prPcgr4lfpageICaMzvdwGUYV3emA8XoDf3uwAq+bo6d/urB8UF sALdBVCVhw+rU1wA6/CcIUBtvnS+Ip8akIf9uzHc7wLowL2vKuQWwJrc7wLow12UzHw6ZOOAhJHc 75rM5jRwuc+1lAmTgaUtADf3uwAac4NlLu8/UIV9ugHc7wJozu2v8eQWAN/oLoBVCLA4QouKjAIY SXcBLMeXH15CawGd3O93Yy2U7prJogeY6OH19ed1d81JZfEBwLV0FwB//OyNRTJMaAEQSncBcMCz u2Rf/2mhNpNbAIyhuwB417d6+fYFHq9TbTytBcB4uguAKC8K59k/erPHtm17+L+gtQB+5as1Quku ABL5esnfU1Bf/52Pf2TRAEBCuguApPYUlMqCcwr9Hib08M/sFwBrsUYEAFiQ7gIAAIiluwAAAGLp rmk8Vw0AAIvQXQAAa7H5yzOOjTi6CwAAIJbuAgAAiKW7pvF94gAAsAjdBQAA3G5uDETSXdP4tUUA AFKxQI2juwAAAGLprmncxgUAgEXoLgAAgFi6C4by2DQAwIJ0F4wmvQAAVqO7AAAAYukuAACAWLoL AAAglu6C0fwJAQCA1eguGM33agAArEZ3AQAA/7FBHER3AQAAxNJdMIGdJAAgJ7+IHkR3AQAAxNJd AAAAsXQXAABALN0Fc/gVLwCAdeguAACAWLoLAAAglu6CaTxqCACk4kvk4+guAACAWLoLAAAglu4C AFiIp9xhCt0FM7n4AQCsQHcBAADE0l0wmVteAADt6S4AgFXY7INZdBcAAEAs3QUAABBLdwEAAMTS XTCfp+0BAHrTXQAAALF0FwDAEjxeARPpLkjBtRAAmGvbttkvoTPdBQAAEEt3QRZueQEAdKW7AAAA YukuAACAWLoLEvGoIQBBXGJ4zZdqRNNdAAAAsXQXAABALN0FuXgOBACgH90FAAAQS3cBAADE0l2Q jkcNAbiWKwtMp7sAAABi6S4AAFiaP941gO4CAACIpbsgIw/iA3AV1xTIQHcBAMC6PGQ4hu4CAACI pbsAANrykCEkobumMQcBAJjOonQM3QVJGYIAvMmlBPLQXQAAALF0F+RlnxKA01xEIBXdBQAAEEt3 AQAAxNJdAADdeMiQ/fzd5DF0F6TmwgnAUa4dHOKAGUN3AQAAxNJdkN39frcRBcBOLhmQk+6CGtQX AEBdugsqkV4AvOAyAWnpLijGjS8AHnJ1gMx0F5SkvgD4ykWB03yP/Bi6CwpzlQXg5nIAFeguqM2N L4DFuQpACboLOnDRBViQrTcoRHdBE66+AEsx86EW3QWtuAwDrMC0h3J01zS+OoYgLsYAvZnzUJHu goZckgFa8kg51KW7oCfXZoBmTHWCOLTG0F3QmfoC6MEwh+p0F/Tnag1Qlx00BnCMDfDv7BcAjPAx T32bC0AhlsLQie6Chdzvd+kFkJbQgsZ0F6zFjS+APIQWrMPm92QGLhM5/QFmsQAgG6uCaLprMmOX uUwAgJFc98nMqiCU5wxhaR47BIimtUhu2zZH6QDud83nQCcPAwHgKq7vlGMZEMr9LuAPX3gIsNPD gam1gGf83WTgL/5AJ8AedqmAQ9zvAh7we18At91PAdiuAn6lu4Cn1BewuF8HoOICdtJdwC/U1we/ /AZ8klvAUboL2EV93aQXLE9uAadZQ6RgjlOO0QG05+rMalzcQ7nfBZzhzg/QldwCIugu4KSfSxMl Bs187rBUf9JYSgHT2bFOwfWAfswWKG3PhSntae6qCuekPal70F0puEKwCAMHkjtxPfp6Xr/+r3/e OgsaBS6mcI6r8xi6KwtXC1Zz+fDxK2dwQp6rz7Zt317MizM6z8uGBlw9x7BMycIlBD59m0tff7Hk kjPF3AMXHeCTy+IYuisLl0CYyzCkN1cZ4BlXwDF0VxauiFDFz9txBilpubgAe7iQDWC5kIirIzRg qJKBCwpwgktYKH+/CwD6UFwAOf0z+wUAtGLVC0BRLmGhdFci7u1CD/f/m/1CAIAsdBdAFPXFYI43 gLR0Vy5ueQEAMIvtmzi+VwMAyrNUAkjO/S6AQG5iAwA33QUA1bnZBVzFdmEc3QUQyIKYaI4xgBJ0 Vzq2GaAZy2LiOLoAqtBdAFCS6AIoRHcBhLM+BiA/T12F0l0ZOeihH+kFACvTXQCDSC8u5HACLne/ 382WOP5uclLbtjnuAQAYw/NW0dzvAhjHfgqXcCABlKO7AABgaW52DaC7AIZypwIAFqS78rLxAAAA PegugNHc8uIdjh/gWvb6x9BdABNYOgPAUnRXarYfAACgAd0FMIdbXpzgsAEoSncBAMCiPF01jO7K zskAwAc3uwDq0l0AAACxdFcBbnkB4GYXQGm6C2AaK2kApnMxGkN3AQDAirZtE13D6K4aPGoIXbng sYfjBKA63QUAAOuyvz+G7gIAAIiluwAAYDkft7nc7BpGd5XhrAAAgKJ0F8BkvjIBANrTXQDzSS8A RvIg1Xi6CwAAFiK6ptBdlThJoDG3vACgMd0FAKlpcuBC9vFn0V3FOFUAAKAc3QUAABBLd9Xjlhd0 5XEyAEJZRk6kuwAAAGLprpLsVQAAcIgF5Fy6CwAAIJbuAkjEr3gBEMHNrul0V1VOHoAVSHGAHnQX AAB0Zr8+A91VmFMIoDc3uwDa0F0AANCWnfokdFdtTiQAAJ6xVsxDd9nbn+0AAAiZSURBVAEAQEOi KxXdVZ4zCqAlv9wF0InuAgCAbmzNZ6O7OnBeAQDwyeIwId3VhLMLoBMPGQI08+/sFwAAAFzDXnxa 7nf14TQDAFiZ1WBm7ncBAEBhcqsE97tacdYBAEBCugsAAKqy7V6F7urGuQcAANnoroakF9Tl/AWA lnQXAACUZLeuEN3Vk5MQAKA3671adFdbTkUAgKXc7/fZL4GndBcAABRjh70c3dXZtm3OSSjECQvA O1xHMtNdAABQib6qSHf158wEAKjOiq463QUAAKl9jS4BVpTuWoLzE6AWcxv4JLp60F2rcJYCAJTz uYS73++Wc6XpLgAAyMidrk5010KcrgCFGNqwMn8NqB/dtRbnMABAclZrLekuAADI4iO67vf77BfC xfx+3rqcz5CNgcxPZjUsxYWgMfe71uXEBgDIw9qsN921NKc3QHIGNazAb+CvQHetzkkOADDRz8WY B4xb0l1IL4DUTGlo7NsJfr/fRVdXuovbzUUdAGCsn88WfhaXhVlLuov/OMMBAMZ4se6yJOtKd/GH 8xwAINqzFZdv1+hNd/EXZztAQoYzQHW6i+/stQAABHn2O1209+/sF0BS27YZBAAAV1Fci3O/i6fc 9QLIw0yG0nx1Ie538crHLLAfAwO49AJ09TC6jP3V3H3k7CG9IJppzB6mMZRjvPPBc4bsYmRAKKcY QD++q4yvdBd7mR0QxJkF0I/Zzje6i2MMEbiWc4pDHDBQglOVn3yvBof5sg0AgJ8e5tb97vsUuN3c 7+I0EwTe5zziBIcN5CS6eM2hwFvc9YJ3mMCcZvxCHoY5e7jfxVt82Qac5twBaMAwZyfdxQVMHDjK WcObHEIAtXjOkCt57gX2MHi5iqkLExnmHOJ+F1cygOBXThMu5HCCWZx9HKW7uJgxBAA05pfbOUd3 cT3DCJ5xdgDU9bC4PO7LTrqLEBaX8JPzggi23mEAJxrv011EMZ4AgOp+LS4LHnbyfYbEcvMdPhi2 RDNv4VrmNtdyv4tYZhbcnAgM4TCDCzmhuJzuIpzJxeKcAgCF/Hyw0M1kLuE5Q8YxtliQGcssRi4c 8nBcf55Hhjnv010MZR3AUgxY5jJyYY9ns1p0cS3dxQSWAqzAdCUD8xYeMqIZT3cxjdUAjRmt5GHY wgeTmbl0F5NZENCS0UoqJi0rM5BJQneRgjUBnZirJGTMsiDTmFR0F4lYFtCAoUpmxiyLMIpJSHeR jmUBdZmo5GfG0ps5TFq6i6SsDKjIRKUEA5aWTGCS012kZnFAFWYptZiudGICU4LuogZLBDIzSKnI XKUB45dCdBf1WCuQhxFKdSYqRRm/lKO7KMxygbnMT9owTinB1KU03UUTFg2MZHLSjylKQoYtnegu urF0IJqxSW+mKNMZs7Sku+jM6oHLmZmswPBkPNOV9nQXS7CG4BIGJuswNolmorIa3cWiLCnYz5yk sfv96UrAnORyxikr013wh0UG35iQYDB29THfxny+ZincdBfs8bkfbP2xCIMRfjIAT9u27ePd+3Yp +Tpqrn17P/4fjTJIxTkJ17Ai6cFIhD1MvGemzBCJBSU4UWE065WETEI4wTS7mR7AbroLUvjYrfz6 8IkFzRhmIFxlnallbgAn6C4oZp2VTSijD0K12UUyK4Cr6C5oovTKZgzjDnKKi7TGZ/3XN6rxjwmd 6C5YwrJVZsRBKr4BAliW8QcrapxhZhoAkJDuAjpkmFEGvPD1z2cBTKG7gMce/pXPVIwvAKAK3QUc 9jDD3hwmL9Lu81ftzSsAoCjdBQAAVfm6mip8TgAAALH+mf0CAAAAmtNdAAAAsXQXAABALN0FAAAQ S3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAA sXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAA EEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAA ALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBcAAEAs3QUA ABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cBAADE0l0A AACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJdAAAAsXQXAABALN0F AAAQS3cBAADE0l0AAACxdBcAAEAs3QUAABBLdwEAAMTSXQAAALF0FwAAQCzdBQAAEEt3AQAAxNJd AAAAsXQXAABALN0FAAAQS3cBAADE0l0AAACxdBfAYff7ffZLAAAquW/bNvs1AAAAdOZ+FwAAQCzd BQAAEEt3AQAAxNJdAAAAsXQXAABALN0FAAAQS3cl4i8CAQBAS/5+FwAAQCz3uwAAAGLpLgAAgFi6 CwAAIJbuAgAAiKW7AAAAYukuAACAWLoLAAAglu4CAACIpbsAAABi6S4AAIBYugsAACCW7gIAAIil uwCAJu73++yXAPDYfdu22a8BAACgM/e7AAAAYukuAACAWLoLAAAglu4CAACIpbsAAABi6S4AAIBY ugsAACCW7gIAAIiluwAAAGLpLgAAgFi6CwAAIJbuAgAAiKW7AAAAYukuAACAWLoLAAAglu4CAACI pbsAAABi6S4AAIBYugsAACCW7gIAAIiluwAAAGLpLgAAgFi6CwAAIJbuAgAAiKW7AAAAYukuAACA WLoLAAAglu4CAACIpbsAAABi6S4AAIBYugsAACCW7gIAAIiluwAAAGLpLgAAgFi6CwAAIJbuAgAA iKW7AAAAYukuAACAWLoLAAAglu4CAACI9T/cpLHUlkYrNQAAAABJRU5ErkJggg== "
- height="367.39426"
- width="428.99997" />
+ <g
+ transform="matrix(0.20036166,0,0,0.20036166,28.277456,17.580208)"
+ id="g1745-3">
+ <path
+ sodipodi:nodetypes="cccccscscccccccccsccscsccc"
+ inkscape:connector-curvature="0"
+ id="path904-6-67"
+ d="m 33.334478,102.25658 -1.276087,23.80689 -0.0061,0.003 c -0.08934,1.57587 -0.55403,1.79754 -0.644545,1.88447 -0.420443,0.40386 -0.969288,0.27991 -1.431912,0.32133 -0.189719,0.0171 -0.274798,1.15962 0.04972,1.15962 0.403098,0 2.284665,-0.0926 2.707616,-0.0918 0.34244,5.3e-4 2.446519,0.0918 2.947819,0.0918 0.243445,0 0.370705,-1.03417 0.105626,-1.06723 -0.334434,-0.0417 -1.200125,-0.31005 -1.274063,-0.75434 -0.01017,-0.0614 -0.02059,-0.35182 -0.02945,-0.91973 l 0.703497,-13.30253 10.175621,17.27235 9.996785,-19.87488 1.204659,15.57675 c 0.0199,0.79989 -0.03947,1.50889 -0.07995,1.58096 -0.158144,0.28144 -0.595899,0.45144 -1.099501,0.58649 -0.3945,0.10571 -0.411138,0.90798 0.161424,0.90798 1.064556,0 2.202444,-0.25797 2.625396,-0.25717 0.342437,5.3e-4 3.41059,0.24547 4.337927,0.21604 0.374592,-0.0113 0.485723,-0.78623 0.09622,-0.87103 -0.660138,-0.14372 -1.863341,-0.68218 -2.085916,-1.20514 -0.04562,-0.10717 -0.451347,-1.17783 -0.52673,-1.76188 L 56.99415,102.32777 45.888836,124.45753 Z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccscccccccsccccscccccccccccccccccc"
+ inkscape:connector-curvature="0"
+ id="path906-7-5"
+ d="m 66.900973,102.70528 c -0.312317,-0.0246 -0.418637,0.62952 -0.05394,0.71112 1.198705,0.26826 1.543521,1.04959 1.766094,1.57255 0.04894,0.11494 0.445239,1.36695 0.44487,1.85292 -0.0047,6.12333 0.345802,12.15388 0.135426,18.22902 0.100256,1.2438 -0.193648,2.39 -0.509132,2.93519 -0.18505,0.31978 -0.884081,0.66664 -1.440569,0.67016 -0.227745,0.001 -0.331782,0.75651 -0.0072,0.75651 4.310608,-0.025 8.883044,0.006 12.816334,-0.002 l 2.7e-4,-0.0188 c 0.43124,-0.43502 1.5967,-2.08483 1.82958,-3.27069 0.18948,-0.52535 0.18474,-0.98124 -0.50683,-0.0127 -0.4636,0.64927 -0.59031,0.90175 -1.32429,1.151 -1.82289,0.3852 -4.59972,0.38331 -6.355561,0.0383 -0.986536,-0.19387 -1.839515,-0.67769 -1.853692,-2.07823 v -7.99646 h 5.990643 c 0.739256,0 1.31716,1.03942 1.3397,1.60391 0.0185,0.46295 0.74548,0.48307 0.75459,0.008 l 0.026,-4.50003 c 0.004,-0.53369 -0.7806,-0.56271 -0.7806,0.008 0,0.73447 -0.921892,0.73171 -1.42577,0.76963 -1.94137,0 -3.97512,0.0127 -5.953786,0.0127 v -9.80142 h 7.151596 l -2.7e-4,0.008 c 0.85429,0.46268 1.26608,1.01883 1.29024,1.6238 0.0185,0.46295 0.62893,0.48309 0.63805,0.008 l 0.0264,-3.43815 h -1.88473 -7.235748 c -0.478738,-0.0866 -0.972265,-0.15524 -1.413794,-0.20809 -1.617876,-0.19366 -2.63534,-0.4144 -3.398684,-0.6258 -0.02257,-0.006 -0.04421,-0.01 -0.06502,-0.0113 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccccccccccccscsccccccc"
+ inkscape:connector-curvature="0"
+ id="path908-5-3"
+ d="m 94.757975,103.02543 c -0.11408,2e-4 -0.22964,0.003 -0.34695,0.006 -2.58595,0.0843 -6.698092,2.61617 -6.539972,6.86354 0.08677,2.33091 2.746652,5.60719 5.031322,7.5844 2.17308,1.88065 5.586685,3.44697 5.546319,6.10751 -0.051,2.28483 -1.805601,4.27404 -5.270909,4.27404 -2.16525,-0.11423 -3.141615,-0.16572 -4.533845,-1.52671 -0.56065,-0.552 -0.693139,-1.85564 -0.705019,-2.51927 l -1.239776,-0.008 -0.0264,4.0512 c 1.89697,1.15258 4.30363,1.78611 6.4362,1.74849 5.35562,-0.13555 7.674295,-3.30385 7.756735,-6.55447 0.20533,-4.55913 -3.157431,-6.63582 -5.844601,-8.78074 -2.23068,-1.78054 -4.734939,-3.06638 -4.734939,-6.03352 0,-1.27234 2.035285,-3.41774 4.601495,-3.57283 1.467869,-4.8e-4 2.663536,0.16339 3.582067,1.19225 0.523978,0.58691 0.91614,1.19386 0.92798,1.8575 l 0.983328,0.0553 -0.0352,-4.07423 c -0.581995,0.0175 -1.212055,-0.0524 -1.771445,-0.16946 -0.0364,-0.008 -0.0666,-0.01 -0.0969,-0.0127 -0.97967,-0.18924 -2.20726,-0.49013 -3.71961,-0.4877 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="ssscsssccs"
+ inkscape:connector-curvature="0"
+ id="path910-3-5"
+ d="m 119.7033,102.99102 c -7.99363,0 -13.58284,5.8792 -13.58284,14.60148 0,6.01803 5.49465,12.43311 14.00323,12.43311 9.22108,0 13.84717,-7.21361 13.81967,-13.4888 -0.0281,-6.41474 -4.43284,-13.54579 -14.24006,-13.54579 z m 10.30799,14.57345 c -0.12272,8.03534 -4.17824,10.62033 -9.57356,10.69443 -6.14582,0.0844 -10.59665,-6.47237 -10.61036,-12.01292 -0.12109,-10.12036 6.15743,-11.49973 9.07137,-11.58569 8.43605,0.16046 11.18568,8.11371 11.11255,12.90418 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="csccsccscscccccsccccccc"
+ inkscape:connector-curvature="0"
+ id="path917-5-62"
+ d="m 139.89961,102.70719 -0.37324,23.7168 c -0.014,0.89103 -0.17468,1.31752 -0.31805,1.554 -0.24039,0.39655 -0.86073,0.68583 -1.47275,0.6826 -0.26436,-0.001 -0.30089,0.82643 0.0236,0.82643 0.75747,0 2.67875,-0.21072 3.1017,-0.20992 0.34242,5.3e-4 1.8727,0.15573 3.08741,0.15657 0.61256,0 0.41837,-0.76043 0.095,-0.81871 -0.27055,-0.0488 -1.24066,-0.44168 -1.31459,-0.82412 -0.0284,-0.1467 -0.15685,-1.69371 -0.16868,-2.66273 -0.059,-4.83509 -0.071,-9.53141 -0.0523,-14.44595 l 21.14337,19.71286 -0.0541,-23.80413 c 0,-0.68938 0.17375,-1.20182 0.37143,-1.55761 0.28835,-0.51901 0.91397,-0.60476 1.40691,-0.64762 0.25291,-0.022 0.36676,-0.86034 -0.0658,-0.86034 -0.53725,0 -2.50677,0.0341 -3.07048,0.0331 -0.45641,-7.3e-4 -2.79852,-0.16948 -3.46655,-0.18148 -0.59648,-0.01 -0.51589,0.90058 0.0537,0.89521 1.1222,-0.01 1.47204,0.66995 1.7443,1.68232 0.0443,0.16517 0.21934,0.83734 0.29759,2.23392 l 0.13745,15.66736 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ </g>
+ <path
+ style="fill:#39207c;fill-opacity:1;stroke:none;stroke-width:1.71902049;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="M 181.30469 74.429688 C 161.46687 85.630004 149.15002 106.61558 149.03711 129.41016 C 169.79161 140.72574 194.16258 140.33981 213.04883 129.5293 C 213.05119 129.47468 213.05406 129.41962 213.05859 129.36523 C 213.03478 106.69297 200.93117 85.75333 181.30469 74.429688 z M 191.98438 98.699219 C 194.92554 100.69787 194.48939 105.50984 194.01562 108.80469 C 193.41277 112.43416 189.93299 118.40107 185.41211 122.29492 C 185.99691 123.1334 186.3389 124.16595 186.33398 125.26758 C 183.53249 126.68366 180.75263 125.08608 177.39062 122.20703 C 174.02862 119.32795 170.40256 113.55973 169.19922 107.63086 C 168.13189 107.74388 167.03897 107.50536 166.10156 106.91406 C 166.3464 103.58956 169.87486 101.6149 173.00195 100.44922 C 176.12905 99.283537 184.18515 98.888979 189.86523 100.9082 C 190.30813 99.978311 191.04179 99.196513 191.98438 98.701172 L 191.98438 98.699219 z "
+ transform="scale(0.26458333)"
+ id="path817-6-2-9-7-9-93-9-8-1" />
+ <g
+ transform="matrix(0.20036166,0,0,0.20036166,99.991867,3.1795752)"
+ id="g1745-3-3">
+ <path
+ sodipodi:nodetypes="cccccscscccccccccsccscsccc"
+ inkscape:connector-curvature="0"
+ id="path904-6-67-6"
+ d="m 33.334478,102.25658 -1.276087,23.80689 -0.0061,0.003 c -0.08934,1.57587 -0.55403,1.79754 -0.644545,1.88447 -0.420443,0.40386 -0.969288,0.27991 -1.431912,0.32133 -0.189719,0.0171 -0.274798,1.15962 0.04972,1.15962 0.403098,0 2.284665,-0.0926 2.707616,-0.0918 0.34244,5.3e-4 2.446519,0.0918 2.947819,0.0918 0.243445,0 0.370705,-1.03417 0.105626,-1.06723 -0.334434,-0.0417 -1.200125,-0.31005 -1.274063,-0.75434 -0.01017,-0.0614 -0.02059,-0.35182 -0.02945,-0.91973 l 0.703497,-13.30253 10.175621,17.27235 9.996785,-19.87488 1.204659,15.57675 c 0.0199,0.79989 -0.03947,1.50889 -0.07995,1.58096 -0.158144,0.28144 -0.595899,0.45144 -1.099501,0.58649 -0.3945,0.10571 -0.411138,0.90798 0.161424,0.90798 1.064556,0 2.202444,-0.25797 2.625396,-0.25717 0.342437,5.3e-4 3.41059,0.24547 4.337927,0.21604 0.374592,-0.0113 0.485723,-0.78623 0.09622,-0.87103 -0.660138,-0.14372 -1.863341,-0.68218 -2.085916,-1.20514 -0.04562,-0.10717 -0.451347,-1.17783 -0.52673,-1.76188 L 56.99415,102.32777 45.888836,124.45753 Z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccscccccccsccccscccccccccccccccccc"
+ inkscape:connector-curvature="0"
+ id="path906-7-5-7"
+ d="m 66.900973,102.70528 c -0.312317,-0.0246 -0.418637,0.62952 -0.05394,0.71112 1.198705,0.26826 1.543521,1.04959 1.766094,1.57255 0.04894,0.11494 0.445239,1.36695 0.44487,1.85292 -0.0047,6.12333 0.345802,12.15388 0.135426,18.22902 0.100256,1.2438 -0.193648,2.39 -0.509132,2.93519 -0.18505,0.31978 -0.884081,0.66664 -1.440569,0.67016 -0.227745,0.001 -0.331782,0.75651 -0.0072,0.75651 4.310608,-0.025 8.883044,0.006 12.816334,-0.002 l 2.7e-4,-0.0188 c 0.43124,-0.43502 1.5967,-2.08483 1.82958,-3.27069 0.18948,-0.52535 0.18474,-0.98124 -0.50683,-0.0127 -0.4636,0.64927 -0.59031,0.90175 -1.32429,1.151 -1.82289,0.3852 -4.59972,0.38331 -6.355561,0.0383 -0.986536,-0.19387 -1.839515,-0.67769 -1.853692,-2.07823 v -7.99646 h 5.990643 c 0.739256,0 1.31716,1.03942 1.3397,1.60391 0.0185,0.46295 0.74548,0.48307 0.75459,0.008 l 0.026,-4.50003 c 0.004,-0.53369 -0.7806,-0.56271 -0.7806,0.008 0,0.73447 -0.921892,0.73171 -1.42577,0.76963 -1.94137,0 -3.97512,0.0127 -5.953786,0.0127 v -9.80142 h 7.151596 l -2.7e-4,0.008 c 0.85429,0.46268 1.26608,1.01883 1.29024,1.6238 0.0185,0.46295 0.62893,0.48309 0.63805,0.008 l 0.0264,-3.43815 h -1.88473 -7.235748 c -0.478738,-0.0866 -0.972265,-0.15524 -1.413794,-0.20809 -1.617876,-0.19366 -2.63534,-0.4144 -3.398684,-0.6258 -0.02257,-0.006 -0.04421,-0.01 -0.06502,-0.0113 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccccccccccccscsccccccc"
+ inkscape:connector-curvature="0"
+ id="path908-5-3-5"
+ d="m 94.757975,103.02543 c -0.11408,2e-4 -0.22964,0.003 -0.34695,0.006 -2.58595,0.0843 -6.698092,2.61617 -6.539972,6.86354 0.08677,2.33091 2.746652,5.60719 5.031322,7.5844 2.17308,1.88065 5.586685,3.44697 5.546319,6.10751 -0.051,2.28483 -1.805601,4.27404 -5.270909,4.27404 -2.16525,-0.11423 -3.141615,-0.16572 -4.533845,-1.52671 -0.56065,-0.552 -0.693139,-1.85564 -0.705019,-2.51927 l -1.239776,-0.008 -0.0264,4.0512 c 1.89697,1.15258 4.30363,1.78611 6.4362,1.74849 5.35562,-0.13555 7.674295,-3.30385 7.756735,-6.55447 0.20533,-4.55913 -3.157431,-6.63582 -5.844601,-8.78074 -2.23068,-1.78054 -4.734939,-3.06638 -4.734939,-6.03352 0,-1.27234 2.035285,-3.41774 4.601495,-3.57283 1.467869,-4.8e-4 2.663536,0.16339 3.582067,1.19225 0.523978,0.58691 0.91614,1.19386 0.92798,1.8575 l 0.983328,0.0553 -0.0352,-4.07423 c -0.581995,0.0175 -1.212055,-0.0524 -1.771445,-0.16946 -0.0364,-0.008 -0.0666,-0.01 -0.0969,-0.0127 -0.97967,-0.18924 -2.20726,-0.49013 -3.71961,-0.4877 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="ssscsssccs"
+ inkscape:connector-curvature="0"
+ id="path910-3-5-3"
+ d="m 119.7033,102.99102 c -7.99363,0 -13.58284,5.8792 -13.58284,14.60148 0,6.01803 5.49465,12.43311 14.00323,12.43311 9.22108,0 13.84717,-7.21361 13.81967,-13.4888 -0.0281,-6.41474 -4.43284,-13.54579 -14.24006,-13.54579 z m 10.30799,14.57345 c -0.12272,8.03534 -4.17824,10.62033 -9.57356,10.69443 -6.14582,0.0844 -10.59665,-6.47237 -10.61036,-12.01292 -0.12109,-10.12036 6.15743,-11.49973 9.07137,-11.58569 8.43605,0.16046 11.18568,8.11371 11.11255,12.90418 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="csccsccscscccccsccccccc"
+ inkscape:connector-curvature="0"
+ id="path917-5-62-5"
+ d="m 139.89961,102.70719 -0.37324,23.7168 c -0.014,0.89103 -0.17468,1.31752 -0.31805,1.554 -0.24039,0.39655 -0.86073,0.68583 -1.47275,0.6826 -0.26436,-0.001 -0.30089,0.82643 0.0236,0.82643 0.75747,0 2.67875,-0.21072 3.1017,-0.20992 0.34242,5.3e-4 1.8727,0.15573 3.08741,0.15657 0.61256,0 0.41837,-0.76043 0.095,-0.81871 -0.27055,-0.0488 -1.24066,-0.44168 -1.31459,-0.82412 -0.0284,-0.1467 -0.15685,-1.69371 -0.16868,-2.66273 -0.059,-4.83509 -0.071,-9.53141 -0.0523,-14.44595 l 21.14337,19.71286 -0.0541,-23.80413 c 0,-0.68938 0.17375,-1.20182 0.37143,-1.55761 0.28835,-0.51901 0.91397,-0.60476 1.40691,-0.64762 0.25291,-0.022 0.36676,-0.86034 -0.0658,-0.86034 -0.53725,0 -2.50677,0.0341 -3.07048,0.0331 -0.45641,-7.3e-4 -2.79852,-0.16948 -3.46655,-0.18148 -0.59648,-0.01 -0.51589,0.90058 0.0537,0.89521 1.1222,-0.01 1.47204,0.66995 1.7443,1.68232 0.0443,0.16517 0.21934,0.83734 0.29759,2.23392 l 0.13745,15.66736 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ </g>
+ <path
+ style="fill:#39207c;fill-opacity:1;stroke:none;stroke-width:0.99843764;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="M 373.3418 81.814453 C 361.82049 88.32 354.6691 100.50911 354.60352 113.74805 C 366.6581 120.32034 380.81178 120.09534 391.78125 113.81641 C 391.78261 113.78466 391.78414 113.75422 391.78516 113.72266 C 391.77004 100.55476 384.7423 88.391622 373.34375 81.814453 L 373.3418 81.814453 z M 379.54688 95.910156 C 381.25515 97.071012 381.00171 99.865586 380.72656 101.7793 C 380.37643 103.88736 378.35627 107.35361 375.73047 109.61523 C 376.0701 110.10224 376.26642 110.70195 376.26367 111.3418 C 374.63651 112.16428 373.02302 111.23666 371.07031 109.56445 C 369.1176 107.89224 367.01143 104.54125 366.3125 101.09766 C 365.69258 101.1633 365.05814 101.02509 364.51367 100.68164 C 364.65588 98.75071 366.70521 97.602831 368.52148 96.925781 C 370.33776 96.248751 375.01731 96.020561 378.31641 97.193359 C 378.57364 96.653261 378.99941 96.199807 379.54688 95.912109 L 379.54688 95.910156 z "
+ transform="scale(0.26458333)"
+ id="path817-6-2-9-7-9-93-9-8-1-2" />
+ <rect
+ style="fill:#05030c;fill-opacity:1;stroke:#a6a6a6;stroke-width:0.75042528;stroke-miterlimit:4;stroke-dasharray:0.75042529, 3.00170116;stroke-dashoffset:0;stroke-opacity:1"
+ id="rect1921"
+ width="114.10837"
+ height="46.072655"
+ x="34.699532"
+ y="83.369179" />
+ <g
+ id="g2039"
+ transform="translate(-52.349702,9.4494047)">
+ <path
+ sodipodi:nodetypes="cccccscscccccccccsccscsccc"
+ inkscape:connector-curvature="0"
+ id="path904-6-67-2"
+ d="m 94.6371,104.71582 -0.255679,4.76999 -0.0012,6e-4 c -0.0179,0.31574 -0.111006,0.36016 -0.129142,0.37758 -0.08424,0.0809 -0.194208,0.0561 -0.2869,0.0644 -0.03801,0.003 -0.05506,0.23234 0.01,0.23234 0.08076,0 0.457759,-0.0186 0.542502,-0.0184 0.06861,1e-4 0.490189,0.0184 0.59063,0.0184 0.04878,0 0.07428,-0.20721 0.02116,-0.21383 -0.06701,-0.008 -0.240459,-0.0621 -0.255274,-0.15114 -0.002,-0.0123 -0.0041,-0.0705 -0.0059,-0.18428 l 0.140953,-2.66532 2.038805,3.46072 2.002972,-3.98216 0.241368,3.12098 c 0.004,0.16027 -0.0079,0.30232 -0.01602,0.31676 -0.03169,0.0564 -0.119396,0.0905 -0.220298,0.11751 -0.07904,0.0212 -0.08238,0.18193 0.03234,0.18193 0.213296,0 0.441285,-0.0517 0.526029,-0.0515 0.06861,1.1e-4 0.683354,0.0492 0.869154,0.0433 0.075,-0.002 0.0973,-0.15753 0.0193,-0.17452 -0.13227,-0.0288 -0.37334,-0.13669 -0.41794,-0.24147 -0.009,-0.0215 -0.09043,-0.23599 -0.105534,-0.35301 l -0.600761,-4.65456 -2.22508,4.43396 z"
+ style="fill:#fffff1;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccscccccccsccccscccccccccccccccccc"
+ inkscape:connector-curvature="0"
+ id="path906-7-5-0"
+ d="m 101.36254,104.80572 c -0.0626,-0.005 -0.0839,0.12614 -0.0108,0.14248 0.24017,0.0538 0.30926,0.2103 0.35386,0.31508 0.01,0.023 0.0892,0.27389 0.0891,0.37126 -9.5e-4,1.22688 0.0693,2.43517 0.0271,3.65239 0.0201,0.24921 -0.0388,0.47887 -0.10201,0.5881 -0.0371,0.0641 -0.17714,0.13357 -0.28864,0.13428 -0.0456,2e-4 -0.0665,0.15157 -0.001,0.15157 0.86368,-0.005 1.77982,10e-4 2.5679,-4e-4 l 6e-5,-0.004 c 0.0864,-0.0872 0.31992,-0.41772 0.36658,-0.65532 0.038,-0.10526 0.037,-0.19661 -0.10155,-0.003 -0.0929,0.13009 -0.11828,0.18068 -0.26534,0.23062 -0.36524,0.0772 -0.92161,0.0768 -1.27341,0.008 -0.19767,-0.0388 -0.36857,-0.13578 -0.37141,-0.4164 v -1.60218 h 1.20029 c 0.14812,0 0.26391,0.20826 0.26843,0.32136 0.004,0.0928 0.14936,0.0968 0.15119,0.002 l 0.005,-0.90163 c 8e-4,-0.10693 -0.1564,-0.11274 -0.1564,0.002 0,0.14716 -0.18471,0.14661 -0.28567,0.15421 -0.38898,0 -0.79646,0.003 -1.19291,0.003 v -1.96383 h 1.4329 l -5e-5,0.002 c 0.17117,0.0927 0.25367,0.20413 0.25851,0.32534 0.004,0.0928 0.12602,0.0968 0.12785,0.002 l 0.005,-0.68888 h -0.37763 -1.44977 c -0.0959,-0.0173 -0.1948,-0.0311 -0.28327,-0.0417 -0.32416,-0.0388 -0.52802,-0.083 -0.68096,-0.12539 -0.005,-10e-4 -0.009,-0.002 -0.013,-0.002 z"
+ style="fill:#fffff1;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccccccccccccscsccccccc"
+ inkscape:connector-curvature="0"
+ id="path908-5-3-2"
+ d="m 106.94401,104.86987 c -0.0229,4e-5 -0.046,6e-4 -0.0695,10e-4 -0.51812,0.0169 -1.34204,0.52418 -1.31036,1.37519 0.0174,0.46703 0.55033,1.12347 1.00809,1.51962 0.4354,0.37681 1.11936,0.69065 1.11127,1.22372 -0.0102,0.45779 -0.36178,0.85635 -1.05609,0.85635 -0.43383,-0.0229 -0.62946,-0.0332 -0.90841,-0.30589 -0.11233,-0.1106 -0.13888,-0.3718 -0.14126,-0.50477 l -0.2484,-0.002 -0.005,0.8117 c 0.38008,0.23094 0.86228,0.35787 1.28957,0.35033 1.07306,-0.0272 1.53763,-0.66196 1.55415,-1.31326 0.0411,-0.91348 -0.63263,-1.32957 -1.17104,-1.75933 -0.44694,-0.35675 -0.9487,-0.61438 -0.9487,-1.20888 0,-0.25493 0.4078,-0.68479 0.92197,-0.71586 0.2941,-1e-4 0.53367,0.0327 0.71771,0.23888 0.10498,0.1176 0.18356,0.23921 0.18593,0.37217 l 0.19702,0.0111 -0.007,-0.81632 c -0.11661,0.004 -0.24285,-0.0105 -0.35493,-0.034 -0.007,-0.002 -0.0133,-0.002 -0.0194,-0.003 -0.19629,-0.0379 -0.44225,-0.0982 -0.74527,-0.0977 z"
+ style="fill:#fffff1;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="ssscsssccs"
+ inkscape:connector-curvature="0"
+ id="path910-3-5-37"
+ d="m 111.9421,104.86297 c -1.60162,0 -2.72148,1.17797 -2.72148,2.92558 0,1.20578 1.10092,2.49112 2.80571,2.49112 1.84755,0 2.77444,-1.44533 2.76893,-2.70264 -0.006,-1.28527 -0.88817,-2.71406 -2.85316,-2.71406 z m 2.06533,2.91997 c -0.0246,1.60997 -0.83716,2.1279 -1.91818,2.14275 -1.23138,0.0169 -2.12316,-1.29682 -2.12591,-2.40693 -0.0243,-2.02773 1.23372,-2.3041 1.81756,-2.32133 1.69026,0.0322 2.24118,1.62568 2.22653,2.58551 z"
+ style="fill:#fffff1;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="csccsccscscccccsccccccc"
+ inkscape:connector-curvature="0"
+ id="path917-5-62-59"
+ d="m 115.98867,104.80611 -0.0748,4.75193 c -0.003,0.17853 -0.035,0.26398 -0.0637,0.31137 -0.0482,0.0795 -0.17246,0.13741 -0.29509,0.13676 -0.053,-2e-4 -0.0603,0.16559 0.005,0.16559 0.15177,0 0.53672,-0.0422 0.62147,-0.0421 0.0686,1e-4 0.37521,0.0312 0.61859,0.0314 0.12274,0 0.0838,-0.15236 0.019,-0.16404 -0.0542,-0.01 -0.24858,-0.0885 -0.26339,-0.16512 -0.006,-0.0294 -0.0314,-0.33936 -0.0338,-0.53351 -0.0118,-0.96877 -0.0142,-1.90973 -0.0105,-2.89442 l 4.23632,3.94971 -0.0108,-4.76944 c 0,-0.13812 0.0348,-0.2408 0.0744,-0.31208 0.0578,-0.10399 0.18312,-0.12118 0.28189,-0.12976 0.0507,-0.004 0.0735,-0.17238 -0.0132,-0.17238 -0.10764,0 -0.50226,0.007 -0.61521,0.007 -0.0915,-1.5e-4 -0.56071,-0.034 -0.69456,-0.0364 -0.11951,-0.002 -0.10336,0.18044 0.0108,0.17936 0.22485,-0.002 0.29494,0.13424 0.34949,0.33708 0.009,0.0331 0.0439,0.16777 0.0596,0.44759 l 0.0275,3.13914 z"
+ style="fill:#fffff1;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ id="path817-6-2-9-7-9-93-9-8-1-22"
+ d="m 107.65089,86.340169 c -5.24875,2.963417 -8.507587,8.515851 -8.537461,14.546921 5.491291,2.99391 11.939451,2.8918 16.936431,0.0315 5.3e-4,-0.0144 0.001,-0.029 0.003,-0.0434 -0.006,-5.998704 -3.20866,-11.538984 -8.4015,-14.535031 z m 2.82567,6.421313 c 0.77818,0.52881 0.66278,1.801977 0.53743,2.67374 -0.1595,0.960297 -1.08019,2.539037 -2.27634,3.569287 0.15473,0.22185 0.24521,0.49505 0.24391,0.78652 -0.74123,0.374671 -1.47673,-0.048 -2.36626,-0.80977 -0.88953,-0.76176 -1.84893,-2.287933 -2.16731,-3.856613 -0.2824,0.0299 -0.57157,-0.03321 -0.81959,-0.189654 0.0648,-0.879607 0.99835,-1.402069 1.82573,-1.710489 0.82737,-0.30842 2.95888,-0.412813 4.46174,0.121439 0.11718,-0.246033 0.3113,-0.452884 0.56069,-0.583943 z"
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.45482418;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ inkscape:connector-curvature="0" />
+ </g>
+ <g
+ transform="matrix(0.20036166,0,0,0.20036166,28.237911,51.343593)"
+ id="g1745-3-8">
+ <path
+ sodipodi:nodetypes="cccccscscccccccccsccscsccc"
+ inkscape:connector-curvature="0"
+ id="path904-6-67-9"
+ d="m 33.334478,102.25658 -1.276087,23.80689 -0.0061,0.003 c -0.08934,1.57587 -0.55403,1.79754 -0.644545,1.88447 -0.420443,0.40386 -0.969288,0.27991 -1.431912,0.32133 -0.189719,0.0171 -0.274798,1.15962 0.04972,1.15962 0.403098,0 2.284665,-0.0926 2.707616,-0.0918 0.34244,5.3e-4 2.446519,0.0918 2.947819,0.0918 0.243445,0 0.370705,-1.03417 0.105626,-1.06723 -0.334434,-0.0417 -1.200125,-0.31005 -1.274063,-0.75434 -0.01017,-0.0614 -0.02059,-0.35182 -0.02945,-0.91973 l 0.703497,-13.30253 10.175621,17.27235 9.996785,-19.87488 1.204659,15.57675 c 0.0199,0.79989 -0.03947,1.50889 -0.07995,1.58096 -0.158144,0.28144 -0.595899,0.45144 -1.099501,0.58649 -0.3945,0.10571 -0.411138,0.90798 0.161424,0.90798 1.064556,0 2.202444,-0.25797 2.625396,-0.25717 0.342437,5.3e-4 3.41059,0.24547 4.337927,0.21604 0.374592,-0.0113 0.485723,-0.78623 0.09622,-0.87103 -0.660138,-0.14372 -1.863341,-0.68218 -2.085916,-1.20514 -0.04562,-0.10717 -0.451347,-1.17783 -0.52673,-1.76188 L 56.99415,102.32777 45.888836,124.45753 Z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccscccccccsccccscccccccccccccccccc"
+ inkscape:connector-curvature="0"
+ id="path906-7-5-73"
+ d="m 66.900973,102.70528 c -0.312317,-0.0246 -0.418637,0.62952 -0.05394,0.71112 1.198705,0.26826 1.543521,1.04959 1.766094,1.57255 0.04894,0.11494 0.445239,1.36695 0.44487,1.85292 -0.0047,6.12333 0.345802,12.15388 0.135426,18.22902 0.100256,1.2438 -0.193648,2.39 -0.509132,2.93519 -0.18505,0.31978 -0.884081,0.66664 -1.440569,0.67016 -0.227745,0.001 -0.331782,0.75651 -0.0072,0.75651 4.310608,-0.025 8.883044,0.006 12.816334,-0.002 l 2.7e-4,-0.0188 c 0.43124,-0.43502 1.5967,-2.08483 1.82958,-3.27069 0.18948,-0.52535 0.18474,-0.98124 -0.50683,-0.0127 -0.4636,0.64927 -0.59031,0.90175 -1.32429,1.151 -1.82289,0.3852 -4.59972,0.38331 -6.355561,0.0383 -0.986536,-0.19387 -1.839515,-0.67769 -1.853692,-2.07823 v -7.99646 h 5.990643 c 0.739256,0 1.31716,1.03942 1.3397,1.60391 0.0185,0.46295 0.74548,0.48307 0.75459,0.008 l 0.026,-4.50003 c 0.004,-0.53369 -0.7806,-0.56271 -0.7806,0.008 0,0.73447 -0.921892,0.73171 -1.42577,0.76963 -1.94137,0 -3.97512,0.0127 -5.953786,0.0127 v -9.80142 h 7.151596 l -2.7e-4,0.008 c 0.85429,0.46268 1.26608,1.01883 1.29024,1.6238 0.0185,0.46295 0.62893,0.48309 0.63805,0.008 l 0.0264,-3.43815 h -1.88473 -7.235748 c -0.478738,-0.0866 -0.972265,-0.15524 -1.413794,-0.20809 -1.617876,-0.19366 -2.63534,-0.4144 -3.398684,-0.6258 -0.02257,-0.006 -0.04421,-0.01 -0.06502,-0.0113 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccccccccccccscsccccccc"
+ inkscape:connector-curvature="0"
+ id="path908-5-3-6"
+ d="m 94.757975,103.02543 c -0.11408,2e-4 -0.22964,0.003 -0.34695,0.006 -2.58595,0.0843 -6.698092,2.61617 -6.539972,6.86354 0.08677,2.33091 2.746652,5.60719 5.031322,7.5844 2.17308,1.88065 5.586685,3.44697 5.546319,6.10751 -0.051,2.28483 -1.805601,4.27404 -5.270909,4.27404 -2.16525,-0.11423 -3.141615,-0.16572 -4.533845,-1.52671 -0.56065,-0.552 -0.693139,-1.85564 -0.705019,-2.51927 l -1.239776,-0.008 -0.0264,4.0512 c 1.89697,1.15258 4.30363,1.78611 6.4362,1.74849 5.35562,-0.13555 7.674295,-3.30385 7.756735,-6.55447 0.20533,-4.55913 -3.157431,-6.63582 -5.844601,-8.78074 -2.23068,-1.78054 -4.734939,-3.06638 -4.734939,-6.03352 0,-1.27234 2.035285,-3.41774 4.601495,-3.57283 1.467869,-4.8e-4 2.663536,0.16339 3.582067,1.19225 0.523978,0.58691 0.91614,1.19386 0.92798,1.8575 l 0.983328,0.0553 -0.0352,-4.07423 c -0.581995,0.0175 -1.212055,-0.0524 -1.771445,-0.16946 -0.0364,-0.008 -0.0666,-0.01 -0.0969,-0.0127 -0.97967,-0.18924 -2.20726,-0.49013 -3.71961,-0.4877 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="ssscsssccs"
+ inkscape:connector-curvature="0"
+ id="path910-3-5-1"
+ d="m 119.7033,102.99102 c -7.99363,0 -13.58284,5.8792 -13.58284,14.60148 0,6.01803 5.49465,12.43311 14.00323,12.43311 9.22108,0 13.84717,-7.21361 13.81967,-13.4888 -0.0281,-6.41474 -4.43284,-13.54579 -14.24006,-13.54579 z m 10.30799,14.57345 c -0.12272,8.03534 -4.17824,10.62033 -9.57356,10.69443 -6.14582,0.0844 -10.59665,-6.47237 -10.61036,-12.01292 -0.12109,-10.12036 6.15743,-11.49973 9.07137,-11.58569 8.43605,0.16046 11.18568,8.11371 11.11255,12.90418 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="csccsccscscccccsccccccc"
+ inkscape:connector-curvature="0"
+ id="path917-5-62-2"
+ d="m 139.89961,102.70719 -0.37324,23.7168 c -0.014,0.89103 -0.17468,1.31752 -0.31805,1.554 -0.24039,0.39655 -0.86073,0.68583 -1.47275,0.6826 -0.26436,-0.001 -0.30089,0.82643 0.0236,0.82643 0.75747,0 2.67875,-0.21072 3.1017,-0.20992 0.34242,5.3e-4 1.8727,0.15573 3.08741,0.15657 0.61256,0 0.41837,-0.76043 0.095,-0.81871 -0.27055,-0.0488 -1.24066,-0.44168 -1.31459,-0.82412 -0.0284,-0.1467 -0.15685,-1.69371 -0.16868,-2.66273 -0.059,-4.83509 -0.071,-9.53141 -0.0523,-14.44595 l 21.14337,19.71286 -0.0541,-23.80413 c 0,-0.68938 0.17375,-1.20182 0.37143,-1.55761 0.28835,-0.51901 0.91397,-0.60476 1.40691,-0.64762 0.25291,-0.022 0.36676,-0.86034 -0.0658,-0.86034 -0.53725,0 -2.50677,0.0341 -3.07048,0.0331 -0.45641,-7.3e-4 -2.79852,-0.16948 -3.46655,-0.18148 -0.59648,-0.01 -0.51589,0.90058 0.0537,0.89521 1.1222,-0.01 1.47204,0.66995 1.7443,1.68232 0.0443,0.16517 0.21934,0.83734 0.29759,2.23392 l 0.13745,15.66736 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ </g>
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.45482418;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 47.930655,53.45624 c -5.248756,2.963417 -8.507589,8.51585 -8.537463,14.546917 5.491294,2.993913 11.939447,2.891803 16.936434,0.03152 5.29e-4,-0.01445 0.0013,-0.02902 0.0026,-0.04341 -0.0063,-5.9987 -3.208668,-11.53898 -8.401507,-14.535027 z m 2.825668,6.421313 c 0.778182,0.52881 0.662784,1.801977 0.537433,2.673739 -0.159504,0.960298 -1.080196,2.539043 -2.276346,3.569291 0.154729,0.221847 0.245213,0.495043 0.243912,0.786516 -0.741228,0.374671 -1.476732,-0.04802 -2.366264,-0.809771 -0.889529,-0.761756 -1.848925,-2.287931 -2.167308,-3.856611 -0.282398,0.0299 -0.571566,-0.03321 -0.819589,-0.189654 0.06478,-0.879607 0.998352,-1.402069 1.825728,-1.710489 0.827378,-0.30842 2.958888,-0.412813 4.461743,0.121439 0.117184,-0.246033 0.311298,-0.452884 0.560691,-0.583943 z"
+ id="path817-6-2-9-7-9-93-9-8-1-9" />
+ <g
+ transform="matrix(0.20036166,0,0,0.20036166,98.564859,40.45489)"
+ id="g1745-3-3-3">
+ <path
+ sodipodi:nodetypes="cccccscscccccccccsccscsccc"
+ inkscape:connector-curvature="0"
+ id="path904-6-67-6-1"
+ d="m 33.334478,102.25658 -1.276087,23.80689 -0.0061,0.003 c -0.08934,1.57587 -0.55403,1.79754 -0.644545,1.88447 -0.420443,0.40386 -0.969288,0.27991 -1.431912,0.32133 -0.189719,0.0171 -0.274798,1.15962 0.04972,1.15962 0.403098,0 2.284665,-0.0926 2.707616,-0.0918 0.34244,5.3e-4 2.446519,0.0918 2.947819,0.0918 0.243445,0 0.370705,-1.03417 0.105626,-1.06723 -0.334434,-0.0417 -1.200125,-0.31005 -1.274063,-0.75434 -0.01017,-0.0614 -0.02059,-0.35182 -0.02945,-0.91973 l 0.703497,-13.30253 10.175621,17.27235 9.996785,-19.87488 1.204659,15.57675 c 0.0199,0.79989 -0.03947,1.50889 -0.07995,1.58096 -0.158144,0.28144 -0.595899,0.45144 -1.099501,0.58649 -0.3945,0.10571 -0.411138,0.90798 0.161424,0.90798 1.064556,0 2.202444,-0.25797 2.625396,-0.25717 0.342437,5.3e-4 3.41059,0.24547 4.337927,0.21604 0.374592,-0.0113 0.485723,-0.78623 0.09622,-0.87103 -0.660138,-0.14372 -1.863341,-0.68218 -2.085916,-1.20514 -0.04562,-0.10717 -0.451347,-1.17783 -0.52673,-1.76188 L 56.99415,102.32777 45.888836,124.45753 Z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccscccccccsccccscccccccccccccccccc"
+ inkscape:connector-curvature="0"
+ id="path906-7-5-7-9"
+ d="m 66.900973,102.70528 c -0.312317,-0.0246 -0.418637,0.62952 -0.05394,0.71112 1.198705,0.26826 1.543521,1.04959 1.766094,1.57255 0.04894,0.11494 0.445239,1.36695 0.44487,1.85292 -0.0047,6.12333 0.345802,12.15388 0.135426,18.22902 0.100256,1.2438 -0.193648,2.39 -0.509132,2.93519 -0.18505,0.31978 -0.884081,0.66664 -1.440569,0.67016 -0.227745,0.001 -0.331782,0.75651 -0.0072,0.75651 4.310608,-0.025 8.883044,0.006 12.816334,-0.002 l 2.7e-4,-0.0188 c 0.43124,-0.43502 1.5967,-2.08483 1.82958,-3.27069 0.18948,-0.52535 0.18474,-0.98124 -0.50683,-0.0127 -0.4636,0.64927 -0.59031,0.90175 -1.32429,1.151 -1.82289,0.3852 -4.59972,0.38331 -6.355561,0.0383 -0.986536,-0.19387 -1.839515,-0.67769 -1.853692,-2.07823 v -7.99646 h 5.990643 c 0.739256,0 1.31716,1.03942 1.3397,1.60391 0.0185,0.46295 0.74548,0.48307 0.75459,0.008 l 0.026,-4.50003 c 0.004,-0.53369 -0.7806,-0.56271 -0.7806,0.008 0,0.73447 -0.921892,0.73171 -1.42577,0.76963 -1.94137,0 -3.97512,0.0127 -5.953786,0.0127 v -9.80142 h 7.151596 l -2.7e-4,0.008 c 0.85429,0.46268 1.26608,1.01883 1.29024,1.6238 0.0185,0.46295 0.62893,0.48309 0.63805,0.008 l 0.0264,-3.43815 h -1.88473 -7.235748 c -0.478738,-0.0866 -0.972265,-0.15524 -1.413794,-0.20809 -1.617876,-0.19366 -2.63534,-0.4144 -3.398684,-0.6258 -0.02257,-0.006 -0.04421,-0.01 -0.06502,-0.0113 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccccccccccccscsccccccc"
+ inkscape:connector-curvature="0"
+ id="path908-5-3-5-4"
+ d="m 94.757975,103.02543 c -0.11408,2e-4 -0.22964,0.003 -0.34695,0.006 -2.58595,0.0843 -6.698092,2.61617 -6.539972,6.86354 0.08677,2.33091 2.746652,5.60719 5.031322,7.5844 2.17308,1.88065 5.586685,3.44697 5.546319,6.10751 -0.051,2.28483 -1.805601,4.27404 -5.270909,4.27404 -2.16525,-0.11423 -3.141615,-0.16572 -4.533845,-1.52671 -0.56065,-0.552 -0.693139,-1.85564 -0.705019,-2.51927 l -1.239776,-0.008 -0.0264,4.0512 c 1.89697,1.15258 4.30363,1.78611 6.4362,1.74849 5.35562,-0.13555 7.674295,-3.30385 7.756735,-6.55447 0.20533,-4.55913 -3.157431,-6.63582 -5.844601,-8.78074 -2.23068,-1.78054 -4.734939,-3.06638 -4.734939,-6.03352 0,-1.27234 2.035285,-3.41774 4.601495,-3.57283 1.467869,-4.8e-4 2.663536,0.16339 3.582067,1.19225 0.523978,0.58691 0.91614,1.19386 0.92798,1.8575 l 0.983328,0.0553 -0.0352,-4.07423 c -0.581995,0.0175 -1.212055,-0.0524 -1.771445,-0.16946 -0.0364,-0.008 -0.0666,-0.01 -0.0969,-0.0127 -0.97967,-0.18924 -2.20726,-0.49013 -3.71961,-0.4877 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="ssscsssccs"
+ inkscape:connector-curvature="0"
+ id="path910-3-5-3-7"
+ d="m 119.7033,102.99102 c -7.99363,0 -13.58284,5.8792 -13.58284,14.60148 0,6.01803 5.49465,12.43311 14.00323,12.43311 9.22108,0 13.84717,-7.21361 13.81967,-13.4888 -0.0281,-6.41474 -4.43284,-13.54579 -14.24006,-13.54579 z m 10.30799,14.57345 c -0.12272,8.03534 -4.17824,10.62033 -9.57356,10.69443 -6.14582,0.0844 -10.59665,-6.47237 -10.61036,-12.01292 -0.12109,-10.12036 6.15743,-11.49973 9.07137,-11.58569 8.43605,0.16046 11.18568,8.11371 11.11255,12.90418 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="csccsccscscccccsccccccc"
+ inkscape:connector-curvature="0"
+ id="path917-5-62-5-8"
+ d="m 139.89961,102.70719 -0.37324,23.7168 c -0.014,0.89103 -0.17468,1.31752 -0.31805,1.554 -0.24039,0.39655 -0.86073,0.68583 -1.47275,0.6826 -0.26436,-0.001 -0.30089,0.82643 0.0236,0.82643 0.75747,0 2.67875,-0.21072 3.1017,-0.20992 0.34242,5.3e-4 1.8727,0.15573 3.08741,0.15657 0.61256,0 0.41837,-0.76043 0.095,-0.81871 -0.27055,-0.0488 -1.24066,-0.44168 -1.31459,-0.82412 -0.0284,-0.1467 -0.15685,-1.69371 -0.16868,-2.66273 -0.059,-4.83509 -0.071,-9.53141 -0.0523,-14.44595 l 21.14337,19.71286 -0.0541,-23.80413 c 0,-0.68938 0.17375,-1.20182 0.37143,-1.55761 0.28835,-0.51901 0.91397,-0.60476 1.40691,-0.64762 0.25291,-0.022 0.36676,-0.86034 -0.0658,-0.86034 -0.53725,0 -2.50677,0.0341 -3.07048,0.0331 -0.45641,-7.3e-4 -2.79852,-0.16948 -3.46655,-0.18148 -0.59648,-0.01 -0.51589,0.90058 0.0537,0.89521 1.1222,-0.01 1.47204,0.66995 1.7443,1.68232 0.0443,0.16517 0.21934,0.83734 0.29759,2.23392 l 0.13745,15.66736 z"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26458332px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ </g>
+ <path
+ inkscape:connector-curvature="0"
+ style="fill:#000000;fill-opacity:1;stroke:none;stroke-width:0.26416996;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 97.353009,58.922056 c -3.04835,1.721259 -4.94049,4.946295 -4.95784,8.449097 3.18944,1.738919 6.93427,1.679388 9.836611,0.0181 2.7e-4,-0.0084 7.9e-4,-0.01646 0.001,-0.02479 -0.004,-3.484006 -1.86342,-6.70217 -4.879291,-8.442379 z m 1.64176,3.729488 c 0.45198,0.307143 0.38492,1.046541 0.31212,1.552878 -0.0926,0.557757 -0.62713,1.474869 -1.32188,2.073256 0.0899,0.128855 0.14181,0.287528 0.14108,0.456822 -0.43052,0.217614 -0.85742,-0.02782 -1.37408,-0.470257 -0.51665,-0.442439 -1.07391,-1.329055 -1.25884,-2.240172 -0.16402,0.01736 -0.33188,-0.01921 -0.47594,-0.110072 0.0376,-0.510892 0.57985,-0.814601 1.0604,-0.993738 0.48056,-0.17913 1.71869,-0.239506 2.59158,0.0708 0.0681,-0.142901 0.18071,-0.262877 0.32556,-0.338997 z"
+ id="path817-6-2-9-7-9-93-9-8-1-2-4" />
+ <g
+ id="g2084"
+ transform="translate(-9.8273809,24.379464)">
+ <path
+ sodipodi:nodetypes="cccccscscccccccccsccscsccc"
+ inkscape:connector-curvature="0"
+ id="path904-6-67-6-1-0"
+ d="m 114.69321,80.219973 -0.25568,4.769988 -10e-4,6.01e-4 c -0.0179,0.315744 -0.11101,0.360158 -0.12914,0.377576 -0.0842,0.08092 -0.19421,0.05608 -0.2869,0.06438 -0.038,0.0034 -0.0551,0.232343 0.01,0.232343 0.0808,0 0.45776,-0.01855 0.5425,-0.01839 0.0686,1.06e-4 0.49019,0.01839 0.59063,0.01839 0.0488,0 0.0743,-0.207208 0.0212,-0.213832 -0.067,-0.0084 -0.24046,-0.06212 -0.25528,-0.15114 -0.002,-0.0123 -0.004,-0.07049 -0.006,-0.184279 l 0.14096,-2.665317 2.0388,3.460717 2.00297,-3.982164 0.24137,3.120983 c 0.004,0.160267 -0.008,0.302324 -0.016,0.316764 -0.0317,0.05639 -0.11939,0.09045 -0.2203,0.11751 -0.079,0.02118 -0.0824,0.181924 0.0323,0.181924 0.21329,0 0.44128,-0.05169 0.52602,-0.05153 0.0686,1.07e-4 0.68336,0.04918 0.86916,0.04329 0.0751,-0.0023 0.0973,-0.157531 0.0193,-0.174521 -0.13227,-0.0288 -0.37334,-0.136683 -0.41794,-0.241464 -0.009,-0.02147 -0.0904,-0.235992 -0.10554,-0.353014 l -0.60076,-4.654553 -2.22508,4.433955 z"
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccscccccccsccccscccccccccccccccccc"
+ inkscape:connector-curvature="0"
+ id="path906-7-5-7-9-3"
+ d="m 121.41865,80.309875 c -0.0626,-0.0049 -0.0839,0.126132 -0.0108,0.142482 0.24018,0.05375 0.30926,0.210297 0.35386,0.315078 0.01,0.02303 0.0892,0.273885 0.0891,0.371254 -9.4e-4,1.226881 0.0693,2.435172 0.0271,3.652397 0.0201,0.24921 -0.0388,0.478865 -0.10201,0.5881 -0.0371,0.06407 -0.17714,0.133569 -0.28864,0.134274 -0.0456,2e-4 -0.0665,0.151576 -0.001,0.151576 0.86368,-0.005 1.77982,0.0012 2.5679,-4.01e-4 l 6e-5,-0.0038 c 0.0864,-0.08716 0.31991,-0.41772 0.36657,-0.655321 0.038,-0.10526 0.037,-0.196603 -0.10154,-0.0025 -0.0929,0.130088 -0.11828,0.180676 -0.26534,0.230616 -0.36524,0.07718 -0.92161,0.0768 -1.27341,0.0077 -0.19767,-0.03884 -0.36857,-0.135783 -0.37141,-0.416398 v -1.602184 h 1.20029 c 0.14812,0 0.26391,0.20826 0.26843,0.321362 0.004,0.09276 0.14936,0.09679 0.15119,0.0016 l 0.005,-0.901633 c 8e-4,-0.106931 -0.1564,-0.112746 -0.1564,0.0016 0,0.147159 -0.18472,0.146606 -0.28567,0.154204 -0.38898,0 -0.79646,0.0025 -1.19291,0.0025 v -1.963828 h 1.4329 l -5e-5,0.0016 c 0.17117,0.0927 0.25367,0.204134 0.25851,0.325347 0.004,0.09276 0.12602,0.09679 0.12784,0.0016 l 0.005,-0.688874 h -0.37762 -1.44977 c -0.0959,-0.01735 -0.19481,-0.0311 -0.28327,-0.04169 -0.32416,-0.0388 -0.52802,-0.08303 -0.68097,-0.125386 -0.005,-0.0012 -0.009,-0.002 -0.013,-0.0023 z"
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="cccccccccccccscsccccccc"
+ inkscape:connector-curvature="0"
+ id="path908-5-3-5-4-6"
+ d="m 127.00013,80.374021 c -0.0229,4e-5 -0.046,6.01e-4 -0.0695,0.0012 -0.51813,0.01689 -1.34204,0.524181 -1.31036,1.375191 0.0174,0.467025 0.55032,1.123466 1.00808,1.519623 0.43541,0.37681 1.11936,0.69064 1.11127,1.22371 -0.0102,0.457793 -0.36177,0.856354 -1.05608,0.856354 -0.43384,-0.02289 -0.62946,-0.0332 -0.90841,-0.305894 -0.11234,-0.1106 -0.13888,-0.371799 -0.14126,-0.504765 l -0.24841,-0.0016 -0.005,0.811705 c 0.38008,0.230933 0.86228,0.357868 1.28956,0.350331 1.07306,-0.02716 1.53764,-0.661965 1.55416,-1.313265 0.0411,-0.913475 -0.63263,-1.329564 -1.17104,-1.759324 -0.44694,-0.356752 -0.9487,-0.614385 -0.9487,-1.208886 0,-0.254928 0.40779,-0.684784 0.92196,-0.715858 0.29411,-9.6e-5 0.53367,0.03274 0.71771,0.238881 0.10499,0.117595 0.18356,0.239204 0.18593,0.372172 l 0.19703,0.01108 -0.007,-0.816319 c -0.11661,0.0035 -0.24285,-0.0105 -0.35493,-0.03395 -0.007,-0.0016 -0.0133,-0.002 -0.0194,-0.0025 -0.19629,-0.03792 -0.44225,-0.0982 -0.74527,-0.09772 z"
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="ssscsssccs"
+ inkscape:connector-curvature="0"
+ id="path910-3-5-3-7-1"
+ d="m 131.99821,80.367127 c -1.60162,0 -2.72148,1.177966 -2.72148,2.925577 0,1.205782 1.10092,2.491118 2.80571,2.491118 1.84755,0 2.77444,-1.445331 2.76893,-2.702638 -0.006,-1.285268 -0.88817,-2.714057 -2.85316,-2.714057 z m 2.06533,2.91996 c -0.0246,1.609974 -0.83716,2.127907 -1.91818,2.142754 -1.23138,0.01691 -2.12316,-1.296815 -2.12591,-2.406928 -0.0243,-2.027733 1.23372,-2.304105 1.81756,-2.321329 1.69026,0.03215 2.24118,1.625677 2.22653,2.585503 z"
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ <path
+ sodipodi:nodetypes="csccsccscscccccsccccccc"
+ inkscape:connector-curvature="0"
+ id="path917-5-62-5-8-0"
+ d="m 136.04478,80.310258 -0.0748,4.751938 c -0.003,0.178528 -0.035,0.26398 -0.0637,0.311362 -0.0482,0.07945 -0.17246,0.137414 -0.29508,0.136766 -0.053,-2e-4 -0.0603,0.165585 0.005,0.165585 0.15176,0 0.53671,-0.04222 0.62146,-0.04206 0.0686,1.07e-4 0.37522,0.0312 0.6186,0.03137 0.12273,0 0.0838,-0.152361 0.019,-0.164038 -0.0542,-0.0098 -0.24858,-0.0885 -0.26339,-0.165122 -0.006,-0.02939 -0.0314,-0.339355 -0.0338,-0.533509 -0.0118,-0.968767 -0.0142,-1.909729 -0.0105,-2.894415 l 4.23632,3.949702 -0.0108,-4.769435 c 0,-0.138126 0.0348,-0.240799 0.0744,-0.312086 0.0578,-0.103989 0.18313,-0.12117 0.28189,-0.129758 0.0507,-0.0044 0.0735,-0.172379 -0.0132,-0.172379 -0.10764,0 -0.50226,0.0068 -0.61521,0.0066 -0.0914,-1.46e-4 -0.56071,-0.03396 -0.69456,-0.03636 -0.11951,-0.002 -0.10336,0.180442 0.0108,0.179366 0.22484,-0.002 0.29494,0.134232 0.34949,0.337072 0.009,0.03309 0.0439,0.167771 0.0596,0.447592 l 0.0275,3.139139 z"
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.05301235px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1" />
+ </g>
<path
- style="fill:#389f00;fill-opacity:1;stroke:none;stroke-opacity:1"
- d="m 252.71941,348.36581 c -17.21481,0 -47.14006,-0.84934 -60.97457,-13.17571 -13.83451,-12.32637 -16.84556,-17.45227 -19.09172,-35.51933 -2.08905,-16.80337 -2.6481,-33.71944 -1.49162,-50.37795 3.07225,-20.81514 5.13462,-47.62599 12.05413,-67.63597 6.23548,-30.42037 13.58284,-55.16491 25.9691,-83.71646 5.06527,-13.287784 16.76769,-22.557428 30.27757,-25.948958 15.50334,-3.30801 29.68022,-2.91134 46.74465,-1.2459 14.78626,1.27468 30.83798,17.066244 32.88238,34.597818 2.02303,17.34845 1.73385,22.0552 3.2728,37.6258 -0.19443,8.76313 6.34424,22.76616 16.45974,22.40304 9.56867,-0.34349 16.48036,-12.71843 17.51509,-20.93495 2.35599,-18.70837 -0.6551,-19.81684 2.94334,-37.3491 3.59844,-17.532268 14.18885,-33.043544 30.48873,-36.540734 16.26809,-2.12031 28.63094,-2.71731 46.02053,0.49697 18.07396,3.34079 26.93793,12.81813 33.62878,26.592994 6.73685,13.86957 10.49571,27.84178 16.21017,43.09018 4.88864,13.04479 8.56125,29.18304 10.94268,42.92888 3.32552,15.18288 3.65237,21.38717 6.77781,36.57323 0.77923,16.65538 4.45192,19.80725 5.02299,35.71533 0.35582,9.91195 -0.29524,39.12315 -2.43742,46.76046 -2.93782,13.52423 -7.12707,26.02687 -16.79636,33.20923 -12.64781,8.53102 -33.08952,11.60259 -47.55104,12.32486 -18.33849,0.12627 -30.65715,1.24014 -48.96624,-1.08017 -14.10098,-2.78518 -21.50029,-13.82094 -20.71192,-29.39227 1.06365,-21.00843 21.00671,-22.03777 35.65227,-22.0668 14.64556,-0.029 35.24942,-0.73393 37.06929,-19.42192 2.88023,-18.22229 0.38467,-36.76259 -2.58508,-54.95764 -2.30841,-16.30474 -8.74904,-35.18114 -15.47418,-50.2434 -1.96378,-11.35394 -19.50871,-15.47995 -23.80866,-4.84186 -5.41476,8.88334 -4.61169,19.77501 -7.41008,29.84511 -2.83889,10.21585 -1.44479,19.7209 -5.44492,32.54371 -4.76333,15.2693 -4.62217,18.45204 -20.66557,24.86079 -15.69676,2.99362 -39.52174,2.95141 -55.28152,0.43903 -14.06764,-2.44948 -22.74154,-14.03686 -24.98313,-27.54206 -3.16922,-11.4823 -2.40407,-23.59284 -4.92263,-35.45854 -2.5605,-12.06331 -6.37931,-35.47293 -22.0005,-29.70057 -11.4991,4.24916 -17.39709,28.66088 -22.07262,43.97383 -4.41,20.15236 -9.59637,40.99697 -6.08472,61.67032 -2.34204,29.06652 17.85275,26.14283 32.49264,29.17728 12.50724,0 38.93213,0.75389 40.84562,17.16664 1.14528,9.82353 1.53864,23.62381 -11.36782,30.99327 -9.67605,5.54646 -25.93319,4.16152 -43.14799,4.16152 z M 425.78708,171.50203 c -1.14992,-1.69089 1.64273,0.31937 0,0 z"
- id="path3783-1"
inkscape:connector-curvature="0"
- sodipodi:nodetypes="zzscccccscsszcsssccsccccszccccsscccsscccsczcc"
- inkscape:export-filename="/home/jpakkane/workspace/meson/graphics/meson_logo_big.png"
- inkscape:export-xdpi="300"
- inkscape:export-ydpi="300" />
+ style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:0.26416996;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
+ d="m 96.975039,102.57831 c -3.04835,1.72126 -4.94049,4.94629 -4.95784,8.44909 3.18944,1.73892 6.93427,1.67939 9.836611,0.0181 2.6e-4,-0.008 7.9e-4,-0.0165 0.001,-0.0248 -0.004,-3.484 -1.863431,-6.70217 -4.879301,-8.44238 z m 1.64176,3.72948 c 0.45198,0.30715 0.38492,1.04655 0.31212,1.55288 -0.0926,0.55776 -0.62713,1.47487 -1.32188,2.07326 0.0899,0.12885 0.14181,0.28753 0.14108,0.45682 -0.43052,0.21761 -0.85742,-0.0278 -1.37408,-0.47026 -0.51665,-0.44244 -1.07391,-1.32905 -1.25884,-2.24017 -0.16403,0.0174 -0.33188,-0.0192 -0.47594,-0.11007 0.0376,-0.51089 0.57985,-0.8146 1.0604,-0.99374 0.48056,-0.17913 1.71868,-0.2395 2.59158,0.0708 0.0681,-0.1429 0.18071,-0.26288 0.32556,-0.339 z"
+ id="path817-6-2-9-7-9-93-9-8-1-2-4-6" />
</g>
</svg>
diff --git a/graphics/meson_logo_big.png b/graphics/meson_logo_big.png
index d285135..e2abe1b 100644
--- a/graphics/meson_logo_big.png
+++ b/graphics/meson_logo_big.png
Binary files differ
diff --git a/man/meson.1 b/man/meson.1
index 702ac4d..3b67aab 100644
--- a/man/meson.1
+++ b/man/meson.1
@@ -1,4 +1,4 @@
-.TH MESON "1" "December 2018" "meson 0.49.0" "User Commands"
+.TH MESON "1" "March 2019" "meson 0.50.0" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION
@@ -37,7 +37,7 @@ backend of Meson is Ninja, which can be invoked like this.
You only need to run the Meson command once: when you first configure
your build dir. After that you just run the build command. Meson will
-autodetect changes in your source tree and regenerates all files
+autodetect changes in your source tree and regenerate all files
needed to build the project.
The setup command is the default operation. If no actual command is
diff --git a/mesonbuild/ast/__init__.py b/mesonbuild/ast/__init__.py
new file mode 100644
index 0000000..48de523
--- /dev/null
+++ b/mesonbuild/ast/__init__.py
@@ -0,0 +1,33 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+__all__ = [
+ 'AstConditionLevel',
+ 'AstInterpreter',
+ 'AstIDGenerator',
+ 'AstIndentationGenerator',
+ 'AstVisitor',
+ 'AstPrinter',
+ 'IntrospectionInterpreter',
+ 'build_target_functions',
+]
+
+from .interpreter import AstInterpreter
+from .introspection import IntrospectionInterpreter, build_target_functions
+from .visitor import AstVisitor
+from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator
+from .printer import AstPrinter
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
new file mode 100644
index 0000000..01277f0
--- /dev/null
+++ b/mesonbuild/ast/interpreter.py
@@ -0,0 +1,235 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+from .visitor import AstVisitor
+from .. import interpreterbase, mparser, mesonlib
+from .. import environment
+
+from ..interpreterbase import InvalidArguments, BreakRequest, ContinueRequest
+
+import os, sys
+from typing import List
+
+class DontCareObject(interpreterbase.InterpreterObject):
+ pass
+
+class MockExecutable(interpreterbase.InterpreterObject):
+ pass
+
+class MockStaticLibrary(interpreterbase.InterpreterObject):
+ pass
+
+class MockSharedLibrary(interpreterbase.InterpreterObject):
+ pass
+
+class MockCustomTarget(interpreterbase.InterpreterObject):
+ pass
+
+class MockRunTarget(interpreterbase.InterpreterObject):
+ pass
+
+ADD_SOURCE = 0
+REMOVE_SOURCE = 1
+
+class AstInterpreter(interpreterbase.InterpreterBase):
+ def __init__(self, source_root: str, subdir: str, visitors: List[AstVisitor] = []):
+ super().__init__(source_root, subdir)
+ self.visitors = visitors
+ self.visited_subdirs = {}
+ self.assignments = {}
+ self.assign_vals = {}
+ self.reverse_assignment = {}
+ self.funcs.update({'project': self.func_do_nothing,
+ 'test': self.func_do_nothing,
+ 'benchmark': self.func_do_nothing,
+ 'install_headers': self.func_do_nothing,
+ 'install_man': self.func_do_nothing,
+ 'install_data': self.func_do_nothing,
+ 'install_subdir': self.func_do_nothing,
+ 'configuration_data': self.func_do_nothing,
+ 'configure_file': self.func_do_nothing,
+ 'find_program': self.func_do_nothing,
+ 'include_directories': self.func_do_nothing,
+ 'add_global_arguments': self.func_do_nothing,
+ 'add_global_link_arguments': self.func_do_nothing,
+ 'add_project_arguments': self.func_do_nothing,
+ 'add_project_link_arguments': self.func_do_nothing,
+ 'message': self.func_do_nothing,
+ 'generator': self.func_do_nothing,
+ 'error': self.func_do_nothing,
+ 'run_command': self.func_do_nothing,
+ 'assert': self.func_do_nothing,
+ 'subproject': self.func_do_nothing,
+ 'dependency': self.func_do_nothing,
+ 'get_option': self.func_do_nothing,
+ 'join_paths': self.func_do_nothing,
+ 'environment': self.func_do_nothing,
+ 'import': self.func_do_nothing,
+ 'vcs_tag': self.func_do_nothing,
+ 'add_languages': self.func_do_nothing,
+ 'declare_dependency': self.func_do_nothing,
+ 'files': self.func_do_nothing,
+ 'executable': self.func_do_nothing,
+ 'static_library': self.func_do_nothing,
+ 'shared_library': self.func_do_nothing,
+ 'library': self.func_do_nothing,
+ 'build_target': self.func_do_nothing,
+ 'custom_target': self.func_do_nothing,
+ 'run_target': self.func_do_nothing,
+ 'subdir': self.func_subdir,
+ 'set_variable': self.func_do_nothing,
+ 'get_variable': self.func_do_nothing,
+ 'is_variable': self.func_do_nothing,
+ 'disabler': self.func_do_nothing,
+ 'gettext': self.func_do_nothing,
+ 'jar': self.func_do_nothing,
+ 'warning': self.func_do_nothing,
+ 'shared_module': self.func_do_nothing,
+ 'option': self.func_do_nothing,
+ 'both_libraries': self.func_do_nothing,
+ 'add_test_setup': self.func_do_nothing,
+ 'find_library': self.func_do_nothing,
+ 'subdir_done': self.func_do_nothing,
+ })
+
+ def func_do_nothing(self, node, args, kwargs):
+ return True
+
+ def load_root_meson_file(self):
+ super().load_root_meson_file()
+ for i in self.visitors:
+ self.ast.accept(i)
+
+ def func_subdir(self, node, args, kwargs):
+ args = self.flatten_args(args)
+ if len(args) != 1 or not isinstance(args[0], str):
+ sys.stderr.write('Unable to evaluate subdir({}) in AstInterpreter --> Skipping\n'.format(args))
+ return
+
+ prev_subdir = self.subdir
+ subdir = os.path.join(prev_subdir, args[0])
+ absdir = os.path.join(self.source_root, subdir)
+ buildfilename = os.path.join(subdir, environment.build_filename)
+ absname = os.path.join(self.source_root, buildfilename)
+ symlinkless_dir = os.path.realpath(absdir)
+ if symlinkless_dir in self.visited_subdirs:
+ sys.stderr.write('Trying to enter {} which has already been visited --> Skipping\n'.format(args[0]))
+ return
+ self.visited_subdirs[symlinkless_dir] = True
+
+ if not os.path.isfile(absname):
+ sys.stderr.write('Unable to find build file {} --> Skipping\n'.format(buildfilename))
+ return
+ with open(absname, encoding='utf8') as f:
+ code = f.read()
+ assert(isinstance(code, str))
+ try:
+ codeblock = mparser.Parser(code, subdir).parse()
+ except mesonlib.MesonException as me:
+ me.file = buildfilename
+ raise me
+
+ self.subdir = subdir
+ for i in self.visitors:
+ codeblock.accept(i)
+ self.evaluate_codeblock(codeblock)
+ self.subdir = prev_subdir
+
+ def method_call(self, node):
+ return True
+
+ def evaluate_arithmeticstatement(self, cur):
+ return 0
+
+ def evaluate_plusassign(self, node):
+ assert(isinstance(node, mparser.PlusAssignmentNode))
+ if node.var_name not in self.assignments:
+ self.assignments[node.var_name] = []
+ self.assignments[node.var_name] += [node.value] # Save a reference to the value node
+ if hasattr(node.value, 'ast_id'):
+ self.reverse_assignment[node.value.ast_id] = node
+ self.assign_vals[node.var_name] += [self.evaluate_statement(node.value)]
+
+ def evaluate_indexing(self, node):
+ return 0
+
+ def unknown_function_called(self, func_name):
+ pass
+
+ def reduce_arguments(self, args):
+ assert(isinstance(args, mparser.ArgumentNode))
+ if args.incorrect_order():
+ raise InvalidArguments('All keyword arguments must be after positional arguments.')
+ return args.arguments, args.kwargs
+
+ def evaluate_comparison(self, node):
+ return False
+
+ def evaluate_foreach(self, node):
+ try:
+ self.evaluate_codeblock(node.block)
+ except ContinueRequest:
+ pass
+ except BreakRequest:
+ pass
+
+ def evaluate_if(self, node):
+ for i in node.ifs:
+ self.evaluate_codeblock(i.block)
+ if not isinstance(node.elseblock, mparser.EmptyNode):
+ self.evaluate_codeblock(node.elseblock)
+
+ def get_variable(self, varname):
+ return 0
+
+ def assignment(self, node):
+ assert(isinstance(node, mparser.AssignmentNode))
+ self.assignments[node.var_name] = [node.value] # Save a reference to the value node
+ if hasattr(node.value, 'ast_id'):
+ self.reverse_assignment[node.value.ast_id] = node
+ self.assign_vals[node.var_name] = [self.evaluate_statement(node.value)] # Evaluate the value just in case
+
+ def flatten_args(self, args, include_unknown_args: bool = False):
+ # Resolve mparser.ArrayNode if needed
+ flattend_args = []
+ temp_args = []
+ if isinstance(args, mparser.ArrayNode):
+ args = [x for x in args.args.arguments]
+ elif isinstance(args, mparser.ArgumentNode):
+ args = [x for x in args.arguments]
+ for i in args:
+ if isinstance(i, mparser.ArrayNode):
+ temp_args += [x for x in i.args.arguments]
+ else:
+ temp_args += [i]
+ for i in temp_args:
+ if isinstance(i, mparser.ElementaryNode) and not isinstance(i, mparser.IdNode):
+ flattend_args += [i.value]
+ elif isinstance(i, (str, bool, int, float)) or include_unknown_args:
+ flattend_args += [i]
+ return flattend_args
+
+ def flatten_kwargs(self, kwargs: object, include_unknown_args: bool = False):
+ flattend_kwargs = {}
+ for key, val in kwargs.items():
+ if isinstance(val, mparser.ElementaryNode):
+ flattend_kwargs[key] = val.value
+ elif isinstance(val, (mparser.ArrayNode, mparser.ArgumentNode)):
+ flattend_kwargs[key] = self.flatten_args(val, include_unknown_args)
+ elif isinstance(val, (str, bool, int, float)) or include_unknown_args:
+ flattend_kwargs[key] = val
+ return flattend_kwargs
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
new file mode 100644
index 0000000..5745d29
--- /dev/null
+++ b/mesonbuild/ast/introspection.py
@@ -0,0 +1,270 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from . import AstInterpreter
+from .. import compilers, environment, mesonlib, optinterpreter
+from .. import coredata as cdata
+from ..interpreterbase import InvalidArguments
+from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary
+from ..mparser import ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode
+import os
+
+build_target_functions = ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']
+
+class IntrospectionHelper:
+ # mimic an argparse namespace
+ def __init__(self, cross_file):
+ self.cross_file = cross_file
+ self.native_file = None
+ self.cmd_line_options = {}
+
+class IntrospectionInterpreter(AstInterpreter):
+ # Interpreter to detect the options without a build directory
+ # Most of the code is stolen from interperter.Interpreter
+ def __init__(self, source_root, subdir, backend, visitors=[], cross_file=None, subproject='', subproject_dir='subprojects', env=None):
+ super().__init__(source_root, subdir, visitors=visitors)
+
+ options = IntrospectionHelper(cross_file)
+ self.cross_file = cross_file
+ if env is None:
+ self.environment = environment.Environment(source_root, None, options)
+ else:
+ self.environment = env
+ self.subproject = subproject
+ self.subproject_dir = subproject_dir
+ self.coredata = self.environment.get_coredata()
+ self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+ self.backend = backend
+ self.default_options = {'backend': self.backend}
+ self.project_data = {}
+ self.targets = []
+ self.dependencies = []
+ self.project_node = None
+
+ self.funcs.update({
+ 'add_languages': self.func_add_languages,
+ 'dependency': self.func_dependency,
+ 'executable': self.func_executable,
+ 'jar': self.func_jar,
+ 'library': self.func_library,
+ 'project': self.func_project,
+ 'shared_library': self.func_shared_lib,
+ 'shared_module': self.func_shared_module,
+ 'static_library': self.func_static_lib,
+ 'both_libraries': self.func_both_lib,
+ })
+
+ def func_project(self, node, args, kwargs):
+ if self.project_node:
+ raise InvalidArguments('Second call to project()')
+ self.project_node = node
+ if len(args) < 1:
+ raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
+
+ proj_name = args[0]
+ proj_vers = kwargs.get('version', 'undefined')
+ proj_langs = self.flatten_args(args[1:])
+ if isinstance(proj_vers, ElementaryNode):
+ proj_vers = proj_vers.value
+ if not isinstance(proj_vers, str):
+ proj_vers = 'undefined'
+ self.project_data = {'descriptive_name': proj_name, 'version': proj_vers}
+
+ if os.path.exists(self.option_file):
+ oi = optinterpreter.OptionInterpreter(self.subproject)
+ oi.process(self.option_file)
+ self.coredata.merge_user_options(oi.options)
+
+ def_opts = self.flatten_args(kwargs.get('default_options', []))
+ self.project_default_options = mesonlib.stringlistify(def_opts)
+ self.project_default_options = cdata.create_options_dict(self.project_default_options)
+ self.default_options.update(self.project_default_options)
+ self.coredata.set_default_options(self.default_options, self.subproject, self.environment)
+
+ if not self.is_subproject() and 'subproject_dir' in kwargs:
+ spdirname = kwargs['subproject_dir']
+ if isinstance(spdirname, ElementaryNode):
+ self.subproject_dir = spdirname.value
+ if not self.is_subproject():
+ self.project_data['subprojects'] = []
+ subprojects_dir = os.path.join(self.source_root, self.subproject_dir)
+ if os.path.isdir(subprojects_dir):
+ for i in os.listdir(subprojects_dir):
+ if os.path.isdir(os.path.join(subprojects_dir, i)):
+ self.do_subproject(i)
+
+ self.coredata.init_backend_options(self.backend)
+ options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
+
+ self.coredata.set_options(options)
+ self.func_add_languages(None, proj_langs, None)
+
+ def do_subproject(self, dirname):
+ subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
+ subpr = os.path.join(subproject_dir_abs, dirname)
+ try:
+ subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment)
+ subi.analyze()
+ subi.project_data['name'] = dirname
+ self.project_data['subprojects'] += [subi.project_data]
+ except:
+ return
+
+ def func_add_languages(self, node, args, kwargs):
+ args = self.flatten_args(args)
+ need_cross_compiler = self.environment.is_cross_build()
+ for lang in sorted(args, key=compilers.sort_clink):
+ lang = lang.lower()
+ if lang not in self.coredata.compilers:
+ self.environment.detect_compilers(lang, need_cross_compiler)
+
+ def func_dependency(self, node, args, kwargs):
+ args = self.flatten_args(args)
+ if not args:
+ return
+ name = args[0]
+ has_fallback = 'fallback' in kwargs
+ required = kwargs.get('required', True)
+ condition_level = node.condition_level if hasattr(node, 'condition_level') else 0
+ if isinstance(required, ElementaryNode):
+ required = required.value
+ self.dependencies += [{
+ 'name': name,
+ 'required': required,
+ 'has_fallback': has_fallback,
+ 'conditional': condition_level > 0,
+ 'node': node
+ }]
+
+ def build_target(self, node, args, kwargs, targetclass):
+ args = self.flatten_args(args)
+ if not args or not isinstance(args[0], str):
+ return
+ kwargs = self.flatten_kwargs(kwargs, True)
+ name = args[0]
+ srcqueue = [node]
+ if 'sources' in kwargs:
+ srcqueue += kwargs['sources']
+
+ source_nodes = []
+ while srcqueue:
+ curr = srcqueue.pop(0)
+ arg_node = None
+ if isinstance(curr, FunctionNode):
+ arg_node = curr.args
+ elif isinstance(curr, ArrayNode):
+ arg_node = curr.args
+ elif isinstance(curr, IdNode):
+ # Try to resolve the ID and append the node to the queue
+ id = curr.value
+ if id in self.assignments and self.assignments[id]:
+ tmp_node = self.assignments[id][0]
+ if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)):
+ srcqueue += [tmp_node]
+ elif isinstance(curr, ArithmeticNode):
+ srcqueue += [curr.left, curr.right]
+ if arg_node is None:
+ continue
+ elemetary_nodes = list(filter(lambda x: isinstance(x, (str, StringNode)), arg_node.arguments))
+ srcqueue += list(filter(lambda x: isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode)), arg_node.arguments))
+ # Pop the first element if the function is a build target function
+ if isinstance(curr, FunctionNode) and curr.func_name in build_target_functions:
+ elemetary_nodes.pop(0)
+ if elemetary_nodes:
+ source_nodes += [curr]
+
+ # Make sure nothing can crash when creating the build class
+ kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in ['install', 'build_by_default', 'build_always']}
+ is_cross = False
+ objects = []
+ empty_sources = [] # Passing the unresolved sources list causes errors
+ target = targetclass(name, self.subdir, self.subproject, is_cross, empty_sources, objects, self.environment, kwargs_reduced)
+
+ new_target = {
+ 'name': target.get_basename(),
+ 'id': target.get_id(),
+ 'type': target.get_typename(),
+ 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)),
+ 'subdir': self.subdir,
+ 'build_by_default': target.build_by_default,
+ 'installed': target.should_install(),
+ 'outputs': target.get_outputs(),
+ 'sources': source_nodes,
+ 'kwargs': kwargs,
+ 'node': node,
+ }
+
+ self.targets += [new_target]
+ return new_target
+
+ def build_library(self, node, args, kwargs):
+ default_library = self.coredata.get_builtin_option('default_library')
+ if default_library == 'shared':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif default_library == 'static':
+ return self.build_target(node, args, kwargs, StaticLibrary)
+ elif default_library == 'both':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_executable(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, Executable)
+
+ def func_static_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, StaticLibrary)
+
+ def func_shared_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_both_lib(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedLibrary)
+
+ def func_shared_module(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, SharedModule)
+
+ def func_library(self, node, args, kwargs):
+ return self.build_library(node, args, kwargs)
+
+ def func_jar(self, node, args, kwargs):
+ return self.build_target(node, args, kwargs, Jar)
+
+ def func_build_target(self, node, args, kwargs):
+ if 'target_type' not in kwargs:
+ return
+ target_type = kwargs.pop('target_type')
+ if isinstance(target_type, ElementaryNode):
+ target_type = target_type.value
+ if target_type == 'executable':
+ return self.build_target(node, args, kwargs, Executable)
+ elif target_type == 'shared_library':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif target_type == 'static_library':
+ return self.build_target(node, args, kwargs, StaticLibrary)
+ elif target_type == 'both_libraries':
+ return self.build_target(node, args, kwargs, SharedLibrary)
+ elif target_type == 'library':
+ return self.build_library(node, args, kwargs)
+ elif target_type == 'jar':
+ return self.build_target(node, args, kwargs, Jar)
+
+ def is_subproject(self):
+ return self.subproject != ''
+
+ def analyze(self):
+ self.load_root_meson_file()
+ self.sanity_check_ast()
+ self.parse_project()
+ self.run()
diff --git a/mesonbuild/ast/postprocess.py b/mesonbuild/ast/postprocess.py
new file mode 100644
index 0000000..8e8732f
--- /dev/null
+++ b/mesonbuild/ast/postprocess.py
@@ -0,0 +1,116 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from . import AstVisitor
+from .. import mparser
+
+class AstIndentationGenerator(AstVisitor):
+ def __init__(self):
+ self.level = 0
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ # Store the current level in the node
+ node.level = self.level
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_DictNode(self, node: mparser.DictNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_MethodNode(self, node: mparser.MethodNode):
+ self.visit_default_func(node)
+ node.source_object.accept(self)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.args.accept(self)
+ self.level -= 1
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.items.accept(self)
+ node.block.accept(self)
+ self.level -= 1
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ self.level += 1
+ node.elseblock.accept(self)
+ self.level -= 1
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ self.visit_default_func(node)
+ self.level += 1
+ node.condition.accept(self)
+ node.block.accept(self)
+ self.level -= 1
+
+class AstIDGenerator(AstVisitor):
+ def __init__(self):
+ self.counter = {}
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ name = type(node).__name__
+ if name not in self.counter:
+ self.counter[name] = 0
+ node.ast_id = name + '#' + str(self.counter[name])
+ self.counter[name] += 1
+
+class AstConditionLevel(AstVisitor):
+ def __init__(self):
+ self.condition_level = 0
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ node.condition_level = self.condition_level
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ self.visit_default_func(node)
+ self.condition_level += 1
+ node.items.accept(self)
+ node.block.accept(self)
+ self.condition_level -= 1
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ self.condition_level += 1
+ node.elseblock.accept(self)
+ self.condition_level -= 1
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ self.visit_default_func(node)
+ self.condition_level += 1
+ node.condition.accept(self)
+ node.block.accept(self)
+ self.condition_level -= 1
diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py
new file mode 100644
index 0000000..60e0b0d
--- /dev/null
+++ b/mesonbuild/ast/printer.py
@@ -0,0 +1,203 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .. import mparser
+from . import AstVisitor
+import re
+
+arithmic_map = {
+ 'add': '+',
+ 'sub': '-',
+ 'mod': '%',
+ 'mul': '*',
+ 'div': '/'
+}
+
+class AstPrinter(AstVisitor):
+ def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5):
+ self.result = ''
+ self.indent = indent
+ self.arg_newline_cutoff = arg_newline_cutoff
+ self.ci = ''
+ self.is_newline = True
+ self.last_level = 0
+
+ def post_process(self):
+ self.result = re.sub(r'\s+\n', '\n', self.result)
+
+ def append(self, data: str, node: mparser.BaseNode):
+ level = 0
+ if node and hasattr(node, 'level'):
+ level = node.level
+ else:
+ level = self.last_level
+ self.last_level = level
+ if self.is_newline:
+ self.result += ' ' * (level * self.indent)
+ self.result += data
+ self.is_newline = False
+
+ def append_padded(self, data: str, node: mparser.BaseNode):
+ if self.result[-1] not in [' ', '\n']:
+ data = ' ' + data
+ self.append(data + ' ', node)
+
+ def newline(self):
+ self.result += '\n'
+ self.is_newline = True
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode):
+ self.append('true' if node.value else 'false', node)
+
+ def visit_IdNode(self, node: mparser.IdNode):
+ self.append(node.value, node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode):
+ self.append(str(node.value), node)
+
+ def visit_StringNode(self, node: mparser.StringNode):
+ self.append("'" + node.value + "'", node)
+
+ def visit_ContinueNode(self, node: mparser.ContinueNode):
+ self.append('continue', node)
+
+ def visit_BreakNode(self, node: mparser.BreakNode):
+ self.append('break', node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode):
+ self.append('[', node)
+ node.args.accept(self)
+ self.append(']', node)
+
+ def visit_DictNode(self, node: mparser.DictNode):
+ self.append('{', node)
+ node.args.accept(self)
+ self.append('}', node)
+
+ def visit_OrNode(self, node: mparser.OrNode):
+ node.left.accept(self)
+ self.append_padded('or', node)
+ node.right.accept(self)
+
+ def visit_AndNode(self, node: mparser.AndNode):
+ node.left.accept(self)
+ self.append_padded('and', node)
+ node.right.accept(self)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode):
+ node.left.accept(self)
+ self.append_padded(mparser.comparison_map[node.ctype], node)
+ node.right.accept(self)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode):
+ node.left.accept(self)
+ self.append_padded(arithmic_map[node.operation], node)
+ node.right.accept(self)
+
+ def visit_NotNode(self, node: mparser.NotNode):
+ self.append_padded('not', node)
+ node.value.accept(self)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode):
+ for i in node.lines:
+ i.accept(self)
+ self.newline()
+
+ def visit_IndexNode(self, node: mparser.IndexNode):
+ self.append('[', node)
+ node.index.accept(self)
+ self.append(']', node)
+
+ def visit_MethodNode(self, node: mparser.MethodNode):
+ node.source_object.accept(self)
+ self.append('.' + node.name + '(', node)
+ node.args.accept(self)
+ self.append(')', node)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode):
+ self.append(node.func_name + '(', node)
+ node.args.accept(self)
+ self.append(')', node)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode):
+ self.append(node.var_name + ' = ', node)
+ node.value.accept(self)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode):
+ self.append(node.var_name + ' += ', node)
+ node.value.accept(self)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ varnames = [x.value for x in node.varnames]
+ self.append_padded('foreach', node)
+ self.append_padded(', '.join(varnames), node)
+ self.append_padded(':', node)
+ node.items.accept(self)
+ self.newline()
+ node.block.accept(self)
+ self.append('endforeach', node)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ prefix = ''
+ for i in node.ifs:
+ self.append_padded(prefix + 'if', node)
+ prefix = 'el'
+ i.accept(self)
+ if node.elseblock:
+ self.append('else', node)
+ node.elseblock.accept(self)
+ self.append('endif', node)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode):
+ self.append_padded('-', node)
+ node.value.accept(self)
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ node.condition.accept(self)
+ self.newline()
+ node.block.accept(self)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode):
+ node.condition.accept(self)
+ self.append_padded('?', node)
+ node.trueblock.accept(self)
+ self.append_padded(':', node)
+ node.falseblock.accept(self)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode):
+ break_args = True if (len(node.arguments) + len(node.kwargs)) > self.arg_newline_cutoff else False
+ for i in node.arguments + list(node.kwargs.values()):
+ if not isinstance(i, mparser.ElementaryNode):
+ break_args = True
+ if break_args:
+ self.newline()
+ for i in node.arguments:
+ i.accept(self)
+ self.append(', ', node)
+ if break_args:
+ self.newline()
+ for key, val in node.kwargs.items():
+ self.append(key, node)
+ self.append_padded(':', node)
+ val.accept(self)
+ self.append(', ', node)
+ if break_args:
+ self.newline()
+ if break_args:
+ self.result = re.sub(r', \n$', '\n', self.result)
+ else:
+ self.result = re.sub(r', $', '', self.result)
diff --git a/mesonbuild/ast/visitor.py b/mesonbuild/ast/visitor.py
new file mode 100644
index 0000000..c8769d4
--- /dev/null
+++ b/mesonbuild/ast/visitor.py
@@ -0,0 +1,140 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool
+
+from .. import mparser
+
+class AstVisitor:
+ def __init__(self):
+ pass
+
+ def visit_default_func(self, node: mparser.BaseNode):
+ pass
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode):
+ self.visit_default_func(node)
+
+ def visit_IdNode(self, node: mparser.IdNode):
+ self.visit_default_func(node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode):
+ self.visit_default_func(node)
+
+ def visit_StringNode(self, node: mparser.StringNode):
+ self.visit_default_func(node)
+
+ def visit_ContinueNode(self, node: mparser.ContinueNode):
+ self.visit_default_func(node)
+
+ def visit_BreakNode(self, node: mparser.BreakNode):
+ self.visit_default_func(node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode):
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_DictNode(self, node: mparser.DictNode):
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_EmptyNode(self, node: mparser.EmptyNode):
+ self.visit_default_func(node)
+
+ def visit_OrNode(self, node: mparser.OrNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_AndNode(self, node: mparser.AndNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode):
+ self.visit_default_func(node)
+ node.left.accept(self)
+ node.right.accept(self)
+
+ def visit_NotNode(self, node: mparser.NotNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode):
+ self.visit_default_func(node)
+ for i in node.lines:
+ i.accept(self)
+
+ def visit_IndexNode(self, node: mparser.IndexNode):
+ self.visit_default_func(node)
+ node.index.accept(self)
+
+ def visit_MethodNode(self, node: mparser.MethodNode):
+ self.visit_default_func(node)
+ node.source_object.accept(self)
+ node.args.accept(self)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode):
+ self.visit_default_func(node)
+ node.args.accept(self)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode):
+ self.visit_default_func(node)
+ node.items.accept(self)
+ node.block.accept(self)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode):
+ self.visit_default_func(node)
+ for i in node.ifs:
+ i.accept(self)
+ if node.elseblock:
+ node.elseblock.accept(self)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode):
+ self.visit_default_func(node)
+ node.value.accept(self)
+
+ def visit_IfNode(self, node: mparser.IfNode):
+ self.visit_default_func(node)
+ node.condition.accept(self)
+ node.block.accept(self)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode):
+ self.visit_default_func(node)
+ node.condition.accept(self)
+ node.trueblock.accept(self)
+ node.falseblock.accept(self)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode):
+ self.visit_default_func(node)
+ for i in node.arguments:
+ i.accept(self)
+ for i in node.commas:
+ pass
+ for val in node.kwargs.values():
+ val.accept(self)
diff --git a/mesonbuild/astinterpreter.py b/mesonbuild/astinterpreter.py
deleted file mode 100644
index a447a55..0000000
--- a/mesonbuild/astinterpreter.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# Copyright 2016 The Meson development team
-
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-
-# http://www.apache.org/licenses/LICENSE-2.0
-
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This class contains the basic functionality needed to run any interpreter
-# or an interpreter-based tool.
-
-from . import interpreterbase, mlog, mparser, mesonlib
-from . import environment
-
-from .interpreterbase import InterpreterException, InvalidArguments, BreakRequest, ContinueRequest
-
-import os, sys
-
-class DontCareObject(interpreterbase.InterpreterObject):
- pass
-
-class MockExecutable(interpreterbase.InterpreterObject):
- pass
-
-class MockStaticLibrary(interpreterbase.InterpreterObject):
- pass
-
-class MockSharedLibrary(interpreterbase.InterpreterObject):
- pass
-
-class MockCustomTarget(interpreterbase.InterpreterObject):
- pass
-
-class MockRunTarget(interpreterbase.InterpreterObject):
- pass
-
-ADD_SOURCE = 0
-REMOVE_SOURCE = 1
-
-class AstInterpreter(interpreterbase.InterpreterBase):
- def __init__(self, source_root, subdir):
- super().__init__(source_root, subdir)
- self.funcs.update({'project': self.func_do_nothing,
- 'test': self.func_do_nothing,
- 'benchmark': self.func_do_nothing,
- 'install_headers': self.func_do_nothing,
- 'install_man': self.func_do_nothing,
- 'install_data': self.func_do_nothing,
- 'install_subdir': self.func_do_nothing,
- 'configuration_data': self.func_do_nothing,
- 'configure_file': self.func_do_nothing,
- 'find_program': self.func_do_nothing,
- 'include_directories': self.func_do_nothing,
- 'add_global_arguments': self.func_do_nothing,
- 'add_global_link_arguments': self.func_do_nothing,
- 'add_project_arguments': self.func_do_nothing,
- 'add_project_link_arguments': self.func_do_nothing,
- 'message': self.func_do_nothing,
- 'generator': self.func_do_nothing,
- 'error': self.func_do_nothing,
- 'run_command': self.func_do_nothing,
- 'assert': self.func_do_nothing,
- 'subproject': self.func_do_nothing,
- 'dependency': self.func_do_nothing,
- 'get_option': self.func_do_nothing,
- 'join_paths': self.func_do_nothing,
- 'environment': self.func_do_nothing,
- 'import': self.func_do_nothing,
- 'vcs_tag': self.func_do_nothing,
- 'add_languages': self.func_do_nothing,
- 'declare_dependency': self.func_do_nothing,
- 'files': self.func_do_nothing,
- 'executable': self.func_do_nothing,
- 'static_library': self.func_do_nothing,
- 'shared_library': self.func_do_nothing,
- 'library': self.func_do_nothing,
- 'build_target': self.func_do_nothing,
- 'custom_target': self.func_do_nothing,
- 'run_target': self.func_do_nothing,
- 'subdir': self.func_do_nothing,
- 'set_variable': self.func_do_nothing,
- 'get_variable': self.func_do_nothing,
- 'is_variable': self.func_do_nothing,
- })
-
- def func_do_nothing(self, node, args, kwargs):
- return True
-
- def method_call(self, node):
- return True
-
- def evaluate_arithmeticstatement(self, cur):
- return 0
-
- def evaluate_plusassign(self, node):
- return 0
-
- def evaluate_indexing(self, node):
- return 0
-
- def unknown_function_called(self, func_name):
- pass
-
- def reduce_arguments(self, args):
- assert(isinstance(args, mparser.ArgumentNode))
- if args.incorrect_order():
- raise InvalidArguments('All keyword arguments must be after positional arguments.')
- return args.arguments, args.kwargs
-
- def evaluate_comparison(self, node):
- return False
-
- def evaluate_foreach(self, node):
- try:
- self.evaluate_codeblock(node.block)
- except ContinueRequest:
- pass
- except BreakRequest:
- pass
-
- def evaluate_if(self, node):
- for i in node.ifs:
- self.evaluate_codeblock(i.block)
- if not isinstance(node.elseblock, mparser.EmptyNode):
- self.evaluate_codeblock(node.elseblock)
-
- def get_variable(self, varname):
- return 0
-
- def assignment(self, node):
- pass
-
-class RewriterInterpreter(AstInterpreter):
- def __init__(self, source_root, subdir):
- super().__init__(source_root, subdir)
- self.asts = {}
- self.funcs.update({'files': self.func_files,
- 'executable': self.func_executable,
- 'static_library': self.func_static_lib,
- 'shared_library': self.func_shared_lib,
- 'library': self.func_library,
- 'build_target': self.func_build_target,
- 'custom_target': self.func_custom_target,
- 'run_target': self.func_run_target,
- 'subdir': self.func_subdir,
- 'set_variable': self.func_set_variable,
- 'get_variable': self.func_get_variable,
- 'is_variable': self.func_is_variable,
- })
-
- def func_executable(self, node, args, kwargs):
- if args[0] == self.targetname:
- if self.operation == ADD_SOURCE:
- self.add_source_to_target(node, args, kwargs)
- elif self.operation == REMOVE_SOURCE:
- self.remove_source_from_target(node, args, kwargs)
- else:
- raise NotImplementedError('Bleep bloop')
- return MockExecutable()
-
- def func_static_lib(self, node, args, kwargs):
- return MockStaticLibrary()
-
- def func_shared_lib(self, node, args, kwargs):
- return MockSharedLibrary()
-
- def func_library(self, node, args, kwargs):
- return self.func_shared_lib(node, args, kwargs)
-
- def func_custom_target(self, node, args, kwargs):
- return MockCustomTarget()
-
- def func_run_target(self, node, args, kwargs):
- return MockRunTarget()
-
- def func_subdir(self, node, args, kwargs):
- prev_subdir = self.subdir
- subdir = os.path.join(prev_subdir, args[0])
- self.subdir = subdir
- buildfilename = os.path.join(self.subdir, environment.build_filename)
- absname = os.path.join(self.source_root, buildfilename)
- if not os.path.isfile(absname):
- self.subdir = prev_subdir
- raise InterpreterException('Nonexistent build def file %s.' % buildfilename)
- with open(absname, encoding='utf8') as f:
- code = f.read()
- assert(isinstance(code, str))
- try:
- codeblock = mparser.Parser(code, self.subdir).parse()
- self.asts[subdir] = codeblock
- except mesonlib.MesonException as me:
- me.file = buildfilename
- raise me
- self.evaluate_codeblock(codeblock)
- self.subdir = prev_subdir
-
- def func_files(self, node, args, kwargs):
- if not isinstance(args, list):
- return [args]
- return args
-
- def transform(self):
- self.load_root_meson_file()
- self.asts[''] = self.ast
- self.sanity_check_ast()
- self.parse_project()
- self.run()
-
- def add_source(self, targetname, filename):
- self.operation = ADD_SOURCE
- self.targetname = targetname
- self.filename = filename
- self.transform()
-
- def remove_source(self, targetname, filename):
- self.operation = REMOVE_SOURCE
- self.targetname = targetname
- self.filename = filename
- self.transform()
-
- def add_source_to_target(self, node, args, kwargs):
- namespan = node.args.arguments[0].bytespan
- buildfilename = os.path.join(self.source_root, self.subdir, environment.build_filename)
- raw_data = open(buildfilename, 'r').read()
- updated = raw_data[0:namespan[1]] + (", '%s'" % self.filename) + raw_data[namespan[1]:]
- open(buildfilename, 'w').write(updated)
- sys.exit(0)
-
- def remove_argument_item(self, args, i):
- assert(isinstance(args, mparser.ArgumentNode))
- namespan = args.arguments[i].bytespan
- # Usually remove the comma after this item but if it is
- # the last argument, we need to remove the one before.
- if i >= len(args.commas):
- i -= 1
- if i < 0:
- commaspan = (0, 0) # Removed every entry in the list.
- else:
- commaspan = args.commas[i].bytespan
- if commaspan[0] < namespan[0]:
- commaspan, namespan = namespan, commaspan
- buildfilename = os.path.join(self.source_root, args.subdir, environment.build_filename)
- raw_data = open(buildfilename, 'r').read()
- intermediary = raw_data[0:commaspan[0]] + raw_data[commaspan[1]:]
- updated = intermediary[0:namespan[0]] + intermediary[namespan[1]:]
- open(buildfilename, 'w').write(updated)
- sys.exit(0)
-
- def hacky_find_and_remove(self, node_to_remove):
- for a in self.asts[node_to_remove.subdir].lines:
- if a.lineno == node_to_remove.lineno:
- if isinstance(a, mparser.AssignmentNode):
- v = a.value
- if not isinstance(v, mparser.ArrayNode):
- raise NotImplementedError('Not supported yet, bro.')
- args = v.args
- for i in range(len(args.arguments)):
- if isinstance(args.arguments[i], mparser.StringNode) and self.filename == args.arguments[i].value:
- self.remove_argument_item(args, i)
- raise NotImplementedError('Sukkess')
-
- def remove_source_from_target(self, node, args, kwargs):
- for i in range(1, len(node.args)):
- # Is file name directly in function call as a string.
- if isinstance(node.args.arguments[i], mparser.StringNode) and self.filename == node.args.arguments[i].value:
- self.remove_argument_item(node.args, i)
- # Is file name in a variable that gets expanded here.
- if isinstance(node.args.arguments[i], mparser.IdNode):
- avar = self.get_variable(node.args.arguments[i].value)
- if not isinstance(avar, list):
- raise NotImplementedError('Non-arrays not supported yet, sorry.')
- for entry in avar:
- if isinstance(entry, mparser.StringNode) and entry.value == self.filename:
- self.hacky_find_and_remove(entry)
- sys.exit('Could not find source %s in target %s.' % (self.filename, args[0]))
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index e8adc99..4d35d22 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -20,7 +20,7 @@ from .. import mesonlib
from .. import mlog
import json
import subprocess
-from ..mesonlib import MesonException, OrderedSet
+from ..mesonlib import MachineChoice, MesonException, OrderedSet
from ..mesonlib import classify_unity_sources
from ..mesonlib import File
from ..compilers import CompilerArgs, VisualStudioCCompiler
@@ -84,7 +84,7 @@ class ExecutableSerialisation:
class TestSerialisation:
def __init__(self, name, project, suite, fname, is_cross_built, exe_wrapper, is_parallel,
- cmd_args, env, should_fail, timeout, workdir, extra_paths):
+ cmd_args, env, should_fail, timeout, workdir, extra_paths, protocol):
self.name = name
self.project_name = project
self.suite = suite
@@ -100,6 +100,7 @@ class TestSerialisation:
self.timeout = timeout
self.workdir = workdir
self.extra_paths = extra_paths
+ self.protocol = protocol
class OptionProxy:
def __init__(self, name, value):
@@ -156,6 +157,8 @@ class Backend:
self.build = build
self.environment = build.environment
self.processed_targets = {}
+ self.build_dir = self.environment.get_build_dir()
+ self.source_dir = self.environment.get_source_dir()
self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
@@ -183,9 +186,14 @@ class Backend:
self.environment.coredata.base_options)
def get_compiler_options_for_target(self, target):
- return OptionOverrideProxy(target.option_overrides,
- # no code depends on builtins for now
- self.environment.coredata.compiler_options)
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+
+ return OptionOverrideProxy(
+ target.option_overrides,
+ self.environment.coredata.compiler_options[for_machine])
def get_option_for_target(self, option_name, target):
if option_name in target.option_overrides:
@@ -343,8 +351,7 @@ class Backend:
exe_is_native = True
is_cross_built = (not exe_is_native) and \
self.environment.is_cross_build() and \
- self.environment.cross_info.need_cross_compiler() and \
- self.environment.cross_info.need_exe_wrapper()
+ self.environment.need_exe_wrapper()
if is_cross_built:
exe_wrapper = self.environment.get_exe_wrapper()
if not exe_wrapper.found():
@@ -509,6 +516,23 @@ class Backend:
args += compiler.get_pch_use_args(pchpath, p[0])
return includeargs + args
+ def create_msvc_pch_implementation(self, target, lang, pch_header):
+ # We have to include the language in the file name, otherwise
+ # pch.c and pch.cpp will both end up as pch.obj in VS backends.
+ impl_name = 'meson_pch-%s.%s' % (lang, lang)
+ pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
+ # Make sure to prepend the build dir, since the working directory is
+ # not defined. Otherwise, we might create the file in the wrong path.
+ pch_file = os.path.join(self.build_dir, pch_rel_to_build)
+ os.makedirs(os.path.dirname(pch_file), exist_ok=True)
+
+ content = '#include "%s"' % os.path.basename(pch_header)
+ pch_file_tmp = pch_file + '.tmp'
+ with open(pch_file_tmp, 'w') as f:
+ f.write(content)
+ mesonlib.replace_if_different(pch_file, pch_file_tmp)
+ return pch_rel_to_build
+
@staticmethod
def escape_extra_args(compiler, args):
# No extra escaping/quoting needed when not running on Windows
@@ -573,10 +597,14 @@ class Backend:
# Add compile args added using add_global_arguments()
# These override per-project arguments
commands += self.build.get_global_args(compiler, target.is_cross)
- if not target.is_cross:
- # Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
- # to override all the defaults, but not the per-target compile args.
- commands += self.environment.coredata.get_external_args(compiler.get_language())
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+ # Compile args added from the env: CFLAGS/CXXFLAGS, etc, or the cross
+ # file. We want these to override all the defaults, but not the
+ # per-target compile args.
+ commands += self.environment.coredata.get_external_args(for_machine, compiler.get_language())
# Always set -fPIC for shared libraries
if isinstance(target, build.SharedLibrary):
commands += compiler.get_pic_args()
@@ -640,11 +668,11 @@ class Backend:
def get_mingw_extra_paths(self, target):
paths = OrderedSet()
# The cross bindir
- root = self.environment.cross_info.get_root()
+ root = self.environment.properties.host.get_root()
if root:
paths.add(os.path.join(root, 'bin'))
# The toolchain bindir
- sys_root = self.environment.cross_info.get_sys_root()
+ sys_root = self.environment.properties.host.get_sys_root()
if sys_root:
paths.add(os.path.join(sys_root, 'bin'))
# Get program and library dirs from all target compilers
@@ -684,7 +712,7 @@ class Backend:
def write_test_file(self, datafile):
self.write_test_serialisation(self.build.get_tests(), datafile)
- def write_test_serialisation(self, tests, datafile):
+ def create_test_serialisation(self, tests):
arr = []
for t in tests:
exe = t.get_exe()
@@ -693,8 +721,7 @@ class Backend:
else:
cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))]
is_cross = self.environment.is_cross_build() and \
- self.environment.cross_info.need_cross_compiler() and \
- self.environment.cross_info.need_exe_wrapper()
+ self.environment.need_exe_wrapper()
if isinstance(exe, build.BuildTarget):
is_cross = is_cross and exe.is_cross
if isinstance(exe, dependencies.ExternalProgram):
@@ -725,14 +752,24 @@ class Backend:
elif isinstance(a, str):
cmd_args.append(a)
elif isinstance(a, build.Target):
- cmd_args.append(self.get_target_filename(a))
+ cmd_args.append(self.construct_target_rel_path(a, t.workdir))
else:
raise MesonException('Bad object in test command.')
ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
exe_wrapper, t.is_parallel, cmd_args, t.env,
- t.should_fail, t.timeout, t.workdir, extra_paths)
+ t.should_fail, t.timeout, t.workdir, extra_paths, t.protocol)
arr.append(ts)
- pickle.dump(arr, datafile)
+ return arr
+
+ def write_test_serialisation(self, tests, datafile):
+ pickle.dump(self.create_test_serialisation(tests), datafile)
+
+ def construct_target_rel_path(self, a, workdir):
+ if workdir is None:
+ return self.get_target_filename(a)
+ assert(os.path.isabs(workdir))
+ abs_path = self.get_target_filename_abs(a)
+ return os.path.relpath(abs_path, workdir)
def generate_depmf_install(self, d):
if self.build.dep_manifest_name is None:
@@ -765,7 +802,7 @@ class Backend:
def exe_object_to_cmd_array(self, exe):
if self.environment.is_cross_build() and \
isinstance(exe, build.BuildTarget) and exe.is_cross:
- if self.environment.exe_wrapper is None and self.environment.cross_info.need_exe_wrapper():
+ if self.environment.exe_wrapper is None and self.environment.need_exe_wrapper():
s = textwrap.dedent('''
Can not use target {} as a generator because it is cross-built
and no exe wrapper is defined or needs_exe_wrapper is true.
@@ -788,7 +825,7 @@ class Backend:
def replace_outputs(self, args, private_dir, output_list):
newargs = []
- regex = re.compile('@OUTPUT(\d+)@')
+ regex = re.compile(r'@OUTPUT(\d+)@')
for arg in args:
m = regex.search(arg)
while m is not None:
@@ -803,7 +840,7 @@ class Backend:
result = OrderedDict()
# Get all build and custom targets that must be built by default
for name, t in self.build.get_targets().items():
- if t.build_by_default or t.install:
+ if t.build_by_default:
result[name] = t
# Get all targets used as test executables and arguments. These must
# also be built by default. XXX: Sometime in the future these should be
@@ -928,7 +965,7 @@ class Backend:
dfilename = os.path.join(outdir, target.depfile)
i = i.replace('@DEPFILE@', dfilename)
elif '@PRIVATE_OUTDIR_' in i:
- match = re.search('@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
+ match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
if not match:
msg = 'Custom target {!r} has an invalid argument {!r}' \
''.format(target.name, i)
@@ -976,18 +1013,14 @@ class Backend:
cmd = s['exe'] + s['args']
subprocess.check_call(cmd, env=child_env)
- def create_install_data_files(self):
- install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
-
- if self.environment.is_cross_build():
- bins = self.environment.cross_info.config['binaries']
- if 'strip' not in bins:
+ def create_install_data(self):
+ strip_bin = self.environment.binaries.host.lookup_entry('strip')
+ if strip_bin is None:
+ if self.environment.is_cross_build():
mlog.warning('Cross file does not specify strip binary, result will not be stripped.')
- strip_bin = None
else:
- strip_bin = mesonlib.stringlistify(bins['strip'])
- else:
- strip_bin = self.environment.native_strip_bin
+ # TODO go through all candidates, like others
+ strip_bin = [self.environment.default_strip[0]]
d = InstallData(self.environment.get_source_dir(),
self.environment.get_build_dir(),
self.environment.get_prefix(),
@@ -1001,8 +1034,12 @@ class Backend:
self.generate_data_install(d)
self.generate_custom_install_script(d)
self.generate_subdir_install(d)
+ return d
+
+ def create_install_data_files(self):
+ install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
with open(install_data_file, 'wb') as ofile:
- pickle.dump(d, ofile)
+ pickle.dump(self.create_install_data(), ofile)
def generate_target_install(self, d):
for t in self.build.get_targets().values():
@@ -1064,7 +1101,8 @@ class Backend:
if num_outdirs == 1 and num_out > 1:
for output in t.get_outputs():
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode)
+ i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode,
+ optional=not t.build_by_default)
d.targets.append(i)
else:
for output, outdir in zip(t.get_outputs(), outdirs):
@@ -1072,7 +1110,8 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
+ i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode,
+ optional=not t.build_by_default)
d.targets.append(i)
def generate_custom_install_script(self, d):
@@ -1148,3 +1187,53 @@ class Backend:
dst_dir = os.path.join(dst_dir, os.path.basename(src_dir))
d.install_subdirs.append([src_dir, dst_dir, sd.install_mode,
sd.exclude])
+
+ def get_introspection_data(self, target_id, target):
+ '''
+ Returns a list of source dicts with the following format for a given target:
+ [
+ {
+ "language": "<LANG>",
+ "compiler": ["result", "of", "comp.get_exelist()"],
+ "parameters": ["list", "of", "compiler", "parameters],
+ "sources": ["list", "of", "all", "<LANG>", "source", "files"],
+ "generated_sources": ["list", "of", "generated", "source", "files"]
+ }
+ ]
+
+ This is a limited fallback / reference implementation. The backend should override this method.
+ '''
+ if isinstance(target, (build.CustomTarget, build.BuildTarget)):
+ source_list_raw = target.sources + target.extra_files
+ source_list = []
+ for j in source_list_raw:
+ if isinstance(j, mesonlib.File):
+ source_list += [j.absolute_path(self.source_dir, self.build_dir)]
+ elif isinstance(j, str):
+ source_list += [os.path.join(self.source_dir, j)]
+ source_list = list(map(lambda x: os.path.normpath(x), source_list))
+
+ compiler = []
+ if isinstance(target, build.CustomTarget):
+ tmp_compiler = target.command
+ if not isinstance(compiler, list):
+ tmp_compiler = [compiler]
+ for j in tmp_compiler:
+ if isinstance(j, mesonlib.File):
+ compiler += [j.absolute_path(self.source_dir, self.build_dir)]
+ elif isinstance(j, str):
+ compiler += [j]
+ elif isinstance(j, (build.BuildTarget, build.CustomTarget)):
+ compiler += j.get_outputs()
+ else:
+ raise RuntimeError('Type "{}" is not supported in get_introspection_data. This is a bug'.format(type(j).__name__))
+
+ return [{
+ 'language': 'unknown',
+ 'compiler': compiler,
+ 'parameters': [],
+ 'sources': source_list,
+ 'generated_sources': []
+ }]
+
+ return []
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 372d7c2..21c6c08 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
+from typing import List
import os
import re
import shlex
@@ -19,7 +19,7 @@ import pickle
import subprocess
from collections import OrderedDict
import itertools
-from pathlib import PurePath
+from pathlib import PurePath, Path
from functools import lru_cache
from . import backends
@@ -29,9 +29,9 @@ from .. import build
from .. import mlog
from .. import dependencies
from .. import compilers
-from ..compilers import CompilerArgs, CCompiler, VisualStudioCCompiler
+from ..compilers import CompilerArgs, CCompiler, VisualStudioCCompiler, FortranCompiler
from ..linkers import ArLinker
-from ..mesonlib import File, MesonException, OrderedSet
+from ..mesonlib import File, MachineChoice, MesonException, OrderedSet
from ..mesonlib import get_compiler_for_source, has_path_sep
from .backends import CleanTrees
from ..build import InvalidArguments
@@ -150,6 +150,7 @@ class NinjaBackend(backends.Backend):
self.ninja_filename = 'build.ninja'
self.fortran_deps = {}
self.all_outputs = {}
+ self.introspection_data = {}
def create_target_alias(self, to_target, outfile):
# We need to use aliases for targets that might be used as directory
@@ -321,6 +322,52 @@ int dummy;
return False
return True
+ def create_target_source_introspection(self, target: build.Target, comp: compilers.Compiler, parameters, sources, generated_sources):
+ '''
+ Adds the source file introspection information for a language of a target
+
+ Internal introspection storage formart:
+ self.introspection_data = {
+ '<target ID>': {
+ <id tuple>: {
+ 'language: 'lang',
+ 'compiler': ['comp', 'exe', 'list'],
+ 'parameters': ['UNIQUE', 'parameter', 'list'],
+ 'sources': [],
+ 'generated_sources': [],
+ }
+ }
+ }
+ '''
+ id = target.get_id()
+ lang = comp.get_language()
+ tgt = self.introspection_data[id]
+ # Find an existing entry or create a new one
+ id_hash = (lang, tuple(parameters))
+ src_block = tgt.get(id_hash, None)
+ if src_block is None:
+ # Convert parameters
+ if isinstance(parameters, CompilerArgs):
+ parameters = parameters.to_native(copy=True)
+ parameters = comp.compute_parameters_with_absolute_paths(parameters, self.build_dir)
+ # The new entry
+ src_block = {
+ 'language': lang,
+ 'compiler': comp.get_exelist(),
+ 'parameters': parameters,
+ 'sources': [],
+ 'generated_sources': [],
+ }
+ tgt[id_hash] = src_block
+ # Make source files absolute
+ sources = [x.absolute_path(self.source_dir, self.build_dir) if isinstance(x, File) else os.path.normpath(os.path.join(self.build_dir, x))
+ for x in sources]
+ generated_sources = [x.absolute_path(self.source_dir, self.build_dir) if isinstance(x, File) else os.path.normpath(os.path.join(self.build_dir, x))
+ for x in generated_sources]
+ # Add the source files
+ src_block['sources'] += sources
+ src_block['generated_sources'] += generated_sources
+
def generate_target(self, target, outfile):
if isinstance(target, build.CustomTarget):
self.generate_custom_target(target, outfile)
@@ -330,6 +377,8 @@ int dummy;
if name in self.processed_targets:
return
self.processed_targets[name] = True
+ # Initialize an empty introspection source list
+ self.introspection_data[name] = {}
# Generate rules for all dependency targets
self.process_target_dependencies(target, outfile)
# If target uses a language that cannot link to C objects,
@@ -374,12 +423,7 @@ int dummy;
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
obj_list = []
- use_pch = self.environment.coredata.base_options.get('b_pch', False)
is_unity = self.is_unity(target)
- if use_pch and target.has_pch():
- pch_objects = self.generate_pch(target, outfile)
- else:
- pch_objects = []
header_deps = []
unity_src = []
unity_deps = [] # Generated sources that must be built before compiling a Unity target.
@@ -432,6 +476,12 @@ int dummy;
header_deps=header_deps)
obj_list.append(o)
+ use_pch = self.environment.coredata.base_options.get('b_pch', False)
+ if use_pch and target.has_pch():
+ pch_objects = self.generate_pch(target, outfile, header_deps=header_deps)
+ else:
+ pch_objects = []
+
# Generate compilation targets for C sources generated from Vala
# sources. This can be extended to other $LANG->C compilers later if
# necessary. This needs to be separate for at least Vala
@@ -536,8 +586,7 @@ int dummy;
# a serialized executable wrapper for that and check if the
# CustomTarget command needs extra paths first.
is_cross = self.environment.is_cross_build() and \
- self.environment.cross_info.need_cross_compiler() and \
- self.environment.cross_info.need_exe_wrapper()
+ self.environment.need_exe_wrapper()
if mesonlib.for_windows(is_cross, self.environment) or \
mesonlib.for_cygwin(is_cross, self.environment):
extra_bdeps = target.get_transitive_build_target_deps()
@@ -771,14 +820,16 @@ int dummy;
# Add possible java generated files to src list
generated_sources = self.get_target_generated_sources(target)
+ gen_src_list = []
for rel_src, gensrc in generated_sources.items():
dirpart, fnamepart = os.path.split(rel_src)
raw_src = File(True, dirpart, fnamepart)
if rel_src.endswith('.java'):
- src_list.append(raw_src)
+ gen_src_list.append(raw_src)
- for src in src_list:
- plain_class_path = self.generate_single_java_compile(src, target, compiler, outfile)
+ compile_args = self.determine_single_java_compile_args(target, compiler)
+ for src in src_list + gen_src_list:
+ plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args, outfile)
class_list.append(plain_class_path)
class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list]
manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF')
@@ -804,6 +855,8 @@ int dummy;
elem.add_dep(class_dep_list)
elem.add_item('ARGS', commands)
elem.write(outfile)
+ # Create introspection information
+ self.create_target_source_introspection(target, compiler, compile_args, src_list, gen_src_list)
def generate_cs_resource_tasks(self, target, outfile):
args = []
@@ -857,10 +910,11 @@ int dummy;
else:
outputs = [outname_rel]
generated_sources = self.get_target_generated_sources(target)
+ generated_rel_srcs = []
for rel_src in generated_sources.keys():
dirpart, fnamepart = os.path.split(rel_src)
if rel_src.lower().endswith('.cs'):
- rel_srcs.append(os.path.normpath(rel_src))
+ generated_rel_srcs.append(os.path.normpath(rel_src))
deps.append(os.path.normpath(rel_src))
for dep in target.get_external_deps():
@@ -868,19 +922,15 @@ int dummy;
commands += self.build.get_project_args(compiler, target.subproject, target.is_cross)
commands += self.build.get_global_args(compiler, target.is_cross)
- elem = NinjaBuildElement(self.all_outputs, outputs, 'cs_COMPILER', rel_srcs)
+ elem = NinjaBuildElement(self.all_outputs, outputs, 'cs_COMPILER', rel_srcs + generated_rel_srcs)
elem.add_dep(deps)
elem.add_item('ARGS', commands)
elem.write(outfile)
self.generate_generator_list_rules(target, outfile)
+ self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
- def generate_single_java_compile(self, src, target, compiler, outfile):
- deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets]
- generated_sources = self.get_target_generated_sources(target)
- for rel_src, gensrc in generated_sources.items():
- if rel_src.endswith('.java'):
- deps.append(rel_src)
+ def determine_single_java_compile_args(self, target, compiler):
args = []
args += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target))
args += self.build.get_global_args(compiler, target.is_cross)
@@ -895,6 +945,14 @@ int dummy;
for idir in i.get_incdirs():
sourcepath += os.path.join(self.build_to_src, i.curdir, idir) + os.pathsep
args += ['-sourcepath', sourcepath]
+ return args
+
+ def generate_single_java_compile(self, src, target, compiler, args, outfile):
+ deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets]
+ generated_sources = self.get_target_generated_sources(target)
+ for rel_src, gensrc in generated_sources.items():
+ if rel_src.endswith('.java'):
+ deps.append(rel_src)
rel_src = src.rel_to_builddir(self.build_to_src)
plain_class_path = src.fname[:-4] + 'class'
rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path)
@@ -1103,6 +1161,7 @@ int dummy;
element.add_item('ARGS', args)
element.add_dep(extra_dep_files)
element.write(outfile)
+ self.create_target_source_introspection(target, valac, args, all_files, [])
return other_src[0], other_src[1], vala_c_src
def generate_rust_target(self, target, outfile):
@@ -1194,6 +1253,7 @@ int dummy;
element.write(outfile)
if isinstance(target, build.SharedLibrary):
self.generate_shsym(outfile, target)
+ self.create_target_source_introspection(target, rustc, args, [main_rust_file], [])
def swift_module_file_name(self, target):
return os.path.join(self.get_target_private_dir(target),
@@ -1242,12 +1302,14 @@ int dummy;
module_name = self.target_swift_modulename(target)
swiftc = target.compilers['swift']
abssrc = []
+ relsrc = []
abs_headers = []
header_imports = []
for i in target.get_sources():
if swiftc.can_compile(i):
- relsrc = i.rel_to_builddir(self.build_to_src)
- abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), relsrc))
+ rels = i.rel_to_builddir(self.build_to_src)
+ abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), rels))
+ relsrc.append(rels)
abssrc.append(abss)
elif self.environment.is_header(i):
relh = i.rel_to_builddir(self.build_to_src)
@@ -1331,6 +1393,8 @@ int dummy;
elem.write(outfile)
else:
raise MesonException('Swift supports only executable and static library targets.')
+ # Introspection information
+ self.create_target_source_introspection(target, swiftc, compile_args + header_imports + module_includes, relsrc, rel_generated)
def generate_static_link_rules(self, is_cross, outfile):
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
@@ -1338,7 +1402,7 @@ int dummy;
if not is_cross:
self.generate_java_link(outfile)
if is_cross:
- if self.environment.cross_info.need_cross_compiler():
+ if self.environment.is_cross_build():
static_linker = self.build.static_cross_linker
else:
static_linker = self.build.static_linker
@@ -1381,11 +1445,7 @@ int dummy;
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
ctypes = [(self.build.compilers, False)]
if self.environment.is_cross_build():
- if self.environment.cross_info.need_cross_compiler():
- ctypes.append((self.build.cross_compilers, True))
- else:
- # Native compiler masquerades as the cross compiler.
- ctypes.append((self.build.compilers, True))
+ ctypes.append((self.build.cross_compilers, True))
else:
ctypes.append((self.build.cross_compilers, True))
for (complist, is_cross) in ctypes:
@@ -1396,24 +1456,18 @@ int dummy;
or langname == 'cs':
continue
crstr = ''
- cross_args = []
if is_cross:
crstr = '_CROSS'
- try:
- cross_args = self.environment.cross_info.config['properties'][langname + '_link_args']
- except KeyError:
- pass
rule = 'rule %s%s_LINKER\n' % (langname, crstr)
if compiler.can_linker_accept_rsp():
command_template = ''' command = {executable} @$out.rsp
rspfile = $out.rsp
- rspfile_content = $ARGS {output_args} $in $LINK_ARGS {cross_args} $aliasing
+ rspfile_content = $ARGS {output_args} $in $LINK_ARGS $aliasing
'''
else:
- command_template = ' command = {executable} $ARGS {output_args} $in $LINK_ARGS {cross_args} $aliasing\n'
+ command_template = ' command = {executable} $ARGS {output_args} $in $LINK_ARGS $aliasing\n'
command = command_template.format(
executable=' '.join(compiler.get_linker_exelist()),
- cross_args=' '.join(cross_args),
output_args=' '.join(compiler.get_linker_output_args('$out'))
)
description = ' description = Linking target $out.\n'
@@ -1536,12 +1590,11 @@ rule FORTRAN_DEP_HACK%s
if compiler.can_linker_accept_rsp():
command_template = ' command = {executable} @$out.rsp\n' \
' rspfile = $out.rsp\n' \
- ' rspfile_content = $ARGS {cross_args} {output_args} {compile_only_args} $in\n'
+ ' rspfile_content = $ARGS {output_args} {compile_only_args} $in\n'
else:
- command_template = ' command = {executable} $ARGS {cross_args} {output_args} {compile_only_args} $in\n'
+ command_template = ' command = {executable} $ARGS {output_args} {compile_only_args} $in\n'
command = command_template.format(
executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),
- cross_args=' '.join(compiler.get_cross_extra_flags(self.environment, False)) if is_cross else '',
output_args=' '.join(compiler.get_output_args('$out')),
compile_only_args=' '.join(compiler.get_compile_only_args())
)
@@ -1586,20 +1639,15 @@ rule FORTRAN_DEP_HACK%s
d = quote_func(d)
quoted_depargs.append(d)
- if is_cross:
- cross_args = compiler.get_cross_extra_flags(self.environment, False)
- else:
- cross_args = ''
if compiler.can_linker_accept_rsp():
command_template = ''' command = {executable} @$out.rsp
rspfile = $out.rsp
- rspfile_content = $ARGS {cross_args} {dep_args} {output_args} {compile_only_args} $in
+ rspfile_content = $ARGS {dep_args} {output_args} {compile_only_args} $in
'''
else:
- command_template = ' command = {executable} $ARGS {cross_args} {dep_args} {output_args} {compile_only_args} $in\n'
+ command_template = ' command = {executable} $ARGS {dep_args} {output_args} {compile_only_args} $in\n'
command = command_template.format(
executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),
- cross_args=' '.join(cross_args),
dep_args=' '.join(quoted_depargs),
output_args=' '.join(compiler.get_output_args('$out')),
compile_only_args=' '.join(compiler.get_compile_only_args())
@@ -1625,12 +1673,6 @@ rule FORTRAN_DEP_HACK%s
crstr = ''
rule = 'rule %s%s_PCH\n' % (langname, crstr)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- cross_args = []
- if is_cross:
- try:
- cross_args = compiler.get_cross_extra_flags(self.environment, False)
- except KeyError:
- pass
quoted_depargs = []
for d in depargs:
@@ -1641,9 +1683,8 @@ rule FORTRAN_DEP_HACK%s
output = ''
else:
output = ' '.join(compiler.get_output_args('$out'))
- command = " command = {executable} $ARGS {cross_args} {dep_args} {output_args} {compile_only_args} $in\n".format(
+ command = " command = {executable} $ARGS {dep_args} {output_args} {compile_only_args} $in\n".format(
executable=' '.join(compiler.get_exelist()),
- cross_args=' '.join(cross_args),
dep_args=' '.join(quoted_depargs),
output_args=output,
compile_only_args=' '.join(compiler.get_compile_only_args())
@@ -1667,12 +1708,7 @@ rule FORTRAN_DEP_HACK%s
self.generate_compile_rule_for(langname, compiler, False, outfile)
self.generate_pch_rule_for(langname, compiler, False, outfile)
if self.environment.is_cross_build():
- # In case we are going a target-only build, make the native compilers
- # masquerade as cross compilers.
- if self.environment.cross_info.need_cross_compiler():
- cclist = self.build.cross_compilers
- else:
- cclist = self.build.compilers
+ cclist = self.build.cross_compilers
for langname, compiler in cclist.items():
if compiler.get_id() == 'clang':
self.generate_llvm_ir_compile_rule(compiler, True, outfile)
@@ -1712,11 +1748,11 @@ rule FORTRAN_DEP_HACK%s
outfilelist = genlist.get_outputs()
extra_dependencies = [os.path.join(self.build_to_src, i) for i in genlist.extra_depends]
for i in range(len(infilelist)):
+ curfile = infilelist[i]
if len(generator.outputs) == 1:
sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i])
else:
- sole_output = ''
- curfile = infilelist[i]
+ sole_output = '{}'.format(curfile)
infilename = curfile.rel_to_builddir(self.build_to_src)
base_args = generator.get_arglist(infilename)
outfiles = genlist.get_outputs_for(curfile)
@@ -1733,7 +1769,7 @@ rule FORTRAN_DEP_HACK%s
for x in args]
args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist)
# We have consumed output files, so drop them from the list of remaining outputs.
- if sole_output == '':
+ if len(generator.outputs) > 1:
outfilelist = outfilelist[len(generator.outputs):]
args = self.replace_paths(target, args, override_subdir=subdir)
cmdlist = exe_arr + self.replace_extra_args(args, genlist)
@@ -1756,7 +1792,11 @@ rule FORTRAN_DEP_HACK%s
elem.add_item('DEPFILE', depfile)
if len(extra_dependencies) > 0:
elem.add_dep(extra_dependencies)
- elem.add_item('DESC', 'Generating {!r}.'.format(sole_output))
+ if len(generator.outputs) == 1:
+ elem.add_item('DESC', 'Generating {!r}.'.format(sole_output))
+ else:
+ # since there are multiple outputs, we log the source that caused the rebuild
+ elem.add_item('DESC', 'Generating source from {!r}.'.format(sole_output))
if isinstance(exe, build.BuildTarget):
elem.add_dep(self.get_target_filename(exe))
elem.add_item('COMMAND', cmd)
@@ -1771,7 +1811,9 @@ rule FORTRAN_DEP_HACK%s
if compiler is None:
self.fortran_deps[target.get_basename()] = {}
return
- modre = re.compile(r"\s*module\s+(\w+)", re.IGNORECASE)
+
+ modre = re.compile(r"\s*\bmodule\b\s+(\w+)\s*$", re.IGNORECASE)
+ submodre = re.compile(r"\s*\bsubmodule\b\s+\((\w+:?\w+)\)\s+(\w+)\s*$", re.IGNORECASE)
module_files = {}
for s in target.get_sources():
# FIXME, does not work for Fortran sources generated by
@@ -1781,61 +1823,47 @@ rule FORTRAN_DEP_HACK%s
continue
filename = s.absolute_path(self.environment.get_source_dir(),
self.environment.get_build_dir())
- # Some Fortran editors save in weird encodings,
- # but all the parts we care about are in ASCII.
- with open(filename, errors='ignore') as f:
+ # Fortran keywords must be ASCII.
+ with open(filename, encoding='ascii', errors='ignore') as f:
for line in f:
modmatch = modre.match(line)
if modmatch is not None:
modname = modmatch.group(1).lower()
- if modname == 'procedure':
- # MODULE PROCEDURE construct
- continue
if modname in module_files:
raise InvalidArguments(
'Namespace collision: module %s defined in '
'two files %s and %s.' %
(modname, module_files[modname], s))
module_files[modname] = s
+ else:
+ submodmatch = submodre.match(line)
+ if submodmatch is not None:
+ submodname = submodmatch.group(2).lower()
+ if submodname in module_files:
+ raise InvalidArguments(
+ 'Namespace collision: submodule %s defined in '
+ 'two files %s and %s.' %
+ (submodname, module_files[submodname], s))
+ module_files[submodname] = s
+
self.fortran_deps[target.get_basename()] = module_files
- def get_fortran_deps(self, compiler, src, target):
- mod_files = []
- usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE)
- dirname = self.get_target_private_dir(target)
+ def get_fortran_deps(self, compiler: FortranCompiler, src: str, target) -> List[str]:
+ """
+ Find all modules and submodules needed by a target
+ """
+
+ dirname = Path(self.get_target_private_dir(target))
tdeps = self.fortran_deps[target.get_basename()]
- with open(src) as f:
- for line in f:
- usematch = usere.match(line)
- if usematch is not None:
- usename = usematch.group(1).lower()
- if usename not in tdeps:
- # The module is not provided by any source file. This
- # is due to:
- # a) missing file/typo/etc
- # b) using a module provided by the compiler, such as
- # OpenMP
- # There's no easy way to tell which is which (that I
- # know of) so just ignore this and go on. Ideally we
- # would print a warning message to the user but this is
- # a common occurrence, which would lead to lots of
- # distracting noise.
- continue
- mod_source_file = tdeps[usename]
- # Check if a source uses a module it exports itself.
- # Potential bug if multiple targets have a file with
- # the same name.
- if mod_source_file.fname == os.path.basename(src):
- continue
- mod_name = compiler.module_name_to_filename(
- usematch.group(1))
- mod_files.append(os.path.join(dirname, mod_name))
+ srcdir = Path(self.source_dir)
+
+ mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler)
return mod_files
def get_cross_stdlib_args(self, target, compiler):
if not target.is_cross:
return []
- if not self.environment.cross_info.has_stdlib(compiler.language):
+ if not self.environment.properties.host.has_stdlib(compiler.language):
return []
return compiler.get_no_stdinc_args()
@@ -2063,6 +2091,12 @@ rule FORTRAN_DEP_HACK%s
commands = self._generate_single_compile(target, compiler, is_generated)
commands = CompilerArgs(commands.compiler, commands)
+ # Create introspection information
+ if is_generated is False:
+ self.create_target_source_introspection(target, compiler, commands, [src], [])
+ else:
+ self.create_target_source_introspection(target, compiler, commands, [], [src])
+
build_dir = self.environment.get_build_dir()
if isinstance(src, File):
rel_src = src.rel_to_builddir(self.build_to_src)
@@ -2117,18 +2151,14 @@ rule FORTRAN_DEP_HACK%s
for modname, srcfile in self.fortran_deps[target.get_basename()].items():
modfile = os.path.join(self.get_target_private_dir(target),
compiler.module_name_to_filename(modname))
+
if srcfile == src:
depelem = NinjaBuildElement(self.all_outputs, modfile, 'FORTRAN_DEP_HACK' + crstr, rel_obj)
depelem.write(outfile)
commands += compiler.get_module_outdir_args(self.get_target_private_dir(target))
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
- for d in header_deps:
- if isinstance(d, File):
- d = d.rel_to_builddir(self.build_to_src)
- elif not self.has_dir_part(d):
- d = os.path.join(self.get_target_private_dir(target), d)
- element.add_dep(d)
+ self.add_header_deps(target, element, header_deps)
for d in extra_deps:
element.add_dep(d)
for d in order_deps:
@@ -2137,7 +2167,7 @@ rule FORTRAN_DEP_HACK%s
elif not self.has_dir_part(d):
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
- element.add_orderdep(pch_dep)
+ element.add_dep(pch_dep)
# Convert from GCC-style link argument naming to the naming used by the
# current compiler.
commands = commands.to_native()
@@ -2148,6 +2178,14 @@ rule FORTRAN_DEP_HACK%s
element.write(outfile)
return rel_obj
+ def add_header_deps(self, target, ninja_element, header_deps):
+ for d in header_deps:
+ if isinstance(d, File):
+ d = d.rel_to_builddir(self.build_to_src)
+ elif not self.has_dir_part(d):
+ d = os.path.join(self.get_target_private_dir(target), d)
+ ninja_element.add_dep(d)
+
def has_dir_part(self, fname):
# FIXME FIXME: The usage of this is a terrible and unreliable hack
if isinstance(fname, File):
@@ -2166,21 +2204,28 @@ rule FORTRAN_DEP_HACK%s
return [os.path.join(self.get_target_dir(lt), lt.get_filename()) for lt in target.link_targets]
def generate_msvc_pch_command(self, target, compiler, pch):
- if len(pch) != 2:
- raise MesonException('MSVC requires one header and one source to produce precompiled headers.')
header = pch[0]
- source = pch[1]
pchname = compiler.get_pch_name(header)
dst = os.path.join(self.get_target_private_dir(target), pchname)
commands = []
commands += self.generate_basic_compiler_args(target, compiler)
+
+ if len(pch) == 1:
+ # Auto generate PCH.
+ source = self.create_msvc_pch_implementation(target, compiler.get_language(), pch[0])
+ pch_header_dir = os.path.dirname(os.path.join(self.build_to_src, target.get_source_subdir(), header))
+ commands += compiler.get_include_args(pch_header_dir, False)
+ else:
+ source = os.path.join(self.build_to_src, target.get_source_subdir(), pch[1])
+
just_name = os.path.basename(header)
(objname, pch_args) = compiler.gen_pch_args(just_name, source, dst)
commands += pch_args
+ commands += self._generate_single_compile(target, compiler)
commands += self.get_compile_debugfile_args(compiler, target, objname)
dep = dst + '.' + compiler.get_depfile_suffix()
- return commands, dep, dst, [objname]
+ return commands, dep, dst, [objname], source
def generate_gcc_pch_command(self, target, compiler, pch):
commands = self._generate_single_compile(target, compiler)
@@ -2193,7 +2238,7 @@ rule FORTRAN_DEP_HACK%s
dep = dst + '.' + compiler.get_depfile_suffix()
return commands, dep, dst, [] # Gcc does not create an object file during pch generation.
- def generate_pch(self, target, outfile):
+ def generate_pch(self, target, outfile, header_deps=[]):
cstr = ''
pch_objects = []
if target.is_cross:
@@ -2209,8 +2254,7 @@ rule FORTRAN_DEP_HACK%s
raise InvalidArguments(msg)
compiler = target.compilers[lang]
if isinstance(compiler, VisualStudioCCompiler):
- src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1])
- (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch)
+ (commands, dep, dst, objs, src) = self.generate_msvc_pch_command(target, compiler, pch)
extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0])
elif compiler.id == 'intel':
# Intel generates on target generation
@@ -2224,6 +2268,7 @@ rule FORTRAN_DEP_HACK%s
elem = NinjaBuildElement(self.all_outputs, dst, rulename, src)
if extradep is not None:
elem.add_dep(extradep)
+ self.add_header_deps(target, elem, header_deps)
elem.add_item('ARGS', commands)
elem.add_item('DEPFILE', dep)
elem.write(outfile)
@@ -2235,14 +2280,14 @@ rule FORTRAN_DEP_HACK%s
targetdir = self.get_target_private_dir(target)
symname = os.path.join(targetdir, target_name + '.symbols')
elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file)
- if self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler():
- elem.add_item('CROSS', '--cross-host=' + self.environment.cross_info.config['host_machine']['system'])
+ if self.environment.is_cross_build():
+ elem.add_item('CROSS', '--cross-host=' + self.environment.machines.host.system)
elem.write(outfile)
def get_cross_stdlib_link_args(self, target, linker):
if isinstance(target, build.StaticLibrary) or not target.is_cross:
return []
- if not self.environment.cross_info.has_stdlib(linker.language):
+ if not self.environment.properties.host.has_stdlib(linker.language):
return []
return linker.get_no_stdlib_link_args()
@@ -2251,8 +2296,6 @@ rule FORTRAN_DEP_HACK%s
if isinstance(target, build.Executable):
# Currently only used with the Swift compiler to add '-emit-executable'
commands += linker.get_std_exe_link_args()
- # If gui_app is significant on this platform, add the appropriate linker arguments
- commands += linker.get_gui_app_args(target.gui_app)
# If export_dynamic, add the appropriate linker arguments
if target.export_dynamic:
commands += linker.gen_export_dynamic_link_args(self.environment)
@@ -2285,19 +2328,27 @@ rule FORTRAN_DEP_HACK%s
raise RuntimeError('Unknown build target type.')
return commands
+ def get_target_type_link_args_post_dependencies(self, target, linker):
+ commands = []
+ if isinstance(target, build.Executable):
+ # If gui_app is significant on this platform, add the appropriate linker arguments.
+ # Unfortunately this can't be done in get_target_type_link_args, because some misguided
+ # libraries (such as SDL2) add -mwindows to their link flags.
+ commands += linker.get_gui_app_args(target.gui_app)
+ return commands
+
def get_link_whole_args(self, linker, target):
target_args = self.build_target_link_arguments(linker, target.link_whole_targets)
return linker.get_link_whole_for(target_args) if len(target_args) else []
- @staticmethod
@lru_cache(maxsize=None)
- def guess_library_absolute_path(linker, libname, search_dirs, patterns):
+ def guess_library_absolute_path(self, linker, libname, search_dirs, patterns):
for d in search_dirs:
for p in patterns:
trial = CCompiler._get_trials_from_pattern(p, d, libname)
if not trial:
continue
- trial = CCompiler._get_file_from_list(trial)
+ trial = CCompiler._get_file_from_list(self.environment, trial)
if not trial:
continue
# Return the first result
@@ -2360,9 +2411,9 @@ rule FORTRAN_DEP_HACK%s
sharedlibs = self.guess_library_absolute_path(linker, libname,
search_dirs, shared_patterns)
if staticlibs:
- guessed_dependencies.append(os.path.realpath(staticlibs))
+ guessed_dependencies.append(staticlibs.resolve().as_posix())
if sharedlibs:
- guessed_dependencies.append(os.path.realpath(sharedlibs))
+ guessed_dependencies.append(sharedlibs.resolve().as_posix())
return guessed_dependencies + absolute_libs
@@ -2410,16 +2461,20 @@ rule FORTRAN_DEP_HACK%s
if not isinstance(target, build.StaticLibrary):
commands += self.get_link_whole_args(linker, target)
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+
if not isinstance(target, build.StaticLibrary):
# Add link args added using add_project_link_arguments()
commands += self.build.get_project_link_args(linker, target.subproject, target.is_cross)
# Add link args added using add_global_link_arguments()
# These override per-project link arguments
commands += self.build.get_global_link_args(linker, target.is_cross)
- if not target.is_cross:
- # Link args added from the env: LDFLAGS. We want these to
- # override all the defaults but not the per-target link args.
- commands += self.environment.coredata.get_external_link_args(linker.get_language())
+ # Link args added from the env: LDFLAGS. We want these to override
+ # all the defaults but not the per-target link args.
+ commands += self.environment.coredata.get_external_link_args(for_machine, linker.get_language())
# Now we will add libraries and library paths from various sources
@@ -2460,12 +2515,15 @@ rule FORTRAN_DEP_HACK%s
if need_threads:
commands += linker.thread_link_flags(self.environment)
+ # Add link args specific to this BuildTarget type that must not be overridden by dependencies
+ commands += self.get_target_type_link_args_post_dependencies(target, linker)
+
# Add link args for c_* or cpp_* build options. Currently this only
# adds c_winlibs and cpp_winlibs when building for Windows. This needs
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
- commands += linker.get_option_link_args(self.environment.coredata.compiler_options)
+ commands += linker.get_option_link_args(self.environment.coredata.compiler_options[for_machine])
dep_targets = []
dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal))
@@ -2677,8 +2735,100 @@ rule FORTRAN_DEP_HACK%s
elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
elem.write(outfile)
+ def get_introspection_data(self, target_id, target):
+ if target_id not in self.introspection_data or len(self.introspection_data[target_id]) == 0:
+ return super().get_introspection_data(target_id, target)
+
+ result = []
+ for _, i in self.introspection_data[target_id].items():
+ result += [i]
+ return result
+
def load(build_dir):
filename = os.path.join(build_dir, 'meson-private', 'install.dat')
with open(filename, 'rb') as f:
obj = pickle.load(f)
return obj
+
+
+def _scan_fortran_file_deps(src: str, srcdir: Path, dirname: Path, tdeps, compiler) -> List[str]:
+ """
+ scan a Fortran file for dependencies. Needs to be distinct from target
+ to allow for recursion induced by `include` statements.er
+
+ It makes a number of assumptions, including
+
+ * `use`, `module`, `submodule` name is not on a continuation line
+
+ Regex
+ -----
+
+ * `incre` works for `#include "foo.f90"` and `include "foo.f90"`
+ * `usere` works for legacy and Fortran 2003 `use` statements
+ * `submodre` is for Fortran >= 2008 `submodule`
+ """
+
+ incre = re.compile(r"#?include\s*['\"](\w+\.\w+)['\"]\s*$", re.IGNORECASE)
+ usere = re.compile(r"\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)", re.IGNORECASE)
+ submodre = re.compile(r"\s*\bsubmodule\b\s+\((\w+:?\w+)\)\s+(\w+)\s*$", re.IGNORECASE)
+
+ mod_files = []
+ src = Path(src)
+ with src.open(encoding='ascii', errors='ignore') as f:
+ for line in f:
+ # included files
+ incmatch = incre.match(line)
+ if incmatch is not None:
+ incfile = srcdir / incmatch.group(1)
+ if incfile.suffix.lower()[1:] in compiler.file_suffixes:
+ mod_files.extend(_scan_fortran_file_deps(incfile, srcdir, dirname, tdeps, compiler))
+ # modules
+ usematch = usere.match(line)
+ if usematch is not None:
+ usename = usematch.group(1).lower()
+ if usename == 'intrinsic': # this keeps the regex simpler
+ continue
+ if usename not in tdeps:
+ # The module is not provided by any source file. This
+ # is due to:
+ # a) missing file/typo/etc
+ # b) using a module provided by the compiler, such as
+ # OpenMP
+ # There's no easy way to tell which is which (that I
+ # know of) so just ignore this and go on. Ideally we
+ # would print a warning message to the user but this is
+ # a common occurrence, which would lead to lots of
+ # distracting noise.
+ continue
+ srcfile = srcdir / tdeps[usename].fname
+ if not srcfile.is_file():
+ if srcfile.name != src.name: # generated source file
+ pass
+ else: # subproject
+ continue
+ elif srcfile.samefile(src): # self-reference
+ continue
+
+ mod_name = compiler.module_name_to_filename(usename)
+ mod_files.append(str(dirname / mod_name))
+ else: # submodules
+ submodmatch = submodre.match(line)
+ if submodmatch is not None:
+ parents = submodmatch.group(1).lower().split(':')
+ assert len(parents) in (1, 2), (
+ 'submodule ancestry must be specified as'
+ ' ancestor:parent but Meson found {}'.parents)
+ for parent in parents:
+ if parent not in tdeps:
+ raise MesonException("submodule {} relies on parent module {} that was not found.".format(submodmatch.group(2).lower(), parent))
+ submodsrcfile = srcdir / tdeps[parent].fname
+ if not submodsrcfile.is_file():
+ if submodsrcfile.name != src.name: # generated source file
+ pass
+ else: # subproject
+ continue
+ elif submodsrcfile.samefile(src): # self-reference
+ continue
+ mod_name = compiler.module_name_to_filename(parent)
+ mod_files.append(str(dirname / mod_name))
+ return mod_files
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 939f7b4..6d62553 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import copy
import os
import pickle
import xml.dom.minidom
@@ -25,7 +26,9 @@ from .. import dependencies
from .. import mlog
from .. import compilers
from ..compilers import CompilerArgs
-from ..mesonlib import MesonException, File, python_command, replace_if_different
+from ..mesonlib import (
+ MesonException, MachineChoice, File, python_command, replace_if_different
+)
from ..environment import Environment, build_filename
def autodetect_vs_version(build):
@@ -87,6 +90,7 @@ class Vs2010Backend(backends.Backend):
self.vs_version = '2010'
self.windows_target_platform_version = None
self.subdirs = {}
+ self.handled_target_deps = {}
def generate_custom_generator_commands(self, target, parent_node):
generator_output_files = []
@@ -303,19 +307,9 @@ class Vs2010Backend(backends.Backend):
prj[0], prj[1], prj[2])
ofile.write(prj_line)
target_dict = {target.get_id(): target}
- # Get direct deps
- all_deps = self.get_target_deps(target_dict)
# Get recursive deps
recursive_deps = self.get_target_deps(
target_dict, recursive=True)
- ofile.write('\tProjectSection(ProjectDependencies) = '
- 'postProject\n')
- regen_guid = self.environment.coredata.regen_guid
- ofile.write('\t\t{%s} = {%s}\n' % (regen_guid, regen_guid))
- for dep in all_deps.keys():
- guid = self.environment.coredata.target_guids[dep]
- ofile.write('\t\t{%s} = {%s}\n' % (guid, guid))
- ofile.write('\tEndProjectSection\n')
ofile.write('EndProject\n')
for dep, target in recursive_deps.items():
if prj[0] in default_projlist:
@@ -444,10 +438,26 @@ class Vs2010Backend(backends.Backend):
def quote_arguments(self, arr):
return ['"%s"' % i for i in arr]
- def add_project_reference(self, root, include, projid):
+ def add_project_reference(self, root, include, projid, link_outputs=False):
ig = ET.SubElement(root, 'ItemGroup')
pref = ET.SubElement(ig, 'ProjectReference', Include=include)
ET.SubElement(pref, 'Project').text = '{%s}' % projid
+ if not link_outputs:
+ # Do not link in generated .lib files from dependencies automatically.
+ # We only use the dependencies for ordering and link in the generated
+ # objects and .lib files manually.
+ ET.SubElement(pref, 'LinkLibraryDependencies').text = 'false'
+
+ def add_target_deps(self, root, target):
+ target_dict = {target.get_id(): target}
+ for name, dep in self.get_target_deps(target_dict).items():
+ if dep.get_id() in self.handled_target_deps[target.get_id()]:
+ # This dependency was already handled manually.
+ continue
+ relpath = self.get_target_dir_relative_to(dep, target)
+ vcxproj = os.path.join(relpath, dep.get_id() + '.vcxproj')
+ tid = self.environment.coredata.target_guids[dep.get_id()]
+ self.add_project_reference(root, vcxproj, tid)
def create_basic_crap(self, target, guid):
project_name = target.name
@@ -472,14 +482,14 @@ class Vs2010Backend(backends.Backend):
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType')
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -494,7 +504,6 @@ class Vs2010Backend(backends.Backend):
def gen_run_target_vcxproj(self, target, ofname, guid):
root = self.create_basic_crap(target, guid)
action = ET.SubElement(root, 'ItemDefinitionGroup')
- customstep = ET.SubElement(action, 'PostBuildEvent')
cmd_raw = [target.command] + target.args
cmd = python_command + \
[os.path.join(self.environment.get_script_dir(), 'commandrunner.py'),
@@ -509,18 +518,20 @@ class Vs2010Backend(backends.Backend):
elif isinstance(i, File):
relfname = i.rel_to_builddir(self.build_to_src)
cmd.append(os.path.join(self.environment.get_build_dir(), relfname))
+ elif isinstance(i, str):
+ # Escape embedded quotes, because we quote the entire argument below.
+ cmd.append(i.replace('"', '\\"'))
else:
cmd.append(i)
cmd_templ = '''"%s" ''' * len(cmd)
- ET.SubElement(customstep, 'Command').text = cmd_templ % tuple(cmd)
- ET.SubElement(customstep, 'Message').text = 'Running custom command.'
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_custom_build(root, 'run_target', cmd_templ % tuple(cmd))
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self.add_target_deps(root, target)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
def gen_custom_target_vcxproj(self, target, ofname, guid):
root = self.create_basic_crap(target, guid)
- action = ET.SubElement(root, 'ItemDefinitionGroup')
- customstep = ET.SubElement(action, 'CustomBuildStep')
# We need to always use absolute paths because our invocation is always
# from the target dir, not the build root.
target.absolute_paths = True
@@ -537,11 +548,16 @@ class Vs2010Backend(backends.Backend):
extra_paths=extra_paths,
capture=ofilenames[0] if target.capture else None)
wrapper_cmd = self.environment.get_build_command() + ['--internal', 'exe', exe_data]
- ET.SubElement(customstep, 'Command').text = ' '.join(self.quote_arguments(wrapper_cmd))
- ET.SubElement(customstep, 'Outputs').text = ';'.join(ofilenames)
- ET.SubElement(customstep, 'Inputs').text = ';'.join([exe_data] + srcs + depend_files)
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ if target.build_always_stale:
+ # Use a nonexistent file to always consider the target out-of-date.
+ ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
+ 'outofdate.file'))]
+ self.add_custom_build(root, 'custom_target', ' '.join(self.quote_arguments(wrapper_cmd)),
+ deps=[exe_data] + srcs + depend_files, outputs=ofilenames)
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
self.generate_custom_generator_commands(target, root)
+ self.add_regen_dependency(root)
+ self.add_target_deps(root, target)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
@classmethod
@@ -553,18 +569,37 @@ class Vs2010Backend(backends.Backend):
return 'cpp'
raise MesonException('Could not guess language from source file %s.' % src)
- def add_pch(self, inc_cl, proj_to_src_dir, pch_sources, source_file):
+ def add_pch(self, pch_sources, lang, inc_cl):
if len(pch_sources) <= 1:
# We only need per file precompiled headers if we have more than 1 language.
return
- lang = Vs2010Backend.lang_from_source_file(source_file)
- header = os.path.join(proj_to_src_dir, pch_sources[lang][0])
- pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
- pch_file.text = header
+ self.use_pch(pch_sources, lang, inc_cl)
+
+ def create_pch(self, pch_sources, lang, inc_cl):
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Create'
+ self.add_pch_files(pch_sources, lang, inc_cl)
+
+ def use_pch(self, pch_sources, lang, inc_cl):
+ header = self.add_pch_files(pch_sources, lang, inc_cl)
pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
pch_include.text = header + ';%(ForcedIncludeFiles)'
+
+ def add_pch_files(self, pch_sources, lang, inc_cl):
+ header = os.path.basename(pch_sources[lang][0])
+ pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
+ # When USING PCHs, MSVC will not do the regular include
+ # directory lookup, but simply use a string match to find the
+ # PCH to use. That means the #include directive must match the
+ # pch_file.text used during PCH CREATION verbatim.
+ # When CREATING a PCH, MSVC will do the include directory
+ # lookup to find the actual PCH header to use. Thus, the PCH
+ # header must either be in the include_directories of the target
+ # or be in the same directory as the PCH implementation.
+ pch_file.text = header
pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % lang
+ return header
def is_argument_with_msbuild_xml_entry(self, entry):
# Remove arguments that have a top level XML entry so
@@ -692,6 +727,7 @@ class Vs2010Backend(backends.Backend):
mlog.debug('Generating vcxproj %s.' % target.name)
entrypoint = 'WinMainCRTStartup'
subsystem = 'Windows'
+ self.handled_target_deps[target.get_id()] = []
if isinstance(target, build.Executable):
conftype = 'Application'
if not target.gui_app:
@@ -747,7 +783,7 @@ class Vs2010Backend(backends.Backend):
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
# Start configuration
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType').text = conftype
@@ -758,19 +794,6 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false'
# Let VS auto-set the RTC level
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default'
- o_flags = split_o_flags_args(buildtype_args)
- if '/Oi' in o_flags:
- ET.SubElement(type_config, 'IntrinsicFunctions').text = 'true'
- if '/Ob1' in o_flags:
- ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
- elif '/Ob2' in o_flags:
- ET.SubElement(type_config, 'InlineFunctionExpansion').text = 'AnySuitable'
- # In modern MSVC parlance "/O1" means size optimization.
- # "/Os" has been deprecated.
- if '/O1' in o_flags:
- ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Size'
- else:
- ET.SubElement(type_config, 'FavorSizeOrSpeed').text = 'Speed'
# Incremental linking increases code size
if '/INCREMENTAL:NO' in buildtype_link_args:
ET.SubElement(type_config, 'LinkIncremental').text = 'false'
@@ -810,17 +833,8 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
elif '/RTCs' in buildtype_args:
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
- # Optimization flags
- if '/Ox' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'Full'
- elif '/O2' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'MaxSpeed'
- elif '/O1' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'MinSpace'
- elif '/Od' in o_flags:
- ET.SubElement(type_config, 'Optimization').text = 'Disabled'
# End configuration
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
(gen_src, gen_hdrs, gen_objs, gen_langs) = self.split_sources(generated_files)
(custom_src, custom_hdrs, custom_objs, custom_langs) = self.split_sources(custom_target_output_files)
@@ -856,10 +870,14 @@ class Vs2010Backend(backends.Backend):
file_inc_dirs = dict((lang, []) for lang in target.compilers)
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
+ if self.environment.is_cross_build() and not target.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
for l, comp in target.compilers.items():
if l in file_args:
file_args[l] += compilers.get_base_compile_args(self.get_base_options_for_target(target), comp)
- file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options)
+ file_args[l] += comp.get_option_compile_args(self.environment.coredata.compiler_options[for_machine])
# Add compile args added using add_project_arguments()
for l, args in self.build.projects_args.get(target.subproject, {}).items():
@@ -870,13 +888,12 @@ class Vs2010Backend(backends.Backend):
for l, args in self.build.global_args.items():
if l in file_args:
file_args[l] += args
- if not target.is_cross:
- # Compile args added from the env: CFLAGS/CXXFLAGS, etc. We want these
- # to override all the defaults, but not the per-target compile args.
- for key, opt in self.environment.coredata.compiler_options.items():
- l, suffix = key.split('_', 1)
- if suffix == 'args' and l in file_args:
- file_args[l] += opt.value
+ # Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these
+ # to override all the defaults, but not the per-target compile args.
+ for key, opt in self.environment.coredata.compiler_options[for_machine].items():
+ l, suffix = key.split('_', 1)
+ if suffix == 'args' and l in file_args:
+ file_args[l] += opt.value
for args in file_args.values():
# This is where Visual Studio will insert target_args, target_defines,
# etc, which are added later from external deps (see below).
@@ -987,6 +1004,27 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level))
if self.get_option_for_target('werror', target):
ET.SubElement(clconf, 'TreatWarningAsError').text = 'true'
+ # Optimization flags
+ o_flags = split_o_flags_args(buildtype_args)
+ if '/Ox' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'Full'
+ elif '/O2' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'MaxSpeed'
+ elif '/O1' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'MinSpace'
+ elif '/Od' in o_flags:
+ ET.SubElement(clconf, 'Optimization').text = 'Disabled'
+ if '/Oi' in o_flags:
+ ET.SubElement(clconf, 'IntrinsicFunctions').text = 'true'
+ if '/Ob1' in o_flags:
+ ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
+ elif '/Ob2' in o_flags:
+ ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'AnySuitable'
+ # Size-preserving flags
+ if '/Os' in o_flags:
+ ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Size'
+ else:
+ ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Speed'
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
if self.environment.coredata.base_options.get('b_pch', False):
@@ -997,24 +1035,22 @@ class Vs2010Backend(backends.Backend):
continue
pch_node.text = 'Use'
if compiler.id == 'msvc':
- if len(pch) != 2:
- raise MesonException('MSVC requires one header and one source to produce precompiled headers.')
- pch_sources[lang] = [pch[0], pch[1], lang]
+ if len(pch) == 1:
+ # Auto generate PCH.
+ src = os.path.join(down, self.create_msvc_pch_implementation(target, lang, pch[0]))
+ pch_header_dir = os.path.dirname(os.path.join(proj_to_src_dir, pch[0]))
+ else:
+ src = os.path.join(proj_to_src_dir, pch[1])
+ pch_header_dir = None
+ pch_sources[lang] = [pch[0], src, lang, pch_header_dir]
else:
# I don't know whether its relevant but let's handle other compilers
# used with a vs backend
- pch_sources[lang] = [pch[0], None, lang]
+ pch_sources[lang] = [pch[0], None, lang, None]
if len(pch_sources) == 1:
# If there is only 1 language with precompiled headers, we can use it for the entire project, which
# is cleaner than specifying it for each source file.
- pch_source = list(pch_sources.values())[0]
- header = os.path.join(proj_to_src_dir, pch_source[0])
- pch_file = ET.SubElement(clconf, 'PrecompiledHeaderFile')
- pch_file.text = header
- pch_include = ET.SubElement(clconf, 'ForcedIncludeFiles')
- pch_include.text = header + ';%(ForcedIncludeFiles)'
- pch_out = ET.SubElement(clconf, 'PrecompiledHeaderOutputFile')
- pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % pch_source[2]
+ self.use_pch(pch_sources, list(pch_sources)[0], clconf)
resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
@@ -1038,10 +1074,10 @@ class Vs2010Backend(backends.Backend):
# Add link args added using add_global_link_arguments()
# These override per-project link arguments
extra_link_args += self.build.get_global_link_args(compiler, target.is_cross)
- if not target.is_cross:
- # Link args added from the env: LDFLAGS. We want these to
- # override all the defaults but not the per-target link args.
- extra_link_args += self.environment.coredata.get_external_link_args(compiler.get_language())
+ # Link args added from the env: LDFLAGS, or the cross file. We want
+ # these to override all the defaults but not the per-target link
+ # args.
+ extra_link_args += self.environment.coredata.get_external_link_args(for_machine, compiler.get_language())
# Only non-static built targets need link args and link dependencies
extra_link_args += target.link_args
# External deps must be last because target link libraries may depend on them.
@@ -1064,7 +1100,7 @@ class Vs2010Backend(backends.Backend):
# to be after all internal and external libraries so that unresolved
# symbols from those can be found here. This is needed when the
# *_winlibs that we want to link to are static mingw64 libraries.
- extra_link_args += compiler.get_option_link_args(self.environment.coredata.compiler_options)
+ extra_link_args += compiler.get_option_link_args(self.environment.coredata.compiler_options[for_machine])
(additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native())
# Add more libraries to be linked if needed
@@ -1082,7 +1118,10 @@ class Vs2010Backend(backends.Backend):
trelpath = self.get_target_dir_relative_to(t, target)
tvcxproj = os.path.join(trelpath, t.get_id() + '.vcxproj')
tid = self.environment.coredata.target_guids[t.get_id()]
- self.add_project_reference(root, tvcxproj, tid)
+ self.add_project_reference(root, tvcxproj, tid, link_outputs=True)
+ # Mark the dependency as already handled to not have
+ # multiple references to the same target.
+ self.handled_target_deps[target.get_id()].append(t.get_id())
else:
# Other libraries go into AdditionalDependencies
if linkname not in additional_links:
@@ -1138,7 +1177,7 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))
extra_files = target.extra_files
- if len(headers) + len(gen_hdrs) + len(extra_files) > 0:
+ if len(headers) + len(gen_hdrs) + len(extra_files) + len(pch_sources) > 0:
inc_hdrs = ET.SubElement(root, 'ItemGroup')
for h in headers:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
@@ -1148,6 +1187,9 @@ class Vs2010Backend(backends.Backend):
for h in target.extra_files:
relpath = os.path.join(down, h.rel_to_builddir(self.build_to_src))
ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
+ for lang in pch_sources:
+ h = pch_sources[lang][0]
+ ET.SubElement(inc_hdrs, 'CLInclude', Include=os.path.join(proj_to_src_dir, h))
if len(sources) + len(gen_src) + len(pch_sources) > 0:
inc_src = ET.SubElement(root, 'ItemGroup')
@@ -1155,7 +1197,7 @@ class Vs2010Backend(backends.Backend):
relpath = os.path.join(down, s.rel_to_builddir(self.build_to_src))
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1163,26 +1205,24 @@ class Vs2010Backend(backends.Backend):
for s in gen_src:
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(inc_cl, proj_to_src_dir, pch_sources, s)
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
for lang in pch_sources:
- header, impl, suffix = pch_sources[lang]
+ impl = pch_sources[lang][1]
if impl:
- relpath = os.path.join(proj_to_src_dir, impl)
- inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
- pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
- pch.text = 'Create'
- pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
- pch_out.text = '$(IntDir)$(TargetName)-%s.pch' % suffix
- pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
- # MSBuild searches for the header relative from the implementation, so we have to use
- # just the file name instead of the relative path to the file.
- pch_file.text = os.path.basename(header)
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=impl)
+ self.create_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
- self.add_include_dirs(lang, inc_cl, file_inc_dirs)
+ pch_header_dir = pch_sources[lang][3]
+ if pch_header_dir:
+ inc_dirs = copy.deepcopy(file_inc_dirs)
+ inc_dirs[lang] = [pch_header_dir] + inc_dirs[lang]
+ else:
+ inc_dirs = file_inc_dirs
+ self.add_include_dirs(lang, inc_cl, inc_dirs)
if self.has_objects(objects, additional_objects, gen_objs):
inc_objs = ET.SubElement(root, 'ItemGroup')
@@ -1193,10 +1233,9 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(inc_objs, 'Object', Include=s)
self.add_generated_objects(inc_objs, gen_objs)
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
- # Reference the regen target.
- regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
- self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid)
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self.add_target_deps(root, target)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
def gen_regenproj(self, project_name, ofname):
@@ -1221,14 +1260,14 @@ class Vs2010Backend(backends.Backend):
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType').text = "Utility"
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -1248,32 +1287,14 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
regen_command = self.environment.get_build_command() + ['--internal', 'regencheck']
- private_dir = self.environment.get_scratch_dir()
- vcvars_command = self.get_vcvars_command()
- cmd_templ = '''setlocal
-call %s > NUL
-"%s" "%s"
-if %%errorlevel%% neq 0 goto :cmEnd
-:cmEnd
-endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
-:cmErrorLevel
-exit /b %%1
-:cmDone
-if %%errorlevel%% neq 0 goto :VCEnd'''
- igroup = ET.SubElement(root, 'ItemGroup')
- rulefile = os.path.join(self.environment.get_scratch_dir(), 'regen.rule')
- if not os.path.exists(rulefile):
- with open(rulefile, 'w', encoding='utf-8') as f:
- f.write("# Meson regen file.")
- custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile)
- message = ET.SubElement(custombuild, 'Message')
- message.text = 'Checking whether solution needs to be regenerated.'
- ET.SubElement(custombuild, 'Command').text = cmd_templ % \
- (vcvars_command, '" "'.join(regen_command), private_dir)
- ET.SubElement(custombuild, 'Outputs').text = Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())
- deps = self.get_regen_filelist()
- ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ cmd_templ = '''call %s > NUL
+"%s" "%s"'''
+ regen_command = cmd_templ % \
+ (self.get_vcvars_command(), '" "'.join(regen_command), self.environment.get_scratch_dir())
+ self.add_custom_build(root, 'regen', regen_command, deps=self.get_regen_filelist(),
+ outputs=[Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())],
+ msg='Checking whether solution needs to be regenerated.')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
@@ -1300,14 +1321,14 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType')
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -1326,27 +1347,16 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
- postbuild = ET.SubElement(action, 'PostBuildEvent')
- ET.SubElement(postbuild, 'Message')
# FIXME: No benchmarks?
test_command = self.environment.get_build_command() + ['test', '--no-rebuild']
if not self.environment.coredata.get_builtin_option('stdsplit'):
test_command += ['--no-stdsplit']
if self.environment.coredata.get_builtin_option('errorlogs'):
test_command += ['--print-errorlogs']
- cmd_templ = '''setlocal
-"%s"
-if %%errorlevel%% neq 0 goto :cmEnd
-:cmEnd
-endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
-:cmErrorLevel
-exit /b %%1
-:cmDone
-if %%errorlevel%% neq 0 goto :VCEnd'''
self.serialize_tests()
- ET.SubElement(postbuild, 'Command').text =\
- cmd_templ % ('" "'.join(test_command))
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_custom_build(root, 'run_tests', '"%s"' % ('" "'.join(test_command)))
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
def gen_installproj(self, target_name, ofname):
@@ -1373,14 +1383,14 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
pname.text = project_name
if self.windows_target_platform_version:
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
ET.SubElement(type_config, 'ConfigurationType')
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
if self.platform_toolset:
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.props')
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
direlem = ET.SubElement(root, 'PropertyGroup')
fver = ET.SubElement(direlem, '_ProjectFileVersion')
fver.text = self.project_file_version
@@ -1399,12 +1409,24 @@ if %%errorlevel%% neq 0 goto :VCEnd'''
ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
- postbuild = ET.SubElement(action, 'PostBuildEvent')
- ET.SubElement(postbuild, 'Message')
- # FIXME: No benchmarks?
- test_command = self.environment.get_build_command() + ['install', '--no-rebuild']
+ install_command = self.environment.get_build_command() + ['install', '--no-rebuild']
+ self.add_custom_build(root, 'run_install', '"%s"' % ('" "'.join(install_command)))
+ ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ self.add_regen_dependency(root)
+ self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+
+ def add_custom_build(self, node, rulename, command, deps=None, outputs=None, msg=None):
+ igroup = ET.SubElement(node, 'ItemGroup')
+ rulefile = os.path.join(self.environment.get_scratch_dir(), rulename + '.rule')
+ if not os.path.exists(rulefile):
+ with open(rulefile, 'w', encoding='utf-8') as f:
+ f.write("# Meson regen file.")
+ custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile)
+ if msg:
+ message = ET.SubElement(custombuild, 'Message')
+ message.text = msg
cmd_templ = '''setlocal
-"%s"
+%s
if %%errorlevel%% neq 0 goto :cmEnd
:cmEnd
endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
@@ -1412,11 +1434,27 @@ endlocal & call :cmErrorLevel %%errorlevel%% & goto :cmDone
exit /b %%1
:cmDone
if %%errorlevel%% neq 0 goto :VCEnd'''
- ET.SubElement(postbuild, 'Command').text =\
- cmd_templ % ('" "'.join(test_command))
- ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets')
- self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
+ ET.SubElement(custombuild, 'Command').text = cmd_templ % command
+ if not outputs:
+ # Use a nonexistent file to always consider the target out-of-date.
+ outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
+ 'outofdate.file'))]
+ ET.SubElement(custombuild, 'Outputs').text = ';'.join(outputs)
+ if deps:
+ ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
+
+ @staticmethod
+ def nonexistent_file(prefix):
+ i = 0
+ file = prefix
+ while os.path.exists(file):
+ file = '%s%d' % (prefix, i)
+ return file
def generate_debug_information(self, link):
# valid values for vs2015 is 'false', 'true', 'DebugFastLink'
ET.SubElement(link, 'GenerateDebugInformation').text = 'true'
+
+ def add_regen_dependency(self, root):
+ regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
+ self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid)
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index a550d91..990b824 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -733,6 +733,8 @@ class XCodeBackend(backends.Backend):
else:
product_name = target.get_basename()
ldargs += target.link_args
+ for dep in target.get_external_deps():
+ ldargs += dep.get_link_args()
ldstr = ' '.join(ldargs)
valid = self.buildconfmap[target_name][buildtype]
langargs = {}
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 5d0fefa..20f0cdb 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -36,6 +36,7 @@ pch_kwargs = set(['c_pch', 'cpp_pch'])
lang_arg_kwargs = set([
'c_args',
'cpp_args',
+ 'cuda_args',
'd_args',
'd_import_dirs',
'd_unittest',
@@ -111,8 +112,9 @@ class Build:
self.environment = environment
self.projects = {}
self.targets = OrderedDict()
- self.compilers = OrderedDict()
- self.cross_compilers = OrderedDict()
+ # Coredata holds the state. This is just here for convenience.
+ self.compilers = environment.coredata.compilers
+ self.cross_compilers = environment.coredata.cross_compilers
self.global_args = {}
self.projects_args = {}
self.global_link_args = {}
@@ -145,6 +147,10 @@ class Build:
def copy(self):
other = Build(self.environment)
for k, v in self.__dict__.items():
+ if k in ['compilers', 'cross_compilers']:
+ # These alias coredata's fields of the same name, and must not
+ # become copies.
+ continue
if isinstance(v, (list, dict, set, OrderedDict)):
other.__dict__[k] = v.copy()
else:
@@ -155,19 +161,13 @@ class Build:
for k, v in other.__dict__.items():
self.__dict__[k] = v
- def add_compiler(self, compiler):
+ def ensure_static_linker(self, compiler):
if self.static_linker is None and compiler.needs_static_linker():
self.static_linker = self.environment.detect_static_linker(compiler)
- lang = compiler.get_language()
- if lang not in self.compilers:
- self.compilers[lang] = compiler
- def add_cross_compiler(self, compiler):
- if not self.cross_compilers:
+ def ensure_static_cross_linker(self, compiler):
+ if self.static_cross_linker is None and compiler.needs_static_linker():
self.static_cross_linker = self.environment.detect_static_linker(compiler)
- lang = compiler.get_language()
- if lang not in self.cross_compilers:
- self.cross_compilers[lang] = compiler
def get_project(self):
return self.projects['']
@@ -345,6 +345,8 @@ a hard error in the future.''' % name)
self.install = False
self.build_always_stale = False
self.option_overrides = {}
+ if not hasattr(self, 'typename'):
+ raise RuntimeError('Target type is not set for target class "{}". This is a bug'.format(type(self).__name__))
def get_install_dir(self, environment):
# Find the installation directory.
@@ -366,6 +368,9 @@ a hard error in the future.''' % name)
def get_subdir(self):
return self.subdir
+ def get_typename(self):
+ return self.typename
+
@staticmethod
def _get_id_hash(target_id):
# We don't really need cryptographic security here.
@@ -404,6 +409,11 @@ a hard error in the future.''' % name)
self.build_by_default = kwargs['build_by_default']
if not isinstance(self.build_by_default, bool):
raise InvalidArguments('build_by_default must be a boolean value.')
+ elif kwargs.get('install', False):
+ # For backward compatibility, if build_by_default is not explicitly
+ # set, use the value of 'install' if it's enabled.
+ self.build_by_default = True
+
self.option_overrides = self.parse_overrides(kwargs)
def parse_overrides(self, kwargs):
@@ -712,9 +722,12 @@ class BuildTarget(Target):
def extract_objects(self, srclist):
obj_src = []
for src in srclist:
- if not isinstance(src, str):
- raise MesonException('Object extraction arguments must be strings.')
- src = File(False, self.subdir, src)
+ if isinstance(src, str):
+ src = File(False, self.subdir, src)
+ elif isinstance(src, File):
+ FeatureNew('File argument for extract_objects', '0.50.0').use(self.subproject)
+ else:
+ raise MesonException('Object extraction arguments must be strings or Files.')
# FIXME: It could be a generated source
if src not in self.sources:
raise MesonException('Tried to extract unknown source %s.' % src)
@@ -788,13 +801,13 @@ just like those detected with the dependency() function.''')
for linktarget in lwhole:
self.link_whole(linktarget)
- c_pchlist, cpp_pchlist, clist, cpplist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
- = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cs_args', 'vala_args', 'objc_args',
+ c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
+ = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args',
'objcpp_args', 'fortran_args', 'rust_args')
self.add_pch('c', c_pchlist)
self.add_pch('cpp', cpp_pchlist)
- compiler_args = {'c': clist, 'cpp': cpplist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
+ compiler_args = {'c': clist, 'cpp': cpplist, 'cuda': cudalist, 'cs': cslist, 'vala': valalist, 'objc': objclist, 'objcpp': objcpplist,
'fortran': fortranlist, 'rust': rustlist
}
for key, value in compiler_args.items():
@@ -1017,8 +1030,9 @@ This will become a hard error in a future Meson release.''')
# Deps of deps.
self.add_deps(dep.ext_deps)
elif isinstance(dep, dependencies.Dependency):
- self.external_deps.append(dep)
- self.process_sourcelist(dep.get_sources())
+ if dep not in self.external_deps:
+ self.external_deps.append(dep)
+ self.process_sourcelist(dep.get_sources())
elif isinstance(dep, BuildTarget):
raise InvalidArguments('''Tried to use a build target as a dependency.
You probably should put it in link_with instead.''')
@@ -1080,6 +1094,11 @@ You probably should put it in link_with instead.''')
pchlist = [pchlist[1], pchlist[0]]
else:
raise InvalidArguments('PCH argument %s is of unknown type.' % pchlist[0])
+
+ if (os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1])):
+ raise InvalidArguments('PCH files must be stored in the same folder.')
+
+ mlog.warning('PCH source files are deprecated, only a single header file should be used.')
elif len(pchlist) > 2:
raise InvalidArguments('PCH definition may have a maximum of 2 files.')
for f in pchlist:
@@ -1361,6 +1380,7 @@ class Executable(BuildTarget):
known_kwargs = known_exe_kwargs
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ self.typename = 'executable'
if 'pie' not in kwargs and 'b_pie' in environment.coredata.base_options:
kwargs['pie'] = environment.coredata.base_options['b_pie'].value
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
@@ -1450,6 +1470,7 @@ class StaticLibrary(BuildTarget):
known_kwargs = known_stlib_kwargs
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ self.typename = 'static library'
if 'pic' not in kwargs and 'b_staticpic' in environment.coredata.base_options:
kwargs['pic'] = environment.coredata.base_options['b_staticpic'].value
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
@@ -1509,6 +1530,7 @@ class SharedLibrary(BuildTarget):
known_kwargs = known_shlib_kwargs
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ self.typename = 'shared library'
self.soversion = None
self.ltversion = None
# Max length 2, first element is compatibility_version, second is current_version
@@ -1817,6 +1839,7 @@ class SharedModule(SharedLibrary):
if 'soversion' in kwargs:
raise MesonException('Shared modules must not specify the soversion kwarg.')
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
+ self.typename = 'shared module'
def get_default_install_dir(self, environment):
return environment.get_shared_module_dir()
@@ -1842,6 +1865,7 @@ class CustomTarget(Target):
])
def __init__(self, name, subdir, subproject, kwargs, absolute_paths=False):
+ self.typename = 'custom'
super().__init__(name, subdir, subproject, False)
self.dependencies = []
self.extra_depends = []
@@ -1869,9 +1893,6 @@ class CustomTarget(Target):
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.command)
- def get_id(self):
- return self.name + self.type_suffix()
-
def get_target_dependencies(self):
deps = self.dependencies[:]
deps += self.extra_depends
@@ -2083,6 +2104,7 @@ class CustomTarget(Target):
class RunTarget(Target):
def __init__(self, name, command, args, dependencies, subdir, subproject):
+ self.typename = 'run'
super().__init__(name, subdir, subproject, False)
self.command = command
self.args = args
@@ -2110,6 +2132,14 @@ class RunTarget(Target):
def get_filename(self):
return self.name
+ def get_outputs(self):
+ if isinstance(self.name, str):
+ return [self.name]
+ elif isinstance(self.name, list):
+ return self.name
+ else:
+ raise RuntimeError('RunTarget: self.name is neither a list nor a string. This is a bug')
+
def type_suffix(self):
return "@run"
@@ -2117,6 +2147,7 @@ class Jar(BuildTarget):
known_kwargs = known_jar_kwargs
def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs):
+ self.typename = 'jar'
super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs)
for s in self.sources:
if not s.endswith('.java'):
@@ -2160,6 +2191,7 @@ class CustomTargetIndex:
"""
def __init__(self, target, output):
+ self.typename = 'custom'
self.target = target
self.output = output
@@ -2276,8 +2308,13 @@ def load(build_dir):
obj = pickle.load(f)
except FileNotFoundError:
raise MesonException(nonexisting_fail_msg)
- except pickle.UnpicklingError:
+ except (pickle.UnpicklingError, EOFError):
raise MesonException(load_fail_msg)
+ except AttributeError:
+ raise MesonException(
+ "Build data file {!r} references functions or classes that don't "
+ "exist. This probably means that it was generated with an old "
+ "version of meson. Try running meson {} --wipe".format(filename, build_dir))
if not isinstance(obj, Build):
raise MesonException(load_fail_msg)
return obj
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index 31b7b89..5de0e59 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -15,6 +15,7 @@
# Public symbols for compilers sub-package when using 'from . import compilers'
__all__ = [
'CompilerType',
+ 'Compiler',
'all_languages',
'base_options',
@@ -61,6 +62,7 @@ __all__ = [
'GnuDCompiler',
'GnuFortranCompiler',
'ElbrusFortranCompiler',
+ 'FlangFortranCompiler',
'GnuObjCCompiler',
'GnuObjCPPCompiler',
'IntelCompiler',
@@ -70,12 +72,15 @@ __all__ = [
'JavaCompiler',
'LLVMDCompiler',
'MonoCompiler',
+ 'CudaCompiler',
'VisualStudioCsCompiler',
'NAGFortranCompiler',
'ObjCCompiler',
'ObjCPPCompiler',
'Open64FortranCompiler',
'PathScaleFortranCompiler',
+ 'PGICCompiler',
+ 'PGICPPCompiler',
'PGIFortranCompiler',
'RustCompiler',
'CcrxCCompiler',
@@ -91,6 +96,7 @@ __all__ = [
# Bring symbols from each module into compilers sub-package namespace
from .compilers import (
CompilerType,
+ Compiler,
all_languages,
base_options,
clib_langs,
@@ -124,6 +130,7 @@ from .c import (
GnuCCompiler,
ElbrusCCompiler,
IntelCCompiler,
+ PGICCompiler,
CcrxCCompiler,
VisualStudioCCompiler,
)
@@ -136,6 +143,7 @@ from .cpp import (
GnuCPPCompiler,
ElbrusCPPCompiler,
IntelCPPCompiler,
+ PGICPPCompiler,
CcrxCPPCompiler,
VisualStudioCPPCompiler,
)
@@ -146,11 +154,13 @@ from .d import (
GnuDCompiler,
LLVMDCompiler,
)
+from .cuda import CudaCompiler
from .fortran import (
FortranCompiler,
G95FortranCompiler,
GnuFortranCompiler,
ElbrusFortranCompiler,
+ FlangFortranCompiler,
IntelFortranCompiler,
NAGFortranCompiler,
Open64FortranCompiler,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index a1694d1..6ab14d2 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -19,13 +19,15 @@ import subprocess
import functools
import itertools
from pathlib import Path
+from typing import List
from .. import mlog
from .. import coredata
from . import compilers
from ..mesonlib import (
- EnvironmentException, MesonException, version_compare, Popen_safe, listify,
- for_windows, for_darwin, for_cygwin, for_haiku, for_openbsd,
+ EnvironmentException, MachineChoice, MesonException, Popen_safe, listify,
+ version_compare, for_windows, for_darwin, for_cygwin, for_haiku,
+ for_openbsd, darwin_get_object_archs
)
from .c_function_attributes import C_FUNC_ATTRIBUTES
@@ -33,6 +35,7 @@ from .compilers import (
get_largefile_args,
gnu_winlibs,
msvc_winlibs,
+ unixy_compiler_internal_libs,
vs32_instruction_set_args,
vs64_instruction_set_args,
ArmCompiler,
@@ -40,23 +43,24 @@ from .compilers import (
ClangCompiler,
Compiler,
CompilerArgs,
+ CompilerType,
CrossNoRunException,
GnuCompiler,
ElbrusCompiler,
IntelCompiler,
+ PGICompiler,
RunResult,
CcrxCompiler,
)
-gnu_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt')
-
class CCompiler(Compiler):
# TODO: Replace this manual cache with functools.lru_cache
library_dirs_cache = {}
program_dirs_cache = {}
find_library_cache = {}
- internal_libs = gnu_compiler_internal_libs
+ find_framework_cache = {}
+ internal_libs = unixy_compiler_internal_libs
@staticmethod
def attribute_check_func(name):
@@ -316,10 +320,7 @@ class CCompiler(Compiler):
# on OSX the compiler binary is the same but you need
# a ton of compiler flags to differentiate between
# arm and x86_64. So just compile.
- extra_flags += self.get_cross_extra_flags(environment, link=False)
extra_flags += self.get_compile_only_args()
- else:
- extra_flags += self.get_cross_extra_flags(environment, link=True)
# Is a valid executable output for all toolchains and platforms
binname += '.exe'
# Write binary check source
@@ -419,24 +420,25 @@ class CCompiler(Compiler):
# Select a CRT if needed since we're linking
if mode == 'link':
args += self.get_linker_debug_crt_args()
- # Read c_args/cpp_args/etc from the cross-info file (if needed)
- args += self.get_cross_extra_flags(env, link=(mode == 'link'))
- if not self.is_cross:
- if mode == 'preprocess':
- # Add CPPFLAGS from the env.
- args += env.coredata.get_external_preprocess_args(self.language)
- elif mode == 'compile':
- # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
- sys_args = env.coredata.get_external_args(self.language)
- # Apparently it is a thing to inject linker flags both
- # via CFLAGS _and_ LDFLAGS, even though the former are
- # also used during linking. These flags can break
- # argument checks. Thanks, Autotools.
- cleaned_sys_args = self.remove_linkerlike_args(sys_args)
- args += cleaned_sys_args
- elif mode == 'link':
- # Add LDFLAGS from the env
- args += env.coredata.get_external_link_args(self.language)
+ if env.is_cross_build() and not self.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+ if mode == 'preprocess':
+ # Add CPPFLAGS from the env.
+ args += env.coredata.get_external_preprocess_args(for_machine, self.language)
+ elif mode == 'compile':
+ # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
+ sys_args = env.coredata.get_external_args(for_machine, self.language)
+ # Apparently it is a thing to inject linker flags both
+ # via CFLAGS _and_ LDFLAGS, even though the former are
+ # also used during linking. These flags can break
+ # argument checks. Thanks, Autotools.
+ cleaned_sys_args = self.remove_linkerlike_args(sys_args)
+ args += cleaned_sys_args
+ elif mode == 'link':
+ # Add LDFLAGS from the env
+ args += env.coredata.get_external_link_args(for_machine, self.language)
args += self.get_compiler_check_args()
# extra_args must override all other arguments, so we add them last
args += extra_args
@@ -454,7 +456,7 @@ class CCompiler(Compiler):
return self.compiles(code, env, extra_args=extra_args,
dependencies=dependencies, mode='link')
- def run(self, code, env, *, extra_args=None, dependencies=None):
+ def run(self, code: str, env, *, extra_args=None, dependencies=None):
if self.is_cross and self.exe_wrapper is None:
raise CrossNoRunException('Can not run test applications in this cross environment.')
with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p:
@@ -757,7 +759,7 @@ class CCompiler(Compiler):
varname = 'has function ' + funcname
varname = varname.replace(' ', '_')
if self.is_cross:
- val = env.cross_info.config['properties'].get(varname, None)
+ val = env.properties.host.get(varname, None)
if val is not None:
if isinstance(val, bool):
return val
@@ -866,8 +868,7 @@ class CCompiler(Compiler):
}
#endif
'''
- args = self.get_cross_extra_flags(env, link=False)
- args += self.get_compiler_check_args()
+ args = self.get_compiler_check_args()
n = 'symbols_have_underscore_prefix'
with self.compile(code, args, 'compile', want_output=True) as p:
if p.returncode != 0:
@@ -900,7 +901,8 @@ class CCompiler(Compiler):
# is expensive. It's wrong in many edge cases, but it will match
# correctly-named libraries and hopefully no one on OpenBSD names
# their files libfoo.so.9a.7b.1.0
- patterns.append('lib{}.so.[0-9]*.[0-9]*')
+ for p in prefixes:
+ patterns.append(p + '{}.so.[0-9]*.[0-9]*')
return patterns
def get_library_naming(self, env, libtype, strict=False):
@@ -909,8 +911,10 @@ class CCompiler(Compiler):
priority
'''
stlibext = ['a']
- # We've always allowed libname to be both `foo` and `libfoo`,
- # and now people depend on it
+ # We've always allowed libname to be both `foo` and `libfoo`, and now
+ # people depend on it. Also, some people use prebuilt `foo.so` instead
+ # of `libfoo.so` for unknown reasons, and may also want to create
+ # `foo.so` by setting name_prefix to ''
if strict and not isinstance(self, VisualStudioCCompiler): # lib prefix is not usually used with msvc
prefixes = ['lib']
else:
@@ -933,18 +937,17 @@ class CCompiler(Compiler):
else:
# Linux/BSDs
shlibext = ['so']
- patterns = []
# Search priority
- if libtype in ('default', 'shared-static'):
- patterns += self._get_patterns(env, prefixes, shlibext, True)
- patterns += self._get_patterns(env, prefixes, stlibext, False)
+ if libtype == 'shared-static':
+ patterns = self._get_patterns(env, prefixes, shlibext, True)
+ patterns.extend([x for x in self._get_patterns(env, prefixes, stlibext, False) if x not in patterns])
elif libtype == 'static-shared':
- patterns += self._get_patterns(env, prefixes, stlibext, False)
- patterns += self._get_patterns(env, prefixes, shlibext, True)
+ patterns = self._get_patterns(env, prefixes, stlibext, False)
+ patterns.extend([x for x in self._get_patterns(env, prefixes, shlibext, True) if x not in patterns])
elif libtype == 'shared':
- patterns += self._get_patterns(env, prefixes, shlibext, True)
+ patterns = self._get_patterns(env, prefixes, shlibext, True)
elif libtype == 'static':
- patterns += self._get_patterns(env, prefixes, stlibext, False)
+ patterns = self._get_patterns(env, prefixes, stlibext, False)
else:
raise AssertionError('BUG: unknown libtype {!r}'.format(libtype))
return tuple(patterns)
@@ -972,14 +975,33 @@ class CCompiler(Compiler):
if '*' in pattern:
# NOTE: globbing matches directories and broken symlinks
# so we have to do an isfile test on it later
- return cls._sort_shlibs_openbsd(glob.glob(str(f)))
- return [f.as_posix()]
+ return [Path(x) for x in cls._sort_shlibs_openbsd(glob.glob(str(f)))]
+ return [f]
@staticmethod
- def _get_file_from_list(files):
+ def _get_file_from_list(env, files: List[str]) -> Path:
+ '''
+ We just check whether the library exists. We can't do a link check
+ because the library might have unresolved symbols that require other
+ libraries. On macOS we check if the library matches our target
+ architecture.
+ '''
+ # If not building on macOS for Darwin, do a simple file check
+ files = [Path(f) for f in files]
+ if not env.machines.host.is_darwin() or not env.machines.build.is_darwin():
+ for f in files:
+ if f.is_file():
+ return f
+ # Run `lipo` and check if the library supports the arch we want
for f in files:
- if os.path.isfile(f):
+ if not f.is_file():
+ continue
+ archs = darwin_get_object_archs(f)
+ if archs and env.machines.host.cpu_family in archs:
return f
+ else:
+ mlog.debug('Rejected {}, supports {} but need {}'
+ .format(f, archs, env.machines.host.cpu_family))
return None
@functools.lru_cache()
@@ -993,7 +1015,10 @@ class CCompiler(Compiler):
# First try if we can just add the library as -l.
# Gcc + co seem to prefer builtin lib dirs to -L dirs.
# Only try to find std libs if no extra dirs specified.
- if not extra_dirs or libname in self.internal_libs:
+ # The built-in search procedure will always favour .so and then always
+ # search for .a. This is only allowed if libtype is 'shared-static'
+ if ((not extra_dirs and libtype == 'shared-static') or
+ libname in self.internal_libs):
args = ['-l' + libname]
largs = self.linker_to_compiler_args(self.get_allow_undefined_link_args())
if self.links(code, env, extra_args=(args + largs)):
@@ -1020,13 +1045,10 @@ class CCompiler(Compiler):
trial = self._get_trials_from_pattern(p, d, libname)
if not trial:
continue
- # We just check whether the library exists. We can't do a link
- # check because the library might have unresolved symbols that
- # require other libraries.
- trial = self._get_file_from_list(trial)
+ trial = self._get_file_from_list(env, trial)
if not trial:
continue
- return [trial]
+ return [trial.as_posix()]
return None
def find_library_impl(self, libname, env, extra_dirs, code, libtype):
@@ -1045,10 +1067,74 @@ class CCompiler(Compiler):
return None
return value[:]
- def find_library(self, libname, env, extra_dirs, libtype='default'):
+ def find_library(self, libname, env, extra_dirs, libtype='shared-static'):
code = 'int main(int argc, char **argv) { return 0; }'
return self.find_library_impl(libname, env, extra_dirs, code, libtype)
+ def find_framework_paths(self, env):
+ '''
+ These are usually /Library/Frameworks and /System/Library/Frameworks,
+ unless you select a particular macOS SDK with the -isysroot flag.
+ You can also add to this by setting -F in CFLAGS.
+ '''
+ if self.id != 'clang':
+ raise MesonException('Cannot find framework path with non-clang compiler')
+ # Construct the compiler command-line
+ commands = self.get_exelist() + ['-v', '-E', '-']
+ commands += self.get_always_args()
+ # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env
+ if env.is_cross_build() and not self.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+ commands += env.coredata.get_external_args(for_machine, self.language)
+ mlog.debug('Finding framework path by running: ', ' '.join(commands), '\n')
+ os_env = os.environ.copy()
+ os_env['LC_ALL'] = 'C'
+ _, _, stde = Popen_safe(commands, env=os_env, stdin=subprocess.PIPE)
+ paths = []
+ for line in stde.split('\n'):
+ if '(framework directory)' not in line:
+ continue
+ # line is of the form:
+ # ` /path/to/framework (framework directory)`
+ paths.append(line[:-21].strip())
+ return paths
+
+ def find_framework_real(self, name, env, extra_dirs, allow_system):
+ code = 'int main(int argc, char **argv) { return 0; }'
+ link_args = []
+ for d in extra_dirs:
+ link_args += ['-F' + d]
+ # We can pass -Z to disable searching in the system frameworks, but
+ # then we must also pass -L/usr/lib to pick up libSystem.dylib
+ extra_args = [] if allow_system else ['-Z', '-L/usr/lib']
+ link_args += ['-framework', name]
+ if self.links(code, env, extra_args=(extra_args + link_args)):
+ return link_args
+
+ def find_framework_impl(self, name, env, extra_dirs, allow_system):
+ if isinstance(extra_dirs, str):
+ extra_dirs = [extra_dirs]
+ key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system)
+ if key in self.find_framework_cache:
+ value = self.find_framework_cache[key]
+ else:
+ value = self.find_framework_real(name, env, extra_dirs, allow_system)
+ self.find_framework_cache[key] = value
+ if value is None:
+ return None
+ return value[:]
+
+ def find_framework(self, name, env, extra_dirs, allow_system=True):
+ '''
+ Finds the framework with the specified name, and returns link args for
+ the same or returns None when the framework is not found.
+ '''
+ if self.id != 'clang':
+ raise MesonException('Cannot find frameworks with non-clang compiler')
+ return self.find_framework_impl(name, env, extra_dirs, allow_system)
+
def thread_flags(self, env):
if for_haiku(self.is_cross, env) or for_darwin(self.is_cross, env):
return []
@@ -1071,7 +1157,7 @@ class CCompiler(Compiler):
# flags, so when we are testing a flag like "-Wno-forgotten-towel", also
# check the equivalent enable flag too "-Wforgotten-towel"
if arg.startswith('-Wno-'):
- args.append('-W' + arg[5:])
+ args.append('-W' + arg[5:])
if arg.startswith('-Wl,'):
mlog.warning('{} looks like a linker argument, '
'but has_argument and other similar methods only '
@@ -1128,7 +1214,8 @@ class ClangCCompiler(ClangCompiler, CCompiler):
CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
ClangCompiler.__init__(self, compiler_type)
default_warn_args = ['-Wall', '-Winvalid-pch']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -1162,7 +1249,8 @@ class ArmclangCCompiler(ArmclangCompiler, CCompiler):
CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
ArmclangCompiler.__init__(self, compiler_type)
default_warn_args = ['-Wall', '-Winvalid-pch']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -1190,7 +1278,8 @@ class GnuCCompiler(GnuCompiler, CCompiler):
CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
GnuCompiler.__init__(self, compiler_type, defines)
default_warn_args = ['-Wall', '-Winvalid-pch']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -1222,6 +1311,12 @@ class GnuCCompiler(GnuCompiler, CCompiler):
return ['-fpch-preprocess', '-include', os.path.basename(header)]
+class PGICCompiler(PGICompiler, CCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs):
+ CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
+ PGICompiler.__init__(self, CompilerType.PGI_STANDARD)
+
+
class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs):
GnuCCompiler.__init__(self, exelist, version, compiler_type, is_cross, exe_wrapper, defines, **kwargs)
@@ -1254,7 +1349,8 @@ class IntelCCompiler(IntelCompiler, CCompiler):
IntelCompiler.__init__(self, compiler_type)
self.lang_header = 'c-header'
default_warn_args = ['-Wall', '-w3', '-diag-disable:remark']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra']}
@@ -1280,7 +1376,7 @@ class IntelCCompiler(IntelCompiler, CCompiler):
class VisualStudioCCompiler(CCompiler):
std_warn_args = ['/W3']
std_opt_args = ['/O2']
- ignore_libs = gnu_compiler_internal_libs
+ ignore_libs = unixy_compiler_internal_libs
internal_libs = ()
crt_args = {'none': [],
@@ -1296,7 +1392,8 @@ class VisualStudioCCompiler(CCompiler):
# /showIncludes is needed for build dependency tracking in Ninja
# See: https://ninja-build.org/manual.html#_deps
self.always_args = ['/nologo', '/showIncludes']
- self.warn_args = {'1': ['/W2'],
+ self.warn_args = {'0': ['/W1'],
+ '1': ['/W2'],
'2': ['/W3'],
'3': ['/W4']}
self.base_options = ['b_pch', 'b_ndebug', 'b_vscrt'] # FIXME add lto, pgo and the like
@@ -1482,6 +1579,15 @@ class VisualStudioCCompiler(CCompiler):
# msvc does not have a concept of system header dirs.
return ['-I' + path]
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '/I':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+ elif i[:9] == '/LIBPATH:':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
+
# Visual Studio is special. It ignores some arguments it does not
# understand and you can't tell it to error out on those.
# http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t
@@ -1657,6 +1763,9 @@ class CcrxCCompiler(CcrxCompiler, CCompiler):
def get_linker_output_args(self, outputname):
return ['-output=%s' % outputname]
+ def get_werror_args(self):
+ return ['-change_message=error']
+
def get_include_args(self, path, is_system):
if path == '':
path = '.'
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 31047b1..ceefefe 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -14,14 +14,18 @@
import abc, contextlib, enum, os.path, re, tempfile, shlex
import subprocess
+from typing import List, Tuple
from ..linkers import StaticLinker
from .. import coredata
from .. import mlog
from .. import mesonlib
from ..mesonlib import (
- EnvironmentException, MesonException, OrderedSet, version_compare,
- Popen_safe
+ EnvironmentException, MesonException, OrderedSet,
+ version_compare, Popen_safe
+)
+from ..envconfig import (
+ Properties,
)
"""This file contains the data files of all compilers Meson knows
@@ -36,6 +40,7 @@ lib_suffixes = ('a', 'lib', 'dll', 'dylib', 'so')
lang_suffixes = {
'c': ('c',),
'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'),
+ 'cuda': ('cu',),
# f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
# f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'),
@@ -57,7 +62,7 @@ clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'fortran',)
# List of languages that can be linked with C code directly by the linker
# used in build.py:process_compilers() and build.py:get_dynamic_linker()
# XXX: Add Rust to this?
-clink_langs = ('d',) + clib_langs
+clink_langs = ('d', 'cuda') + clib_langs
clink_suffixes = ()
for _l in clink_langs + ('vala',):
clink_suffixes += lang_suffixes[_l]
@@ -68,6 +73,7 @@ soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
# Environment variables that each lang uses.
cflags_mapping = {'c': 'CFLAGS',
'cpp': 'CXXFLAGS',
+ 'cuda': 'CUFLAGS',
'objc': 'OBJCFLAGS',
'objcpp': 'OBJCXXFLAGS',
'fortran': 'FFLAGS',
@@ -75,6 +81,9 @@ cflags_mapping = {'c': 'CFLAGS',
'vala': 'VALAFLAGS',
'rust': 'RUSTFLAGS'}
+# execinfo is a compiler lib on BSD
+unixy_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt', 'execinfo')
+
# All these are only for C-linkable languages; see `clink_langs` above.
def sort_clink(lang):
@@ -142,6 +151,13 @@ armclang_buildtype_args = {'plain': [],
'custom': [],
}
+cuda_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ }
+
arm_buildtype_args = {'plain': [],
'debug': ['-O0', '--debug'],
'debugoptimized': ['-O1', '--debug'],
@@ -166,6 +182,13 @@ msvc_buildtype_args = {'plain': [],
'custom': [],
}
+pgi_buildtype_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ }
apple_buildtype_linker_args = {'plain': [],
'debug': [],
'debugoptimized': [],
@@ -197,6 +220,13 @@ ccrx_buildtype_linker_args = {'plain': [],
'minsize': [],
'custom': [],
}
+pgi_buildtype_linker_args = {'plain': [],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
+ 'custom': [],
+ }
msvc_buildtype_linker_args = {'plain': [],
'debug': [],
@@ -330,6 +360,17 @@ msvc_optimization_args = {'0': [],
's': ['/O1'], # Implies /Os.
}
+cuda_optimization_args = {'0': [],
+ 'g': ['-O0'],
+ '1': ['-O1'],
+ '2': ['-O2'],
+ '3': ['-O3', '-Otime'],
+ 's': ['-O3', '-Ospace']
+ }
+
+cuda_debug_args = {False: [],
+ True: ['-g']}
+
clike_debug_args = {False: [],
True: ['-g']}
@@ -621,6 +662,9 @@ class CompilerArgs(list):
# Only UNIX shared libraries require this. Others have a fixed extension.
dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+ # In generate_link() we add external libs without de-dup, but we must
+ # *always* de-dup these because they're special arguments to the linker
+ always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs)
compiler = None
def _check_args(self, args):
@@ -755,7 +799,7 @@ class CompilerArgs(list):
normal_flags = []
lflags = []
for i in iterable:
- if i.startswith('-l') or i.startswith('-L'):
+ if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
lflags.append(i)
else:
normal_flags.append(i)
@@ -884,6 +928,9 @@ class Compiler:
def compute_int(self, expression, low, high, guess, prefix, env, extra_args, dependencies):
raise EnvironmentException('%s does not support compute_int ' % self.get_id())
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ raise EnvironmentException('%s does not support compute_parameters_with_absolute_paths ' % self.get_id())
+
def has_members(self, typename, membernames, prefix, env, *, extra_args=None, dependencies=None):
raise EnvironmentException('%s does not support has_member(s) ' % self.get_id())
@@ -971,23 +1018,44 @@ class Compiler:
def get_options(self):
opts = {} # build afresh every time
-
- # Take default values from env variables.
- compile_args, link_args = self.get_args_from_envvars()
description = 'Extra arguments passed to the {}'.format(self.get_display_language())
opts.update({
self.language + '_args': coredata.UserArrayOption(
self.language + '_args',
description + ' compiler',
- compile_args, shlex_split=True, user_input=True, allow_dups=True),
+ [], shlex_split=True, user_input=True, allow_dups=True),
self.language + '_link_args': coredata.UserArrayOption(
self.language + '_link_args',
description + ' linker',
- link_args, shlex_split=True, user_input=True, allow_dups=True),
+ [], shlex_split=True, user_input=True, allow_dups=True),
})
return opts
+ def get_and_default_options(self, properties: Properties):
+ """
+ Take default values from env variables and/or config files.
+ """
+ opts = self.get_options()
+
+ if properties.fallback:
+ # Get from env vars.
+ compile_args, link_args = self.get_args_from_envvars()
+ else:
+ compile_args = []
+ link_args = []
+
+ for k, o in opts.items():
+ if k in properties:
+ # Get from configuration files.
+ o.set_value(properties[k])
+ elif k == self.language + '_args':
+ o.set_value(compile_args)
+ elif k == self.language + '_link_args':
+ o.set_value(link_args)
+
+ return opts
+
def get_option_compile_args(self, options):
return []
@@ -1042,18 +1110,6 @@ class Compiler:
'Language {} does not support has_multi_link_arguments.'.format(
self.get_display_language()))
- def get_cross_extra_flags(self, environment, link):
- extra_flags = []
- if self.is_cross and environment:
- if 'properties' in environment.cross_info.config:
- props = environment.cross_info.config['properties']
- lang_args_key = self.language + '_args'
- extra_flags += mesonlib.stringlistify(props.get(lang_args_key, []))
- lang_link_args_key = self.language + '_link_args'
- if link:
- extra_flags += mesonlib.stringlistify(props.get(lang_link_args_key, []))
- return extra_flags
-
def _get_compile_output(self, dirname, mode):
# In pre-processor mode, the output is sent to stdout and discarded
if mode == 'preprocess':
@@ -1309,6 +1365,8 @@ class CompilerType(enum.Enum):
CCRX_WIN = 40
+ PGI_STANDARD = 50
+
@property
def is_standard_compiler(self):
return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD')
@@ -1433,7 +1491,9 @@ class GnuLikeCompiler(abc.ABC):
self.compiler_type = compiler_type
self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage',
'b_ndebug', 'b_staticpic', 'b_pie']
- if not self.compiler_type.is_osx_compiler and not self.compiler_type.is_windows_compiler:
+ if (not self.compiler_type.is_osx_compiler and
+ not self.compiler_type.is_windows_compiler and
+ not mesonlib.is_openbsd()):
self.base_options.append('b_lundef')
if not self.compiler_type.is_windows_compiler:
self.base_options.append('b_asneeded')
@@ -1546,27 +1606,34 @@ class GnuLikeCompiler(abc.ABC):
return ['-Wl,--allow-shlib-undefined']
def get_gui_app_args(self, value):
- if self.compiler_type.is_windows_compiler and value:
- return ['-mwindows']
+ if self.compiler_type.is_windows_compiler:
+ return ['-mwindows' if value else '-mconsole']
return []
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
class GnuCompiler(GnuLikeCompiler):
"""
GnuCompiler represents an actual GCC in its many incarnations.
Compilers imitating GCC (Clang/Intel) should use the GnuLikeCompiler ABC.
"""
- def __init__(self, compiler_type, defines):
+ def __init__(self, compiler_type, defines: dict):
super().__init__(compiler_type)
self.id = 'gcc'
self.defines = defines or {}
self.base_options.append('b_colorout')
- def get_colorout_args(self, colortype):
+ def get_colorout_args(self, colortype: str) -> List[str]:
if mesonlib.version_compare(self.version, '>=4.9.0'):
return gnu_color_args[colortype][:]
return []
- def get_warn_args(self, level):
+ def get_warn_args(self, level: str) -> list:
args = super().get_warn_args(level)
if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
# -Wpedantic was added in 4.8.0
@@ -1574,23 +1641,70 @@ class GnuCompiler(GnuLikeCompiler):
args[args.index('-Wpedantic')] = '-pedantic'
return args
- def has_builtin_define(self, define):
+ def has_builtin_define(self, define: str) -> bool:
return define in self.defines
def get_builtin_define(self, define):
if define in self.defines:
return self.defines[define]
- def get_optimization_args(self, optimization_level):
+ def get_optimization_args(self, optimization_level: str):
return gnu_optimization_args[optimization_level]
- def get_pch_suffix(self):
+ def get_pch_suffix(self) -> str:
return 'gch'
- def openmp_flags(self):
+ def openmp_flags(self) -> List[str]:
return ['-fopenmp']
+class PGICompiler:
+ def __init__(self, compiler_type=None):
+ self.id = 'pgi'
+ self.compiler_type = compiler_type
+
+ default_warn_args = ['-Minform=inform']
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args,
+ '3': default_warn_args}
+
+ def get_module_incdir_args(self) -> Tuple[str]:
+ return ('-module', )
+
+ def get_no_warn_args(self) -> List[str]:
+ return ['-silent']
+
+ def openmp_flags(self) -> List[str]:
+ return ['-mp']
+
+ def get_buildtype_args(self, buildtype: str) -> List[str]:
+ return pgi_buildtype_args[buildtype]
+
+ def get_buildtype_linker_args(self, buildtype: str) -> List[str]:
+ return pgi_buildtype_linker_args[buildtype]
+
+ def get_optimization_args(self, optimization_level: str):
+ return clike_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug: bool):
+ return clike_debug_args[is_debug]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list: List[str], build_dir: str):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ def get_allow_undefined_link_args(self):
+ return []
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return []
+
+ def get_always_args(self):
+ return []
+
+
class ElbrusCompiler(GnuCompiler):
# Elbrus compiler is nearly like GCC, but does not support
# PCH, LTO, sanitizers and color output as of version 1.21.x.
@@ -1705,7 +1819,7 @@ class ArmclangCompiler:
EnvironmentException('armlink version string not found')
# Using the regular expression from environment.search_version,
# which is used for searching compiler version
- version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
+ version_regex = r'(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
linker_ver = re.search(version_regex, ver_str)
if linker_ver:
linker_ver = linker_ver.group(0)
@@ -1779,6 +1893,13 @@ class ArmclangCompiler:
"""
return ['--symdefs=' + implibname]
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0.0
class IntelCompiler(GnuLikeCompiler):
@@ -1798,7 +1919,7 @@ class IntelCompiler(GnuLikeCompiler):
def get_optimization_args(self, optimization_level):
return clike_optimization_args[optimization_level]
- def get_pch_suffix(self):
+ def get_pch_suffix(self) -> str:
return 'pchi'
def get_pch_use_args(self, pch_dir, header):
@@ -1808,7 +1929,7 @@ class IntelCompiler(GnuLikeCompiler):
def get_pch_name(self, header_name):
return os.path.basename(header_name) + '.' + self.get_pch_suffix()
- def openmp_flags(self):
+ def openmp_flags(self) -> List[str]:
if version_compare(self.version, '>=15.0.0'):
return ['-qopenmp']
else:
@@ -1845,7 +1966,8 @@ class ArmCompiler:
self.id = 'arm'
self.compiler_type = compiler_type
default_warn_args = []
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + [],
'3': default_warn_args + []}
# Assembly
@@ -1913,6 +2035,13 @@ class ArmCompiler:
def get_debug_args(self, is_debug):
return clike_debug_args[is_debug]
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
class CcrxCompiler:
def __init__(self, compiler_type):
if not self.is_cross:
@@ -1930,7 +2059,8 @@ class CcrxCompiler:
# Assembly
self.can_compile_suffixes.update('s')
default_warn_args = []
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + [],
'3': default_warn_args + []}
@@ -2006,3 +2136,10 @@ class CcrxCompiler:
continue
result.append(i)
return result
+
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '-include=':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+ return parameter_list
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index d702e83..67de684 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -23,10 +23,12 @@ from .c import CCompiler, VisualStudioCCompiler, ClangClCCompiler
from .compilers import (
gnu_winlibs,
msvc_winlibs,
+ CompilerType,
ClangCompiler,
GnuCompiler,
ElbrusCompiler,
IntelCompiler,
+ PGICompiler,
ArmCompiler,
ArmclangCompiler,
CcrxCompiler,
@@ -133,7 +135,8 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler):
CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
ClangCompiler.__init__(self, compiler_type)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -164,7 +167,8 @@ class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler):
CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
ArmclangCompiler.__init__(self, compiler_type)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -192,7 +196,8 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler):
CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs)
GnuCompiler.__init__(self, compiler_type, defines)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -232,6 +237,12 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler):
return ['-lstdc++']
+class PGICPPCompiler(PGICompiler, CPPCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs):
+ CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
+ PGICompiler.__init__(self, CompilerType.PGI_STANDARD)
+
+
class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs):
GnuCPPCompiler.__init__(self, exelist, version, compiler_type, is_cross, exe_wrapper, defines, **kwargs)
@@ -264,7 +275,8 @@ class IntelCPPCompiler(IntelCompiler, CPPCompiler):
self.lang_header = 'c++-header'
default_warn_args = ['-Wall', '-w3', '-diag-disable:remark',
'-Wpch-messages', '-Wnon-virtual-dtor']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra']}
diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py
index a6c74d2..cd67da0 100644
--- a/mesonbuild/compilers/cs.py
+++ b/mesonbuild/compilers/cs.py
@@ -32,6 +32,7 @@ class CsCompiler(Compiler):
self.language = 'cs'
super().__init__(exelist, version)
self.id = id
+ self.is_cross = False
self.runner = runner
def get_display_language(self):
@@ -88,6 +89,15 @@ class CsCompiler(Compiler):
def get_pic_args(self):
return []
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+ if i[:5] == '-lib:':
+ parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:]))
+
+ return parameter_list
+
def name_string(self):
return ' '.join(self.exelist)
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
new file mode 100644
index 0000000..21fa498
--- /dev/null
+++ b/mesonbuild/compilers/cuda.py
@@ -0,0 +1,242 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re, os.path
+
+from .. import mlog
+from ..mesonlib import EnvironmentException, Popen_safe
+from .compilers import (Compiler, cuda_buildtype_args, cuda_optimization_args,
+ cuda_debug_args, CompilerType, get_gcc_soname_args)
+
+class CudaCompiler(Compiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None):
+ if not hasattr(self, 'language'):
+ self.language = 'cuda'
+ super().__init__(exelist, version)
+ self.is_cross = is_cross
+ self.exe_wrapper = exe_wrapper
+ self.id = 'nvcc'
+ default_warn_args = []
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
+ '2': default_warn_args + ['-Xcompiler=-Wextra'],
+ '3': default_warn_args + ['-Xcompiler=-Wextra',
+ '-Xcompiler=-Wpedantic']}
+
+ def needs_static_linker(self):
+ return False
+
+ def get_always_args(self):
+ return []
+
+ def get_display_language(self):
+ return 'Cuda'
+
+ def get_no_stdinc_args(self):
+ return []
+
+ def thread_link_flags(self, environment):
+ return ['-Xcompiler=-pthread']
+
+ def sanity_check(self, work_dir, environment):
+ mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))
+ mlog.debug('Is cross compiler: %s.' % str(self.is_cross))
+
+ sname = 'sanitycheckcuda.cu'
+ code = r'''
+ #include <cuda_runtime.h>
+ #include <stdio.h>
+
+ __global__ void kernel (void) {}
+
+ int main(void){
+ struct cudaDeviceProp prop;
+ int count, i;
+ cudaError_t ret = cudaGetDeviceCount(&count);
+ if(ret != cudaSuccess){
+ fprintf(stderr, "%d\n", (int)ret);
+ }else{
+ for(i=0;i<count;i++){
+ if(cudaGetDeviceProperties(&prop, i) == cudaSuccess){
+ fprintf(stdout, "%d.%d\n", prop.major, prop.minor);
+ }
+ }
+ }
+ fflush(stderr);
+ fflush(stdout);
+ return 0;
+ }
+ '''
+ binname = sname.rsplit('.', 1)[0]
+ binname += '_cross' if self.is_cross else ''
+ source_name = os.path.join(work_dir, sname)
+ binary_name = os.path.join(work_dir, binname + '.exe')
+ with open(source_name, 'w') as ofile:
+ ofile.write(code)
+
+ # The Sanity Test for CUDA language will serve as both a sanity test
+ # and a native-build GPU architecture detection test, useful later.
+ #
+ # For this second purpose, NVCC has very handy flags, --run and
+ # --run-args, that allow one to run an application with the
+ # environment set up properly. Of course, this only works for native
+ # builds; For cross builds we must still use the exe_wrapper (if any).
+ self.detected_cc = ''
+ flags = ['-w', '-cudart', 'static', source_name]
+ if self.is_cross and self.exe_wrapper is None:
+ # Linking cross built apps is painful. You can't really
+ # tell if you should use -nostdlib or not and for example
+ # on OSX the compiler binary is the same but you need
+ # a ton of compiler flags to differentiate between
+ # arm and x86_64. So just compile.
+ flags += self.get_compile_only_args()
+ flags += self.get_output_args(binary_name)
+
+ # Compile sanity check
+ cmdlist = self.exelist + flags
+ mlog.debug('Sanity check compiler command line: ', ' '.join(cmdlist))
+ pc, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check compile stdout: ')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check compile stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
+ if pc.returncode != 0:
+ raise EnvironmentException('Compiler {0} can not compile programs.'.format(self.name_string()))
+
+ # Run sanity check (if possible)
+ if self.is_cross:
+ if self.exe_wrapper is None:
+ return
+ else:
+ cmdlist = self.exe_wrapper + [binary_name]
+ else:
+ cmdlist = self.exelist + ['--run', '"' + binary_name + '"']
+ mlog.debug('Sanity check run command line: ', ' '.join(cmdlist))
+ pe, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+ mlog.debug('Sanity check run stdout: ')
+ mlog.debug(stdo)
+ mlog.debug('-----\nSanity check run stderr:')
+ mlog.debug(stde)
+ mlog.debug('-----')
+ pe.wait()
+ if pe.returncode != 0:
+ raise EnvironmentException('Executables created by {0} compiler {1} are not runnable.'.format(self.language, self.name_string()))
+
+ # Interpret the result of the sanity test.
+ # As mentionned above, it is not only a sanity test but also a GPU
+ # architecture detection test.
+ if stde == '':
+ self.detected_cc = stdo
+ else:
+ mlog.debug('cudaGetDeviceCount() returned ' + stde)
+
+ def get_compiler_check_args(self):
+ return super().get_compiler_check_args() + []
+
+ def has_header_symbol(self, hname, symbol, prefix, env, extra_args=None, dependencies=None):
+ if super().has_header_symbol(hname, symbol, prefix, env, extra_args, dependencies):
+ return True
+ if extra_args is None:
+ extra_args = []
+ fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
+ t = '''{prefix}
+ #include <{header}>
+ using {symbol};
+ int main () {{ return 0; }}'''
+ return self.compiles(t.format(**fargs), env, extra_args, dependencies)
+
+ @staticmethod
+ def _cook_link_args(args):
+ """
+ Converts GNU-style arguments -Wl,-arg,-arg
+ to NVCC-style arguments -Xlinker=-arg,-arg
+ """
+ return [re.sub('^-Wl,', '-Xlinker=', arg) for arg in args]
+
+ def get_output_args(self, target):
+ return ['-o', target]
+
+ def name_string(self):
+ return ' '.join(self.exelist)
+
+ def get_soname_args(self, *args):
+ rawargs = get_gcc_soname_args(CompilerType.GCC_STANDARD, *args)
+ return self._cook_link_args(rawargs)
+
+ def get_dependency_gen_args(self, outtarget, outfile):
+ return []
+
+ def get_compile_only_args(self):
+ return ['-c']
+
+ def get_no_optimization_args(self):
+ return ['-O0']
+
+ def get_optimization_args(self, optimization_level):
+ return cuda_optimization_args[optimization_level]
+
+ def get_debug_args(self, is_debug):
+ return cuda_debug_args[is_debug]
+
+ def get_werror_args(self):
+ return ['-Werror=cross-execution-space-call,deprecated-declarations,reorder']
+
+ def get_linker_exelist(self):
+ return self.exelist[:]
+
+ def get_linker_output_args(self, outputname):
+ return ['-o', outputname]
+
+ def get_warn_args(self, level):
+ return self.warn_args[level]
+
+ def get_buildtype_args(self, buildtype):
+ return cuda_buildtype_args[buildtype]
+
+ def get_include_args(self, path, is_system):
+ if path == '':
+ path = '.'
+ return ['-I' + path]
+
+ def get_std_shared_lib_link_args(self):
+ return ['-shared']
+
+ def depfile_for_object(self, objfile):
+ return objfile + '.' + self.get_depfile_suffix()
+
+ def get_depfile_suffix(self):
+ return 'd'
+
+ def get_buildtype_linker_args(self, buildtype):
+ return []
+
+ def get_std_exe_link_args(self):
+ return []
+
+ def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ rawargs = self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ return self._cook_link_args(rawargs)
+
+ def get_linker_search_args(self, dirname):
+ return ['-L' + dirname]
+
+ def linker_to_compiler_args(self, args):
+ return args
+
+ def get_pic_args(self):
+ return ['-Xcompiler=-fPIC']
+
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ return []
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index 2cf0fbd..f1580b6 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -14,7 +14,9 @@
import os.path, subprocess
-from ..mesonlib import EnvironmentException, version_compare, is_windows, is_osx
+from ..mesonlib import (
+ EnvironmentException, MachineChoice, version_compare, is_windows, is_osx
+)
from .compilers import (
CompilerType,
@@ -111,6 +113,19 @@ class DCompiler(Compiler):
def get_include_args(self, path, is_system):
return ['-I=' + path]
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:3] == '-I=':
+ parameter_list[idx] = i[:3] + os.path.normpath(os.path.join(build_dir, i[3:]))
+ if i[:4] == '-L-L':
+ parameter_list[idx] = i[:4] + os.path.normpath(os.path.join(build_dir, i[4:]))
+ if i[:5] == '-L=-L':
+ parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:]))
+ if i[:6] == '-Wl,-L':
+ parameter_list[idx] = i[:6] + os.path.normpath(os.path.join(build_dir, i[6:]))
+
+ return parameter_list
+
def get_warn_args(self, level):
return ['-wi']
@@ -293,12 +308,17 @@ class DCompiler(Compiler):
# Add link flags needed to find dependencies
args += d.get_link_args()
+ if env.is_cross_build() and not self.is_cross:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
+
if mode == 'compile':
# Add DFLAGS from the env
- args += env.coredata.get_external_args(self.language)
+ args += env.coredata.get_external_args(for_machine, self.language)
elif mode == 'link':
# Add LDFLAGS from the env
- args += env.coredata.get_external_link_args(self.language)
+ args += env.coredata.get_external_link_args(for_machine, self.language)
# extra_args must override all other arguments, so we add them last
args += extra_args
return args
@@ -360,7 +380,18 @@ class DCompiler(Compiler):
# translate library link flag
dcargs.append('-L=' + arg)
continue
- elif arg.startswith('-L'):
+ elif arg.startswith('-isystem'):
+ # translate -isystem system include path
+ # this flag might sometimes be added by C library Cflags via
+ # pkg-config.
+ # NOTE: -isystem and -I are not 100% equivalent, so this is just
+ # a workaround for the most common cases.
+ if arg.startswith('-isystem='):
+ dcargs.append('-I=' + arg[9:])
+ else:
+ dcargs.append('-I')
+ continue
+ elif arg.startswith('-L/') or arg.startswith('-L./'):
# we need to handle cases where -L is set by e.g. a pkg-config
# setting to select a linker search path. We can however not
# unconditionally prefix '-L' with '-L' because the user might
@@ -467,7 +498,8 @@ class GnuDCompiler(DCompiler):
DCompiler.__init__(self, exelist, version, is_cross, arch, **kwargs)
self.id = 'gcc'
default_warn_args = ['-Wall', '-Wdeprecated']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt']
@@ -511,6 +543,13 @@ class GnuDCompiler(DCompiler):
def get_buildtype_args(self, buildtype):
return d_gdc_buildtype_args[buildtype]
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
def build_rpath_args(self, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
return self.build_unix_rpath_args(build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
@@ -531,8 +570,10 @@ class LLVMDCompiler(DCompiler):
def get_warn_args(self, level):
if level == '2' or level == '3':
return ['-wi', '-dw']
- else:
+ elif level == '1':
return ['-wi']
+ else:
+ return []
def get_buildtype_args(self, buildtype):
if buildtype != 'plain':
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index 75db26d..738a5c6 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -11,6 +11,9 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import List
+import subprocess, os
+from pathlib import Path
from .c import CCompiler
from .compilers import (
@@ -22,12 +25,14 @@ from .compilers import (
clike_debug_args,
Compiler,
GnuCompiler,
+ ClangCompiler,
ElbrusCompiler,
IntelCompiler,
+ PGICompiler
)
from mesonbuild.mesonlib import EnvironmentException, is_osx
-import subprocess, os
+
class FortranCompiler(Compiler):
library_dirs_cache = CCompiler.library_dirs_cache
@@ -73,12 +78,8 @@ class FortranCompiler(Compiler):
source_name = os.path.join(work_dir, 'sanitycheckf.f90')
binary_name = os.path.join(work_dir, 'sanitycheckf')
with open(source_name, 'w') as ofile:
- ofile.write('''program prog
- print *, "Fortran compilation is working."
-end program prog
-''')
- extra_flags = self.get_cross_extra_flags(environment, link=True)
- pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
+ ofile.write('print *, "Fortran compilation is working."; end')
+ pc = subprocess.Popen(self.exelist + [source_name, '-o', binary_name])
pc.wait()
if pc.returncode != 0:
raise EnvironmentException('Compiler %s can not compile programs.' % self.name_string())
@@ -171,7 +172,14 @@ end program prog
def get_module_outdir_args(self, path):
return ['-module', path]
- def module_name_to_filename(self, module_name):
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
+ def module_name_to_filename(self, module_name: str) -> str:
return module_name.lower() + '.mod'
def get_std_shared_lib_link_args(self):
@@ -219,7 +227,7 @@ end program prog
dependencies=dependencies)
def run(self, code, env, *, extra_args=None, dependencies=None):
- return CCompiler.run(self, code, env, extra_args, dependencies)
+ return CCompiler.run(self, code, env, extra_args=extra_args, dependencies=dependencies)
def _get_patterns(self, *args, **kwargs):
return CCompiler._get_patterns(self, *args, **kwargs)
@@ -233,7 +241,7 @@ end program prog
def find_library_impl(self, *args):
return CCompiler.find_library_impl(self, *args)
- def find_library(self, libname, env, extra_dirs, libtype='default'):
+ def find_library(self, libname, env, extra_dirs, libtype='shared-static'):
code = '''program main
call exit(0)
end program main'''
@@ -254,13 +262,27 @@ end program prog
def has_multi_arguments(self, args, env):
return CCompiler.has_multi_arguments(self, args, env)
+ def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None):
+ return CCompiler.has_header(self, hname, prefix, env, extra_args=extra_args, dependencies=dependencies)
+
+ def get_define(self, dname, prefix, env, extra_args, dependencies):
+ return CCompiler.get_define(self, dname, prefix, env, extra_args, dependencies)
+
+ @classmethod
+ def _get_trials_from_pattern(cls, pattern, directory, libname):
+ return CCompiler._get_trials_from_pattern(pattern, directory, libname)
+
+ @staticmethod
+ def _get_file_from_list(env, files: List[str]) -> Path:
+ return CCompiler._get_file_from_list(env, files)
class GnuFortranCompiler(GnuCompiler, FortranCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs):
FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
GnuCompiler.__init__(self, compiler_type, defines)
default_warn_args = ['-Wall']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -287,7 +309,8 @@ class G95FortranCompiler(FortranCompiler):
FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags)
self.id = 'g95'
default_warn_args = ['-Wall']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-pedantic']}
@@ -332,7 +355,8 @@ class IntelFortranCompiler(IntelCompiler, FortranCompiler):
IntelCompiler.__init__(self, CompilerType.ICC_STANDARD)
self.id = 'intel'
default_warn_args = ['-warn', 'general', '-warn', 'truncated_source']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-warn', 'unused'],
'3': ['-warn', 'all']}
@@ -354,7 +378,8 @@ class PathScaleFortranCompiler(FortranCompiler):
FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags)
self.id = 'pathscale'
default_warn_args = ['-fullwarn']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args,
'3': default_warn_args}
@@ -362,31 +387,30 @@ class PathScaleFortranCompiler(FortranCompiler):
return ['-mp']
-class PGIFortranCompiler(FortranCompiler):
+class PGIFortranCompiler(PGICompiler, FortranCompiler):
def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags):
FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags)
- self.id = 'pgi'
+ PGICompiler.__init__(self, CompilerType.PGI_STANDARD)
+
+
+class FlangFortranCompiler(ClangCompiler, FortranCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags):
+ FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags)
+ ClangCompiler.__init__(self, CompilerType.CLANG_STANDARD)
+ self.id = 'flang'
default_warn_args = ['-Minform=inform']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args,
'3': default_warn_args}
- def get_module_incdir_args(self):
- return ('-module', )
-
- def get_no_warn_args(self):
- return ['-silent']
-
- def openmp_flags(self):
- return ['-fopenmp']
-
-
class Open64FortranCompiler(FortranCompiler):
def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags):
FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags)
self.id = 'open64'
default_warn_args = ['-fullwarn']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args,
'3': default_warn_args}
diff --git a/mesonbuild/compilers/java.py b/mesonbuild/compilers/java.py
index 978562c..5d7f865 100644
--- a/mesonbuild/compilers/java.py
+++ b/mesonbuild/compilers/java.py
@@ -23,6 +23,7 @@ class JavaCompiler(Compiler):
self.language = 'java'
super().__init__(exelist, version)
self.id = 'unknown'
+ self.is_cross = False
self.javarunner = 'java'
def get_soname_args(self, *args):
@@ -81,6 +82,15 @@ class JavaCompiler(Compiler):
def get_buildtype_args(self, buildtype):
return java_buildtype_args[buildtype]
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i in ['-cp', '-classpath', '-sourcepath'] and idx + 1 < len(parameter_list):
+ path_list = parameter_list[idx + 1].split(os.pathsep)
+ path_list = [os.path.normpath(os.path.join(build_dir, x)) for x in path_list]
+ parameter_list[idx + 1] = os.pathsep.join(path_list)
+
+ return parameter_list
+
def sanity_check(self, work_dir, environment):
src = 'SanityCheck.java'
obj = 'SanityCheck'
diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py
index 5b2b517..8dfd0a2 100644
--- a/mesonbuild/compilers/objc.py
+++ b/mesonbuild/compilers/objc.py
@@ -31,7 +31,7 @@ class ObjCCompiler(CCompiler):
# TODO try to use sanity_check_impl instead of duplicated code
source_name = os.path.join(work_dir, 'sanitycheckobjc.m')
binary_name = os.path.join(work_dir, 'sanitycheckobjc')
- extra_flags = self.get_cross_extra_flags(environment, link=False)
+ extra_flags = []
if self.is_cross:
extra_flags += self.get_compile_only_args()
with open(source_name, 'w') as ofile:
@@ -55,7 +55,8 @@ class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
ObjCCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
GnuCompiler.__init__(self, compiler_type, defines)
default_warn_args = ['-Wall', '-Winvalid-pch']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -65,7 +66,8 @@ class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
ObjCCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
ClangCompiler.__init__(self, compiler_type)
default_warn_args = ['-Wall', '-Winvalid-pch']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage']
diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py
index e1b7a7d..e66d730 100644
--- a/mesonbuild/compilers/objcpp.py
+++ b/mesonbuild/compilers/objcpp.py
@@ -31,14 +31,11 @@ class ObjCPPCompiler(CPPCompiler):
# TODO try to use sanity_check_impl instead of duplicated code
source_name = os.path.join(work_dir, 'sanitycheckobjcpp.mm')
binary_name = os.path.join(work_dir, 'sanitycheckobjcpp')
- extra_flags = self.get_cross_extra_flags(environment, link=False)
- if self.is_cross:
- extra_flags += self.get_compile_only_args()
with open(source_name, 'w') as ofile:
ofile.write('#import<stdio.h>\n'
'class MyClass;'
'int main(int argc, char **argv) { return 0; }\n')
- pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
+ pc = subprocess.Popen(self.exelist + [source_name, '-o', binary_name])
pc.wait()
if pc.returncode != 0:
raise EnvironmentException('ObjC++ compiler %s can not compile programs.' % self.name_string())
@@ -56,7 +53,8 @@ class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
ObjCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
GnuCompiler.__init__(self, compiler_type, defines)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
@@ -66,7 +64,8 @@ class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
ObjCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper)
ClangCompiler.__init__(self, compiler_type)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
- self.warn_args = {'1': default_warn_args,
+ self.warn_args = {'0': [],
+ '1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage']
diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py
index 93c2917..68da823 100644
--- a/mesonbuild/compilers/rust.py
+++ b/mesonbuild/compilers/rust.py
@@ -82,3 +82,14 @@ class RustCompiler(Compiler):
def get_optimization_args(self, optimization_level):
return rust_optimization_args[optimization_level]
+
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-L':
+ for j in ['dependency', 'crate', 'native', 'framework', 'all']:
+ combined_len = len(j) + 3
+ if i[:combined_len] == '-L{}='.format(j):
+ parameter_list[idx] = i[:combined_len] + os.path.normpath(os.path.join(build_dir, i[combined_len:]))
+ break
+
+ return parameter_list
diff --git a/mesonbuild/compilers/swift.py b/mesonbuild/compilers/swift.py
index 4d5dd0c..94e6736 100644
--- a/mesonbuild/compilers/swift.py
+++ b/mesonbuild/compilers/swift.py
@@ -91,6 +91,13 @@ class SwiftCompiler(Compiler):
def get_compile_only_args(self):
return ['-c']
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:2] == '-I' or i[:2] == '-L':
+ parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+ return parameter_list
+
def sanity_check(self, work_dir, environment):
src = 'swifttest.swift'
source_name = os.path.join(work_dir, src)
@@ -98,8 +105,7 @@ class SwiftCompiler(Compiler):
with open(source_name, 'w') as ofile:
ofile.write('''print("Swift compilation is working.")
''')
- extra_flags = self.get_cross_extra_flags(environment, link=True)
- pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
+ pc = subprocess.Popen(self.exelist + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
raise EnvironmentException('Swift compiler %s can not compile programs.' % self.name_string())
diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py
index 46bb210..b463f0d 100644
--- a/mesonbuild/compilers/vala.py
+++ b/mesonbuild/compilers/vala.py
@@ -49,6 +49,12 @@ class ValaCompiler(Compiler):
def get_pic_args(self):
return []
+ def get_pie_args(self):
+ return []
+
+ def get_pie_link_args(self):
+ return []
+
def get_always_args(self):
return ['-C']
@@ -66,10 +72,22 @@ class ValaCompiler(Compiler):
return ['--color=' + colortype]
return []
+ def compute_parameters_with_absolute_paths(self, parameter_list, build_dir):
+ for idx, i in enumerate(parameter_list):
+ if i[:9] == '--girdir=':
+ parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+ if i[:10] == '--vapidir=':
+ parameter_list[idx] = i[:10] + os.path.normpath(os.path.join(build_dir, i[10:]))
+ if i[:13] == '--includedir=':
+ parameter_list[idx] = i[:13] + os.path.normpath(os.path.join(build_dir, i[13:]))
+ if i[:14] == '--metadatadir=':
+ parameter_list[idx] = i[:14] + os.path.normpath(os.path.join(build_dir, i[14:]))
+
+ return parameter_list
+
def sanity_check(self, work_dir, environment):
code = 'class MesonSanityCheck : Object { }'
- args = self.get_cross_extra_flags(environment, link=False)
- with self.compile(code, args, 'compile') as p:
+ with self.compile(code, [], 'compile') as p:
if p.returncode != 0:
msg = 'Vala compiler {!r} can not compile programs' \
''.format(self.name_string())
@@ -80,7 +98,7 @@ class ValaCompiler(Compiler):
return ['--debug']
return []
- def find_library(self, libname, env, extra_dirs):
+ def find_library(self, libname, env, extra_dirs, *args):
if extra_dirs and isinstance(extra_dirs, str):
extra_dirs = [extra_dirs]
# Valac always looks in the default vapi dir, so only search there if
@@ -88,9 +106,7 @@ class ValaCompiler(Compiler):
if not extra_dirs:
code = 'class MesonFindLibrary : Object { }'
vapi_args = ['--pkg', libname]
- args = self.get_cross_extra_flags(env, link=False)
- args += vapi_args
- with self.compile(code, args, 'compile') as p:
+ with self.compile(code, vapi_args, 'compile') as p:
if p.returncode == 0:
return vapi_args
# Not found? Try to find the vapi file itself.
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 8ada86a..fba90fa 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2018 The Meson development team
+# Copyright 2012-2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,14 +19,15 @@ from itertools import chain
from pathlib import PurePath
from collections import OrderedDict
from .mesonlib import (
- MesonException, default_libdir, default_libexecdir, default_prefix
+ MesonException, MachineChoice, PerMachine,
+ default_libdir, default_libexecdir, default_prefix, stringlistify
)
from .wrap import WrapMode
import ast
import argparse
import configparser
-version = '0.49.999'
+version = '0.50.999'
backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'xcode']
default_yielding = False
@@ -43,6 +44,9 @@ class UserOption:
raise MesonException('Value of "yielding" must be a boolean.')
self.yielding = yielding
+ def printable_value(self):
+ return self.value
+
# Check that the input is a valid value and return the
# "cleaned" or "native" version. For example the Boolean
# option could take the string "true" and return True.
@@ -114,9 +118,14 @@ class UserUmaskOption(UserIntegerOption):
super().__init__(name, description, 0, 0o777, value, yielding)
self.choices = ['preserve', '0000-0777']
+ def printable_value(self):
+ if self.value == 'preserve':
+ return self.value
+ return format(self.value, '04o')
+
def validate_value(self, value):
if value is None or value == 'preserve':
- return None
+ return 'preserve'
return super().validate_value(value)
def toint(self, valuestring):
@@ -226,47 +235,11 @@ def load_configs(filenames):
raise MesonException('Cannot find specified native file: ' + f)
- config = configparser.SafeConfigParser()
+ config = configparser.ConfigParser()
config.read(gen())
return config
-def _get_section(config, section):
- if config.has_section(section):
- final = {}
- for k, v in config.items(section):
- # Windows paths...
- v = v.replace('\\', '\\\\')
- try:
- final[k] = ast.literal_eval(v)
- except SyntaxError:
- raise MesonException(
- 'Malformed value in native file variable: {}'.format(v))
- return final
- return {}
-
-
-class ConfigData:
-
- """Contains configuration information provided by the user for the build."""
-
- def __init__(self, config=None):
- if config:
- self.binaries = _get_section(config, 'binaries')
- # global is a keyword and globals is a builtin, rather than mangle it,
- # use a similar word
- self.universal = _get_section(config, 'globals')
- self.subprojects = {s: _get_section(config, s) for s in config.sections()
- if s not in {'binaries', 'globals'}}
- else:
- self.binaries = {}
- self.universal = {}
- self.subprojects = {}
-
- def get_binaries(self, name):
- return self.binaries.get(name, None)
-
-
# This class contains all data that must persist over multiple
# invocations of Meson. It is roughly the same thing as
# cmakecache.
@@ -289,9 +262,9 @@ class CoreData:
self.init_builtins()
self.backend_options = {}
self.user_options = {}
- self.compiler_options = {}
+ self.compiler_options = PerMachine({}, {}, {})
self.base_options = {}
- self.external_preprocess_args = {} # CPPFLAGS only
+ self.external_preprocess_args = PerMachine({}, {}, {}) # CPPFLAGS only
self.cross_file = self.__load_cross_file(options.cross_file)
self.compilers = OrderedDict()
self.cross_compilers = OrderedDict()
@@ -485,16 +458,18 @@ class CoreData:
mode = 'custom'
self.builtins['buildtype'].set_value(mode)
+ def get_all_compiler_options(self):
+ # TODO think about cross and command-line interface. (Only .build is mentioned here.)
+ yield self.compiler_options.build
+
def _get_all_nonbuiltin_options(self):
yield self.backend_options
yield self.user_options
- yield self.compiler_options
+ yield from self.get_all_compiler_options()
yield self.base_options
def get_all_options(self):
- return chain(
- iter([self.builtins]),
- self._get_all_nonbuiltin_options())
+ return chain([self.builtins], self._get_all_nonbuiltin_options())
def validate_option_value(self, option_name, override_value):
for opts in self.get_all_options():
@@ -503,14 +478,14 @@ class CoreData:
return opt.validate_value(override_value)
raise MesonException('Tried to validate unknown option %s.' % option_name)
- def get_external_args(self, lang):
- return self.compiler_options[lang + '_args'].value
+ def get_external_args(self, for_machine: MachineChoice, lang):
+ return self.compiler_options[for_machine][lang + '_args'].value
- def get_external_link_args(self, lang):
- return self.compiler_options[lang + '_link_args'].value
+ def get_external_link_args(self, for_machine: MachineChoice, lang):
+ return self.compiler_options[for_machine][lang + '_link_args'].value
- def get_external_preprocess_args(self, lang):
- return self.external_preprocess_args[lang]
+ def get_external_preprocess_args(self, for_machine: MachineChoice, lang):
+ return self.external_preprocess_args[for_machine][lang]
def merge_user_options(self, options):
for (name, value) in options.items():
@@ -521,7 +496,7 @@ class CoreData:
if type(oldval) != type(value):
self.user_options[name] = value
- def set_options(self, options, subproject=''):
+ def set_options(self, options, subproject='', warn_unknown=True):
# Set prefix first because it's needed to sanitize other options
prefix = self.builtins['prefix'].value
if 'prefix' in options:
@@ -545,13 +520,18 @@ class CoreData:
break
else:
unknown_options.append(k)
-
- if unknown_options:
+ if unknown_options and warn_unknown:
unknown_options = ', '.join(sorted(unknown_options))
sub = 'In subproject {}: '.format(subproject) if subproject else ''
mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options))
- def set_default_options(self, default_options, subproject, cmd_line_options):
+ def set_default_options(self, default_options, subproject, env):
+ # Set defaults first from conf files (cross or native), then
+ # override them as nec as necessary.
+ for k, v in env.paths.host:
+ if v is not None:
+ env.cmd_line_options.setdefault(k, v)
+
# Set default options as if they were passed to the command line.
# Subprojects can only define default for user options.
from . import optinterpreter
@@ -560,7 +540,7 @@ class CoreData:
if optinterpreter.is_invalid_name(k):
continue
k = subproject + ':' + k
- cmd_line_options.setdefault(k, v)
+ env.cmd_line_options.setdefault(k, v)
# Create a subset of cmd_line_options, keeping only options for this
# subproject. Also take builtin options if it's the main project.
@@ -568,7 +548,7 @@ class CoreData:
# languages and setting the backend (builtin options must be set first
# to know which backend we'll use).
options = {}
- for k, v in cmd_line_options.items():
+ for k, v in env.cmd_line_options.items():
if subproject:
if not k.startswith(subproject + ':'):
continue
@@ -581,6 +561,66 @@ class CoreData:
self.set_options(options, subproject)
+ def process_new_compilers(self, lang: str, comp, cross_comp, env):
+ from . import compilers
+
+ self.compilers[lang] = comp
+ if cross_comp is not None:
+ self.cross_compilers[lang] = cross_comp
+
+ # Native compiler always exist so always add its options.
+ new_options_for_build = comp.get_and_default_options(env.properties.build)
+ preproc_flags_for_build = comp.get_preproc_flags()
+ if cross_comp is not None:
+ new_options_for_host = cross_comp.get_and_default_options(env.properties.host)
+ preproc_flags_for_host = cross_comp.get_preproc_flags()
+ else:
+ new_options_for_host = comp.get_and_default_options(env.properties.host)
+ preproc_flags_for_host = comp.get_preproc_flags()
+
+ opts_machines_list = [
+ (new_options_for_build, preproc_flags_for_build, MachineChoice.BUILD),
+ (new_options_for_host, preproc_flags_for_host, MachineChoice.HOST),
+ ]
+
+ optprefix = lang + '_'
+ for new_options, preproc_flags, for_machine in opts_machines_list:
+ for k, o in new_options.items():
+ if not k.startswith(optprefix):
+ raise MesonException('Internal error, %s has incorrect prefix.' % k)
+ if (env.machines.matches_build_machine(for_machine) and
+ k in env.cmd_line_options):
+ # TODO think about cross and command-line interface.
+ o.set_value(env.cmd_line_options[k])
+ self.compiler_options[for_machine].setdefault(k, o)
+
+ # Unlike compiler and linker flags, preprocessor flags are not in
+ # compiler_options because they are not visible to user.
+ preproc_flags = shlex.split(preproc_flags)
+ k = lang + '_args'
+ if lang in ('c', 'cpp', 'objc', 'objcpp') and k in env.properties[for_machine]:
+ # `c_args` in the cross file are used, like CPPFLAGS but *not*
+ # CFLAGS, for tests. this is weird, but how it was already
+ # implemented. Hopefully a new version of #3916 fixes it.
+ preproc_flags = stringlistify(env.properties[for_machine][k])
+ self.external_preprocess_args[for_machine].setdefault(lang, preproc_flags)
+
+ enabled_opts = []
+ for optname in comp.base_options:
+ if optname in self.base_options:
+ continue
+ oobj = compilers.base_options[optname]
+ if optname in env.cmd_line_options:
+ oobj.set_value(env.cmd_line_options[optname])
+ enabled_opts.append(optname)
+ self.base_options[optname] = oobj
+ self.emit_base_options_warnings(enabled_opts)
+
+ def emit_base_options_warnings(self, enabled_opts: list):
+ if 'b_bitcode' in enabled_opts:
+ mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
+ mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
+
class CmdLineFileParser(configparser.ConfigParser):
def __init__(self):
# We don't want ':' as key delimiter, otherwise it would break when
@@ -604,6 +644,10 @@ def read_cmd_line_file(build_dir, options):
properties = config['properties']
if options.cross_file is None:
options.cross_file = properties.get('cross_file', None)
+ if not options.native_file:
+ # This will be a string in the form: "['first', 'second', ...]", use
+ # literal_eval to get it into the list of strings.
+ options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
def write_cmd_line_file(build_dir, options):
filename = get_cmd_line_file(build_dir)
@@ -612,6 +656,8 @@ def write_cmd_line_file(build_dir, options):
properties = {}
if options.cross_file is not None:
properties['cross_file'] = options.cross_file
+ if options.native_file:
+ properties['native_file'] = options.native_file
config['options'] = options.cmd_line_options
config['properties'] = properties
@@ -626,17 +672,25 @@ def update_cmd_line_file(build_dir, options):
with open(filename, 'w') as f:
config.write(f)
+def major_versions_differ(v1, v2):
+ return v1.split('.')[0:2] != v2.split('.')[0:2]
+
def load(build_dir):
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)
try:
with open(filename, 'rb') as f:
obj = pickle.load(f)
- except pickle.UnpicklingError:
+ except (pickle.UnpicklingError, EOFError):
raise MesonException(load_fail_msg)
+ except AttributeError:
+ raise MesonException(
+ "Coredata file {!r} references functions or classes that don't "
+ "exist. This probably means that it was generated with an old "
+ "version of meson.".format(filename))
if not isinstance(obj, CoreData):
raise MesonException(load_fail_msg)
- if obj.version != version:
+ if major_versions_differ(obj.version, version):
raise MesonException('Build directory has been generated with Meson version %s, '
'which is incompatible with current version %s.\n' %
(obj.version, version))
@@ -646,7 +700,7 @@ def save(obj, build_dir):
filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
prev_filename = filename + '.prev'
tempfilename = filename + '~'
- if obj.version != version:
+ if major_versions_differ(obj.version, version):
raise MesonException('Fatal version mismatch corruption.')
if os.path.exists(filename):
import shutil
@@ -777,7 +831,7 @@ builtin_options = {
'localstatedir': [UserStringOption, 'Localstate data directory', 'var'],
'sharedstatedir': [UserStringOption, 'Architecture-independent data directory', 'com'],
'werror': [UserBooleanOption, 'Treat warnings as errors', False],
- 'warning_level': [UserComboOption, 'Compiler warning level to use', ['1', '2', '3'], '1'],
+ 'warning_level': [UserComboOption, 'Compiler warning level to use', ['0', '1', '2', '3'], '1'],
'layout': [UserComboOption, 'Build directory layout', ['mirror', 'flat'], 'mirror'],
'default_library': [UserComboOption, 'Default library type', ['shared', 'static', 'both'], 'shared'],
'backend': [UserComboOption, 'Backend to use', backendlist, 'ninja'],
diff --git a/mesonbuild/dependencies/__init__.py b/mesonbuild/dependencies/__init__.py
index afe2a3b..53ff1c9 100644
--- a/mesonbuild/dependencies/__init__.py
+++ b/mesonbuild/dependencies/__init__.py
@@ -18,7 +18,7 @@ from .base import ( # noqa: F401
ExternalDependency, NotFoundDependency, ExternalLibrary, ExtraFrameworkDependency, InternalDependency,
PkgConfigDependency, CMakeDependency, find_external_dependency, get_dep_identifier, packages, _packages_accept_language)
from .dev import GMockDependency, GTestDependency, LLVMDependency, ValgrindDependency
-from .misc import (MPIDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency)
+from .misc import (CoarrayDependency, HDF5Dependency, MPIDependency, NetCDFDependency, OpenMPDependency, Python3Dependency, ThreadDependency, PcapDependency, CupsDependency, LibWmfDependency, LibGCryptDependency)
from .platform import AppleFrameworks
from .ui import GLDependency, GnuStepDependency, Qt4Dependency, Qt5Dependency, SDL2Dependency, WxDependency, VulkanDependency
@@ -32,7 +32,10 @@ packages.update({
# From misc:
'boost': BoostDependency,
+ 'coarray': CoarrayDependency,
'mpi': MPIDependency,
+ 'hdf5': HDF5Dependency,
+ 'netcdf': NetCDFDependency,
'openmp': OpenMPDependency,
'python3': Python3Dependency,
'threads': ThreadDependency,
@@ -54,6 +57,8 @@ packages.update({
'vulkan': VulkanDependency,
})
_packages_accept_language.update({
+ 'hdf5',
'mpi',
+ 'netcdf',
'openmp',
})
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index cd02939..2ba150b 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -14,12 +14,11 @@
# This file contains the detection logic for external dependencies.
# Custom logic for several other packages are in separate files.
-
+from typing import Dict, Any
import copy
import functools
import os
import re
-import stat
import json
import shlex
import shutil
@@ -27,14 +26,17 @@ import textwrap
import platform
import itertools
import ctypes
+from typing import List, Tuple
from enum import Enum
-from pathlib import PurePath
+from pathlib import Path, PurePath
from .. import mlog
from .. import mesonlib
from ..compilers import clib_langs
-from ..mesonlib import MesonException, OrderedSet
+from ..environment import BinaryTable, Environment, MachineInfo
+from ..mesonlib import MachineChoice, MesonException, OrderedSet, PerMachine
from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify
+from ..mesonlib import Version
# These must be defined in this file to avoid cyclical references.
packages = {}
@@ -327,6 +329,10 @@ class NotFoundDependency(Dependency):
self.name = 'not-found'
self.is_found = False
+ def get_partial_dependency(self, *, compile_args=False, link_args=False,
+ links=False, includes=False, sources=False):
+ return copy.copy(self)
+
class ConfigToolDependency(ExternalDependency):
@@ -393,24 +399,21 @@ class ConfigToolDependency(ExternalDependency):
if not isinstance(versions, list) and versions is not None:
versions = listify(versions)
- if self.env.is_cross_build() and not self.native:
- cross_file = self.env.cross_info.config['binaries']
- try:
- tools = [cross_file[self.tool_name]]
- except KeyError:
+ for_machine = MachineChoice.BUILD if self.native else MachineChoice.HOST
+ tool = self.env.binaries[for_machine].lookup_entry(self.tool_name)
+ if tool is not None:
+ tools = [tool]
+ else:
+ if self.env.is_cross_build() and not self.native:
mlog.warning('No entry for {0} specified in your cross file. '
'Falling back to searching PATH. This may find a '
'native version of {0}!'.format(self.tool_name))
- tools = self.tools
- elif self.tool_name in self.env.config_info.binaries:
- tools = [self.env.config_info.binaries[self.tool_name]]
- else:
- tools = self.tools
+ tools = [[t] for t in self.tools]
best_match = (None, None)
for tool in tools:
try:
- p, out = Popen_safe([tool, '--version'])[:2]
+ p, out = Popen_safe(tool + ['--version'])[:2]
except (FileNotFoundError, PermissionError):
continue
if p.returncode != 0:
@@ -439,20 +442,24 @@ class ConfigToolDependency(ExternalDependency):
def report_config(self, version, req_version):
"""Helper method to print messages about the tool."""
+
+ found_msg = [mlog.bold(self.tool_name), 'found:']
+
if self.config is None:
- if version is not None:
- mlog.log('Found', mlog.bold(self.tool_name), repr(version),
- mlog.red('NO'), '(needed', req_version, ')')
- else:
- mlog.log('Found', mlog.bold(self.tool_name), repr(req_version),
- mlog.red('NO'))
- return False
- mlog.log('Found {}:'.format(self.tool_name), mlog.bold(shutil.which(self.config)),
- '({})'.format(version))
- return True
+ found_msg.append(mlog.red('NO'))
+ if version is not None and req_version is not None:
+ found_msg.append('found {!r} but need {!r}'.format(version, req_version))
+ elif req_version:
+ found_msg.append('need {!r}'.format(req_version))
+ else:
+ found_msg += [mlog.green('YES'), '({})'.format(shutil.which(self.config[0])), version]
+
+ mlog.log(*found_msg)
+
+ return self.config is not None
def get_config_value(self, args, stage):
- p, out, err = Popen_safe([self.config] + args)
+ p, out, err = Popen_safe(self.config + args)
# This is required to keep shlex from stripping path separators on
# Windows. Also, don't put escape sequences in config values, okay?
out = out.replace('\\', '\\\\')
@@ -469,7 +476,7 @@ class ConfigToolDependency(ExternalDependency):
return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL]
def get_configtool_variable(self, variable_name):
- p, out, _ = Popen_safe([self.config, '--{}'.format(variable_name)])
+ p, out, _ = Popen_safe(self.config + ['--{}'.format(variable_name)])
if p.returncode != 0:
if self.required:
raise DependencyException(
@@ -486,7 +493,7 @@ class ConfigToolDependency(ExternalDependency):
class PkgConfigDependency(ExternalDependency):
# The class's copy of the pkg-config path. Avoids having to search for it
# multiple times in the same Meson invocation.
- class_pkgbin = None
+ class_pkgbin = PerMachine(None, None, None)
# We cache all pkg-config subprocess invocations to avoid redundant calls
pkgbin_cache = {}
@@ -498,31 +505,66 @@ class PkgConfigDependency(ExternalDependency):
# stored in the pickled coredata and recovered.
self.pkgbin = None
- # When finding dependencies for cross-compiling, we don't care about
- # the 'native' pkg-config
- if self.want_cross:
- if 'pkgconfig' not in environment.cross_info.config['binaries']:
- if self.required:
- raise DependencyException('Pkg-config binary missing from cross file')
- else:
- potential_pkgbin = ExternalProgram.from_bin_list(
- environment.cross_info.config['binaries'], 'pkgconfig')
- if potential_pkgbin.found():
- self.pkgbin = potential_pkgbin
- else:
- mlog.debug('Cross pkg-config %s not found.' % potential_pkgbin.name)
- # Only search for the native pkg-config the first time and
- # store the result in the class definition
- elif PkgConfigDependency.class_pkgbin is None:
- self.pkgbin = self.check_pkgconfig()
- PkgConfigDependency.class_pkgbin = self.pkgbin
+ if not self.want_cross and environment.is_cross_build():
+ for_machine = MachineChoice.BUILD
else:
- self.pkgbin = PkgConfigDependency.class_pkgbin
-
- if not self.pkgbin:
+ for_machine = MachineChoice.HOST
+
+ # Create an iterator of options
+ def search():
+ # Lookup in cross or machine file.
+ potential_pkgpath = environment.binaries[for_machine].lookup_entry('pkgconfig')
+ if potential_pkgpath is not None:
+ mlog.debug('Pkg-config binary for {} specified from cross file, native file, '
+ 'or env var as {}'.format(for_machine, potential_pkgpath))
+ yield ExternalProgram.from_entry('pkgconfig', potential_pkgpath)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('Pkg-config binary missing from cross or native file, or env var undefined.')
+ # Fallback on hard-coded defaults.
+ # TODO prefix this for the cross case instead of ignoring thing.
+ if environment.machines.matches_build_machine(for_machine):
+ for potential_pkgpath in environment.default_pkgconfig:
+ mlog.debug('Trying a default pkg-config fallback at', potential_pkgpath)
+ yield ExternalProgram(potential_pkgpath, silent=True)
+
+ # Only search for pkg-config for each machine the first time and store
+ # the result in the class definition
+ if PkgConfigDependency.class_pkgbin[for_machine] is False:
+ mlog.debug('Pkg-config binary for %s is cached as not found.' % for_machine)
+ elif PkgConfigDependency.class_pkgbin[for_machine] is not None:
+ mlog.debug('Pkg-config binary for %s is cached.' % for_machine)
+ else:
+ assert PkgConfigDependency.class_pkgbin[for_machine] is None
+ mlog.debug('Pkg-config binary for %s is not cached.' % for_machine)
+ for potential_pkgbin in search():
+ mlog.debug('Trying pkg-config binary {} for machine {} at {}'
+ .format(potential_pkgbin.name, for_machine, potential_pkgbin.command))
+ version_if_ok = self.check_pkgconfig(potential_pkgbin)
+ if not version_if_ok:
+ continue
+ if not self.silent:
+ mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
+ '(%s)' % version_if_ok)
+ PkgConfigDependency.class_pkgbin[for_machine] = potential_pkgbin
+ break
+ else:
+ if not self.silent:
+ mlog.log('Found Pkg-config:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ PkgConfigDependency.class_pkgbin[for_machine] = False
+
+ self.pkgbin = PkgConfigDependency.class_pkgbin[for_machine]
+ if self.pkgbin is False:
+ self.pkgbin = None
+ msg = 'Pkg-config binary for machine %s not found. Giving up.' % for_machine
if self.required:
- raise DependencyException('Pkg-config not found.')
- return
+ raise DependencyException(msg)
+ else:
+ mlog.debug(msg)
+ return
mlog.debug('Determining dependency {!r} with pkg-config executable '
'{!r}'.format(name, self.pkgbin.get_path()))
@@ -642,7 +684,11 @@ class PkgConfigDependency(ExternalDependency):
raw_link_args = self._convert_mingw_paths(shlex.split(out_raw))
for arg in raw_link_args:
if arg.startswith('-L') and not arg.startswith(('-L-l', '-L-L')):
- prefix_libpaths.add(arg[2:])
+ path = arg[2:]
+ if not os.path.isabs(path):
+ # Resolve the path as a compiler in the build directory would
+ path = os.path.join(self.env.get_build_dir(), path)
+ prefix_libpaths.add(path)
system_libpaths = OrderedSet()
full_args = self._convert_mingw_paths(shlex.split(out))
for arg in full_args:
@@ -657,7 +703,7 @@ class PkgConfigDependency(ExternalDependency):
libs_found = OrderedSet()
# Track not-found libraries to know whether to add library paths
libs_notfound = []
- libtype = 'static' if self.static else 'default'
+ libtype = 'static' if self.static else 'shared-static'
# Generate link arguments for this library
link_args = []
for lib in full_args:
@@ -752,10 +798,10 @@ class PkgConfigDependency(ExternalDependency):
if 'define_variable' in kwargs:
definition = kwargs.get('define_variable', [])
if not isinstance(definition, list):
- raise MesonException('define_variable takes a list')
+ raise DependencyException('define_variable takes a list')
if len(definition) != 2 or not all(isinstance(i, str) for i in definition):
- raise MesonException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
+ raise DependencyException('define_variable must be made up of 2 strings for VARIABLENAME and VARIABLEVALUE')
options = ['--define-variable=' + '='.join(definition)] + options
@@ -785,33 +831,27 @@ class PkgConfigDependency(ExternalDependency):
def get_methods():
return [DependencyMethods.PKGCONFIG]
- def check_pkgconfig(self):
- evar = 'PKG_CONFIG'
- if evar in os.environ:
- pkgbin = os.environ[evar].strip()
- else:
- pkgbin = 'pkg-config'
- pkgbin = ExternalProgram(pkgbin, silent=True)
- if pkgbin.found():
- try:
- p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
- if p.returncode != 0:
- mlog.warning('Found pkg-config {!r} but couldn\'t run it'
- ''.format(' '.join(pkgbin.get_command())))
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
- pkgbin = False
- except (FileNotFoundError, PermissionError):
- pkgbin = False
- else:
- pkgbin = False
- if not self.silent:
- if pkgbin:
- mlog.log('Found pkg-config:', mlog.bold(pkgbin.get_path()),
- '(%s)' % out.strip())
- else:
- mlog.log('Found Pkg-config:', mlog.red('NO'))
- return pkgbin
+ def check_pkgconfig(self, pkgbin):
+ if not pkgbin.found():
+ mlog.log('Did not find pkg-config by name {!r}'.format(pkgbin.name))
+ return None
+ try:
+ p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found pkg-config {!r} but it failed when run'
+ ''.format(' '.join(pkgbin.get_command())))
+ return None
+ except FileNotFoundError:
+ mlog.warning('We thought we found pkg-config {!r} but now it\'s not there. How odd!'
+ ''.format(' '.join(pkgbin.get_command())))
+ return None
+ except PermissionError:
+ msg = 'Found pkg-config {!r} but didn\'t have permissions to run it.'.format(' '.join(pkgbin.get_command()))
+ if not mesonlib.is_windows():
+ msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+ mlog.warning(msg)
+ return None
+ return out.strip()
def extract_field(self, la_file, fieldname):
with open(la_file) as f:
@@ -878,19 +918,21 @@ class CMakeTarget:
class CMakeDependency(ExternalDependency):
# The class's copy of the CMake path. Avoids having to search for it
# multiple times in the same Meson invocation.
- class_cmakebin = None
- class_cmakevers = None
+ class_cmakebin = PerMachine(None, None, None)
+ class_cmakevers = PerMachine(None, None, None)
+ class_cmakeinfo = PerMachine(None, None, None)
# We cache all pkg-config subprocess invocations to avoid redundant calls
cmake_cache = {}
# Version string for the minimum CMake version
class_cmake_version = '>=3.4'
# CMake generators to try (empty for no generator)
class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010']
+ class_working_generator = None
def _gen_exception(self, msg):
return DependencyException('Dependency {} not found: {}'.format(self.name, msg))
- def __init__(self, name, environment, kwargs, language=None):
+ def __init__(self, name: str, environment: Environment, kwargs, language=None):
super().__init__('cmake', environment, language, kwargs)
self.name = name
self.is_libtool = False
@@ -898,6 +940,7 @@ class CMakeDependency(ExternalDependency):
# stored in the pickled coredata and recovered.
self.cmakebin = None
self.cmakevers = None
+ self.cmakeinfo = None
# Dict of CMake variables: '<var_name>': ['list', 'of', 'values']
self.vars = {}
@@ -911,43 +954,256 @@ class CMakeDependency(ExternalDependency):
# When finding dependencies for cross-compiling, we don't care about
# the 'native' CMake binary
# TODO: Test if this works as expected
- if self.want_cross:
- if 'cmake' not in environment.cross_info.config['binaries']:
- if self.required:
- raise self._gen_exception('CMake binary missing from cross file')
- else:
- potential_cmake = ExternalProgram.from_cross_info(environment.cross_info, 'cmake')
- if potential_cmake.found():
- self.cmakebin = potential_cmake
- CMakeDependency.class_cmakebin = self.cmakebin
- else:
- mlog.debug('Cross CMake %s not found.' % potential_cmake.name)
- # Only search for the native CMake the first time and
- # store the result in the class definition
- elif CMakeDependency.class_cmakebin is None:
- self.cmakebin, self.cmakevers = self.check_cmake()
- CMakeDependency.class_cmakebin = self.cmakebin
- CMakeDependency.class_cmakevers = self.cmakevers
+ if environment.is_cross_build() and not self.want_cross:
+ for_machine = MachineChoice.BUILD
else:
- self.cmakebin = CMakeDependency.class_cmakebin
- self.cmakevers = CMakeDependency.class_cmakevers
-
- if not self.cmakebin:
+ for_machine = MachineChoice.HOST
+
+ # Create an iterator of options
+ def search():
+ # Lookup in cross or machine file.
+ potential_cmakepath = environment.binaries[for_machine].lookup_entry('cmake')
+ if potential_cmakepath is not None:
+ mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', for_machine, potential_cmakepath)
+ yield ExternalProgram.from_entry('cmake', potential_cmakepath)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('CMake binary missing from cross or native file, or env var undefined.')
+ # Fallback on hard-coded defaults.
+ # TODO prefix this for the cross case instead of ignoring thing.
+ if environment.machines.matches_build_machine(for_machine):
+ for potential_cmakepath in environment.default_cmake:
+ mlog.debug('Trying a default CMake fallback at', potential_cmakepath)
+ yield ExternalProgram(potential_cmakepath, silent=True)
+
+ # Only search for CMake the first time and store the result in the class
+ # definition
+ if CMakeDependency.class_cmakebin[for_machine] is False:
+ mlog.debug('CMake binary for %s is cached as not found' % for_machine)
+ elif CMakeDependency.class_cmakebin[for_machine] is not None:
+ mlog.debug('CMake binary for %s is cached.' % for_machine)
+ else:
+ assert CMakeDependency.class_cmakebin[for_machine] is None
+ mlog.debug('CMake binary for %s is not cached' % for_machine)
+ for potential_cmakebin in search():
+ mlog.debug('Trying CMake binary {} for machine {} at {}'
+ .format(potential_cmakebin.name, for_machine, potential_cmakebin.command))
+ version_if_ok = self.check_cmake(potential_cmakebin)
+ if not version_if_ok:
+ continue
+ if not self.silent:
+ mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()),
+ '(%s)' % version_if_ok)
+ CMakeDependency.class_cmakebin[for_machine] = potential_cmakebin
+ CMakeDependency.class_cmakevers[for_machine] = version_if_ok
+ break
+ else:
+ if not self.silent:
+ mlog.log('Found CMake:', mlog.red('NO'))
+ # Set to False instead of None to signify that we've already
+ # searched for it and not found it
+ CMakeDependency.class_cmakebin[for_machine] = False
+ CMakeDependency.class_cmakevers[for_machine] = None
+
+ self.cmakebin = CMakeDependency.class_cmakebin[for_machine]
+ self.cmakevers = CMakeDependency.class_cmakevers[for_machine]
+ if self.cmakebin is False:
+ self.cmakebin = None
+ msg = 'No CMake binary for machine %s not found. Giving up.' % for_machine
if self.required:
- raise self._gen_exception('CMake not found.')
+ raise DependencyException(msg)
+ mlog.debug(msg)
return
+ if CMakeDependency.class_cmakeinfo[for_machine] is None:
+ CMakeDependency.class_cmakeinfo[for_machine] = self._get_cmake_info()
+ self.cmakeinfo = CMakeDependency.class_cmakeinfo[for_machine]
+ if self.cmakeinfo is None:
+ raise self._gen_exception('Unable to obtain CMake system information')
+
modules = kwargs.get('modules', [])
+ cm_path = kwargs.get('cmake_module_path', [])
+ cm_args = kwargs.get('cmake_args', [])
if not isinstance(modules, list):
modules = [modules]
- self._detect_dep(name, modules)
+ if not isinstance(cm_path, list):
+ cm_path = [cm_path]
+ if not isinstance(cm_args, list):
+ cm_args = [cm_args]
+ cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
+ if cm_path:
+ cm_args += ['-DCMAKE_MODULE_PATH={}'.format(';'.join(cm_path))]
+ if not self._preliminary_find_check(name, cm_path, environment.machines[for_machine]):
+ return
+ self._detect_dep(name, modules, cm_args)
def __repr__(self):
s = '<{0} {1}: {2} {3}>'
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
- def _detect_dep(self, name, modules):
+ def _get_cmake_info(self):
+ mlog.debug("Extracting basic cmake information")
+ res = {}
+
+ # Try different CMake generators since specifying no generator may fail
+ # in cygwin for some reason
+ gen_list = []
+ # First try the last working generator
+ if CMakeDependency.class_working_generator is not None:
+ gen_list += [CMakeDependency.class_working_generator]
+ gen_list += CMakeDependency.class_cmake_generators
+
+ for i in gen_list:
+ mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+ # Prepare options
+ cmake_opts = ['--trace-expand', '.']
+ if len(i) > 0:
+ cmake_opts = ['-G', i] + cmake_opts
+
+ # Run CMake
+ ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt')
+
+ # Current generator was successful
+ if ret1 == 0:
+ CMakeDependency.class_working_generator = i
+ break
+
+ mlog.debug('CMake failed to gather system information for generator {} with error code {}'.format(i, ret1))
+ mlog.debug('OUT:\n{}\n\n\nERR:\n{}\n\n'.format(out1, err1))
+
+ # Check if any generator succeeded
+ if ret1 != 0:
+ return None
+
+ try:
+ # First parse the trace
+ lexer1 = self._lex_trace(err1)
+
+ # Primary pass -- parse all invocations of set
+ for l in lexer1:
+ if l.func == 'set':
+ self._cmake_set(l)
+ except:
+ return None
+
+ # Extract the variables and sanity check them
+ module_paths = sorted(set(self.get_cmake_var('MESON_PATHS_LIST')))
+ module_paths = list(filter(lambda x: os.path.isdir(x), module_paths))
+ archs = self.get_cmake_var('MESON_ARCH_LIST')
+
+ common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share']
+ for i in archs:
+ common_paths += [os.path.join('lib', i)]
+
+ res = {
+ 'module_paths': module_paths,
+ 'cmake_root': self.get_cmake_var('MESON_CMAKE_ROOT')[0],
+ 'archs': archs,
+ 'common_paths': common_paths
+ }
+
+ mlog.debug(' -- Module search paths: {}'.format(res['module_paths']))
+ mlog.debug(' -- CMake root: {}'.format(res['cmake_root']))
+ mlog.debug(' -- CMake architectures: {}'.format(res['archs']))
+ mlog.debug(' -- CMake lib search paths: {}'.format(res['common_paths']))
+
+ # Reset variables
+ self.vars = {}
+ return res
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_listdir(path: str) -> Tuple[Tuple[str, str]]:
+ try:
+ return tuple((x, str(x).lower()) for x in os.listdir(path))
+ except OSError:
+ return ()
+
+ @staticmethod
+ @functools.lru_cache(maxsize=None)
+ def _cached_isdir(path: str) -> bool:
+ try:
+ return os.path.isdir(path)
+ except OSError:
+ return False
+
+ def _preliminary_find_check(self, name: str, module_path: List[str], machine: MachineInfo) -> bool:
+ lname = str(name).lower()
+
+ # Checks <path>, <path>/cmake, <path>/CMake
+ def find_module(path: str) -> bool:
+ for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]:
+ if not self._cached_isdir(i):
+ continue
+
+ for j in ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake']:
+ if os.path.isfile(os.path.join(i, j.format(name))):
+ return True
+ return False
+
+ # Search in <path>/(lib/<arch>|lib*|share) for cmake files
+ def search_lib_dirs(path: str) -> bool:
+ for i in [os.path.join(path, x) for x in self.cmakeinfo['common_paths']]:
+ if not self._cached_isdir(i):
+ continue
+
+ # Check <path>/(lib/<arch>|lib*|share)/cmake/<name>*/
+ cm_dir = os.path.join(i, 'cmake')
+ if self._cached_isdir(cm_dir):
+ content = self._cached_listdir(cm_dir)
+ content = list(filter(lambda x: x[1].startswith(lname), content))
+ for k in content:
+ if find_module(os.path.join(cm_dir, k[0])):
+ return True
+
+ # <path>/(lib/<arch>|lib*|share)/<name>*/
+ # <path>/(lib/<arch>|lib*|share)/<name>*/(cmake|CMake)/
+ content = self._cached_listdir(i)
+ content = list(filter(lambda x: x[1].startswith(lname), content))
+ for k in content:
+ if find_module(os.path.join(i, k[0])):
+ return True
+
+ return False
+
+ # Check the user provided and system module paths
+ for i in module_path + [os.path.join(self.cmakeinfo['cmake_root'], 'Modules')]:
+ if find_module(i):
+ return True
+
+ # Check the system paths
+ for i in self.cmakeinfo['module_paths']:
+ if find_module(i):
+ return True
+
+ if search_lib_dirs(i):
+ return True
+
+ content = self._cached_listdir(i)
+ content = list(filter(lambda x: x[1].startswith(lname), content))
+ for k in content:
+ if search_lib_dirs(os.path.join(i, k[0])):
+ return True
+
+ # Mac framework support
+ if machine.is_darwin():
+ for j in ['{}.framework', '{}.app']:
+ j = j.format(lname)
+ if j in content:
+ if find_module(os.path.join(i, j[0], 'Resources')) or find_module(os.path.join(i, j[0], 'Version')):
+ return True
+
+ # Check the environment path
+ env_path = os.environ.get('{}_DIR'.format(name))
+ if env_path and find_module(env_path):
+ return True
+
+ return False
+
+ def _detect_dep(self, name: str, modules: List[str], args: List[str]):
# Detect a dependency with CMake using the '--find-package' mode
# and the trace output (stderr)
#
@@ -959,19 +1215,26 @@ class CMakeDependency(ExternalDependency):
# Try different CMake generators since specifying no generator may fail
# in cygwin for some reason
- for i in CMakeDependency.class_cmake_generators:
+ gen_list = []
+ # First try the last working generator
+ if CMakeDependency.class_working_generator is not None:
+ gen_list += [CMakeDependency.class_working_generator]
+ gen_list += CMakeDependency.class_cmake_generators
+
+ for i in gen_list:
mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
# Prepare options
- cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '.']
+ cmake_opts = ['--trace-expand', '-DNAME={}'.format(name), '-DARCHS={}'.format(';'.join(self.cmakeinfo['archs']))] + args + ['.']
if len(i) > 0:
cmake_opts = ['-G', i] + cmake_opts
# Run CMake
- ret1, out1, err1 = self._call_cmake(cmake_opts)
+ ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakeLists.txt')
# Current generator was successful
if ret1 == 0:
+ CMakeDependency.class_working_generator = i
break
mlog.debug('CMake failed for generator {} and package {} with error code {}'.format(i, name, ret1))
@@ -1140,7 +1403,7 @@ class CMakeDependency(ExternalDependency):
def get_cmake_var(self, var):
# Return the value of the CMake variable var or an empty list if var does not exist
- for var in self.vars:
+ if var in self.vars:
return self.vars[var]
return []
@@ -1368,24 +1631,25 @@ set(CMAKE_CXX_ABI_COMPILED TRUE)
set(CMAKE_SIZEOF_VOID_P "{}")
'''.format(os.path.realpath(__file__), ctypes.sizeof(ctypes.c_voidp)))
- def _setup_cmake_dir(self):
+ def _setup_cmake_dir(self, cmake_file: str) -> str:
# Setup the CMake build environment and return the "build" directory
build_dir = '{}/cmake_{}'.format(self.cmake_root_dir, self.name)
os.makedirs(build_dir, exist_ok=True)
# Copy the CMakeLists.txt
cmake_lists = '{}/CMakeLists.txt'.format(build_dir)
- if not os.path.exists(cmake_lists):
- dir_path = os.path.dirname(os.path.realpath(__file__))
- src_cmake = '{}/data/CMakeLists.txt'.format(dir_path)
- shutil.copyfile(src_cmake, cmake_lists)
+ dir_path = os.path.dirname(os.path.realpath(__file__))
+ src_cmake = '{}/data/{}'.format(dir_path, cmake_file)
+ if os.path.exists(cmake_lists):
+ os.remove(cmake_lists)
+ shutil.copyfile(src_cmake, cmake_lists)
self._setup_compiler(build_dir)
self._reset_cmake_cache(build_dir)
return build_dir
- def _call_cmake_real(self, args, env):
- build_dir = self._setup_cmake_dir()
+ def _call_cmake_real(self, args, cmake_file: str, env):
+ build_dir = self._setup_cmake_dir(cmake_file)
cmd = self.cmakebin.get_command() + args
p, out, err = Popen_safe(cmd, env=env, cwd=build_dir)
rc = p.returncode
@@ -1394,7 +1658,7 @@ set(CMAKE_SIZEOF_VOID_P "{}")
return rc, out, err
- def _call_cmake(self, args, env=None):
+ def _call_cmake(self, args, cmake_file: str, env=None):
if env is None:
fenv = env
env = os.environ
@@ -1404,56 +1668,42 @@ set(CMAKE_SIZEOF_VOID_P "{}")
# First check if cached, if not call the real cmake function
cache = CMakeDependency.cmake_cache
- if (self.cmakebin, targs, fenv) not in cache:
- cache[(self.cmakebin, targs, fenv)] = self._call_cmake_real(args, env)
- return cache[(self.cmakebin, targs, fenv)]
+ if (self.cmakebin, targs, cmake_file, fenv) not in cache:
+ cache[(self.cmakebin, targs, cmake_file, fenv)] = self._call_cmake_real(args, cmake_file, env)
+ return cache[(self.cmakebin, targs, cmake_file, fenv)]
@staticmethod
def get_methods():
return [DependencyMethods.CMAKE]
- def check_cmake(self):
- evar = 'CMAKE'
- if evar in os.environ:
- cmakebin = os.environ[evar].strip()
- else:
- cmakebin = 'cmake'
- cmakebin = ExternalProgram(cmakebin, silent=True)
- cmvers = None
- invalid_version = False
- if cmakebin.found():
- try:
- p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2]
- if p.returncode != 0:
- mlog.warning('Found CMake {!r} but couldn\'t run it'
- ''.format(' '.join(cmakebin.get_command())))
- # Set to False instead of None to signify that we've already
- # searched for it and not found it
- cmakebin = False
- except (FileNotFoundError, PermissionError):
- cmakebin = False
-
- cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip()
- if not version_compare(cmvers, CMakeDependency.class_cmake_version):
- invalid_version = True
- else:
- cmakebin = False
- if not self.silent:
- if cmakebin and invalid_version:
- mlog.log('Found CMake:', mlog.red('NO'), '(version of', mlog.bold(cmakebin.get_path()),
- 'is', mlog.bold(cmvers), 'but version', mlog.bold(CMakeDependency.class_cmake_version),
- 'is required)')
- elif cmakebin:
- mlog.log('Found CMake:', mlog.bold(cmakebin.get_path()),
- '(%s)' % cmvers)
- else:
- mlog.log('Found CMake:', mlog.red('NO'))
-
- if invalid_version:
- cmakebin = False
- cmvers = None
-
- return cmakebin, cmvers
+ def check_cmake(self, cmakebin):
+ if not cmakebin.found():
+ mlog.log('Did not find CMake {!r}'.format(cmakebin.name))
+ return None
+ try:
+ p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2]
+ if p.returncode != 0:
+ mlog.warning('Found CMake {!r} but couldn\'t run it'
+ ''.format(' '.join(cmakebin.get_command())))
+ return None
+ except FileNotFoundError:
+ mlog.warning('We thought we found CMake {!r} but now it\'s not there. How odd!'
+ ''.format(' '.join(cmakebin.get_command())))
+ return None
+ except PermissionError:
+ msg = 'Found CMake {!r} but didn\'t have permissions to run it.'.format(' '.join(cmakebin.get_command()))
+ if not mesonlib.is_windows():
+ msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+ mlog.warning(msg)
+ return None
+ cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip()
+ if not version_compare(cmvers, CMakeDependency.class_cmake_version):
+ mlog.warning(
+ 'The version of CMake', mlog.bold(cmakebin.get_path()),
+ 'is', mlog.bold(cmvers), 'but version', mlog.bold(CMakeDependency.class_cmake_version),
+ 'is required')
+ return None
+ return cmvers
def log_tried(self):
return self.type_name
@@ -1607,9 +1857,9 @@ class DubDependency(ExternalDependency):
return ''
# Ex.: library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
- build_name = 'library-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
+ build_name = '-{}-{}-{}-{}_{}'.format(description['buildType'], '.'.join(description['platform']), '.'.join(description['architecture']), comp, d_ver)
for entry in os.listdir(module_build_path):
- if entry.startswith(build_name):
+ if build_name in entry:
for file in os.listdir(os.path.join(module_build_path, entry)):
if file == lib_file_name:
if folder_only:
@@ -1688,14 +1938,15 @@ class ExternalProgram:
'''Human friendly description of the command'''
return ' '.join(self.command)
- @staticmethod
- def from_bin_list(bins, name):
- if name not in bins:
+ @classmethod
+ def from_bin_list(cls, bt: BinaryTable, name):
+ command = bt.lookup_entry(name)
+ if command is None:
return NonExistingExternalProgram()
- command = bins[name]
- if not isinstance(command, (list, str)):
- raise MesonException('Invalid type {!r} for binary {!r} in cross file'
- ''.format(command, name))
+ return cls.from_entry(name, command)
+
+ @staticmethod
+ def from_entry(name, command):
if isinstance(command, list):
if len(command) == 1:
command = command[0]
@@ -1703,6 +1954,7 @@ class ExternalProgram:
# need to search if the path is an absolute path.
if isinstance(command, list) or os.path.isabs(command):
return ExternalProgram(name, command=command, silent=True)
+ assert isinstance(command, str)
# Search for the command using the specified string!
return ExternalProgram(command, silent=True)
@@ -1924,57 +2176,105 @@ class ExternalLibrary(ExternalDependency):
class ExtraFrameworkDependency(ExternalDependency):
- def __init__(self, name, required, path, env, lang, kwargs):
+ system_framework_paths = None
+
+ def __init__(self, name, required, paths, env, lang, kwargs):
super().__init__('extraframeworks', env, lang, kwargs)
self.name = name
self.required = required
- self.detect(name, path)
- if self.found():
- self.compile_args = ['-I' + os.path.join(self.path, self.name, 'Headers')]
- self.link_args = ['-F' + self.path, '-framework', self.name.split('.')[0]]
-
- def detect(self, name, path):
- # should use the compiler to look for frameworks, rather than peering at
- # the filesystem, so we can also find them when cross-compiling
- if self.want_cross:
+ # Full path to framework directory
+ self.framework_path = None
+ if not self.clib_compiler:
+ raise DependencyException('No C-like compilers are available')
+ if self.system_framework_paths is None:
+ self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env)
+ self.detect(name, paths)
+
+ def detect(self, name, paths):
+ if not paths:
+ paths = self.system_framework_paths
+ for p in paths:
+ mlog.debug('Looking for framework {} in {}'.format(name, p))
+ # We need to know the exact framework path because it's used by the
+ # Qt5 dependency class, and for setting the include path. We also
+ # want to avoid searching in an invalid framework path which wastes
+ # time and can cause a false positive.
+ framework_path = self._get_framework_path(p, name)
+ if framework_path is None:
+ continue
+ # We want to prefer the specified paths (in order) over the system
+ # paths since these are "extra" frameworks.
+ # For example, Python2's framework is in /System/Library/Frameworks and
+ # Python3's framework is in /Library/Frameworks, but both are called
+ # Python.framework. We need to know for sure that the framework was
+ # found in the path we expect.
+ allow_system = p in self.system_framework_paths
+ args = self.clib_compiler.find_framework(name, self.env, [p], allow_system)
+ if args is None:
+ continue
+ self.link_args = args
+ self.framework_path = framework_path.as_posix()
+ self.compile_args = ['-F' + self.framework_path]
+ # We need to also add -I includes to the framework because all
+ # cross-platform projects such as OpenGL, Python, Qt, GStreamer,
+ # etc do not use "framework includes":
+ # https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html
+ incdir = self._get_framework_include_path(framework_path)
+ if incdir:
+ self.compile_args += ['-I' + incdir]
+ self.is_found = True
return
+ def _get_framework_path(self, path, name):
+ p = Path(path)
lname = name.lower()
- if path is None:
- paths = ['/System/Library/Frameworks', '/Library/Frameworks']
- else:
- paths = [path]
- for p in paths:
- for d in os.listdir(p):
- fullpath = os.path.join(p, d)
- if lname != d.rsplit('.', 1)[0].lower():
- continue
- if not stat.S_ISDIR(os.stat(fullpath).st_mode):
- continue
- self.path = p
- self.name = d
- self.is_found = True
- return
+ for d in p.glob('*.framework/'):
+ if lname == d.name.rsplit('.', 1)[0].lower():
+ return d
+ return None
+
+ def _get_framework_latest_version(self, path):
+ versions = []
+ for each in path.glob('Versions/*'):
+ # macOS filesystems are usually case-insensitive
+ if each.name.lower() == 'current':
+ continue
+ versions.append(Version(each.name))
+ return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s)
+
+ def _get_framework_include_path(self, path):
+ # According to the spec, 'Headers' must always be a symlink to the
+ # Headers directory inside the currently-selected version of the
+ # framework, but sometimes frameworks are broken. Look in 'Versions'
+ # for the currently-selected version or pick the latest one.
+ trials = ('Headers', 'Versions/Current/Headers',
+ self._get_framework_latest_version(path))
+ for each in trials:
+ trial = path / each
+ if trial.is_dir():
+ return trial.as_posix()
+ return None
+
+ @staticmethod
+ def get_methods():
+ return [DependencyMethods.EXTRAFRAMEWORK]
def log_info(self):
- return os.path.join(self.path, self.name)
+ return self.framework_path
def log_tried(self):
return 'framework'
def get_dep_identifier(name, kwargs, want_cross):
- # Need immutable objects since the identifier will be used as a dict key
- version_reqs = listify(kwargs.get('version', []))
- if isinstance(version_reqs, list):
- version_reqs = frozenset(version_reqs)
- identifier = (name, version_reqs, want_cross)
+ identifier = (name, want_cross)
for key, value in kwargs.items():
- # 'version' is embedded above as the second element for easy access
+ # 'version' is irrelevant for caching; the caller must check version matches
# 'native' is handled above with `want_cross`
# 'required' is irrelevant for caching; the caller handles it separately
# 'fallback' subprojects cannot be cached -- they must be initialized
- if key in ('version', 'native', 'required', 'fallback',):
+ # 'default_options' is only used in fallback case
+ if key in ('version', 'native', 'required', 'fallback', 'default_options'):
continue
# All keyword arguments are strings, ints, or lists (or lists of lists)
if isinstance(value, list):
@@ -1987,8 +2287,10 @@ display_name_map = {
'dub': 'DUB',
'gmock': 'GMock',
'gtest': 'GTest',
+ 'hdf5': 'HDF5',
'llvm': 'LLVM',
'mpi': 'MPI',
+ 'netcdf': 'NetCDF',
'openmp': 'OpenMP',
'wxwidgets': 'WxWidgets',
}
@@ -2029,7 +2331,7 @@ def find_external_dependency(name, env, kwargs):
d = c()
d._check_version()
pkgdep.append(d)
- except Exception as e:
+ except DependencyException as e:
pkg_exc.append(e)
mlog.debug(str(e))
else:
@@ -2071,7 +2373,7 @@ def find_external_dependency(name, env, kwargs):
# if an exception occurred with the first detection method, re-raise it
# (on the grounds that it came from the preferred dependency detection
# method)
- if pkg_exc[0]:
+ if pkg_exc and pkg_exc[0]:
raise pkg_exc[0]
# we have a list of failed ExternalDependency objects, so we can report
@@ -2079,15 +2381,10 @@ def find_external_dependency(name, env, kwargs):
raise DependencyException('Dependency "%s" not found' % (name) +
(', tried %s' % (tried) if tried else ''))
- # return the last failed dependency object
- if pkgdep:
- return pkgdep[-1]
+ return NotFoundDependency(env)
- # this should never happen
- raise DependencyException('Dependency "%s" not found, but no dependency object to return' % (name))
-
-def _build_external_dependency_list(name, env, kwargs):
+def _build_external_dependency_list(name, env: Environment, kwargs: Dict[str, Any]) -> list:
# First check if the method is valid
if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]:
raise DependencyException('method {!r} is invalid'.format(kwargs['method']))
@@ -2121,6 +2418,14 @@ def _build_external_dependency_list(name, env, kwargs):
candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
return candidates
+ # If it's explicitly requested, use the Extraframework detection method (only)
+ if 'extraframework' == kwargs.get('method', ''):
+ # On OSX, also try framework dependency detector
+ if mesonlib.is_osx():
+ candidates.append(functools.partial(ExtraFrameworkDependency, name,
+ False, None, env, None, kwargs))
+ return candidates
+
# Otherwise, just use the pkgconfig and cmake dependency detector
if 'auto' == kwargs.get('method', 'auto'):
candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
diff --git a/mesonbuild/dependencies/data/CMakeLists.txt b/mesonbuild/dependencies/data/CMakeLists.txt
index 144ffda..64f5b23 100644
--- a/mesonbuild/dependencies/data/CMakeLists.txt
+++ b/mesonbuild/dependencies/data/CMakeLists.txt
@@ -1,24 +1,21 @@
cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
-# Inspired by CMakeDetermineCompilerABI.cmake to set CMAKE_LIBRARY_ARCHITECTURE
-if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
- if(NOT DEFINED CMAKE_LIBRARY_ARCHITECTURE)
- file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
- foreach(dir ${implicit_dirs})
- if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
- set(CMAKE_LIBRARY_ARCHITECTURE "${dir}")
- break()
- endif()
- endforeach()
- endif()
-endif()
-
-find_package("${NAME}" QUIET)
-
set(PACKAGE_FOUND FALSE)
set(_packageName "${NAME}")
string(TOUPPER "${_packageName}" PACKAGE_NAME)
+while(TRUE)
+ find_package("${NAME}" QUIET)
+
+ # ARCHS has to be set via the CMD interface
+ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
set(PACKAGE_FOUND TRUE)
diff --git a/mesonbuild/dependencies/data/CMakePathInfo.txt b/mesonbuild/dependencies/data/CMakePathInfo.txt
new file mode 100644
index 0000000..713c2da
--- /dev/null
+++ b/mesonbuild/dependencies/data/CMakePathInfo.txt
@@ -0,0 +1,29 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ foreach(dir ${implicit_dirs})
+ if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+ list(APPEND LIB_ARCH_LIST "${dir}")
+ endif()
+ endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+
+message(STATUS ${TMP_PATHS_LIST})
diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py
index 47beb4e..57a6a96 100644
--- a/mesonbuild/dependencies/dev.py
+++ b/mesonbuild/dependencies/dev.py
@@ -1,4 +1,4 @@
-# Copyright 2013-2017 The Meson development team
+# Copyright 2013-2019 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -21,7 +21,7 @@ import os
import re
from .. import mesonlib
-from ..mesonlib import version_compare, stringlistify, extract_as_list
+from ..mesonlib import version_compare, stringlistify, extract_as_list, MachineChoice
from .base import (
DependencyException, DependencyMethods, ExternalDependency, PkgConfigDependency,
strip_system_libdirs, ConfigToolDependency,
@@ -203,30 +203,40 @@ class LLVMDependency(ConfigToolDependency):
LLVM uses a special tool, llvm-config, which has arguments for getting
c args, cxx args, and ldargs as well as version.
"""
-
- # Ordered list of llvm-config binaries to try. Start with base, then try
- # newest back to oldest (3.5 is arbitrary), and finally the devel version.
- # Please note that llvm-config-6.0 is a development snapshot and it should
- # not be moved to the beginning of the list. The only difference between
- # llvm-config-8 and llvm-config-devel is that the former is used by
- # Debian and the latter is used by FreeBSD.
- tools = [
- 'llvm-config', # base
- 'llvm-config-7', 'llvm-config70',
- 'llvm-config-6.0', 'llvm-config60',
- 'llvm-config-5.0', 'llvm-config50',
- 'llvm-config-4.0', 'llvm-config40',
- 'llvm-config-3.9', 'llvm-config39',
- 'llvm-config-3.8', 'llvm-config38',
- 'llvm-config-3.7', 'llvm-config37',
- 'llvm-config-3.6', 'llvm-config36',
- 'llvm-config-3.5', 'llvm-config35',
- 'llvm-config-8', 'llvm-config-devel', # development snapshot
- ]
tool_name = 'llvm-config'
__cpp_blacklist = {'-DNDEBUG'}
def __init__(self, environment, kwargs):
+ # Ordered list of llvm-config binaries to try. Start with base, then try
+ # newest back to oldest (3.5 is arbitrary), and finally the devel version.
+ # Please note that llvm-config-6.0 is a development snapshot and it should
+ # not be moved to the beginning of the list.
+ self.tools = [
+ 'llvm-config', # base
+ 'llvm-config-8', 'llvm-config80',
+ 'llvm-config-7', 'llvm-config70',
+ 'llvm-config-6.0', 'llvm-config60',
+ 'llvm-config-5.0', 'llvm-config50',
+ 'llvm-config-4.0', 'llvm-config40',
+ 'llvm-config-3.9', 'llvm-config39',
+ 'llvm-config-3.8', 'llvm-config38',
+ 'llvm-config-3.7', 'llvm-config37',
+ 'llvm-config-3.6', 'llvm-config36',
+ 'llvm-config-3.5', 'llvm-config35',
+ 'llvm-config-9', # Debian development snapshot
+ 'llvm-config-devel', # FreeBSD development snapshot
+ ]
+
+ # Fedora starting with Fedora 30 adds a suffix of the number
+ # of bits in the isa that llvm targets, for example, on x86_64
+ # and aarch64 the name will be llvm-config-64, on x86 and arm
+ # it will be llvm-config-32.
+ m = MachineChoice.BUILD if environment.is_cross_build() and kwargs.get('native', True) else MachineChoice.HOST
+ if environment.machines[m].is_64_bit:
+ self.tools.append('llvm-config-64')
+ else:
+ self.tools.append('llvm-config-32')
+
# It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
# the C linker works fine if only using the C API.
super().__init__('LLVM', environment, 'cpp', kwargs)
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 9e0a65a..c95acff 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -14,14 +14,13 @@
# This file contains the detection logic for miscellaneous external dependencies.
+from pathlib import Path
import functools
import os
import re
import shlex
import sysconfig
-from pathlib import Path
-
from .. import mlog
from .. import mesonlib
from ..environment import detect_cpu_family
@@ -33,6 +32,120 @@ from .base import (
)
+class CoarrayDependency(ExternalDependency):
+ """
+ Coarrays are a Fortran 2008 feature.
+
+ Coarrays are sometimes implemented via external library (GCC+OpenCoarrays),
+ while other compilers just build in support (Cray, IBM, Intel, NAG).
+ Coarrays may be thought of as a high-level language abstraction of
+ low-level MPI calls.
+ """
+ def __init__(self, environment, kwargs):
+ super().__init__('coarray', environment, 'fortran', kwargs)
+ kwargs['required'] = False
+ kwargs['silent'] = True
+ self.is_found = False
+
+ cid = self.get_compiler().get_id()
+ if cid == 'gcc':
+ """ OpenCoarrays is the most commonly used method for Fortran Coarray with GCC """
+ self.is_found = True
+ kwargs['modules'] = 'OpenCoarrays::caf_mpi'
+ cmakedep = CMakeDependency('OpenCoarrays', environment, kwargs)
+ if not cmakedep.found():
+ self.compile_args = ['-fcoarray=single']
+ self.version = 'single image'
+ return
+
+ self.compile_args = cmakedep.get_compile_args()
+ self.link_args = cmakedep.get_link_args()
+ self.version = cmakedep.get_version()
+ elif cid == 'intel':
+ """ Coarrays are built into Intel compilers, no external library needed """
+ self.is_found = True
+ self.link_args = ['-coarray=shared']
+ self.compile_args = self.link_args
+ elif cid == 'nagfor':
+ """ NAG doesn't require any special arguments for Coarray """
+ self.is_found = True
+
+
+class HDF5Dependency(ExternalDependency):
+
+ def __init__(self, environment, kwargs):
+ language = kwargs.get('language', 'c')
+ super().__init__('hdf5', environment, language, kwargs)
+ kwargs['required'] = False
+ kwargs['silent'] = True
+ self.is_found = False
+
+ pkgconfig_files = ['hdf5']
+
+ if language not in ('c', 'cpp', 'fortran'):
+ raise DependencyException('Language {} is not supported with HDF5.'.format(language))
+
+ for pkg in pkgconfig_files:
+ try:
+ pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
+ if pkgdep.found():
+ self.compile_args = pkgdep.get_compile_args()
+ # derive needed libraries by language
+ pd_link_args = pkgdep.get_link_args()
+ link_args = []
+ for larg in pd_link_args:
+ lpath = Path(larg)
+ if lpath.is_file():
+ if language == 'cpp':
+ link_args.append(str(lpath.parent / (lpath.stem + '_hl_cpp' + lpath.suffix)))
+ link_args.append(str(lpath.parent / (lpath.stem + '_cpp' + lpath.suffix)))
+ elif language == 'fortran':
+ link_args.append(str(lpath.parent / (lpath.stem + 'hl_fortran' + lpath.suffix)))
+ link_args.append(str(lpath.parent / (lpath.stem + '_fortran' + lpath.suffix)))
+
+ # HDF5 C libs are required by other HDF5 languages
+ link_args.append(str(lpath.parent / (lpath.stem + '_hl' + lpath.suffix)))
+ link_args.append(larg)
+ else:
+ link_args.append(larg)
+
+ self.link_args = link_args
+ self.version = pkgdep.get_version()
+ self.is_found = True
+ self.pcdep = pkgdep
+ break
+ except Exception:
+ pass
+
+class NetCDFDependency(ExternalDependency):
+
+ def __init__(self, environment, kwargs):
+ language = kwargs.get('language', 'c')
+ super().__init__('netcdf', environment, language, kwargs)
+ kwargs['required'] = False
+ kwargs['silent'] = True
+ self.is_found = False
+
+ pkgconfig_files = ['netcdf']
+
+ if language not in ('c', 'cpp', 'fortran'):
+ raise DependencyException('Language {} is not supported with NetCDF.'.format(language))
+
+ if language == 'fortran':
+ pkgconfig_files.append('netcdf-fortran')
+
+ self.compile_args = []
+ self.link_args = []
+ self.pcdep = []
+ for pkg in pkgconfig_files:
+ pkgdep = PkgConfigDependency(pkg, environment, kwargs, language=self.language)
+ if pkgdep.found():
+ self.compile_args.extend(pkgdep.get_compile_args())
+ self.link_args.extend(pkgdep.get_link_args())
+ self.version = pkgdep.get_version()
+ self.is_found = True
+ self.pcdep.append(pkgdep)
+
class MPIDependency(ExternalDependency):
def __init__(self, environment, kwargs):
@@ -176,7 +289,7 @@ class MPIDependency(ExternalDependency):
mlog.debug(mlog.bold('Standard output\n'), o)
mlog.debug(mlog.bold('Standard error\n'), e)
return
- version = re.search('\d+.\d+.\d+', o)
+ version = re.search(r'\d+.\d+.\d+', o)
if version:
version = version.group(0)
else:
@@ -263,7 +376,7 @@ class OpenMPDependency(ExternalDependency):
else:
mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
- def need_openmp(self):
+ def need_openmp(self) -> bool:
return True
@@ -307,7 +420,7 @@ class Python3Dependency(ExternalDependency):
# There is a python in /System/Library/Frameworks, but that's
# python 2, Python 3 will always be in /Library
candidates.append(functools.partial(
- ExtraFrameworkDependency, 'python', False, '/Library/Frameworks',
+ ExtraFrameworkDependency, 'Python', False, ['/Library/Frameworks'],
environment, kwargs.get('language', None), kwargs))
return candidates
@@ -337,10 +450,14 @@ class Python3Dependency(ExternalDependency):
if pyplat.startswith('win'):
vernum = sysconfig.get_config_var('py_version_nodot')
if self.static:
- libname = 'libpython{}.a'.format(vernum)
+ libpath = Path('libs') / 'libpython{}.a'.format(vernum)
else:
- libname = 'python{}.lib'.format(vernum)
- lib = Path(sysconfig.get_config_var('base')) / 'libs' / libname
+ comp = self.get_compiler()
+ if comp.id == "gcc":
+ libpath = 'python{}.dll'.format(vernum)
+ else:
+ libpath = Path('libs') / 'python{}.lib'.format(vernum)
+ lib = Path(sysconfig.get_config_var('base')) / libpath
elif pyplat == 'mingw':
if self.static:
libname = sysconfig.get_config_var('LIBRARY')
diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py
index c78ebed..9863fb1 100644
--- a/mesonbuild/dependencies/platform.py
+++ b/mesonbuild/dependencies/platform.py
@@ -15,8 +15,6 @@
# This file contains the detection logic for external dependencies that are
# platform-specific (generally speaking).
-from .. import mesonlib
-
from .base import ExternalDependency, DependencyException
@@ -29,11 +27,19 @@ class AppleFrameworks(ExternalDependency):
if not modules:
raise DependencyException("AppleFrameworks dependency requires at least one module.")
self.frameworks = modules
- # FIXME: Use self.clib_compiler to check if the frameworks are available
+ if not self.clib_compiler:
+ raise DependencyException('No C-like compilers are available, cannot find the framework')
+ self.is_found = True
for f in self.frameworks:
- self.link_args += ['-framework', f]
-
- self.is_found = mesonlib.for_darwin(self.want_cross, self.env)
+ args = self.clib_compiler.find_framework(f, env, [])
+ if args is not None:
+ # No compile args are needed for system frameworks
+ self.link_args += args
+ else:
+ self.is_found = False
+
+ def log_info(self):
+ return ', '.join(self.frameworks)
def log_tried(self):
return 'framework'
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 7b3d051..ce1ca68 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -91,9 +91,9 @@ class GnuStepDependency(ConfigToolDependency):
'link_args'))
def find_config(self, versions=None):
- tool = self.tools[0]
+ tool = [self.tools[0]]
try:
- p, out = Popen_safe([tool, '--help'])[:2]
+ p, out = Popen_safe(tool + ['--help'])[:2]
except (FileNotFoundError, PermissionError):
return (None, None)
if p.returncode != 0:
@@ -177,13 +177,13 @@ def _qt_get_private_includes(mod_inc_dir, module, mod_version):
os.path.join(private_dir, 'Qt' + module))
class QtExtraFrameworkDependency(ExtraFrameworkDependency):
- def __init__(self, name, required, path, env, lang, kwargs):
- super().__init__(name, required, path, env, lang, kwargs)
+ def __init__(self, name, required, paths, env, lang, kwargs):
+ super().__init__(name, required, paths, env, lang, kwargs)
self.mod_name = name[2:]
def get_compile_args(self, with_private_headers=False, qt_version="0"):
if self.found():
- mod_inc_dir = os.path.join(self.path, self.name, 'Headers')
+ mod_inc_dir = os.path.join(self.framework_path, 'Headers')
args = ['-I' + mod_inc_dir]
if with_private_headers:
args += ['-I' + dirname for dirname in _qt_get_private_includes(mod_inc_dir, self.mod_name, qt_version)]
@@ -216,9 +216,11 @@ class QtBaseDependency(ExternalDependency):
methods = []
# Prefer pkg-config, then fallback to `qmake -query`
if DependencyMethods.PKGCONFIG in self.methods:
+ mlog.debug('Trying to find qt with pkg-config')
self._pkgconfig_detect(mods, kwargs)
methods.append('pkgconfig')
if not self.is_found and DependencyMethods.QMAKE in self.methods:
+ mlog.debug('Trying to find qt with qmake')
self.from_text = self._qmake_detect(mods, kwargs)
methods.append('qmake-' + self.name)
methods.append('qmake')
@@ -333,22 +335,18 @@ class QtBaseDependency(ExternalDependency):
if prefix:
self.bindir = os.path.join(prefix, 'bin')
- def _find_qmake(self, qmake):
- # Even when cross-compiling, if a cross-info qmake is not specified, we
- # fallback to using the qmake in PATH because that's what we used to do
- if self.env.is_cross_build():
- if 'qmake' in self.env.cross_info.config['binaries']:
- return ExternalProgram.from_bin_list(self.env.cross_info.config['binaries'], 'qmake')
- elif self.env.config_info:
- # Prefer suffixed to unsuffixed version
- p = ExternalProgram.from_bin_list(self.env.config_info.binaries, 'qmake')
- if p.found():
- return p
- return ExternalProgram(qmake, silent=True)
-
def _qmake_detect(self, mods, kwargs):
for qmake in ('qmake-' + self.name, 'qmake'):
- self.qmake = self._find_qmake(qmake)
+ self.qmake = ExternalProgram.from_bin_list(
+ self.env.binaries.host, qmake)
+ if not self.qmake.found():
+ # Even when cross-compiling, if a cross-info qmake is not
+ # specified, we fallback to using the qmake in PATH because
+ # that's what we used to do
+ self.qmake = ExternalProgram.from_bin_list(
+ self.env.binaries.build, qmake)
+ if not self.qmake.found():
+ self.qmake = ExternalProgram(qmake, silent=True)
if not self.qmake.found():
continue
# Check that the qmake is for qt5
@@ -364,9 +362,9 @@ class QtBaseDependency(ExternalDependency):
# Didn't find qmake :(
self.is_found = False
return
- self.version = re.search(self.qtver + '(\.\d+)+', stdo).group(0)
+ self.version = re.search(self.qtver + r'(\.\d+)+', stdo).group(0)
# Query library path, header path, and binary path
- mlog.log("Found qmake:", mlog.bold(self.qmake.get_name()), '(%s)' % self.version)
+ mlog.log("Found qmake:", mlog.bold(self.qmake.get_path()), '(%s)' % self.version)
stdo = Popen_safe(self.qmake.get_command() + ['-query'])[1]
qvars = {}
for line in stdo.split('\n'):
@@ -375,7 +373,9 @@ class QtBaseDependency(ExternalDependency):
continue
(k, v) = tuple(line.split(':', 1))
qvars[k] = v
- if mesonlib.is_osx():
+ # Qt on macOS uses a framework, but Qt for iOS does not
+ if self.env.machines.host.is_darwin() and 'ios' not in qvars['QMAKE_XSPEC']:
+ mlog.debug("Building for macOS, looking for framework")
self._framework_detect(qvars, mods, kwargs)
return qmake
incdir = qvars['QT_INSTALL_HEADERS']
@@ -446,7 +446,8 @@ class QtBaseDependency(ExternalDependency):
for m in modules:
fname = 'Qt' + m
- fwdep = QtExtraFrameworkDependency(fname, False, libdir, self.env,
+ mlog.debug('Looking for qt framework ' + fname)
+ fwdep = QtExtraFrameworkDependency(fname, False, [libdir], self.env,
self.language, fw_kwargs)
self.compile_args.append('-F' + libdir)
if fwdep.found():
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
new file mode 100644
index 0000000..609899c
--- /dev/null
+++ b/mesonbuild/envconfig.py
@@ -0,0 +1,431 @@
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import configparser, os, shlex, subprocess
+import typing
+
+from . import mesonlib
+from .mesonlib import EnvironmentException, MachineChoice, PerMachine
+from . import mlog
+
+
+# These classes contains all the data pulled from configuration files (native
+# and cross file currently), and also assists with the reading environment
+# variables.
+#
+# At this time there isn't an ironclad difference between this an other sources
+# of state like `coredata`. But one rough guide is much what is in `coredata` is
+# the *output* of the configuration process: the final decisions after tests.
+# This, on the other hand has *inputs*. The config files are parsed, but
+# otherwise minimally transformed. When more complex fallbacks (environment
+# detection) exist, they are defined elsewhere as functions that construct
+# instances of these classes.
+
+
+known_cpu_families = (
+ 'aarch64',
+ 'arc',
+ 'arm',
+ 'e2k',
+ 'ia64',
+ 'mips',
+ 'mips64',
+ 'parisc',
+ 'ppc',
+ 'ppc64',
+ 'riscv32',
+ 'riscv64',
+ 'rl78',
+ 'rx',
+ 's390x',
+ 'sparc',
+ 'sparc64',
+ 'x86',
+ 'x86_64'
+)
+
+# It would feel more natural to call this "64_BIT_CPU_FAMILES", but
+# python identifiers cannot start with numbers
+CPU_FAMILES_64_BIT = [
+ 'aarch64',
+ 'ia64',
+ 'mips64',
+ 'ppc64',
+ 'riscv64',
+ 'sparc64',
+ 'x86_64',
+]
+
+class MesonConfigFile:
+ @classmethod
+ def parse_datafile(cls, filename):
+ config = configparser.ConfigParser()
+ try:
+ with open(filename, 'r') as f:
+ config.read_file(f, filename)
+ except FileNotFoundError:
+ raise EnvironmentException('File not found: %s.' % filename)
+ return cls.from_config_parser(config)
+
+ @classmethod
+ def from_config_parser(cls, parser: configparser.ConfigParser):
+ out = {}
+ # This is a bit hackish at the moment.
+ for s in parser.sections():
+ section = {}
+ for entry in parser[s]:
+ value = parser[s][entry]
+ # Windows paths...
+ value = value.replace('\\', '\\\\')
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException('Malformed variable name %s in cross file..' % entry)
+ try:
+ res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
+ except Exception:
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+
+ for i in (res if isinstance(res, list) else [res]):
+ if not isinstance(i, (str, int, bool)):
+ raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
+
+ section[entry] = res
+
+ out[s] = section
+ return out
+
+class HasEnvVarFallback:
+ """
+ A tiny class to indicate that this class contains data that can be
+ initialized from either a config file or environment file. The `fallback`
+ field says whether env vars should be used. Downstream logic (e.g. subclass
+ methods) can check it to decide what to do, since env vars are currently
+ lazily decoded.
+
+ Frankly, this is a pretty silly class at the moment. The hope is the way
+ that we deal with environment variables will become more structured, and
+ this can be starting point.
+ """
+ def __init__(self, fallback = True):
+ self.fallback = fallback
+
+class Properties(HasEnvVarFallback):
+ def __init__(
+ self,
+ properties: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None,
+ fallback = True):
+ super().__init__(fallback)
+ self.properties = properties or {}
+
+ def has_stdlib(self, language):
+ return language + '_stdlib' in self.properties
+
+ def get_stdlib(self, language):
+ return self.properties[language + '_stdlib']
+
+ def get_root(self):
+ return self.properties.get('root', None)
+
+ def get_sys_root(self):
+ return self.properties.get('sys_root', None)
+
+ def __eq__(self, other):
+ if isinstance(other, type(self)):
+ return self.properties == other.properties
+ return NotImplemented
+
+ # TODO consider removing so Properties is less freeform
+ def __getitem__(self, key):
+ return self.properties[key]
+
+ # TODO consider removing so Properties is less freeform
+ def __contains__(self, item):
+ return item in self.properties
+
+ # TODO consider removing, for same reasons as above
+ def get(self, key, default=None):
+ return self.properties.get(key, default)
+
+class MachineInfo:
+ def __init__(self, system, cpu_family, cpu, endian):
+ self.system = system
+ self.cpu_family = cpu_family
+ self.cpu = cpu
+ self.endian = endian
+ self.is_64_bit = cpu_family in CPU_FAMILES_64_BIT
+
+ def __eq__(self, other):
+ if self.__class__ is not other.__class__:
+ return NotImplemented
+ return \
+ self.system == other.system and \
+ self.cpu_family == other.cpu_family and \
+ self.cpu == other.cpu and \
+ self.endian == other.endian
+
+ def __ne__(self, other):
+ if self.__class__ is not other.__class__:
+ return NotImplemented
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ return '<MachineInfo: {} {} ({})>'.format(self.system, self.cpu_family, self.cpu)
+
+ @staticmethod
+ def from_literal(literal):
+ minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
+ if set(literal) < minimum_literal:
+ raise EnvironmentException(
+ 'Machine info is currently {}\n'.format(literal) +
+ 'but is missing {}.'.format(minimum_literal - set(literal)))
+
+ cpu_family = literal['cpu_family']
+ if cpu_family not in known_cpu_families:
+ mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family)
+
+ endian = literal['endian']
+ if endian not in ('little', 'big'):
+ mlog.warning('Unknown endian %s' % endian)
+
+ return MachineInfo(
+ literal['system'],
+ cpu_family,
+ literal['cpu'],
+ endian)
+
+ def is_windows(self):
+ """
+ Machine is windows?
+ """
+ return self.system == 'windows'
+
+ def is_cygwin(self):
+ """
+ Machine is cygwin?
+ """
+ return self.system == 'cygwin'
+
+ def is_linux(self):
+ """
+ Machine is linux?
+ """
+ return self.system == 'linux'
+
+ def is_darwin(self):
+ """
+ Machine is Darwin (iOS/OS X)?
+ """
+ return self.system in ('darwin', 'ios')
+
+ def is_android(self):
+ """
+ Machine is Android?
+ """
+ return self.system == 'android'
+
+ def is_haiku(self):
+ """
+ Machine is Haiku?
+ """
+ return self.system == 'haiku'
+
+ def is_openbsd(self):
+ """
+ Machine is OpenBSD?
+ """
+ return self.system == 'openbsd'
+
+ # Various prefixes and suffixes for import libraries, shared libraries,
+ # static libraries, and executables.
+ # Versioning is added to these names in the backends as-needed.
+
+ def get_exe_suffix(self):
+ if self.is_windows() or self.is_cygwin():
+ return 'exe'
+ else:
+ return ''
+
+ def get_object_suffix(self):
+ if self.is_windows():
+ return 'obj'
+ else:
+ return 'o'
+
+ def libdir_layout_is_win(self):
+ return self.is_windows() \
+ or self.is_cygwin()
+
+class PerMachineDefaultable(PerMachine):
+ """Extends `PerMachine` with the ability to default from `None`s.
+ """
+ def __init__(self):
+ super().__init__(None, None, None)
+
+ def default_missing(self):
+ """Default host to buid and target to host.
+
+ This allows just specifying nothing in the native case, just host in the
+ cross non-compiler case, and just target in the native-built
+ cross-compiler case.
+ """
+ if self.host is None:
+ self.host = self.build
+ if self.target is None:
+ self.target = self.host
+
+ def miss_defaulting(self):
+ """Unset definition duplicated from their previous to None
+
+ This is the inverse of ''default_missing''. By removing defaulted
+ machines, we can elaborate the original and then redefault them and thus
+ avoid repeating the elaboration explicitly.
+ """
+ if self.target == self.host:
+ self.target = None
+ if self.host == self.build:
+ self.host = None
+
+class MachineInfos(PerMachineDefaultable):
+ def matches_build_machine(self, machine: MachineChoice):
+ return self.build == self[machine]
+
+class BinaryTable(HasEnvVarFallback):
+ def __init__(
+ self,
+ binaries: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None,
+
+ fallback = True):
+ super().__init__(fallback)
+ self.binaries = binaries or {}
+ for name, command in self.binaries.items():
+ if not isinstance(command, (list, str)):
+ # TODO generalize message
+ raise mesonlib.MesonException(
+ 'Invalid type {!r} for binary {!r} in cross file'
+ ''.format(command, name))
+
+ # Map from language identifiers to environment variables.
+ evarMap = {
+ # Compilers
+ 'c': 'CC',
+ 'cpp': 'CXX',
+ 'cs': 'CSC',
+ 'd': 'DC',
+ 'fortran': 'FC',
+ 'objc': 'OBJC',
+ 'objcpp': 'OBJCXX',
+ 'rust': 'RUSTC',
+ 'vala': 'VALAC',
+
+ # Binutils
+ 'strip': 'STRIP',
+ 'ar': 'AR',
+ 'windres': 'WINDRES',
+
+ 'cmake': 'CMAKE',
+ 'qmake': 'QMAKE',
+ 'pkgconfig': 'PKG_CONFIG',
+ }
+
+ @classmethod
+ def detect_ccache(cls):
+ try:
+ has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except OSError:
+ has_ccache = 1
+ if has_ccache == 0:
+ cmdlist = ['ccache']
+ else:
+ cmdlist = []
+ return cmdlist
+
+ @classmethod
+ def _warn_about_lang_pointing_to_cross(cls, compiler_exe, evar):
+ evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME')
+ if evar_str == compiler_exe:
+ mlog.warning('''Env var %s seems to point to the cross compiler.
+This is probably wrong, it should always point to the native compiler.''' % evar)
+
+ @classmethod
+ def parse_entry(cls, entry):
+ compiler = mesonlib.stringlistify(entry)
+ # Ensure ccache exists and remove it if it doesn't
+ if compiler[0] == 'ccache':
+ compiler = compiler[1:]
+ ccache = cls.detect_ccache()
+ else:
+ ccache = []
+ # Return value has to be a list of compiler 'choices'
+ return compiler, ccache
+
+ def lookup_entry(self, name):
+ """Lookup binary
+
+ Returns command with args as list if found, Returns `None` if nothing is
+ found.
+
+ First tries looking in explicit map, then tries environment variable.
+ """
+ # Try explict map, don't fall back on env var
+ command = self.binaries.get(name)
+ if command is not None:
+ command = mesonlib.stringlistify(command)
+ # Relies on there being no "" env var
+ evar = self.evarMap.get(name, "")
+ self._warn_about_lang_pointing_to_cross(command[0], evar)
+ elif self.fallback:
+ # Relies on there being no "" env var
+ evar = self.evarMap.get(name, "")
+ command = os.environ.get(evar)
+ if command is not None:
+ command = shlex.split(command)
+ return command
+
+class Directories:
+
+ """Data class that holds information about directories for native and cross
+ builds.
+ """
+
+ def __init__(self, bindir: typing.Optional[str] = None, datadir: typing.Optional[str] = None,
+ includedir: typing.Optional[str] = None, infodir: typing.Optional[str] = None,
+ libdir: typing.Optional[str] = None, libexecdir: typing.Optional[str] = None,
+ localedir: typing.Optional[str] = None, localstatedir: typing.Optional[str] = None,
+ mandir: typing.Optional[str] = None, prefix: typing.Optional[str] = None,
+ sbindir: typing.Optional[str] = None, sharedstatedir: typing.Optional[str] = None,
+ sysconfdir: typing.Optional[str] = None):
+ self.bindir = bindir
+ self.datadir = datadir
+ self.includedir = includedir
+ self.infodir = infodir
+ self.libdir = libdir
+ self.libexecdir = libexecdir
+ self.localedir = localedir
+ self.localstatedir = localstatedir
+ self.mandir = mandir
+ self.prefix = prefix
+ self.sbindir = sbindir
+ self.sharedstatedir = sharedstatedir
+ self.sysconfdir = sysconfdir
+
+ def __contains__(self, key: str) -> str:
+ return hasattr(self, key)
+
+ def __getitem__(self, key: str) -> str:
+ return getattr(self, key)
+
+ def __setitem__(self, key: str, value: typing.Optional[str]) -> None:
+ setattr(self, key, value)
+
+ def __iter__(self) -> typing.Iterator[typing.Tuple[str, str]]:
+ return iter(self.__dict__.items())
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index f0fa1da..58adb06 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -13,13 +13,20 @@
# limitations under the License.
import configparser, os, platform, re, sys, shlex, shutil, subprocess
+import typing
from . import coredata
from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker
from . import mesonlib
-from .mesonlib import MesonException, EnvironmentException, PerMachine, Popen_safe
+from .mesonlib import (
+ MesonException, EnvironmentException, MachineChoice, PerMachine, Popen_safe,
+)
from . import mlog
+from .envconfig import (
+ BinaryTable, Directories, MachineInfo, MachineInfos, MesonConfigFile,
+ PerMachineDefaultable, Properties, known_cpu_families,
+)
from . import compilers
from .compilers import (
CompilerType,
@@ -31,6 +38,7 @@ from .compilers import (
is_source,
)
from .compilers import (
+ Compiler,
ArmCCompiler,
ArmCPPCompiler,
ArmclangCCompiler,
@@ -41,6 +49,7 @@ from .compilers import (
ClangObjCPPCompiler,
ClangClCCompiler,
ClangClCPPCompiler,
+ FlangFortranCompiler,
G95FortranCompiler,
GnuCCompiler,
GnuCPPCompiler,
@@ -55,10 +64,13 @@ from .compilers import (
IntelFortranCompiler,
JavaCompiler,
MonoCompiler,
+ CudaCompiler,
VisualStudioCsCompiler,
NAGFortranCompiler,
Open64FortranCompiler,
PathScaleFortranCompiler,
+ PGICCompiler,
+ PGICPPCompiler,
PGIFortranCompiler,
RustCompiler,
CcrxCCompiler,
@@ -71,28 +83,6 @@ from .compilers import (
build_filename = 'meson.build'
-known_cpu_families = (
- 'aarch64',
- 'arc',
- 'arm',
- 'e2k',
- 'ia64',
- 'mips',
- 'mips64',
- 'parisc',
- 'ppc',
- 'ppc64',
- 'riscv32',
- 'riscv64',
- 'rl78',
- 'rx',
- 's390x',
- 'sparc',
- 'sparc64',
- 'x86',
- 'x86_64'
-)
-
def detect_gcovr(version='3.1', log=False):
gcovr_exe = 'gcovr'
try:
@@ -218,6 +208,8 @@ def detect_cpu_family(compilers):
trial = 'arm'
elif trial.startswith('ppc64'):
trial = 'ppc64'
+ elif trial == 'macppc':
+ trial = 'ppc'
elif trial == 'powerpc':
trial = 'ppc'
# FreeBSD calls both ppc and ppc64 "powerpc".
@@ -244,6 +236,10 @@ def detect_cpu_family(compilers):
trial = 'arm'
# Add more quirks here as bugs are reported. Keep in sync with detect_cpu()
# below.
+ elif trial == 'parisc64':
+ # ATM there is no 64 bit userland for PA-RISC. Thus always
+ # report it as 32 bit for simplicity.
+ trial = 'parisc'
if trial not in known_cpu_families:
mlog.warning('Unknown CPU family {!r}, please report this at '
@@ -285,6 +281,37 @@ def detect_msys2_arch():
return os.environ['MSYSTEM_CARCH']
return None
+def detect_machine_info(compilers = None) -> MachineInfo:
+ """Detect the machine we're running on
+
+ If compilers are not provided, we cannot know as much. None out those
+ fields to avoid accidentally depending on partial knowledge. The
+ underlying ''detect_*'' method can be called to explicitly use the
+ partial information.
+ """
+ return MachineInfo(
+ detect_system(),
+ detect_cpu_family(compilers) if compilers is not None else None,
+ detect_cpu(compilers) if compilers is not None else None,
+ sys.byteorder)
+
+# TODO make this compare two `MachineInfo`s purely. How important is the
+# `detect_cpu_family({})` distinction? It is the one impediment to that.
+def machine_info_can_run(machine_info: MachineInfo):
+ """Whether we can run binaries for this machine on the current machine.
+
+ Can almost always run 32-bit binaries on 64-bit natively if the host
+ and build systems are the same. We don't pass any compilers to
+ detect_cpu_family() here because we always want to know the OS
+ architecture, not what the compiler environment tells us.
+ """
+ if machine_info.system != detect_system():
+ return False
+ true_build_cpu_family = detect_cpu_family({})
+ return \
+ (machine_info.cpu_family == true_build_cpu_family) or \
+ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86'))
+
def search_version(text):
# Usually of the type 4.1.4 but compiler output may contain
# stuff like this:
@@ -297,8 +324,26 @@ def search_version(text):
# This regex is reaching magic levels. If it ever needs
# to be updated, do not complexify but convert to something
# saner instead.
- version_regex = '(?<!(\d|\.))(\d{1,2}(\.\d+)+(-[a-zA-Z0-9]+)?)'
- match = re.search(version_regex, text)
+ # We'll demystify it a bit with a verbose definition.
+ version_regex = re.compile(r"""
+ (?<! # Zero-width negative lookbehind assertion
+ (
+ \d # One digit
+ | \. # Or one period
+ ) # One occurrence
+ )
+ # Following pattern must not follow a digit or period
+ (
+ \d{1,2} # One or two digits
+ (
+ \.\d+ # Period and one or more digits
+ )+ # One or more occurrences
+ (
+ -[a-zA-Z0-9]+ # Hyphen and one or more alphanumeric
+ )? # Zero or one occurrence
+ ) # One occurrence
+ """, re.VERBOSE)
+ match = version_regex.search(text)
if match:
return match.group(0)
return 'unknown version'
@@ -306,6 +351,7 @@ def search_version(text):
class Environment:
private_dir = 'meson-private'
log_dir = 'meson-logs'
+ info_dir = 'meson-info'
def __init__(self, source_dir, build_dir, options):
self.source_dir = source_dir
@@ -316,8 +362,10 @@ class Environment:
if build_dir is not None:
self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
self.log_dir = os.path.join(build_dir, Environment.log_dir)
+ self.info_dir = os.path.join(build_dir, Environment.info_dir)
os.makedirs(self.scratch_dir, exist_ok=True)
os.makedirs(self.log_dir, exist_ok=True)
+ os.makedirs(self.info_dir, exist_ok=True)
try:
self.coredata = coredata.load(self.get_build_dir())
self.first_invocation = False
@@ -336,42 +384,67 @@ class Environment:
else:
# Just create a fresh coredata in this case
self.create_new_coredata(options)
- self.exe_wrapper = None
self.machines = MachineInfos()
# Will be fully initialized later using compilers later.
- self.machines.detect_build()
- if self.coredata.cross_file:
- self.cross_info = CrossBuildInfo(self.coredata.cross_file)
- if 'exe_wrapper' in self.cross_info.config['binaries']:
- from .dependencies import ExternalProgram
- self.exe_wrapper = ExternalProgram.from_bin_list(
- self.cross_info.config['binaries'], 'exe_wrapper')
- if 'host_machine' in self.cross_info.config:
- self.machines.host = MachineInfo.from_literal(
- self.cross_info.config['host_machine'])
- if 'target_machine' in self.cross_info.config:
- self.machines.target = MachineInfo.from_literal(
- self.cross_info.config['target_machine'])
- else:
- self.cross_info = None
- self.machines.default_missing()
+ self.detect_build_machine()
+
+ # Similar to coredata.compilers and build.compilers, but lower level in
+ # that there is no meta data, only names/paths.
+ self.binaries = PerMachineDefaultable()
- if self.coredata.config_files:
- self.config_info = coredata.ConfigData(
+ # Misc other properties about each machine.
+ self.properties = PerMachineDefaultable()
+
+ # Just uses hard-coded defaults and environment variables. Might be
+ # overwritten by a native file.
+ self.binaries.build = BinaryTable()
+ self.properties.build = Properties()
+
+ # Store paths for native and cross build files. There is no target
+ # machine information here because nothing is installed for the target
+ # architecture, just the build and host architectures
+ self.paths = PerMachineDefaultable()
+
+ if self.coredata.config_files is not None:
+ config = MesonConfigFile.from_config_parser(
coredata.load_configs(self.coredata.config_files))
+ self.binaries.build = BinaryTable(config.get('binaries', {}))
+ self.paths.build = Directories(**config.get('paths', {}))
+
+ if self.coredata.cross_file is not None:
+ config = MesonConfigFile.parse_datafile(self.coredata.cross_file)
+ self.properties.host = Properties(config.get('properties', {}), False)
+ self.binaries.host = BinaryTable(config.get('binaries', {}), False)
+ if 'host_machine' in config:
+ self.machines.host = MachineInfo.from_literal(config['host_machine'])
+ if 'target_machine' in config:
+ self.machines.target = MachineInfo.from_literal(config['target_machine'])
+ self.paths.host = Directories(**config.get('paths', {}))
+
+ self.machines.default_missing()
+ self.binaries.default_missing()
+ self.properties.default_missing()
+ self.paths.default_missing()
+
+ exe_wrapper = self.binaries.host.lookup_entry('exe_wrapper')
+ if exe_wrapper is not None:
+ from .dependencies import ExternalProgram
+ self.exe_wrapper = ExternalProgram.from_bin_list(
+ self.binaries.host,
+ 'exe_wrapper')
else:
- self.config_info = coredata.ConfigData()
+ self.exe_wrapper = None
self.cmd_line_options = options.cmd_line_options.copy()
# List of potential compilers.
if mesonlib.is_windows():
- self.default_c = ['cl', 'cc', 'gcc', 'clang', 'clang-cl']
- self.default_cpp = ['cl', 'c++', 'g++', 'clang++', 'clang-cl']
+ self.default_c = ['cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc']
+ self.default_cpp = ['cl', 'c++', 'g++', 'clang++', 'clang-cl', 'pgc++']
else:
- self.default_c = ['cc', 'gcc', 'clang']
- self.default_cpp = ['c++', 'g++', 'clang++']
+ self.default_c = ['cc', 'gcc', 'clang', 'pgcc']
+ self.default_cpp = ['c++', 'g++', 'clang++', 'pgc++']
if mesonlib.is_windows():
self.default_cs = ['csc', 'mcs']
else:
@@ -379,16 +452,21 @@ class Environment:
self.default_objc = ['cc']
self.default_objcpp = ['c++']
self.default_d = ['ldc2', 'ldc', 'gdc', 'dmd']
- self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort']
+ self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort', 'pgfortran']
self.default_java = ['javac']
+ self.default_cuda = ['nvcc']
self.default_rust = ['rustc']
self.default_swift = ['swiftc']
self.default_vala = ['valac']
self.default_static_linker = ['ar']
+ self.default_strip = ['strip']
self.vs_static_linker = ['lib']
self.clang_cl_static_linker = ['llvm-lib']
+ self.cuda_static_linker = ['nvlink']
self.gcc_static_linker = ['gcc-ar']
self.clang_static_linker = ['llvm-ar']
+ self.default_cmake = ['cmake']
+ self.default_pkgconfig = ['pkg-config']
# Various prefixes and suffixes for import libraries, shared libraries,
# static libraries, and executables.
@@ -406,11 +484,6 @@ class Environment:
self.exe_suffix = ''
self.object_suffix = 'o'
self.win_libdir_layout = False
- if 'STRIP' in os.environ:
- self.native_strip_bin = shlex.split(
- os.environ[BinaryTable.evarMap['strip']])
- else:
- self.native_strip_bin = ['strip']
def create_new_coredata(self, options):
# WARNING: Don't use any values from coredata in __init__. It gets
@@ -422,7 +495,7 @@ class Environment:
self.first_invocation = True
def is_cross_build(self):
- return self.cross_info is not None
+ return not self.machines.matches_build_machine(MachineChoice.HOST)
def dump_coredata(self):
return coredata.save(self.coredata, self.get_build_dir())
@@ -521,32 +594,30 @@ class Environment:
The list of compilers is detected in the exact same way for
C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here.
'''
- is_cross = False
- exe_wrap = None
- evar = BinaryTable.evarMap[lang]
-
- if self.is_cross_build() and want_cross:
- if lang not in self.cross_info.config['binaries']:
- raise EnvironmentException('{!r} compiler binary not defined in cross file'.format(lang))
- compilers, ccache = BinaryTable.parse_entry(
- mesonlib.stringlistify(self.cross_info.config['binaries'][lang]))
- BinaryTable.warn_about_lang_pointing_to_cross(compilers[0], evar)
- # Return value has to be a list of compiler 'choices'
- compilers = [compilers]
- is_cross = True
- exe_wrap = self.get_exe_wrapper()
- elif evar in os.environ:
- compilers, ccache = BinaryTable.parse_entry(
- shlex.split(os.environ[evar]))
+
+ # This morally assumes `want_cross = !native`. It may not yet be
+ # consistently set that way in the non cross build case, but it doesn't
+ # really matter since both options are the same in that case.
+ for_machine = MachineChoice.HOST if want_cross else MachineChoice.BUILD
+
+ value = self.binaries[for_machine].lookup_entry(lang)
+ if value is not None:
+ compilers, ccache = BinaryTable.parse_entry(value)
# Return value has to be a list of compiler 'choices'
compilers = [compilers]
- elif lang in self.config_info.binaries:
- compilers, ccache = BinaryTable.parse_entry(
- mesonlib.stringlistify(self.config_info.binaries[lang]))
- compilers = [compilers]
else:
+ if not self.machines.matches_build_machine(for_machine):
+ raise EnvironmentException('{!r} compiler binary not defined in cross or native file'.format(lang))
compilers = getattr(self, 'default_' + lang)
ccache = BinaryTable.detect_ccache()
+
+ if self.machines.matches_build_machine(for_machine):
+ is_cross = False
+ exe_wrap = None
+ else:
+ is_cross = True
+ exe_wrap = self.get_exe_wrapper()
+
return compilers, ccache, is_cross, exe_wrap
def _handle_exceptions(self, exceptions, binaries, bintype='compiler'):
@@ -560,6 +631,7 @@ class Environment:
def _detect_c_or_cpp_compiler(self, lang, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, want_cross)
+
for compiler in compilers:
if isinstance(compiler, str):
compiler = [compiler]
@@ -682,6 +754,9 @@ class Environment:
target = 'x86'
cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler
return cls(compiler, version, is_cross, exe_wrap, target)
+ if 'PGI Compilers' in out:
+ cls = PGICCompiler if lang == 'c' else PGICPPCompiler
+ return cls(ccache + compiler, version, is_cross, exe_wrap)
if '(ICC)' in out:
if mesonlib.for_darwin(want_cross, self):
compiler_type = CompilerType.ICC_OSX
@@ -709,6 +784,40 @@ class Environment:
def detect_cpp_compiler(self, want_cross):
return self._detect_c_or_cpp_compiler('cpp', want_cross)
+ def detect_cuda_compiler(self, want_cross):
+ popen_exceptions = {}
+ compilers, ccache, is_cross, exe_wrap = self._get_compilers('cuda', want_cross)
+ for compiler in compilers:
+ if isinstance(compiler, str):
+ compiler = [compiler]
+ else:
+ raise EnvironmentException()
+ arg = '--version'
+ try:
+ p, out, err = Popen_safe(compiler + [arg])
+ except OSError as e:
+ popen_exceptions[' '.join(compiler + [arg])] = e
+ continue
+ # Example nvcc printout:
+ #
+ # nvcc: NVIDIA (R) Cuda compiler driver
+ # Copyright (c) 2005-2018 NVIDIA Corporation
+ # Built on Sat_Aug_25_21:08:01_CDT_2018
+ # Cuda compilation tools, release 10.0, V10.0.130
+ #
+ # search_version() first finds the "10.0" after "release",
+ # rather than the more precise "10.0.130" after "V".
+ # The patch version number is occasionally important; For
+ # instance, on Linux,
+ # - CUDA Toolkit 8.0.44 requires NVIDIA Driver 367.48
+ # - CUDA Toolkit 8.0.61 requires NVIDIA Driver 375.26
+ # Luckily, the "V" also makes it very simple to extract
+ # the full version:
+ version = out.strip().split('V')[-1]
+ cls = CudaCompiler
+ return cls(ccache + compiler, version, is_cross, exe_wrap)
+ raise EnvironmentException('Could not find suitable CUDA compiler: "' + ' '.join(compilers) + '"')
+
def detect_fortran_compiler(self, want_cross):
popen_exceptions = {}
compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', want_cross)
@@ -761,6 +870,9 @@ class Environment:
if 'PGI Compilers' in out:
return PGIFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
+ if 'flang' in out or 'clang' in out:
+ return FlangFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
+
if 'Open64 Compiler Suite' in err:
return Open64FortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
@@ -794,7 +906,7 @@ class Environment:
return GnuObjCCompiler(ccache + compiler, version, compiler_type, is_cross, exe_wrap, defines)
if out.startswith('Apple LLVM'):
return ClangObjCCompiler(ccache + compiler, version, CompilerType.CLANG_OSX, is_cross, exe_wrap)
- if out.startswith('clang'):
+ if out.startswith(('clang', 'OpenBSD clang')):
return ClangObjCCompiler(ccache + compiler, version, CompilerType.CLANG_STANDARD, is_cross, exe_wrap)
self._handle_exceptions(popen_exceptions, compilers)
@@ -821,14 +933,13 @@ class Environment:
return GnuObjCPPCompiler(ccache + compiler, version, compiler_type, is_cross, exe_wrap, defines)
if out.startswith('Apple LLVM'):
return ClangObjCPPCompiler(ccache + compiler, version, CompilerType.CLANG_OSX, is_cross, exe_wrap)
- if out.startswith('clang'):
+ if out.startswith(('clang', 'OpenBSD clang')):
return ClangObjCPPCompiler(ccache + compiler, version, CompilerType.CLANG_STANDARD, is_cross, exe_wrap)
self._handle_exceptions(popen_exceptions, compilers)
def detect_java_compiler(self):
- if 'java' in self.config_info.binaries:
- exelist = mesonlib.stringlistify(self.config_info.binaries['java'])
- else:
+ exelist = self.binaries.host.lookup_entry('java')
+ if exelist is None:
# TODO support fallback
exelist = [self.default_java[0]]
@@ -838,6 +949,10 @@ class Environment:
raise EnvironmentException('Could not execute Java compiler "%s"' % ' '.join(exelist))
if 'javac' in out or 'javac' in err:
version = search_version(err if 'javac' in err else out)
+ if not version or version == 'unknown version':
+ parts = (err if 'javac' in err else out).split()
+ if len(parts) > 1:
+ version = parts[1]
return JavaCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
@@ -862,13 +977,11 @@ class Environment:
self._handle_exceptions(popen_exceptions, compilers)
def detect_vala_compiler(self):
- if 'VALAC' in os.environ:
- exelist = shlex.split(os.environ['VALAC'])
- elif 'vala' in self.config_info.binaries:
- exelist = mesonlib.stringlistify(self.config_info.binaries['vala'])
- else:
+ exelist = self.binaries.host.lookup_entry('vala')
+ if exelist is None:
# TODO support fallback
exelist = [self.default_vala[0]]
+
try:
p, out = Popen_safe(exelist + ['--version'])[0:2]
except OSError:
@@ -899,20 +1012,15 @@ class Environment:
self._handle_exceptions(popen_exceptions, compilers)
def detect_d_compiler(self, want_cross):
- is_cross = False
+ is_cross = want_cross and self.is_cross_build()
+ exelist = self.binaries.host.lookup_entry('d')
# Search for a D compiler.
# We prefer LDC over GDC unless overridden with the DC
# environment variable because LDC has a much more
# up to date language version at time (2016).
- if 'DC' in os.environ:
- exelist = shlex.split(os.environ['DC'])
+ if exelist is not None:
if os.path.basename(exelist[-1]).startswith(('ldmd', 'gdmd')):
raise EnvironmentException('Meson doesn\'t support %s as it\'s only a DMD frontend for another compiler. Please provide a valid value for DC or unset it so that Meson can resolve the compiler by itself.' % exelist[-1])
- elif self.is_cross_build() and want_cross:
- exelist = mesonlib.stringlistify(self.cross_info.config['binaries']['d'])
- is_cross = True
- elif 'd' in self.config_info.binaries:
- exelist = mesonlib.stringlistify(self.config_info.binaries['d'])
else:
for d in self.default_d:
if shutil.which(d):
@@ -947,11 +1055,11 @@ class Environment:
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
def detect_swift_compiler(self):
- if 'swift' in self.config_info.binaries:
- exelist = mesonlib.stringlistify(self.config_info.binaries['swift'])
- else:
+ exelist = self.binaries.host.lookup_entry('swift')
+ if exelist is None:
# TODO support fallback
exelist = [self.default_swift[0]]
+
try:
p, _, err = Popen_safe(exelist + ['-v'])
except OSError:
@@ -961,7 +1069,7 @@ class Environment:
return compilers.SwiftCompiler(exelist, version)
raise EnvironmentException('Unknown compiler "' + ' '.join(exelist) + '"')
- def detect_compilers(self, lang, need_cross_compiler):
+ def compilers_from_language(self, lang: str, need_cross_compiler: bool):
comp = None
cross_comp = None
if lang == 'c':
@@ -976,6 +1084,10 @@ class Environment:
comp = self.detect_objc_compiler(False)
if need_cross_compiler:
cross_comp = self.detect_objc_compiler(True)
+ elif lang == 'cuda':
+ comp = self.detect_cuda_compiler(False)
+ if need_cross_compiler:
+ cross_comp = self.detect_cuda_compiler(True)
elif lang == 'objcpp':
comp = self.detect_objcpp_compiler(False)
if need_cross_compiler:
@@ -1009,18 +1121,35 @@ class Environment:
if need_cross_compiler:
raise EnvironmentException('Cross compilation with Swift is not working yet.')
# cross_comp = self.environment.detect_fortran_compiler(True)
+ else:
+ return None, None
return comp, cross_comp
+ def check_compilers(self, lang: str, comp: Compiler, cross_comp: Compiler):
+ if comp is None:
+ raise EnvironmentException('Tried to use unknown language "%s".' % lang)
+
+ comp.sanity_check(self.get_scratch_dir(), self)
+ if cross_comp:
+ cross_comp.sanity_check(self.get_scratch_dir(), self)
+
+ def detect_compilers(self, lang: str, need_cross_compiler: bool):
+ (comp, cross_comp) = self.compilers_from_language(lang, need_cross_compiler)
+ if comp is not None:
+ self.coredata.process_new_compilers(lang, comp, cross_comp, self)
+ return comp, cross_comp
+
def detect_static_linker(self, compiler):
- if compiler.is_cross:
- linker = self.cross_info.config['binaries']['ar']
- if isinstance(linker, str):
- linker = [linker]
+ for_machine = MachineChoice.HOST if compiler.is_cross else MachineChoice.BUILD
+ linker = self.binaries[for_machine].lookup_entry('ar')
+ if linker is not None:
linkers = [linker]
else:
- evar = BinaryTable.evarMap['ar']
- if evar in os.environ:
+ evar = 'AR'
+ if isinstance(compiler, compilers.CudaCompiler):
+ linkers = [self.cuda_static_linker, self.default_static_linker]
+ elif evar in os.environ:
linkers = [shlex.split(os.environ[evar])]
elif isinstance(compiler, compilers.VisualStudioCCompiler):
linkers = [self.vs_static_linker, self.clang_cl_static_linker]
@@ -1070,6 +1199,9 @@ class Environment:
self._handle_exceptions(popen_exceptions, linkers, 'linker')
raise EnvironmentException('Unknown static linker "%s"' % ' '.join(linkers))
+ def detect_build_machine(self, compilers = None):
+ self.machines.build = detect_machine_info(compilers)
+
def get_source_dir(self):
return self.source_dir
@@ -1079,46 +1211,46 @@ class Environment:
def get_exe_suffix(self):
return self.exe_suffix
- def get_import_lib_dir(self):
+ def get_import_lib_dir(self) -> str:
"Install dir for the import library (library used for linking)"
return self.get_libdir()
- def get_shared_module_dir(self):
+ def get_shared_module_dir(self) -> str:
"Install dir for shared modules that are loaded at runtime"
return self.get_libdir()
- def get_shared_lib_dir(self):
+ def get_shared_lib_dir(self) -> str:
"Install dir for the shared library"
if self.win_libdir_layout:
return self.get_bindir()
return self.get_libdir()
- def get_static_lib_dir(self):
+ def get_static_lib_dir(self) -> str:
"Install dir for the static library"
return self.get_libdir()
def get_object_suffix(self):
return self.object_suffix
- def get_prefix(self):
+ def get_prefix(self) -> str:
return self.coredata.get_builtin_option('prefix')
- def get_libdir(self):
+ def get_libdir(self) -> str:
return self.coredata.get_builtin_option('libdir')
- def get_libexecdir(self):
+ def get_libexecdir(self) -> str:
return self.coredata.get_builtin_option('libexecdir')
- def get_bindir(self):
+ def get_bindir(self) -> str:
return self.coredata.get_builtin_option('bindir')
- def get_includedir(self):
+ def get_includedir(self) -> str:
return self.coredata.get_builtin_option('includedir')
- def get_mandir(self):
+ def get_mandir(self) -> str:
return self.coredata.get_builtin_option('mandir')
- def get_datadir(self):
+ def get_datadir(self) -> str:
return self.coredata.get_builtin_option('datadir')
def get_compiler_system_dirs(self):
@@ -1140,297 +1272,14 @@ class Environment:
out = out.split('\n')[index].lstrip('libraries: =').split(':')
return [os.path.normpath(p) for p in out]
+ def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST):
+ value = self.properties[for_machine].get('needs_exe_wrapper', None)
+ if value is not None:
+ return value
+ return not machine_info_can_run(self.machines[for_machine])
+
def get_exe_wrapper(self):
- if not self.cross_info.need_exe_wrapper():
+ if not self.need_exe_wrapper():
from .dependencies import EmptyExternalProgram
return EmptyExternalProgram()
return self.exe_wrapper
-
-
-class CrossBuildInfo:
- def __init__(self, filename):
- self.config = {'properties': {}}
- self.parse_datafile(filename)
- if 'host_machine' not in self.config and 'target_machine' not in self.config:
- raise mesonlib.MesonException('Cross info file must have either host or a target machine.')
- if 'host_machine' in self.config and 'binaries' not in self.config:
- raise mesonlib.MesonException('Cross file with "host_machine" is missing "binaries".')
-
- def ok_type(self, i):
- return isinstance(i, (str, int, bool))
-
- def parse_datafile(self, filename):
- config = configparser.ConfigParser()
- try:
- with open(filename, 'r') as f:
- config.read_file(f, filename)
- except FileNotFoundError:
- raise EnvironmentException('File not found: %s.' % filename)
- # This is a bit hackish at the moment.
- for s in config.sections():
- self.config[s] = {}
- for entry in config[s]:
- value = config[s][entry]
- if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
- raise EnvironmentException('Malformed variable name %s in cross file..' % entry)
- try:
- res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
- except Exception:
- raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
-
- if self.ok_type(res):
- self.config[s][entry] = res
- elif isinstance(res, list):
- for i in res:
- if not self.ok_type(i):
- raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
- self.config[s][entry] = res
- else:
- raise EnvironmentException('Malformed value in cross file variable %s.' % entry)
-
- def has_host(self):
- return 'host_machine' in self.config
-
- def has_target(self):
- return 'target_machine' in self.config
-
- def has_stdlib(self, language):
- return language + '_stdlib' in self.config['properties']
-
- def get_stdlib(self, language):
- return self.config['properties'][language + '_stdlib']
-
- def get_host_system(self):
- "Name of host system like 'linux', or None"
- if self.has_host():
- return self.config['host_machine']['system']
- return None
-
- def get_properties(self):
- return self.config['properties']
-
- def get_root(self):
- return self.get_properties().get('root', None)
-
- def get_sys_root(self):
- return self.get_properties().get('sys_root', None)
-
- # When compiling a cross compiler we use the native compiler for everything.
- # But not when cross compiling a cross compiler.
- def need_cross_compiler(self):
- return 'host_machine' in self.config
-
- def need_exe_wrapper(self):
- value = self.config['properties'].get('needs_exe_wrapper', None)
- if value is not None:
- return value
- # Can almost always run 32-bit binaries on 64-bit natively if the host
- # and build systems are the same. We don't pass any compilers to
- # detect_cpu_family() here because we always want to know the OS
- # architecture, not what the compiler environment tells us.
- if self.has_host() and detect_cpu_family({}) == 'x86_64' and \
- self.config['host_machine']['cpu_family'] == 'x86' and \
- self.config['host_machine']['system'] == detect_system():
- return False
- return True
-
-class MachineInfo:
- def __init__(self, system, cpu_family, cpu, endian):
- self.system = system
- self.cpu_family = cpu_family
- self.cpu = cpu
- self.endian = endian
-
- def __eq__(self, other):
- if self.__class__ is not other.__class__:
- return NotImplemented
- return \
- self.system == other.system and \
- self.cpu_family == other.cpu_family and \
- self.cpu == other.cpu and \
- self.endian == other.endian
-
- def __ne__(self, other):
- if self.__class__ is not other.__class__:
- return NotImplemented
- return not self.__eq__(other)
-
- @staticmethod
- def detect(compilers = None):
- """Detect the machine we're running on
-
- If compilers are not provided, we cannot know as much. None out those
- fields to avoid accidentally depending on partial knowledge. The
- underlying ''detect_*'' method can be called to explicitly use the
- partial information.
- """
- return MachineInfo(
- detect_system(),
- detect_cpu_family(compilers) if compilers is not None else None,
- detect_cpu(compilers) if compilers is not None else None,
- sys.byteorder)
-
- @staticmethod
- def from_literal(literal):
- minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
- if set(literal) < minimum_literal:
- raise EnvironmentException(
- 'Machine info is currently {}\n'.format(literal) +
- 'but is missing {}.'.format(minimum_literal - set(literal)))
-
- cpu_family = literal['cpu_family']
- if cpu_family not in known_cpu_families:
- mlog.warning('Unknown CPU family %s, please report this at https://github.com/mesonbuild/meson/issues/new' % cpu_family)
-
- endian = literal['endian']
- if endian not in ('little', 'big'):
- mlog.warning('Unknown endian %s' % endian)
-
- return MachineInfo(
- literal['system'],
- cpu_family,
- literal['cpu'],
- endian)
-
- def is_windows(self):
- """
- Machine is windows?
- """
- return self.system == 'windows'
-
- def is_cygwin(self):
- """
- Machine is cygwin?
- """
- return self.system == 'cygwin'
-
- def is_linux(self):
- """
- Machine is linux?
- """
- return self.system == 'linux'
-
- def is_darwin(self):
- """
- Machine is Darwin (iOS/OS X)?
- """
- return self.system in ('darwin', 'ios')
-
- def is_android(self):
- """
- Machine is Android?
- """
- return self.system == 'android'
-
- def is_haiku(self):
- """
- Machine is Haiku?
- """
- return self.system == 'haiku'
-
- def is_openbsd(self):
- """
- Machine is OpenBSD?
- """
- return self.system == 'openbsd'
-
- # Various prefixes and suffixes for import libraries, shared libraries,
- # static libraries, and executables.
- # Versioning is added to these names in the backends as-needed.
-
- def get_exe_suffix(self):
- if self.is_windows() or self.is_cygwin():
- return 'exe'
- else:
- return ''
-
- def get_object_suffix(self):
- if self.is_windows():
- return 'obj'
- else:
- return 'o'
-
- def libdir_layout_is_win(self):
- return self.is_windows() \
- or self.is_cygwin()
-
-class MachineInfos(PerMachine):
- def __init__(self):
- super().__init__(None, None, None)
-
- def default_missing(self):
- """Default host to buid and target to host.
-
- This allows just specifying nothing in the native case, just host in the
- cross non-compiler case, and just target in the native-built
- cross-compiler case.
- """
- if self.host is None:
- self.host = self.build
- if self.target is None:
- self.target = self.host
-
- def miss_defaulting(self):
- """Unset definition duplicated from their previous to None
-
- This is the inverse of ''default_missing''. By removing defaulted
- machines, we can elaborate the original and then redefault them and thus
- avoid repeating the elaboration explicitly.
- """
- if self.target == self.host:
- self.target = None
- if self.host == self.build:
- self.host = None
-
- def detect_build(self, compilers = None):
- self.build = MachineInfo.detect(compilers)
-
-class BinaryTable:
- # Map from language identifiers to environment variables.
- evarMap = {
- # Compilers
- 'c': 'CC',
- 'cpp': 'CXX',
- 'cs': 'CSC',
- 'd': 'DC',
- 'fortran': 'FC',
- 'objc': 'OBJC',
- 'objcpp': 'OBJCXX',
- 'rust': 'RUSTC',
- 'vala': 'VALAC',
-
- # Binutils
- 'strip': 'STRIP',
- 'ar': 'AR',
- }
-
- @classmethod
- def detect_ccache(cls):
- try:
- has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except OSError:
- has_ccache = 1
- if has_ccache == 0:
- cmdlist = ['ccache']
- else:
- cmdlist = []
- return cmdlist
-
- @classmethod
- def warn_about_lang_pointing_to_cross(cls, compiler_exe, evar):
- evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME')
- if evar_str == compiler_exe:
- mlog.warning('''Env var %s seems to point to the cross compiler.
-This is probably wrong, it should always point to the native compiler.''' % evar)
-
- @classmethod
- def parse_entry(cls, entry):
- compiler = mesonlib.stringlistify(entry)
- # Ensure ccache exists and remove it if it doesn't
- if compiler[0] == 'ccache':
- compiler = compiler[1:]
- ccache = cls.detect_ccache()
- else:
- ccache = []
- # Return value has to be a list of compiler 'choices'
- return compiler, ccache
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 61481a4..3c3cfae 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -21,7 +21,7 @@ from . import optinterpreter
from . import compilers
from .wrap import wrap, WrapMode
from . import mesonlib
-from .mesonlib import FileMode, Popen_safe, listify, extract_as_list, has_path_sep
+from .mesonlib import FileMode, MachineChoice, Popen_safe, listify, extract_as_list, has_path_sep
from .dependencies import ExternalProgram
from .dependencies import InternalDependency, Dependency, NotFoundDependency, DependencyException
from .interpreterbase import InterpreterBase
@@ -36,8 +36,8 @@ import os, shutil, uuid
import re, shlex
import subprocess
from collections import namedtuple
+from itertools import chain
from pathlib import PurePath
-import traceback
import functools
import importlib
@@ -101,7 +101,7 @@ def extract_required_kwarg(kwargs, subproject, feature_check=None, default=True)
disabled = True
elif option.is_enabled():
required = True
- elif isinstance(required, bool):
+ elif isinstance(val, bool):
required = val
else:
raise InterpreterException('required keyword argument must be boolean or a feature option')
@@ -144,31 +144,32 @@ class TryRunResultHolder(InterpreterObject):
class RunProcess(InterpreterObject):
- def __init__(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False, capture=True):
+ def __init__(self, cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False, capture=True):
super().__init__()
if not isinstance(cmd, ExternalProgram):
raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
self.capture = capture
- pc, self.stdout, self.stderr = self.run_command(cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
+ pc, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
self.returncode = pc.returncode
self.methods.update({'returncode': self.returncode_method,
'stdout': self.stdout_method,
'stderr': self.stderr_method,
})
- def run_command(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check=False):
+ def run_command(self, cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check=False):
command_array = cmd.get_command() + args
- env = {'MESON_SOURCE_ROOT': source_dir,
- 'MESON_BUILD_ROOT': build_dir,
- 'MESON_SUBDIR': subdir,
- 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
- }
+ menv = {'MESON_SOURCE_ROOT': source_dir,
+ 'MESON_BUILD_ROOT': build_dir,
+ 'MESON_SUBDIR': subdir,
+ 'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+ }
if in_builddir:
cwd = os.path.join(build_dir, subdir)
else:
cwd = os.path.join(source_dir, subdir)
child_env = os.environ.copy()
- child_env.update(env)
+ child_env.update(menv)
+ child_env = env.get_env(child_env)
stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
mlog.debug('Running command:', ' '.join(command_array))
try:
@@ -276,7 +277,8 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
if len(args) == 1 and isinstance(args[0], list) and len(args[0]) == 2:
mlog.deprecation('Passing a list as the single argument to '
'configuration_data.set is deprecated. This will '
- 'become a hard error in the future.')
+ 'become a hard error in the future.',
+ location=self.current_node)
args = args[0]
if len(args) != 2:
@@ -289,7 +291,7 @@ class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
msg = 'Setting a configuration data value to {!r} is invalid, ' \
'and will fail at configure_file(). If you are using it ' \
'just to store some values, please use a dict instead.'
- mlog.deprecation(msg.format(val))
+ mlog.deprecation(msg.format(val), location=self.current_node)
desc = kwargs.get('description', None)
if not isinstance(name, str):
raise InterpreterException("First argument to set must be a string.")
@@ -609,6 +611,8 @@ class Headers(InterpreterObject):
InterpreterObject.__init__(self)
self.sources = sources
self.install_subdir = kwargs.get('subdir', '')
+ if os.path.isabs(self.install_subdir):
+ raise InterpreterException('Subdir keyword must not be an absolute path.')
self.custom_install_dir = kwargs.get('install_dir', None)
self.custom_install_mode = kwargs.get('install_mode', None)
if self.custom_install_dir is not None:
@@ -744,7 +748,8 @@ class BuildTargetHolder(TargetHolder):
mlog.warning('extract_all_objects called without setting recursive '
'keyword argument. Meson currently defaults to '
'non-recursive to maintain backward compatibility but '
- 'the default will be changed in the future.')
+ 'the default will be changed in the future.',
+ location=self.current_node)
return GeneratedObjectsHolder(gobjs)
@noPosargs
@@ -847,7 +852,7 @@ class RunTargetHolder(InterpreterObject, ObjectHolder):
class Test(InterpreterObject):
def __init__(self, name, project, suite, exe, depends, is_parallel,
- cmd_args, env, should_fail, timeout, workdir):
+ cmd_args, env, should_fail, timeout, workdir, protocol):
InterpreterObject.__init__(self)
self.name = name
self.suite = suite
@@ -860,6 +865,7 @@ class Test(InterpreterObject):
self.should_fail = should_fail
self.timeout = timeout
self.workdir = workdir
+ self.protocol = protocol
def get_exe(self):
return self.exe
@@ -913,6 +919,7 @@ find_library_permitted_kwargs = set([
'has_headers',
'required',
'dirs',
+ 'static',
])
find_library_permitted_kwargs |= set(['header_' + k for k in header_permitted_kwargs])
@@ -996,8 +1003,13 @@ class CompilerHolder(InterpreterObject):
idir = os.path.join(self.environment.get_source_dir(),
i.held_object.get_curdir(), idir)
args += self.compiler.get_include_args(idir, False)
+ native = kwargs.get('native', None)
+ if native:
+ for_machine = MachineChoice.BUILD
+ else:
+ for_machine = MachineChoice.HOST
if not nobuiltins:
- opts = self.environment.coredata.compiler_options
+ opts = self.environment.coredata.compiler_options[for_machine]
args += self.compiler.get_option_compile_args(opts)
if mode == 'link':
args += self.compiler.get_option_link_args(opts)
@@ -1032,7 +1044,7 @@ class CompilerHolder(InterpreterObject):
typename = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
- raise InterpreterException('Prefix argument of sizeof must be a string.')
+ raise InterpreterException('Prefix argument of alignment must be a string.')
extra_args = mesonlib.stringlistify(kwargs.get('args', []))
deps, msg = self.determine_dependencies(kwargs)
result = self.compiler.alignment(typename, prefix, self.environment,
@@ -1452,6 +1464,7 @@ class CompilerHolder(InterpreterObject):
silent=True)
return ExternalLibraryHolder(lib, self.subproject)
+ @FeatureNewKwargs('compiler.find_library', '0.51.0', ['static'])
@FeatureNewKwargs('compiler.find_library', '0.50.0', ['has_headers'])
@FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler'])
@disablerIfNotFound
@@ -1480,9 +1493,15 @@ class CompilerHolder(InterpreterObject):
for i in search_dirs:
if not os.path.isabs(i):
raise InvalidCode('Search directory %s is not an absolute path.' % i)
- linkargs = self.compiler.find_library(libname, self.environment, search_dirs)
+ libtype = 'shared-static'
+ if 'static' in kwargs:
+ if not isinstance(kwargs['static'], bool):
+ raise InterpreterException('static must be a boolean')
+ libtype = 'static' if kwargs['static'] else 'shared'
+ linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype)
if required and not linkargs:
- raise InterpreterException('{} library {!r} not found'.format(self.compiler.get_display_language(), libname))
+ raise InterpreterException(
+ '{} library {!r} not found'.format(self.compiler.get_display_language(), libname))
lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
self.compiler.language)
return ExternalLibraryHolder(lib, self.subproject)
@@ -1598,7 +1617,7 @@ ModuleState = namedtuple('ModuleState', [
'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment',
'project_name', 'project_version', 'backend', 'compilers', 'targets',
'data', 'headers', 'man', 'global_args', 'project_args', 'build_machine',
- 'host_machine', 'target_machine'])
+ 'host_machine', 'target_machine', 'current_node'])
class ModuleHolder(InterpreterObject, ObjectHolder):
def __init__(self, modname, module, interpreter):
@@ -1641,6 +1660,7 @@ class ModuleHolder(InterpreterObject, ObjectHolder):
build_machine=self.interpreter.builtin['build_machine'].held_object,
host_machine=self.interpreter.builtin['host_machine'].held_object,
target_machine=self.interpreter.builtin['target_machine'].held_object,
+ current_node=self.current_node
)
if self.held_object.is_snippet(method_name):
value = fn(self.interpreter, state, args, kwargs)
@@ -1760,7 +1780,7 @@ class MesonMain(InterpreterObject):
@permittedKwargs({})
def has_exe_wrapper_method(self, args, kwargs):
if self.is_cross_build_method(None, None) and \
- self.build.environment.cross_info.need_exe_wrapper():
+ self.build.environment.need_exe_wrapper():
if self.build.environment.exe_wrapper is None:
return False
# We return True when exe_wrap is defined, when it's not needed, and
@@ -1866,7 +1886,7 @@ class MesonMain(InterpreterObject):
if not isinstance(propname, str):
raise InterpreterException('Property name must be string.')
try:
- props = self.interpreter.environment.cross_info.get_properties()
+ props = self.interpreter.environment.properties.host
return props[propname]
except Exception:
if len(args) == 2:
@@ -1926,6 +1946,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'main',
'method',
'modules',
+ 'cmake_module_path',
'optional_modules',
'native',
'not_found_message',
@@ -1933,6 +1954,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'static',
'version',
'private_headers',
+ 'cmake_args',
},
'declare_dependency': {'include_directories',
'link_with',
@@ -1953,7 +1975,7 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'},
'jar': build.known_jar_kwargs,
'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},
- 'run_command': {'check', 'capture'},
+ 'run_command': {'check', 'capture', 'env'},
'run_target': {'command', 'depends'},
'shared_library': build.known_shlib_kwargs,
'shared_module': build.known_shmod_kwargs,
@@ -1962,7 +1984,8 @@ permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'library': known_library_kwargs,
'subdir': {'if_found'},
'subproject': {'version', 'default_options', 'required'},
- 'test': {'args', 'depends', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir', 'suite'},
+ 'test': {'args', 'depends', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir',
+ 'suite', 'protocol'},
'vcs_tag': {'input', 'output', 'fallback', 'command', 'replace_string'},
}
@@ -2014,7 +2037,7 @@ class Interpreter(InterpreterBase):
# have the compilers needed to gain more knowledge, so wipe out old
# inferrence and start over.
self.build.environment.machines.miss_defaulting()
- self.build.environment.machines.detect_build(self.coredata.compilers)
+ self.build.environment.detect_build_machine(self.coredata.compilers)
self.build.environment.machines.default_missing()
assert self.build.environment.machines.build.cpu is not None
assert self.build.environment.machines.host.cpu is not None
@@ -2032,9 +2055,10 @@ class Interpreter(InterpreterBase):
for def_opt_name, def_opt_value in self.project_default_options.items():
for option_type in env.coredata.get_all_options():
for cur_opt_name, cur_opt_value in option_type.items():
- if (def_opt_name == cur_opt_name and
- def_opt_value != cur_opt_value.value):
- yield (def_opt_name, def_opt_value, cur_opt_value.value)
+ if def_opt_name == cur_opt_name:
+ def_opt_value = env.coredata.validate_option_value(def_opt_name, def_opt_value)
+ if def_opt_value != cur_opt_value.value:
+ yield (def_opt_name, def_opt_value, cur_opt_value)
def build_func_dict(self):
self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
@@ -2164,10 +2188,10 @@ class Interpreter(InterpreterBase):
def check_cross_stdlibs(self):
if self.build.environment.is_cross_build():
- cross_info = self.build.environment.cross_info
+ props = self.build.environment.properties.host
for l, c in self.build.cross_compilers.items():
try:
- di = mesonlib.stringlistify(cross_info.get_stdlib(l))
+ di = mesonlib.stringlistify(props.get_stdlib(l))
if len(di) != 2:
raise InterpreterException('Stdlib definition for %s should have exactly two elements.'
% l)
@@ -2209,7 +2233,7 @@ class Interpreter(InterpreterBase):
version = kwargs.get('version', self.project_version)
if not isinstance(version, str):
raise InterpreterException('Version must be a string.')
- incs = self.entries_to_incdirs(extract_as_list(kwargs, 'include_directories', unholder=True))
+ incs = self.extract_incdirs(kwargs)
libs = extract_as_list(kwargs, 'link_with', unholder=True)
libs_whole = extract_as_list(kwargs, 'link_whole', unholder=True)
sources = extract_as_list(kwargs, 'sources')
@@ -2258,6 +2282,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if not isinstance(actual, wanted):
raise InvalidArguments('Incorrect argument type.')
+ @FeatureNewKwargs('run_command', '0.50.0', ['env'])
@FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture'])
@permittedKwargs(permitted_kwargs['run_command'])
def func_run_command(self, node, args, kwargs):
@@ -2276,6 +2301,8 @@ external dependencies (including libraries) must go to "dependencies".''')
if not isinstance(check, bool):
raise InterpreterException('Check must be boolean.')
+ env = self.unpack_env_kwarg(kwargs)
+
m = 'must be a string, or the output of find_program(), files() '\
'or configure_file(), or a compiler object; not {!r}'
if isinstance(cmd, ExternalProgramHolder):
@@ -2325,7 +2352,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if not a.startswith('..'):
if a not in self.build_def_files:
self.build_def_files.append(a)
- return RunProcess(cmd, expanded_args, srcdir, builddir, self.subdir,
+ return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
self.environment.get_build_command() + ['introspect'],
in_builddir=in_builddir, check=check, capture=capture)
@@ -2352,7 +2379,7 @@ external dependencies (including libraries) must go to "dependencies".''')
def do_subproject(self, dirname, kwargs):
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
- mlog.log('\nSubproject', mlog.bold(dirname), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
+ mlog.log('Subproject', mlog.bold(dirname), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
return self.disabled_subproject(dirname)
default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
@@ -2366,46 +2393,45 @@ external dependencies (including libraries) must go to "dependencies".''')
if os.path.isabs(dirname):
raise InterpreterException('Subproject name must not be an absolute path.')
if has_path_sep(dirname):
- mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.')
+ mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
+ location=self.current_node)
if dirname in self.subproject_stack:
fullstack = self.subproject_stack + [dirname]
incpath = ' => '.join(fullstack)
raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
if dirname in self.subprojects:
subproject = self.subprojects[dirname]
-
if required and not subproject.found():
raise InterpreterException('Subproject "%s/%s" required but not found.' % (
self.subproject_dir, dirname))
-
return subproject
+
subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'))
try:
resolved = r.resolve(dirname)
except wrap.WrapException as e:
subprojdir = os.path.join(self.subproject_dir, r.directory)
- if not required:
- mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n')
- return self.disabled_subproject(dirname)
-
if isinstance(e, wrap.WrapNotFoundException):
# if the reason subproject execution failed was because
# the directory doesn't exist, try to give some helpful
# advice if it's a nested subproject that needs
# promotion...
self.print_nested_info(dirname)
-
- msg = 'Failed to initialize {!r}:\n{}'
- raise InterpreterException(msg.format(subprojdir, e))
+ if not required:
+ mlog.log(e)
+ mlog.log('Subproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)')
+ return self.disabled_subproject(dirname)
+ raise e
subdir = os.path.join(self.subproject_dir, resolved)
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
self.global_args_frozen = True
mlog.log()
with mlog.nested():
- try:
- mlog.log('\nExecuting subproject', mlog.bold(dirname), '\n')
+ mlog.log('Executing subproject', mlog.bold(dirname), '\n')
+ try:
+ with mlog.nested():
new_build = self.build.copy()
subi = Interpreter(new_build, self.backend, dirname, subdir, self.subproject_dir,
self.modules, default_options)
@@ -2414,14 +2440,21 @@ external dependencies (including libraries) must go to "dependencies".''')
subi.subproject_stack = self.subproject_stack + [dirname]
current_active = self.active_projectname
subi.run()
- mlog.log('\nSubproject', mlog.bold(dirname), 'finished.')
- except Exception as e:
- if not required:
- mlog.log(e)
- mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)')
- return self.disabled_subproject(dirname)
- else:
- raise e
+ mlog.log('Subproject', mlog.bold(dirname), 'finished.')
+ # Invalid code is always an error
+ except InvalidCode:
+ raise
+ except Exception as e:
+ if not required:
+ with mlog.nested():
+ # Suppress the 'ERROR:' prefix because this exception is not
+ # fatal and VS CI treat any logs with "ERROR:" as fatal.
+ mlog.exception(e, prefix=None)
+ mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)')
+ return self.disabled_subproject(dirname)
+ raise e
+
+ mlog.log()
if 'version' in kwargs:
pv = subi.project_version
@@ -2437,10 +2470,9 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.subprojects[dirname]
def get_option_internal(self, optname):
- # Some base options are not defined in some environments, return the
- # default value from compilers.base_options in that case.
- for d in [self.coredata.base_options, compilers.base_options,
- self.coredata.builtins, self.coredata.compiler_options]:
+ for d in chain(
+ [self.coredata.base_options, compilers.base_options, self.coredata.builtins],
+ self.coredata.get_all_compiler_options()):
try:
return d[optname]
except KeyError:
@@ -2465,7 +2497,8 @@ external dependencies (including libraries) must go to "dependencies".''')
mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
'to parent option of type {3!r}, ignoring parent value. '
'Use -D{2}:{0}=value to set the value for this option manually'
- '.'.format(raw_optname, opt_type, self.subproject, popt_type))
+ '.'.format(raw_optname, opt_type, self.subproject, popt_type),
+ location=self.current_node)
return opt
except KeyError:
pass
@@ -2553,7 +2586,7 @@ external dependencies (including libraries) must go to "dependencies".''')
default_options.update(self.default_project_options)
else:
default_options = {}
- self.coredata.set_default_options(default_options, self.subproject, self.environment.cmd_line_options)
+ self.coredata.set_default_options(default_options, self.subproject, self.environment)
self.set_backend()
if not self.is_subproject():
@@ -2653,39 +2686,9 @@ external dependencies (including libraries) must go to "dependencies".''')
self.validate_arguments(args, 0, [])
raise Exception()
- def detect_compilers(self, lang, need_cross_compiler):
- comp, cross_comp = self.environment.detect_compilers(lang, need_cross_compiler)
- if comp is None:
- raise InvalidCode('Tried to use unknown language "%s".' % lang)
-
- comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
- self.coredata.compilers[lang] = comp
- # Native compiler always exist so always add its options.
- new_options = comp.get_options()
- if cross_comp is not None:
- cross_comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
- self.coredata.cross_compilers[lang] = cross_comp
- new_options.update(cross_comp.get_options())
-
- optprefix = lang + '_'
- for k, o in new_options.items():
- if not k.startswith(optprefix):
- raise InterpreterException('Internal error, %s has incorrect prefix.' % k)
- if k in self.environment.cmd_line_options:
- o.set_value(self.environment.cmd_line_options[k])
- self.coredata.compiler_options.setdefault(k, o)
-
- # Unlike compiler and linker flags, preprocessor flags are not in
- # compiler_options because they are not visible to user.
- preproc_flags = comp.get_preproc_flags()
- preproc_flags = shlex.split(preproc_flags)
- self.coredata.external_preprocess_args.setdefault(lang, preproc_flags)
-
- return comp, cross_comp
-
def add_languages(self, args, required):
success = True
- need_cross_compiler = self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler()
+ need_cross_compiler = self.environment.is_cross_build()
for lang in sorted(args, key=compilers.sort_clink):
lang = lang.lower()
if lang in self.coredata.compilers:
@@ -2693,7 +2696,8 @@ external dependencies (including libraries) must go to "dependencies".''')
cross_comp = self.coredata.cross_compilers.get(lang, None)
else:
try:
- (comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
+ (comp, cross_comp) = self.environment.detect_compilers(lang, need_cross_compiler)
+ self.environment.check_compilers(lang, comp, cross_comp)
except Exception:
if not required:
mlog.log('Compiler for language', mlog.bold(lang), 'not found.')
@@ -2707,35 +2711,16 @@ external dependencies (including libraries) must go to "dependencies".''')
version_string = '(%s %s)' % (comp.id, comp.version)
mlog.log('Native', comp.get_display_language(), 'compiler:',
mlog.bold(' '.join(comp.get_exelist())), version_string)
- self.build.add_compiler(comp)
+ self.build.ensure_static_linker(comp)
if need_cross_compiler:
version_string = '(%s %s)' % (cross_comp.id, cross_comp.version)
mlog.log('Cross', cross_comp.get_display_language(), 'compiler:',
mlog.bold(' '.join(cross_comp.get_exelist())), version_string)
- self.build.add_cross_compiler(cross_comp)
- if self.environment.is_cross_build() and not need_cross_compiler:
- self.build.add_cross_compiler(comp)
- self.add_base_options(comp)
+ self.build.ensure_static_cross_linker(cross_comp)
return success
- def emit_base_options_warnings(self, enabled_opts):
- if 'b_bitcode' in enabled_opts:
- mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
- mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
-
- def add_base_options(self, compiler):
- enabled_opts = []
- for optname in compiler.base_options:
- if optname in self.coredata.base_options:
- continue
- oobj = compilers.base_options[optname]
- if optname in self.environment.cmd_line_options:
- oobj.set_value(self.environment.cmd_line_options[optname])
- enabled_opts.append(optname)
- self.coredata. base_options[optname] = oobj
- self.emit_base_options_warnings(enabled_opts)
-
- def _program_from_file(self, prognames, bins, silent):
+ def program_from_file_for(self, for_machine, prognames, silent):
+ bins = self.environment.binaries[for_machine]
for p in prognames:
if hasattr(p, 'held_object'):
p = p.held_object
@@ -2748,14 +2733,6 @@ external dependencies (including libraries) must go to "dependencies".''')
return ExternalProgramHolder(prog)
return None
- def program_from_cross_file(self, prognames, silent=False):
- bins = self.environment.cross_info.config['binaries']
- return self._program_from_file(prognames, bins, silent)
-
- def program_from_config_file(self, prognames, silent=False):
- bins = self.environment.config_info.binaries
- return self._program_from_file(prognames, bins, silent)
-
def program_from_system(self, args, silent=False):
# Search for scripts relative to current subdir.
# Do not cache found programs because find_program('foobar')
@@ -2813,13 +2790,13 @@ external dependencies (including libraries) must go to "dependencies".''')
progobj = self.program_from_overrides(args, silent=silent)
if progobj is None:
- if self.build.environment.is_cross_build() and not native:
- progobj = self.program_from_cross_file(args, silent=silent)
- else:
- progobj = self.program_from_config_file(args, silent=silent)
-
+ for_machine = MachineChoice.BUILD if native else MachineChoice.HOST
+ progobj = self.program_from_file_for(for_machine, args, silent=silent)
if progobj is None:
progobj = self.program_from_system(args, silent=silent)
+ if progobj is None and args[0].endswith('python3'):
+ prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
+ progobj = ExternalProgramHolder(prog)
if required and (progobj is None or not progobj.found()):
raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
if progobj is None:
@@ -2863,28 +2840,24 @@ external dependencies (including libraries) must go to "dependencies".''')
want_cross = not kwargs['native']
else:
want_cross = is_cross
+
identifier = dependencies.get_dep_identifier(name, kwargs, want_cross)
- cached_dep = None
- # Check if we've already searched for and found this dep
- if identifier in self.coredata.deps:
- cached_dep = self.coredata.deps[identifier]
- mlog.log('Dependency', mlog.bold(name),
- 'found:', mlog.green('YES'), '(cached)')
- else:
- # Check if exactly the same dep with different version requirements
- # was found already.
- wanted = identifier[1]
- for trial, trial_dep in self.coredata.deps.items():
- # trial[1], identifier[1] are the version requirements
- if trial[0] != identifier[0] or trial[2:] != identifier[2:]:
- continue
- found = trial_dep.get_version()
- if not wanted or mesonlib.version_compare_many(found, wanted)[0]:
- # We either don't care about the version, or our
- # version requirements matched the trial dep's version.
- cached_dep = trial_dep
- break
- return identifier, cached_dep
+ cached_dep = self.coredata.deps.get(identifier)
+ if cached_dep:
+ if not cached_dep.found():
+ mlog.log('Dependency', mlog.bold(name),
+ 'found:', mlog.red('NO'), '(cached)')
+ return identifier, cached_dep
+
+ # Verify the cached dep version match
+ wanted = kwargs.get('version', [])
+ found = cached_dep.get_version()
+ if not wanted or mesonlib.version_compare_many(found, wanted)[0]:
+ mlog.log('Dependency', mlog.bold(name),
+ 'found:', mlog.green('YES'), '(cached)')
+ return identifier, cached_dep
+
+ return identifier, None
@staticmethod
def check_subproject_version(wanted, found):
@@ -2894,54 +2867,50 @@ external dependencies (including libraries) must go to "dependencies".''')
return False
return True
- def get_subproject_dep(self, name, dirname, varname, required):
- dep = DependencyHolder(NotFoundDependency(self.environment), self.subproject)
+ def notfound_dependency(self):
+ return DependencyHolder(NotFoundDependency(self.environment), self.subproject)
+
+ def get_subproject_dep(self, display_name, dirname, varname, kwargs):
+ dep = self.notfound_dependency()
try:
subproject = self.subprojects[dirname]
if subproject.found():
dep = self.subprojects[dirname].get_variable_method([varname], {})
- except InvalidArguments as e:
+ except InvalidArguments:
pass
if not isinstance(dep, DependencyHolder):
raise InvalidCode('Fetched variable {!r} in the subproject {!r} is '
'not a dependency object.'.format(varname, dirname))
+ required = kwargs.get('required', True)
+ wanted = kwargs.get('version', 'undefined')
+ subproj_path = os.path.join(self.subproject_dir, dirname)
+
if not dep.found():
if required:
raise DependencyException('Could not find dependency {} in subproject {}'
''.format(varname, dirname))
# If the dependency is not required, don't raise an exception
- subproj_path = os.path.join(self.subproject_dir, dirname)
- mlog.log('Dependency', mlog.bold(name), 'from subproject',
+ mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
mlog.bold(subproj_path), 'found:', mlog.red('NO'))
+ return dep
- return dep
+ found = dep.held_object.get_version()
+ if not self.check_subproject_version(wanted, found):
+ if required:
+ raise DependencyException('Version {} of subproject dependency {} already '
+ 'cached, requested incompatible version {} for '
+ 'dep {}'.format(found, dirname, wanted, display_name))
- def _find_cached_fallback_dep(self, name, dirname, varname, wanted, required):
- if dirname not in self.subprojects:
- return False
- dep = self.get_subproject_dep(name, dirname, varname, required)
- if not dep.found():
- return dep
+ mlog.log('Subproject', mlog.bold(subproj_path), 'dependency',
+ mlog.bold(display_name), 'version is', mlog.bold(found),
+ 'but', mlog.bold(wanted), 'is required.')
+ return self.notfound_dependency()
- found = dep.version_method([], {})
- # Don't do a version check if the dependency is not found and not required
- if not dep.found_method([], {}) and not required:
- subproj_path = os.path.join(self.subproject_dir, dirname)
- mlog.log('Dependency', mlog.bold(name), 'from subproject',
- mlog.bold(subproj_path), 'found:', mlog.red('NO'), '(cached)')
- return dep
- if self.check_subproject_version(wanted, found):
- subproj_path = os.path.join(self.subproject_dir, dirname)
- mlog.log('Dependency', mlog.bold(name), 'from subproject',
- mlog.bold(subproj_path), 'found:', mlog.green('YES'), '(cached)')
- return dep
- if required:
- raise DependencyException('Version {} of subproject dependency {} already '
- 'cached, requested incompatible version {} for '
- 'dep {}'.format(found, dirname, wanted, name))
- return None
+ mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
+ mlog.bold(subproj_path), 'found:', mlog.green('YES'))
+ return dep
def _handle_featurenew_dependencies(self, name):
'Do a feature check on dependencies used by this subproject'
@@ -2956,10 +2925,10 @@ external dependencies (including libraries) must go to "dependencies".''')
elif name == 'openmp':
FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject)
+ @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
@FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
@FeatureNewKwargs('dependency', '0.40.0', ['method'])
@FeatureNewKwargs('dependency', '0.38.0', ['default_options'])
- @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message'])
@disablerIfNotFound
@permittedKwargs(permitted_kwargs['dependency'])
def func_dependency(self, node, args, kwargs):
@@ -2983,68 +2952,54 @@ external dependencies (including libraries) must go to "dependencies".''')
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('Dependency', mlog.bold(display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
- return DependencyHolder(NotFoundDependency(self.environment), self.subproject)
- if'default_options' in kwargs and 'fallback' not in kwargs:
- mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.')
+ return self.notfound_dependency()
+
+ has_fallback = 'fallback' in kwargs
+ if 'default_options' in kwargs and not has_fallback:
+ mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.',
+ location=self.current_node)
# writing just "dependency('')" is an error, because it can only fail
- if name == '' and required and 'fallback' not in kwargs:
+ if name == '' and required and not has_fallback:
raise InvalidArguments('Dependency is both required and not-found')
if '<' in name or '>' in name or '=' in name:
raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
'version\n requirements use the \'version\' keyword argument instead.')
- identifier, cached_dep = self._find_cached_dep(name, kwargs)
+ identifier, cached_dep = self._find_cached_dep(name, kwargs)
if cached_dep:
if required and not cached_dep.found():
m = 'Dependency {!r} was already checked and was not found'
raise DependencyException(m.format(display_name))
- dep = cached_dep
- else:
- # If the dependency has already been configured, possibly by
- # a higher level project, try to use it first.
- if 'fallback' in kwargs:
- dirname, varname = self.get_subproject_infos(kwargs)
- wanted = kwargs.get('version', 'undefined')
- dep = self._find_cached_fallback_dep(name, dirname, varname, wanted, required)
- if dep:
- return dep
-
- # We need to actually search for this dep
- exception = None
- dep = NotFoundDependency(self.environment)
-
- # Unless a fallback exists and is forced ...
- if self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback and 'fallback' in kwargs:
- pass
- # ... search for it outside the project
- elif name != '':
- self._handle_featurenew_dependencies(name)
- try:
- dep = dependencies.find_external_dependency(name, self.environment, kwargs)
- except DependencyException as e:
- exception = e
-
- # Search inside the projects list
- if not dep.found():
- if 'fallback' in kwargs:
- if not exception:
- exception = DependencyException("fallback for %s not found" % display_name)
- fallback_dep = self.dependency_fallback(name, kwargs)
- if fallback_dep:
- # Never add fallback deps to self.coredata.deps since we
- # cannot cache them. They must always be evaluated else
- # we won't actually read all the build files.
- return fallback_dep
- if required:
- assert(exception is not None)
- raise exception
-
- # Only store found-deps in the cache
- if dep.found():
- self.coredata.deps[identifier] = dep
- return DependencyHolder(dep, self.subproject)
+ return DependencyHolder(cached_dep, self.subproject)
+
+ # If the dependency has already been configured, possibly by
+ # a higher level project, try to use it first.
+ if has_fallback:
+ dirname, varname = self.get_subproject_infos(kwargs)
+ if dirname in self.subprojects:
+ return self.get_subproject_dep(name, dirname, varname, kwargs)
+
+ wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+ forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback
+ if name != '' and not forcefallback:
+ self._handle_featurenew_dependencies(name)
+ kwargs['required'] = required and not has_fallback
+ dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+ kwargs['required'] = required
+ # Only store found-deps in the cache
+ # Never add fallback deps to self.coredata.deps since we
+ # cannot cache them. They must always be evaluated else
+ # we won't actually read all the build files.
+ if dep.found():
+ self.coredata.deps[identifier] = dep
+ return DependencyHolder(dep, self.subproject)
+
+ if has_fallback:
+ return self.dependency_fallback(display_name, kwargs)
+
+ return self.notfound_dependency()
@FeatureNew('disabler', '0.44.0')
@noKwargs
@@ -3067,7 +3022,7 @@ external dependencies (including libraries) must go to "dependencies".''')
command_templ = '\nmeson wrap promote {}'
for l in found:
message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:])))
- mlog.warning(*message)
+ mlog.warning(*message, location=self.current_node)
def get_subproject_infos(self, kwargs):
fbinfo = kwargs['fallback']
@@ -3076,13 +3031,12 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InterpreterException('Fallback info must have exactly two items.')
return fbinfo
- def dependency_fallback(self, name, kwargs):
- display_name = name if name else '(anonymous)'
+ def dependency_fallback(self, display_name, kwargs):
if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
mlog.log('Not looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback'
'dependencies is disabled.')
- return None
+ return self.notfound_dependency()
elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback:
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.')
@@ -3090,52 +3044,12 @@ external dependencies (including libraries) must go to "dependencies".''')
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name))
dirname, varname = self.get_subproject_infos(kwargs)
- # Try to execute the subproject
- try:
- sp_kwargs = {}
- try:
- sp_kwargs['default_options'] = kwargs['default_options']
- except KeyError:
- pass
- self.do_subproject(dirname, sp_kwargs)
- # Invalid code is always an error
- except InvalidCode:
- raise
- # If the subproject execution failed in a non-fatal way, don't raise an
- # exception; let the caller handle things.
- except Exception as e:
- msg = ['Couldn\'t use fallback subproject in',
- mlog.bold(os.path.join(self.subproject_dir, dirname)),
- 'for the dependency', mlog.bold(display_name), '\nReason:']
- if isinstance(e, mesonlib.MesonException):
- msg.append(e.get_msg_with_context())
- else:
- msg.append(traceback.format_exc())
- mlog.log(*msg)
- return None
- required = kwargs.get('required', True)
- dep = self.get_subproject_dep(name, dirname, varname, required)
- if not dep.found():
- return dep
- subproj_path = os.path.join(self.subproject_dir, dirname)
- # Check if the version of the declared dependency matches what we want
- if 'version' in kwargs:
- wanted = kwargs['version']
- found = dep.version_method([], {})
- # Don't do a version check if the dependency is not found and not required
- if not dep.found_method([], {}) and not required:
- subproj_path = os.path.join(self.subproject_dir, dirname)
- mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
- mlog.bold(subproj_path), 'found:', mlog.red('NO'))
- return dep
- if not self.check_subproject_version(wanted, found):
- mlog.log('Subproject', mlog.bold(subproj_path), 'dependency',
- mlog.bold(display_name), 'version is', mlog.bold(found),
- 'but', mlog.bold(wanted), 'is required.')
- return None
- mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
- mlog.bold(subproj_path), 'found:', mlog.green('YES'))
- return dep
+ sp_kwargs = {
+ 'default_options': kwargs.get('default_options', []),
+ 'required': kwargs.get('required', True),
+ }
+ self.do_subproject(dirname, sp_kwargs)
+ return self.get_subproject_dep(display_name, dirname, varname, kwargs)
@FeatureNewKwargs('executable', '0.42.0', ['implib'])
@permittedKwargs(permitted_kwargs['executable'])
@@ -3260,7 +3174,7 @@ external dependencies (including libraries) must go to "dependencies".''')
kwargs['input'] = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
except mesonlib.MesonException:
mlog.warning('''Custom target input \'%s\' can\'t be converted to File object(s).
-This will become a hard error in the future.''' % kwargs['input'])
+This will become a hard error in the future.''' % kwargs['input'], location=self.current_node)
tg = CustomTargetHolder(build.CustomTarget(name, self.subdir, self.subproject, kwargs), self)
self.add_target(name, tg.held_object)
return tg
@@ -3367,6 +3281,9 @@ This will become a hard error in the future.''' % kwargs['input'])
workdir = None
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
+ protocol = kwargs.get('protocol', 'exitcode')
+ if protocol not in ('exitcode', 'tap'):
+ raise InterpreterException('Protocol must be "exitcode" or "tap".')
suite = []
prj = self.subproject if self.is_subproject() else self.build.project_name
for s in mesonlib.stringlistify(kwargs.get('suite', '')):
@@ -3378,7 +3295,7 @@ This will become a hard error in the future.''' % kwargs['input'])
if not isinstance(dep, (build.CustomTarget, build.BuildTarget)):
raise InterpreterException('Depends items must be build targets.')
t = Test(args[0], prj, suite, exe.held_object, depends, par, cmd_args,
- env, should_fail, timeout, workdir)
+ env, should_fail, timeout, workdir, protocol)
if is_base_test:
self.build.tests.append(t)
mlog.debug('Adding test', mlog.bold(args[0], True))
@@ -3542,6 +3459,7 @@ This will become a hard error in the future.''' % kwargs['input'])
@FeatureNewKwargs('configure_file', '0.47.0', ['copy', 'output_format', 'install_mode', 'encoding'])
@FeatureNewKwargs('configure_file', '0.46.0', ['format'])
@FeatureNewKwargs('configure_file', '0.41.0', ['capture'])
+ @FeatureNewKwargs('configure_file', '0.50.0', ['install'])
@permittedKwargs(permitted_kwargs['configure_file'])
def func_configure_file(self, node, args, kwargs):
if len(args) > 0:
@@ -3701,22 +3619,34 @@ This will become a hard error in the future.''' % kwargs['input'])
# Install file if requested, we check for the empty string
# for backwards compatibility. That was the behaviour before
# 0.45.0 so preserve it.
- idir = kwargs.get('install_dir', None)
- if isinstance(idir, str) and idir:
+ idir = kwargs.get('install_dir', '')
+ if idir is False:
+ idir = ''
+ mlog.deprecation('Please use the new `install:` kwarg instead of passing '
+ '`false` to `install_dir:`', location=node)
+ if not isinstance(idir, str):
+ raise InterpreterException('"install_dir" must be a string')
+ install = kwargs.get('install', idir != '')
+ if not isinstance(install, bool):
+ raise InterpreterException('"install" must be a boolean')
+ if install:
+ if not idir:
+ raise InterpreterException('"install_dir" must be specified '
+ 'when "install" in a configure_file '
+ 'is true')
cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
install_mode = self._get_kwarg_install_mode(kwargs)
self.build.data.append(build.Data([cfile], idir, install_mode))
return mesonlib.File.from_built_file(self.subdir, output)
- def entries_to_incdirs(self, prospectives):
- if not isinstance(prospectives, list):
- return self.entries_to_incdirs([prospectives])[0]
+ def extract_incdirs(self, kwargs):
+ prospectives = listify(kwargs.get('include_directories', []), unholder=True)
result = []
for p in prospectives:
- if isinstance(p, (IncludeDirsHolder, build.IncludeDirs)):
+ if isinstance(p, build.IncludeDirs):
result.append(p)
elif isinstance(p, str):
- result.append(self.build_incdir_object([p]))
+ result.append(self.build_incdir_object([p]).held_object)
else:
raise InterpreterException('Include directory objects can only be created from strings or include directories.')
return result
@@ -3909,7 +3839,8 @@ different subdirectory.
self.coredata.base_options['b_sanitize'].value != 'none'):
mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef.
This will probably not work.
-Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_sanitize'].value))
+Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_sanitize'].value),
+ location=self.current_node)
def evaluate_subproject_info(self, path_from_source_root, subproject_dirname):
depth = 0
@@ -4082,8 +4013,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s
# passed to library() when default_library == 'static'.
kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
- if 'include_directories' in kwargs:
- kwargs['include_directories'] = self.entries_to_incdirs(kwargs['include_directories'])
+ kwargs['include_directories'] = self.extract_incdirs(kwargs)
target = targetclass(name, self.subdir, self.subproject, is_cross, sources, objs, self.environment, kwargs)
if is_cross:
@@ -4104,7 +4034,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s
# path declarations.
if os.path.normpath(i).startswith(self.environment.get_source_dir()):
mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
-This will become a hard error in the future.''')
+This will become a hard error in the future.''', location=self.current_node)
i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
i = self.build_incdir_object([i])
cleaned_items.append(i)
@@ -4121,8 +4051,9 @@ This will become a hard error in the future.''')
def add_cross_stdlib_info(self, target):
for l in self.get_used_languages(target):
- if self.environment.cross_info.has_stdlib(l) \
- and self.subproject != self.environment.cross_info.get_stdlib(l)[0]:
+ props = self.environment.properties.host
+ if props.has_stdlib(l) \
+ and self.subproject != props.get_stdlib(l)[0]:
target.add_deps(self.build.cross_stdlibs[l])
def check_sources_exist(self, subdir, sources):
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 707b8f7..650d1e0 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -18,7 +18,7 @@
from . import mparser, mesonlib, mlog
from . import environment, dependencies
-import os, copy, re, types
+import os, copy, re
from functools import wraps
class ObjectHolder:
@@ -47,14 +47,14 @@ def _get_callee_args(wrapped_args, want_subproject=False):
if want_subproject and n == 2:
if hasattr(s, 'subproject'):
# Interpreter base types have 2 args: self, node
- node_or_state = wrapped_args[1]
+ node = wrapped_args[1]
# args and kwargs are inside the node
args = None
kwargs = None
subproject = s.subproject
elif hasattr(wrapped_args[1], 'subproject'):
# Module objects have 2 args: self, interpreter
- node_or_state = wrapped_args[1]
+ node = wrapped_args[1].current_node
# args and kwargs are inside the node
args = None
kwargs = None
@@ -63,7 +63,7 @@ def _get_callee_args(wrapped_args, want_subproject=False):
raise AssertionError('Unknown args: {!r}'.format(wrapped_args))
elif n == 3:
# Methods on objects (*Holder, MesonMain, etc) have 3 args: self, args, kwargs
- node_or_state = None # FIXME
+ node = s.current_node
args = wrapped_args[1]
kwargs = wrapped_args[2]
if want_subproject:
@@ -73,30 +73,32 @@ def _get_callee_args(wrapped_args, want_subproject=False):
subproject = s.interpreter.subproject
elif n == 4:
# Meson functions have 4 args: self, node, args, kwargs
- # Module functions have 4 args: self, state, args, kwargs; except,
- # PythonInstallation methods have self, interpreter, args, kwargs
- node_or_state = wrapped_args[1]
+ # Module functions have 4 args: self, state, args, kwargs
+ if isinstance(s, InterpreterBase):
+ node = wrapped_args[1]
+ else:
+ node = wrapped_args[1].current_node
args = wrapped_args[2]
kwargs = wrapped_args[3]
if want_subproject:
if isinstance(s, InterpreterBase):
subproject = s.subproject
else:
- subproject = node_or_state.subproject
+ subproject = wrapped_args[1].subproject
elif n == 5:
# Module snippets have 5 args: self, interpreter, state, args, kwargs
- node_or_state = wrapped_args[2]
+ node = wrapped_args[2].current_node
args = wrapped_args[3]
kwargs = wrapped_args[4]
if want_subproject:
- subproject = node_or_state.subproject
+ subproject = wrapped_args[2].subproject
else:
raise AssertionError('Unknown args: {!r}'.format(wrapped_args))
# Sometimes interpreter methods are called internally with None instead of
# empty list/dict
args = args if args is not None else []
kwargs = kwargs if kwargs is not None else {}
- return s, node_or_state, args, kwargs, subproject
+ return s, node, args, kwargs, subproject
def flatten(args):
if isinstance(args, mparser.StringNode):
@@ -164,19 +166,10 @@ class permittedKwargs:
def __call__(self, f):
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
- s, node_or_state, args, kwargs, _ = _get_callee_args(wrapped_args)
- loc = types.SimpleNamespace()
- if hasattr(s, 'subdir'):
- loc.subdir = s.subdir
- loc.lineno = s.current_lineno
- elif node_or_state and hasattr(node_or_state, 'subdir'):
- loc.subdir = node_or_state.subdir
- loc.lineno = node_or_state.current_lineno
- else:
- loc = None
+ s, node, args, kwargs, _ = _get_callee_args(wrapped_args)
for k in kwargs:
if k not in self.permitted:
- mlog.warning('''Passed invalid keyword argument "{}".'''.format(k), location=loc)
+ mlog.warning('''Passed invalid keyword argument "{}".'''.format(k), location=node)
mlog.warning('This will become a hard error in the future.')
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
@@ -320,6 +313,9 @@ class BreakRequest(BaseException):
class InterpreterObject:
def __init__(self):
self.methods = {}
+ # Current node set during a method call. This can be used as location
+ # when printing a warning message during a method call.
+ self.current_node = None
def method_call(self, method_name, args, kwargs):
if method_name in self.methods:
@@ -366,6 +362,9 @@ class InterpreterBase:
self.variables = {}
self.argument_depth = 0
self.current_lineno = -1
+ # Current node set during a function call. This can be used as location
+ # when printing a warning message during a method call.
+ self.current_node = None
def load_root_meson_file(self):
mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
@@ -607,6 +606,23 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InterpreterException('Argument to negation is not an integer.')
return -v
+ @FeatureNew('/ with string arguments', '0.49.0')
+ def evaluate_path_join(self, l, r):
+ if not isinstance(l, str):
+ raise InvalidCode('The division operator can only append to a string.')
+ if not isinstance(r, str):
+ raise InvalidCode('The division operator can only append a string.')
+ return self.join_path_strings((l, r))
+
+ def evaluate_division(self, l, r):
+ if isinstance(l, str) or isinstance(r, str):
+ return self.evaluate_path_join(l, r)
+ if isinstance(l, int) and isinstance(r, int):
+ if r == 0:
+ raise InvalidCode('Division by zero.')
+ return l // r
+ raise InvalidCode('Division works only with strings or integers.')
+
def evaluate_arithmeticstatement(self, cur):
l = self.evaluate_statement(cur.left)
if is_disabler(l):
@@ -631,13 +647,7 @@ The result of this is undefined and will become a hard error in a future Meson r
raise InvalidCode('Multiplication works only with integers.')
return l * r
elif cur.operation == 'div':
- if isinstance(l, str) and isinstance(r, str):
- return self.join_path_strings((l, r))
- if isinstance(l, int) and isinstance(r, int):
- if r == 0:
- raise InvalidCode('Division by zero.')
- return l // r
- raise InvalidCode('Division works only with strings or integers.')
+ return self.evaluate_division(l, r)
elif cur.operation == 'mod':
if not isinstance(l, int) or not isinstance(r, int):
raise InvalidCode('Modulo works only with integers.')
@@ -749,7 +759,6 @@ The result of this is undefined and will become a hard error in a future Meson r
except IndexError:
raise InterpreterException('Index %d out of bounds of array of size %d.' % (index, len(iobject)))
-
def function_call(self, node):
func_name = node.func_name
(posargs, kwargs) = self.reduce_arguments(node.args)
@@ -760,6 +769,7 @@ The result of this is undefined and will become a hard error in a future Meson r
if not getattr(func, 'no-args-flattening', False):
posargs = flatten(posargs)
+ self.current_node = node
return func(node, posargs, kwargs)
else:
self.unknown_function_called(func_name)
@@ -796,6 +806,7 @@ The result of this is undefined and will become a hard error in a future Meson r
return Disabler()
if method_name == 'extract_objects':
self.validate_extraction(obj.held_object)
+ obj.current_node = node
return obj.method_call(method_name, args, kwargs)
def bool_method_call(self, obj, method_name, args):
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 28589da..cd9d35a 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -13,7 +13,7 @@
# limitations under the License.
import os
-from . import (coredata, mesonlib, build)
+from . import coredata, environment, mesonlib, build, mintro, mlog
def add_arguments(parser):
coredata.register_builtin_arguments(parser)
@@ -22,17 +22,44 @@ def add_arguments(parser):
help='Clear cached state (e.g. found dependencies)')
+def make_lower_case(val):
+ if isinstance(val, bool):
+ return str(val).lower()
+ elif isinstance(val, list):
+ return [make_lower_case(i) for i in val]
+ else:
+ return str(val)
+
+
class ConfException(mesonlib.MesonException):
pass
class Conf:
def __init__(self, build_dir):
- self.build_dir = build_dir
- if not os.path.isdir(os.path.join(build_dir, 'meson-private')):
- raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir)
- self.build = build.load(self.build_dir)
- self.coredata = coredata.load(self.build_dir)
+ self.build_dir = os.path.abspath(os.path.realpath(build_dir))
+ if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
+ self.build_dir = os.path.dirname(self.build_dir)
+ self.build = None
+ self.max_choices_line_length = 60
+
+ if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
+ self.build = build.load(self.build_dir)
+ self.source_dir = self.build.environment.get_source_dir()
+ self.coredata = coredata.load(self.build_dir)
+ self.default_values_only = False
+ elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
+ # Make sure that log entries in other parts of meson don't interfere with the JSON output
+ mlog.disable()
+ self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
+ intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja')
+ intr.analyze()
+ # Reenable logging just in case
+ mlog.enable()
+ self.coredata = intr.coredata
+ self.default_values_only = True
+ else:
+ raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps = {}
@@ -41,26 +68,22 @@ class Conf:
self.coredata.set_options(options)
def save(self):
+ # Do nothing when using introspection
+ if self.default_values_only:
+ return
# Only called if something has changed so overwrite unconditionally.
coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
- @staticmethod
- def print_aligned(arr):
- def make_lower_case(val):
- if isinstance(val, bool):
- return str(val).lower()
- elif isinstance(val, list):
- return [make_lower_case(i) for i in val]
- else:
- return str(val)
-
+ def print_aligned(self, arr):
if not arr:
return
titles = {'name': 'Option', 'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
+ if self.default_values_only:
+ titles['value'] = 'Default Value'
name_col = [titles['name'], '-' * len(titles['name'])]
value_col = [titles['value'], '-' * len(titles['value'])]
@@ -78,7 +101,20 @@ class Conf:
if opt['choices']:
choices_found = True
if isinstance(opt['choices'], list):
- choices_col.append('[{0}]'.format(', '.join(make_lower_case(opt['choices']))))
+ choices_list = make_lower_case(opt['choices'])
+ current = '['
+ while choices_list:
+ i = choices_list.pop(0)
+ if len(current) + len(i) >= self.max_choices_line_length:
+ choices_col.append(current + ',')
+ name_col.append('')
+ value_col.append('')
+ descr_col.append('')
+ current = ' '
+ if len(current) > 1:
+ current += ', '
+ current += i
+ choices_col.append(current + ']')
else:
choices_col.append(make_lower_case(opt['choices']))
else:
@@ -100,20 +136,26 @@ class Conf:
if not options:
print(' No {}\n'.format(title.lower()))
arr = []
- for k in sorted(options):
- o = options[k]
+ for k, o in sorted(options.items()):
d = o.description
- v = o.value
+ v = o.printable_value()
c = o.choices
- if isinstance(o, coredata.UserUmaskOption):
- v = format(v, '04o')
arr.append({'name': k, 'descr': d, 'value': v, 'choices': c})
self.print_aligned(arr)
def print_conf(self):
+ def print_default_values_warning():
+ mlog.warning('The source directory instead of the build directory was specified.')
+ mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
+
+ if self.default_values_only:
+ print_default_values_warning()
+ print('')
+
print('Core properties:')
- print(' Source dir', self.build.environment.source_dir)
- print(' Build dir ', self.build.environment.build_dir)
+ print(' Source dir', self.source_dir)
+ if not self.default_values_only:
+ print(' Build dir ', self.build_dir)
dir_option_names = ['bindir',
'datadir',
@@ -139,20 +181,33 @@ class Conf:
self.print_options('Core options', core_options)
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
- self.print_options('Compiler options', self.coredata.compiler_options)
+ # TODO others
+ self.print_options('Compiler options', self.coredata.compiler_options.build)
self.print_options('Directories', dir_options)
self.print_options('Project options', self.coredata.user_options)
self.print_options('Testing options', test_options)
+ # Print the warning twice so that the user shouldn't be able to miss it
+ if self.default_values_only:
+ print('')
+ print_default_values_warning()
def run(options):
coredata.parse_cmd_line_options(options)
builddir = os.path.abspath(os.path.realpath(options.builddir))
+ c = None
try:
c = Conf(builddir)
+ if c.default_values_only:
+ c.print_conf()
+ return 0
+
save = False
if len(options.cmd_line_options) > 0:
c.set_options(options.cmd_line_options)
+ if not c.build.environment.is_cross_build():
+ # TODO think about cross and command-line interface.
+ c.coredata.compiler_options.host = c.coredata.compiler_options.build
coredata.update_cmd_line_file(builddir, options)
save = True
elif options.clearcache:
@@ -162,7 +217,11 @@ def run(options):
c.print_conf()
if save:
c.save()
+ mintro.update_build_options(c.coredata, c.build.environment.info_dir)
+ mintro.write_meson_info_file(c.build, [])
except ConfException as e:
print('Meson configurator encountered an error:')
+ if c is not None and c.build is not None:
+ mintro.write_meson_info_file(c.build, [e])
raise e
return 0
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 98c2366..0afc21b 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -13,7 +13,8 @@
# limitations under the License.
"""A library of random helper functionality."""
-
+from pathlib import Path
+from typing import List
import functools
import sys
import stat
@@ -68,11 +69,11 @@ def set_meson_command(mainfile):
if 'MESON_COMMAND_TESTS' in os.environ:
mlog.log('meson_command is {!r}'.format(meson_command))
-def is_ascii_string(astring):
+def is_ascii_string(astring) -> bool:
try:
if isinstance(astring, str):
astring.encode('ascii')
- if isinstance(astring, bytes):
+ elif isinstance(astring, bytes):
astring.decode('ascii')
except UnicodeDecodeError:
return False
@@ -206,17 +207,17 @@ class FileMode:
return perms
class File:
- def __init__(self, is_built, subdir, fname):
+ def __init__(self, is_built: bool, subdir: str, fname: str):
self.is_built = is_built
self.subdir = subdir
self.fname = fname
assert(isinstance(self.subdir, str))
assert(isinstance(self.fname, str))
- def __str__(self):
+ def __str__(self) -> str:
return self.relative_name()
- def __repr__(self):
+ def __repr__(self) -> str:
ret = '<File: {0}'
if not self.is_built:
ret += ' (not built)'
@@ -225,49 +226,50 @@ class File:
@staticmethod
@lru_cache(maxsize=None)
- def from_source_file(source_root, subdir, fname):
+ def from_source_file(source_root: str, subdir: str, fname: str):
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
raise MesonException('File %s does not exist.' % fname)
return File(False, subdir, fname)
@staticmethod
- def from_built_file(subdir, fname):
+ def from_built_file(subdir: str, fname: str):
return File(True, subdir, fname)
@staticmethod
- def from_absolute_file(fname):
+ def from_absolute_file(fname: str):
return File(False, '', fname)
@lru_cache(maxsize=None)
- def rel_to_builddir(self, build_to_src):
+ def rel_to_builddir(self, build_to_src: str) -> str:
if self.is_built:
return self.relative_name()
else:
return os.path.join(build_to_src, self.subdir, self.fname)
@lru_cache(maxsize=None)
- def absolute_path(self, srcdir, builddir):
+ def absolute_path(self, srcdir: str, builddir: str) -> str:
absdir = srcdir
if self.is_built:
absdir = builddir
return os.path.join(absdir, self.relative_name())
- def endswith(self, ending):
+ def endswith(self, ending: str) -> bool:
return self.fname.endswith(ending)
- def split(self, s):
+ def split(self, s: str) -> List[str]:
return self.fname.split(s)
- def __eq__(self, other):
+ def __eq__(self, other) -> bool:
return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
- def __hash__(self):
+ def __hash__(self) -> int:
return hash((self.fname, self.subdir, self.is_built))
@lru_cache(maxsize=None)
- def relative_name(self):
+ def relative_name(self) -> str:
return os.path.join(self.subdir, self.fname)
+
def get_compiler_for_source(compilers, src):
for comp in compilers:
if comp.can_compile(src):
@@ -308,7 +310,15 @@ class OrderedEnum(Enum):
return self.value < other.value
return NotImplemented
-MachineChoice = OrderedEnum('MachineChoice', ['BUILD', 'HOST', 'TARGET'])
+class MachineChoice(OrderedEnum):
+
+ """Enum class representing one of the three possible values for binaries,
+ the build, host, and target machines.
+ """
+
+ BUILD = 0
+ HOST = 1
+ TARGET = 2
class PerMachine:
def __init__(self, build, host, target):
@@ -331,36 +341,35 @@ class PerMachine:
}[machine]
setattr(self, key, val)
-def is_osx():
+def is_osx() -> bool:
return platform.system().lower() == 'darwin'
-def is_linux():
+def is_linux() -> bool:
return platform.system().lower() == 'linux'
-def is_android():
+def is_android() -> bool:
return platform.system().lower() == 'android'
-def is_haiku():
+def is_haiku() -> bool:
return platform.system().lower() == 'haiku'
-def is_openbsd():
+def is_openbsd() -> bool:
return platform.system().lower() == 'openbsd'
-def is_windows():
+def is_windows() -> bool:
platname = platform.system().lower()
return platname == 'windows' or 'mingw' in platname
-def is_cygwin():
- platname = platform.system().lower()
- return platname.startswith('cygwin')
+def is_cygwin() -> bool:
+ return platform.system().lower().startswith('cygwin')
-def is_debianlike():
+def is_debianlike() -> bool:
return os.path.isfile('/etc/debian_version')
-def is_dragonflybsd():
+def is_dragonflybsd() -> bool:
return platform.system().lower() == 'dragonfly'
-def is_freebsd():
+def is_freebsd() -> bool:
return platform.system().lower() == 'freebsd'
def _get_machine_is_cross(env, is_cross):
@@ -451,16 +460,34 @@ def for_openbsd(is_cross, env):
"""
return _get_machine_is_cross(env, is_cross).is_openbsd()
-def exe_exists(arglist):
+def exe_exists(arglist: List[str]) -> bool:
try:
- p = subprocess.Popen(arglist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- p.communicate()
- if p.returncode == 0:
+ if subprocess.run(arglist, timeout=10).returncode == 0:
return True
- except FileNotFoundError:
+ except (FileNotFoundError, subprocess.TimeoutExpired):
pass
return False
+@lru_cache(maxsize=None)
+def darwin_get_object_archs(objpath):
+ '''
+ For a specific object (executable, static library, dylib, etc), run `lipo`
+ to fetch the list of archs supported by it. Supports both thin objects and
+ 'fat' objects.
+ '''
+ _, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
+ if not stdo:
+ mlog.debug('lipo {}: {}'.format(objpath, stderr))
+ return None
+ stdo = stdo.rsplit(': ', 1)[1]
+ # Convert from lipo-style archs to meson-style CPUs
+ stdo = stdo.replace('i386', 'x86')
+ stdo = stdo.replace('arm64', 'aarch64')
+ # Add generic name for armv7 and armv7s
+ if 'armv7' in stdo:
+ stdo += ' arm'
+ return stdo.split()
+
def detect_vcs(source_dir):
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
@@ -468,7 +495,7 @@ def detect_vcs(source_dir):
dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
]
-
+ # FIXME: this is much cleaner with pathlib.Path
segs = source_dir.replace('\\', '/').split('/')
for i in range(len(segs), -1, -1):
curdir = '/'.join(segs[:i])
@@ -496,6 +523,9 @@ class Version:
def __str__(self):
return '%s (V=%s)' % (self._s, str(self._v))
+ def __repr__(self):
+ return '<Version: {}>'.format(self._s)
+
def __lt__(self, other):
return self.__cmp__(other) == -1
@@ -608,7 +638,7 @@ def version_compare_condition_with_min(condition, minimum):
# Map versions in the constraint of the form '0.46' to '0.46.0', to embed
# this knowledge of the meson versioning scheme.
condition = condition.strip()
- if re.match('^\d+.\d+$', condition):
+ if re.match(r'^\d+.\d+$', condition):
condition += '.0'
return cmpop(Version(minimum), Version(condition))
@@ -625,6 +655,8 @@ def default_libdir():
return 'lib/' + archpath
except Exception:
pass
+ if is_freebsd():
+ return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
return 'lib'
@@ -636,7 +668,7 @@ def default_libexecdir():
def default_prefix():
return 'c:/' if is_windows() else '/usr/local'
-def get_library_dirs():
+def get_library_dirs() -> List[str]:
if is_windows():
return ['C:/mingw/lib'] # Fixme
if is_osx():
@@ -647,20 +679,24 @@ def get_library_dirs():
# than /usr/lib. If you feel that this search order is
# problematic, please raise the issue on the mailing list.
unixdirs = ['/usr/local/lib', '/usr/lib', '/lib']
- plat = subprocess.check_output(['uname', '-m']).decode().strip()
- # This is a terrible hack. I admit it and I'm really sorry.
- # I just don't know what the correct solution is.
- if plat == 'i686':
+
+ if is_freebsd():
+ return unixdirs
+ # FIXME: this needs to be further genericized for aarch64 etc.
+ machine = platform.machine()
+ if machine in ('i386', 'i486', 'i586', 'i686'):
plat = 'i386'
- if plat.startswith('arm'):
+ elif machine.startswith('arm'):
plat = 'arm'
- unixdirs += glob('/usr/lib/' + plat + '*')
+
+ unixdirs += [str(x) for x in (Path('/usr/lib/') / plat).iterdir() if x.is_dir()]
if os.path.exists('/usr/lib64'):
unixdirs.append('/usr/lib64')
- unixdirs += glob('/lib/' + plat + '*')
+
+ unixdirs += [str(x) for x in (Path('/lib/') / plat).iterdir() if x.is_dir()]
if os.path.exists('/lib64'):
unixdirs.append('/lib64')
- unixdirs += glob('/lib/' + plat + '*')
+
return unixdirs
def has_path_sep(name, sep='/\\'):
@@ -729,7 +765,7 @@ def do_mesondefine(line, confdata):
def do_conf_file(src, dst, confdata, format, encoding='utf-8'):
try:
- with open(src, encoding=encoding) as f:
+ with open(src, encoding=encoding, newline='') as f:
data = f.readlines()
except Exception as e:
raise MesonException('Could not read input file %s: %s' % (src, str(e)))
@@ -763,7 +799,7 @@ def do_conf_file(src, dst, confdata, format, encoding='utf-8'):
result.append(line)
dst_tmp = dst + '~'
try:
- with open(dst_tmp, 'w', encoding=encoding) as f:
+ with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
f.writelines(result)
except Exception as e:
raise MesonException('Could not write output file %s: %s' % (dst, str(e)))
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index c11d044..91a52b1 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -17,20 +17,27 @@ import os.path
import importlib
import traceback
import argparse
+import codecs
+import shutil
from . import mesonlib
from . import mlog
-from . import mconf, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects
+from . import mconf, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata
from .mesonlib import MesonException
from .environment import detect_msys2_arch
from .wrap import wraptool
+# Note: when adding arguments, please also add them to the completion
+# scripts in $MESONSRC/data/shell-completions/
class CommandLineParser:
def __init__(self):
+ self.term_width = shutil.get_terminal_size().columns
+ self.formater = lambda prog: argparse.HelpFormatter(prog, max_help_position=int(self.term_width / 2), width=self.term_width)
+
self.commands = {}
self.hidden_commands = []
- self.parser = argparse.ArgumentParser(prog='meson')
+ self.parser = argparse.ArgumentParser(prog='meson', formatter_class=self.formater)
self.subparsers = self.parser.add_subparsers(title='Commands',
description='If no command is specified it defaults to setup command.')
self.add_command('setup', msetup.add_arguments, msetup.run,
@@ -51,24 +58,27 @@ class CommandLineParser:
help='Manage subprojects')
self.add_command('help', self.add_help_arguments, self.run_help_command,
help='Print help of a subcommand')
+ self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formater), rewriter.run,
+ help='Modify the project definition')
# Hidden commands
- self.add_command('rewrite', rewriter.add_arguments, rewriter.run,
- help=argparse.SUPPRESS)
self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
help=argparse.SUPPRESS)
+ self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run,
+ help=argparse.SUPPRESS)
- def add_command(self, name, add_arguments_func, run_func, help):
+ def add_command(self, name, add_arguments_func, run_func, help, aliases=[]):
# FIXME: Cannot have hidden subparser:
# https://bugs.python.org/issue22848
if help == argparse.SUPPRESS:
- p = argparse.ArgumentParser(prog='meson ' + name)
+ p = argparse.ArgumentParser(prog='meson ' + name, formatter_class=self.formater)
self.hidden_commands.append(name)
else:
- p = self.subparsers.add_parser(name, help=help)
+ p = self.subparsers.add_parser(name, help=help, aliases=aliases, formatter_class=self.formater)
add_arguments_func(p)
p.set_defaults(run_func=run_func)
- self.commands[name] = p
+ for i in [name] + aliases:
+ self.commands[i] = p
def add_runpython_arguments(self, parser):
parser.add_argument('script_file')
@@ -77,6 +87,7 @@ class CommandLineParser:
def run_runpython_command(self, options):
import runpy
sys.argv[1:] = options.script_args
+ sys.path.insert(0, os.path.dirname(options.script_file))
runpy.run_path(options.script_file, run_name='__main__')
return 0
@@ -117,7 +128,7 @@ class CommandLineParser:
if os.environ.get('MESON_FORCE_BACKTRACE'):
raise
return 1
- except Exception as e:
+ except Exception:
if os.environ.get('MESON_FORCE_BACKTRACE'):
raise
traceback.print_exc()
@@ -148,6 +159,17 @@ def run_script_command(script_name, script_args):
mlog.exception(e)
return 1
+def ensure_stdout_accepts_unicode():
+ if sys.stdout.encoding and not sys.stdout.encoding.upper().startswith('UTF-'):
+ if sys.version_info >= (3, 7):
+ sys.stdout.reconfigure(errors='surrogateescape')
+ else:
+ sys.stdout = codecs.getwriter('utf-8')(sys.stdout.detach(),
+ errors='surrogateescape')
+ sys.stdout.encoding = 'UTF-8'
+ if not hasattr(sys.stdout, 'buffer'):
+ sys.stdout.buffer = sys.stdout.raw if hasattr(sys.stdout, 'raw') else sys.stdout
+
def run(original_args, mainfile):
if sys.version_info < (3, 5):
print('Meson works correctly only with python 3.5+.')
@@ -155,6 +177,11 @@ def run(original_args, mainfile):
print('Please update your environment')
return 1
+ # Meson gets confused if stdout can't output Unicode, if the
+ # locale isn't Unicode, just force stdout to accept it. This tries
+ # to emulate enough of PEP 540 to work elsewhere.
+ ensure_stdout_accepts_unicode()
+
# https://github.com/mesonbuild/meson/issues/3653
if sys.platform.lower() == 'msys':
mlog.error('This python3 seems to be msys/python on MSYS2 Windows, which is known to have path semantics incompatible with Meson')
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index 4cdd3c6..c6b6bbf 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -101,12 +101,12 @@ def set_chown(path, user=None, group=None, dir_fd=None, follow_symlinks=True):
def set_chmod(path, mode, dir_fd=None, follow_symlinks=True):
try:
os.chmod(path, mode, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
- except (NotImplementedError, OSError, SystemError) as e:
+ except (NotImplementedError, OSError, SystemError):
if not os.path.islink(path):
os.chmod(path, mode, dir_fd=dir_fd)
def sanitize_permissions(path, umask):
- if umask is None:
+ if umask == 'preserve':
return
new_perms = 0o777 if is_executable(path, follow_symlinks=False) else 0o666
new_perms &= ~umask
@@ -157,7 +157,7 @@ def restore_selinux_contexts():
'''
try:
subprocess.check_call(['selinuxenabled'])
- except (FileNotFoundError, PermissionError, subprocess.CalledProcessError) as e:
+ except (FileNotFoundError, PermissionError, subprocess.CalledProcessError):
# If we don't have selinux or selinuxenabled returned 1, failure
# is ignored quietly.
return
@@ -332,7 +332,7 @@ class Installer:
d.destdir = os.environ.get('DESTDIR', '')
d.fullprefix = destdir_join(d.destdir, d.prefix)
- if d.install_umask is not None:
+ if d.install_umask != 'preserve':
os.umask(d.install_umask)
self.did_install_something = False
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 3896c92..243dc5d 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -20,60 +20,99 @@ Currently only works for the Ninja backend. Others use generated
project files and don't need this info."""
import json
-from . import build, mtest, coredata as cdata
-from . import environment
+from . import build, coredata as cdata
from . import mesonlib
-from . import astinterpreter
-from . import mparser
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator
from . import mlog
-from . import compilers
-from . import optinterpreter
-from .interpreterbase import InvalidArguments
-from .backend import ninjabackend, backends
+from .backend import backends
+from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
+from typing import List, Optional
import sys, os
import pathlib
+def get_meson_info_file(info_dir: str):
+ return os.path.join(info_dir, 'meson-info.json')
+
+def get_meson_introspection_version():
+ return '1.0.0'
+
+def get_meson_introspection_required_version():
+ return ['>=1.0', '<2.0']
+
+def get_meson_introspection_types(coredata: Optional[cdata.CoreData] = None,
+ builddata: Optional[build.Build] = None,
+ backend: Optional[backends.Backend] = None,
+ sourcedir: Optional[str] = None):
+ if backend and builddata:
+ benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
+ testdata = backend.create_test_serialisation(builddata.get_tests())
+ installdata = backend.create_install_data()
+ else:
+ benchmarkdata = testdata = installdata = None
+
+ return {
+ 'benchmarks': {
+ 'func': lambda: list_benchmarks(benchmarkdata),
+ 'desc': 'List all benchmarks.',
+ },
+ 'buildoptions': {
+ 'func': lambda: list_buildoptions(coredata),
+ 'no_bd': lambda intr: list_buildoptions_from_source(intr),
+ 'desc': 'List all build options.',
+ },
+ 'buildsystem_files': {
+ 'func': lambda: list_buildsystem_files(builddata),
+ 'desc': 'List files that make up the build system.',
+ 'key': 'buildsystem-files',
+ },
+ 'dependencies': {
+ 'func': lambda: list_deps(coredata),
+ 'no_bd': lambda intr: list_deps_from_source(intr),
+ 'desc': 'List external dependencies.',
+ },
+ 'scan_dependencies': {
+ 'no_bd': lambda intr: list_deps_from_source(intr),
+ 'desc': 'Scan for dependencies used in the meson.build file.',
+ 'key': 'scan-dependencies',
+ },
+ 'installed': {
+ 'func': lambda: list_installed(installdata),
+ 'desc': 'List all installed files and directories.',
+ },
+ 'projectinfo': {
+ 'func': lambda: list_projinfo(builddata),
+ 'no_bd': lambda intr: list_projinfo_from_source(sourcedir, intr),
+ 'desc': 'Information about projects.',
+ },
+ 'targets': {
+ 'func': lambda: list_targets(builddata, installdata, backend),
+ 'no_bd': lambda intr: list_targets_from_source(intr),
+ 'desc': 'List top level targets.',
+ },
+ 'tests': {
+ 'func': lambda: list_tests(testdata),
+ 'desc': 'List all unit tests.',
+ }
+ }
+
def add_arguments(parser):
- parser.add_argument('--targets', action='store_true', dest='list_targets', default=False,
- help='List top level targets.')
- parser.add_argument('--installed', action='store_true', dest='list_installed', default=False,
- help='List all installed files and directories.')
+ intro_types = get_meson_introspection_types()
+ for key, val in intro_types.items():
+ flag = '--' + val.get('key', key)
+ parser.add_argument(flag, action='store_true', dest=key, default=False, help=val['desc'])
+
parser.add_argument('--target-files', action='store', dest='target_files', default=None,
help='List source files for a given target.')
- parser.add_argument('--buildsystem-files', action='store_true', dest='buildsystem_files', default=False,
- help='List files that make up the build system.')
- parser.add_argument('--buildoptions', action='store_true', dest='buildoptions', default=False,
- help='List all build options.')
- parser.add_argument('--tests', action='store_true', dest='tests', default=False,
- help='List all unit tests.')
- parser.add_argument('--benchmarks', action='store_true', dest='benchmarks', default=False,
- help='List all benchmarks.')
- parser.add_argument('--dependencies', action='store_true', dest='dependencies', default=False,
- help='List external dependencies.')
- parser.add_argument('--projectinfo', action='store_true', dest='projectinfo', default=False,
- help='Information about projects.')
parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja',
help='The backend to use for the --buildoptions introspection.')
+ parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
+ help='Print all available information.')
+ parser.add_argument('-i', '--indent', action='store_true', dest='indent', default=False,
+ help='Enable pretty printed JSON.')
+ parser.add_argument('-f', '--force-object-output', action='store_true', dest='force_dict', default=False,
+ help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
-def determine_installed_path(target, installdata):
- install_targets = []
- for i in target.outputs:
- for j in installdata.targets:
- if os.path.basename(j.fname) == i: # FIXME, might clash due to subprojects.
- install_targets += [j]
- break
- if len(install_targets) == 0:
- raise RuntimeError('Something weird happened. File a bug.')
-
- # Normalize the path by using os.path.sep consistently, etc.
- # Does not change the effective path.
- install_targets = list(map(lambda x: os.path.join(installdata.prefix, x.outdir, os.path.basename(x.fname)), install_targets))
- install_targets = list(map(lambda x: str(pathlib.PurePath(x)), install_targets))
-
- return install_targets
-
-
def list_installed(installdata):
res = {}
if installdata is not None:
@@ -86,203 +125,107 @@ def list_installed(installdata):
res[path] = os.path.join(installdata.prefix, installdir, os.path.basename(path))
for path, installpath, unused_custom_install_mode in installdata.man:
res[path] = os.path.join(installdata.prefix, installpath)
- print(json.dumps(res))
-
+ return res
-def list_targets(coredata, builddata, installdata):
+def list_targets_from_source(intr: IntrospectionInterpreter):
+ tlist = []
+ for i in intr.targets:
+ sources = []
+ for n in i['sources']:
+ args = []
+ if isinstance(n, FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name in build_target_functions:
+ args.pop(0)
+ elif isinstance(n, ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, ArgumentNode):
+ args = n.arguments
+ for j in args:
+ if isinstance(j, StringNode):
+ sources += [j.value]
+ elif isinstance(j, str):
+ sources += [j]
+
+ tlist += [{
+ 'name': i['name'],
+ 'id': i['id'],
+ 'type': i['type'],
+ 'defined_in': i['defined_in'],
+ 'filename': [os.path.join(i['subdir'], x) for x in i['outputs']],
+ 'build_by_default': i['build_by_default'],
+ 'target_sources': [{
+ 'language': 'unknown',
+ 'compiler': [],
+ 'parameters': [],
+ 'sources': [os.path.normpath(os.path.join(os.path.abspath(intr.source_root), i['subdir'], x)) for x in sources],
+ 'generated_sources': []
+ }],
+ 'subproject': None, # Subprojects are not supported
+ 'installed': i['installed']
+ }]
+
+ return tlist
+
+def list_targets(builddata: build.Build, installdata, backend: backends.Backend):
tlist = []
+ build_dir = builddata.environment.get_build_dir()
+ src_dir = builddata.environment.get_source_dir()
+
+ # Fast lookup table for installation files
+ install_lookuptable = {}
+ for i in installdata.targets:
+ outname = os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname))
+ install_lookuptable[os.path.basename(i.fname)] = str(pathlib.PurePath(outname))
+
for (idname, target) in builddata.get_targets().items():
- t = {'name': target.get_basename(), 'id': idname}
- fname = target.get_filename()
- if isinstance(fname, list):
- fname = [os.path.join(target.subdir, x) for x in fname]
- else:
- fname = os.path.join(target.subdir, fname)
- t['filename'] = fname
- if isinstance(target, build.Executable):
- typename = 'executable'
- elif isinstance(target, build.SharedLibrary):
- typename = 'shared library'
- elif isinstance(target, build.StaticLibrary):
- typename = 'static library'
- elif isinstance(target, build.CustomTarget):
- typename = 'custom'
- elif isinstance(target, build.RunTarget):
- typename = 'run'
- else:
- typename = 'unknown'
- t['type'] = typename
+ if not isinstance(target, build.Target):
+ raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.')
+
+ t = {
+ 'name': target.get_basename(),
+ 'id': idname,
+ 'type': target.get_typename(),
+ 'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, 'meson.build')),
+ 'filename': [os.path.join(build_dir, target.subdir, x) for x in target.get_outputs()],
+ 'build_by_default': target.build_by_default,
+ 'target_sources': backend.get_introspection_data(idname, target),
+ 'subproject': target.subproject or None
+ }
+
if installdata and target.should_install():
t['installed'] = True
- t['install_filename'] = determine_installed_path(target, installdata)
+ t['install_filename'] = [install_lookuptable.get(x, None) for x in target.get_outputs()]
else:
t['installed'] = False
- t['build_by_default'] = target.build_by_default
tlist.append(t)
- print(json.dumps(tlist))
-
-def list_target_files(target_name, coredata, builddata):
- try:
- t = builddata.targets[target_name]
- sources = t.sources + t.extra_files
- except KeyError:
- print("Unknown target %s." % target_name)
+ return tlist
+
+def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> List[dict]:
+ return list_buildoptions(intr.coredata)
+
+def list_target_files(target_name: str, targets: list, source_dir: str):
+ sys.stderr.write("WARNING: The --target-files introspection API is deprecated. Use --targets instead.\n")
+ result = []
+ tgt = None
+
+ for i in targets:
+ if i['id'] == target_name:
+ tgt = i
+ break
+
+ if tgt is None:
+ print('Target with the ID "{}" could not be found'.format(target_name))
sys.exit(1)
- out = []
- for i in sources:
- if isinstance(i, mesonlib.File):
- i = os.path.join(i.subdir, i.fname)
- out.append(i)
- print(json.dumps(out))
-
-class BuildoptionsOptionHelper:
- # mimic an argparse namespace
- def __init__(self, cross_file):
- self.cross_file = cross_file
- self.native_file = None
- self.cmd_line_options = {}
-
-class BuildoptionsInterperter(astinterpreter.AstInterpreter):
- # Interpreter to detect the options without a build directory
- # Most of the code is stolen from interperter.Interpreter
- def __init__(self, source_root, subdir, backend, cross_file=None, subproject='', subproject_dir='subprojects', env=None):
- super().__init__(source_root, subdir)
-
- options = BuildoptionsOptionHelper(cross_file)
- self.cross_file = cross_file
- if env is None:
- self.environment = environment.Environment(source_root, None, options)
- else:
- self.environment = env
- self.subproject = subproject
- self.subproject_dir = subproject_dir
- self.coredata = self.environment.get_coredata()
- self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
- self.backend = backend
- self.default_options = {'backend': self.backend}
-
- self.funcs.update({
- 'project': self.func_project,
- 'add_languages': self.func_add_languages
- })
-
- def detect_compilers(self, lang, need_cross_compiler):
- comp, cross_comp = self.environment.detect_compilers(lang, need_cross_compiler)
- if comp is None:
- return None, None
-
- self.coredata.compilers[lang] = comp
- # Native compiler always exist so always add its options.
- new_options = comp.get_options()
- if cross_comp is not None:
- self.coredata.cross_compilers[lang] = cross_comp
- new_options.update(cross_comp.get_options())
-
- optprefix = lang + '_'
- for k, o in new_options.items():
- if not k.startswith(optprefix):
- raise RuntimeError('Internal error, %s has incorrect prefix.' % k)
- if k in self.environment.cmd_line_options:
- o.set_value(self.environment.cmd_line_options[k])
- self.coredata.compiler_options.setdefault(k, o)
-
- return comp, cross_comp
-
- def flatten_args(self, args):
- # Resolve mparser.ArrayNode if needed
- flattend_args = []
- for i in args:
- if isinstance(i, mparser.ArrayNode):
- flattend_args += [x.value for x in i.args.arguments]
- elif isinstance(i, str):
- flattend_args += [i]
- else:
- pass
- return flattend_args
-
- def add_languages(self, args):
- need_cross_compiler = self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler()
- for lang in sorted(args, key=compilers.sort_clink):
- lang = lang.lower()
- if lang not in self.coredata.compilers:
- (comp, _) = self.detect_compilers(lang, need_cross_compiler)
- if comp is None:
- return
- for optname in comp.base_options:
- if optname in self.coredata.base_options:
- continue
- oobj = compilers.base_options[optname]
- self.coredata.base_options[optname] = oobj
-
- def func_project(self, node, args, kwargs):
- if len(args) < 1:
- raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
-
- proj_langs = self.flatten_args(args[1:])
-
- if os.path.exists(self.option_file):
- oi = optinterpreter.OptionInterpreter(self.subproject)
- oi.process(self.option_file)
- self.coredata.merge_user_options(oi.options)
-
- def_opts = kwargs.get('default_options', [])
- if isinstance(def_opts, mparser.ArrayNode):
- def_opts = [x.value for x in def_opts.args.arguments]
-
- self.project_default_options = mesonlib.stringlistify(def_opts)
- self.project_default_options = cdata.create_options_dict(self.project_default_options)
- self.default_options.update(self.project_default_options)
- self.coredata.set_default_options(self.default_options, self.subproject, self.environment.cmd_line_options)
-
- if not self.is_subproject() and 'subproject_dir' in kwargs:
- spdirname = kwargs['subproject_dir']
- if isinstance(spdirname, str):
- self.subproject_dir = spdirname
- if not self.is_subproject():
- subprojects_dir = os.path.join(self.source_root, self.subproject_dir)
- if os.path.isdir(subprojects_dir):
- for i in os.listdir(subprojects_dir):
- if os.path.isdir(os.path.join(subprojects_dir, i)):
- self.do_subproject(i)
-
- self.coredata.init_backend_options(self.backend)
- options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
-
- self.coredata.set_options(options)
- self.add_languages(proj_langs)
-
- def do_subproject(self, dirname):
- subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
- subpr = os.path.join(subproject_dir_abs, dirname)
- try:
- subi = BuildoptionsInterperter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment)
- subi.analyze()
- except:
- return
-
- def func_add_languages(self, node, args, kwargs):
- return self.add_languages(self.flatten_args(args))
-
- def is_subproject(self):
- return self.subproject != ''
-
- def analyze(self):
- self.load_root_meson_file()
- self.sanity_check_ast()
- self.parse_project()
- self.run()
-
-def list_buildoptions_from_source(sourcedir, backend):
- # Make sure that log entries in other parts of meson don't interfere with the JSON output
- mlog.disable()
- backend = backends.get_backend_from_name(backend, None)
- intr = BuildoptionsInterperter(sourcedir, '', backend.name)
- intr.analyze()
- # Reenable logging just in case
- mlog.enable()
- list_buildoptions(intr.coredata)
-
-def list_buildoptions(coredata):
+
+ for i in tgt['target_sources']:
+ result += i['sources'] + i['generated_sources']
+
+ result = list(map(lambda x: os.path.relpath(x, source_dir), result))
+
+ return result
+
+def list_buildoptions(coredata: cdata.CoreData) -> List[dict]:
optlist = []
dir_option_names = ['bindir',
@@ -309,11 +252,12 @@ def list_buildoptions(coredata):
add_keys(optlist, core_options, 'core')
add_keys(optlist, coredata.backend_options, 'backend')
add_keys(optlist, coredata.base_options, 'base')
- add_keys(optlist, coredata.compiler_options, 'compiler')
+ # TODO others
+ add_keys(optlist, coredata.compiler_options.build, 'compiler')
add_keys(optlist, dir_options, 'directory')
add_keys(optlist, coredata.user_options, 'user')
add_keys(optlist, test_options, 'test')
- print(json.dumps(optlist))
+ return optlist
def add_keys(optlist, options, section):
keys = list(options.keys())
@@ -347,21 +291,28 @@ def find_buildsystem_files_list(src_dir):
filelist.append(os.path.relpath(os.path.join(root, f), src_dir))
return filelist
-def list_buildsystem_files(builddata):
+def list_buildsystem_files(builddata: build.Build):
src_dir = builddata.environment.get_source_dir()
filelist = find_buildsystem_files_list(src_dir)
- print(json.dumps(filelist))
+ filelist = [os.path.join(src_dir, x) for x in filelist]
+ return filelist
-def list_deps(coredata):
+def list_deps_from_source(intr: IntrospectionInterpreter):
+ result = []
+ for i in intr.dependencies:
+ result += [{k: v for k, v in i.items() if k in ['name', 'required', 'has_fallback', 'conditional']}]
+ return result
+
+def list_deps(coredata: cdata.CoreData):
result = []
for d in coredata.deps.values():
if d.found():
result += [{'name': d.name,
'compile_args': d.get_compile_args(),
'link_args': d.get_link_args()}]
- print(json.dumps(result))
+ return result
-def list_tests(testdata):
+def get_test_list(testdata):
result = []
for t in testdata:
to = {}
@@ -380,11 +331,18 @@ def list_tests(testdata):
to['suite'] = t.suite
to['is_parallel'] = t.is_parallel
result.append(to)
- print(json.dumps(result))
+ return result
+
+def list_tests(testdata):
+ return get_test_list(testdata)
+
+def list_benchmarks(benchdata):
+ return get_test_list(benchdata)
-def list_projinfo(builddata):
+def list_projinfo(builddata: build.Build):
result = {'version': builddata.project_version,
- 'descriptive_name': builddata.project_name}
+ 'descriptive_name': builddata.project_name,
+ 'subproject_dir': builddata.subproject_dir}
subprojects = []
for k, v in builddata.subprojects.items():
c = {'name': k,
@@ -392,110 +350,184 @@ def list_projinfo(builddata):
'descriptive_name': builddata.projects.get(k)}
subprojects.append(c)
result['subprojects'] = subprojects
- print(json.dumps(result))
-
-class ProjectInfoInterperter(astinterpreter.AstInterpreter):
- def __init__(self, source_root, subdir):
- super().__init__(source_root, subdir)
- self.funcs.update({'project': self.func_project})
- self.project_name = None
- self.project_version = None
-
- def func_project(self, node, args, kwargs):
- if len(args) < 1:
- raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
- self.project_name = args[0]
- self.project_version = kwargs.get('version', 'undefined')
- if isinstance(self.project_version, mparser.ElementaryNode):
- self.project_version = self.project_version.value
-
- def set_variable(self, varname, variable):
- pass
-
- def analyze(self):
- self.load_root_meson_file()
- self.sanity_check_ast()
- self.parse_project()
- self.run()
-
-def list_projinfo_from_source(sourcedir):
+ return result
+
+def list_projinfo_from_source(sourcedir: str, intr: IntrospectionInterpreter):
files = find_buildsystem_files_list(sourcedir)
+ files = [os.path.normpath(x) for x in files]
- result = {'buildsystem_files': []}
- subprojects = {}
-
- for f in files:
- f = f.replace('\\', '/')
- if f == 'meson.build':
- interpreter = ProjectInfoInterperter(sourcedir, '')
- interpreter.analyze()
- version = None
- if interpreter.project_version is str:
- version = interpreter.project_version
- result.update({'version': version, 'descriptive_name': interpreter.project_name})
- result['buildsystem_files'].append(f)
- elif f.startswith('subprojects/'):
- subproject_id = f.split('/')[1]
- subproject = subprojects.setdefault(subproject_id, {'buildsystem_files': []})
- subproject['buildsystem_files'].append(f)
- if f.count('/') == 2 and f.endswith('meson.build'):
- interpreter = ProjectInfoInterperter(os.path.join(sourcedir, 'subprojects', subproject_id), '')
- interpreter.analyze()
- subproject.update({'name': subproject_id, 'version': interpreter.project_version, 'descriptive_name': interpreter.project_name})
- else:
- result['buildsystem_files'].append(f)
+ for i in intr.project_data['subprojects']:
+ basedir = os.path.join(intr.subproject_dir, i['name'])
+ i['buildsystem_files'] = [x for x in files if x.startswith(basedir)]
+ files = [x for x in files if not x.startswith(basedir)]
- subprojects = [obj for name, obj in subprojects.items()]
- result['subprojects'] = subprojects
- print(json.dumps(result))
+ intr.project_data['buildsystem_files'] = files
+ intr.project_data['subproject_dir'] = intr.subproject_dir
+ return intr.project_data
+
+def print_results(options, results, indent):
+ if len(results) == 0 and not options.force_dict:
+ print('No command specified')
+ return 1
+ elif len(results) == 1 and not options.force_dict:
+ # Make to keep the existing output format for a single option
+ print(json.dumps(results[0][1], indent=indent))
+ else:
+ out = {}
+ for i in results:
+ out[i[0]] = i[1]
+ print(json.dumps(out, indent=indent))
+ return 0
def run(options):
datadir = 'meson-private'
+ infodir = 'meson-info'
if options.builddir is not None:
datadir = os.path.join(options.builddir, datadir)
- if options.builddir.endswith('/meson.build') or options.builddir.endswith('\\meson.build') or options.builddir == 'meson.build':
- sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
- if options.projectinfo:
- list_projinfo_from_source(sourcedir)
- return 0
- if options.buildoptions:
- list_buildoptions_from_source(sourcedir, options.backend)
- return 0
- if not os.path.isdir(datadir):
- print('Current directory is not a build dir. Please specify it or '
- 'change the working directory to it.')
+ infodir = os.path.join(options.builddir, infodir)
+ indent = 4 if options.indent else None
+ results = []
+ sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
+ intro_types = get_meson_introspection_types(sourcedir=sourcedir)
+
+ if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
+ # Make sure that log entries in other parts of meson don't interfere with the JSON output
+ mlog.disable()
+ backend = backends.get_backend_from_name(options.backend, None)
+ intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ intr.analyze()
+ # Reenable logging just in case
+ mlog.enable()
+ for key, val in intro_types.items():
+ if (not options.all and not getattr(options, key, False)) or 'no_bd' not in val:
+ continue
+ results += [(key, val['no_bd'](intr))]
+ return print_results(options, results, indent)
+
+ infofile = get_meson_info_file(infodir)
+ if not os.path.isdir(datadir) or not os.path.isdir(infodir) or not os.path.isfile(infofile):
+ print('Current directory is not a meson build directory.'
+ 'Please specify a valid build dir or change the working directory to it.'
+ 'It is also possible that the build directory was generated with an old'
+ 'meson version. Please regenerate it in this case.')
return 1
- coredata = cdata.load(options.builddir)
- builddata = build.load(options.builddir)
- testdata = mtest.load_tests(options.builddir)
- benchmarkdata = mtest.load_benchmarks(options.builddir)
-
- # Install data is only available with the Ninja backend
- try:
- installdata = ninjabackend.load(options.builddir)
- except FileNotFoundError:
- installdata = None
-
- if options.list_targets:
- list_targets(coredata, builddata, installdata)
- elif options.list_installed:
- list_installed(installdata)
- elif options.target_files is not None:
- list_target_files(options.target_files, coredata, builddata)
- elif options.buildsystem_files:
- list_buildsystem_files(builddata)
- elif options.buildoptions:
- list_buildoptions(coredata)
- elif options.tests:
- list_tests(testdata)
- elif options.benchmarks:
- list_tests(benchmarkdata)
- elif options.dependencies:
- list_deps(coredata)
- elif options.projectinfo:
- list_projinfo(builddata)
+ intro_vers = '0.0.0'
+ source_dir = None
+ with open(infofile, 'r') as fp:
+ raw = json.load(fp)
+ intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0')
+ source_dir = raw.get('directories', {}).get('source', None)
+
+ vers_to_check = get_meson_introspection_required_version()
+ for i in vers_to_check:
+ if not mesonlib.version_compare(intro_vers, i):
+ print('Introspection version {} is not supported. '
+ 'The required version is: {}'
+ .format(intro_vers, ' and '.join(vers_to_check)))
+ return 1
+
+ # Handle the one option that does not have its own JSON file (meybe deprecate / remove this?)
+ if options.target_files is not None:
+ targets_file = os.path.join(infodir, 'intro-targets.json')
+ with open(targets_file, 'r') as fp:
+ targets = json.load(fp)
+ results += [('target_files', list_target_files(options.target_files, targets, source_dir))]
+
+ # Extract introspection information from JSON
+ for i in intro_types.keys():
+ if 'func' not in intro_types[i]:
+ continue
+ if not options.all and not getattr(options, i, False):
+ continue
+ curr = os.path.join(infodir, 'intro-{}.json'.format(i))
+ if not os.path.isfile(curr):
+ print('Introspection file {} does not exist.'.format(curr))
+ return 1
+ with open(curr, 'r') as fp:
+ results += [(i, json.load(fp))]
+
+ return print_results(options, results, indent)
+
+updated_introspection_files = []
+
+def write_intro_info(intro_info, info_dir):
+ global updated_introspection_files
+ for i in intro_info:
+ out_file = os.path.join(info_dir, 'intro-{}.json'.format(i[0]))
+ tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+ with open(tmp_file, 'w') as fp:
+ json.dump(i[1], fp)
+ fp.flush() # Not sure if this is needed
+ os.replace(tmp_file, out_file)
+ updated_introspection_files += [i[0]]
+
+def generate_introspection_file(builddata: build.Build, backend: backends.Backend):
+ coredata = builddata.environment.get_coredata()
+ intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
+ intro_info = []
+
+ for key, val in intro_types.items():
+ if 'func' not in val:
+ continue
+ intro_info += [(key, val['func']())]
+
+ write_intro_info(intro_info, builddata.environment.info_dir)
+
+def update_build_options(coredata: cdata.CoreData, info_dir):
+ intro_info = [
+ ('buildoptions', list_buildoptions(coredata))
+ ]
+
+ write_intro_info(intro_info, info_dir)
+
+def split_version_string(version: str):
+ vers_list = version.split('.')
+ return {
+ 'full': version,
+ 'major': int(vers_list[0] if len(vers_list) > 0 else 0),
+ 'minor': int(vers_list[1] if len(vers_list) > 1 else 0),
+ 'patch': int(vers_list[2] if len(vers_list) > 2 else 0)
+ }
+
+def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False):
+ global updated_introspection_files
+ info_dir = builddata.environment.info_dir
+ info_file = get_meson_info_file(info_dir)
+ intro_types = get_meson_introspection_types()
+ intro_info = {}
+
+ for i in intro_types.keys():
+ if 'func' not in intro_types[i]:
+ continue
+ intro_info[i] = {
+ 'file': 'intro-{}.json'.format(i),
+ 'updated': i in updated_introspection_files
+ }
+
+ info_data = {
+ 'meson_version': split_version_string(cdata.version),
+ 'directories': {
+ 'source': builddata.environment.get_source_dir(),
+ 'build': builddata.environment.get_build_dir(),
+ 'info': info_dir,
+ },
+ 'introspection': {
+ 'version': split_version_string(get_meson_introspection_version()),
+ 'information': intro_info,
+ },
+ 'build_files_updated': build_files_updated,
+ }
+
+ if len(errors) > 0:
+ info_data['error'] = True
+ info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors]
else:
- print('No command specified')
- return 1
- return 0
+ info_data['error'] = False
+
+ # Write the data to disc
+ tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+ with open(tmp_file, 'w') as fp:
+ json.dump(info_data, fp)
+ fp.flush()
+ os.replace(tmp_file, info_file)
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index 57debb0..0434274 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -48,6 +48,7 @@ log_depth = 0
log_timestamp_start = None
log_fatal_warnings = False
log_disable_stdout = False
+log_errors_only = False
def disable():
global log_disable_stdout
@@ -57,6 +58,14 @@ def enable():
global log_disable_stdout
log_disable_stdout = False
+def set_quiet():
+ global log_errors_only
+ log_errors_only = True
+
+def set_verbose():
+ global log_errors_only
+ log_errors_only = False
+
def initialize(logdir, fatal_warnings=False):
global log_dir, log_file, log_fatal_warnings
log_dir = logdir
@@ -152,14 +161,16 @@ def debug(*args, **kwargs):
print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
log_file.flush()
-def log(*args, **kwargs):
+def log(*args, is_error=False, **kwargs):
+ global log_errors_only
arr = process_markup(args, False)
if log_file is not None:
print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
log_file.flush()
if colorize_console:
arr = process_markup(args, True)
- force_print(*arr, **kwargs)
+ if not log_errors_only or is_error:
+ force_print(*arr, **kwargs)
def _log_error(severity, *args, **kwargs):
from .mesonlib import get_error_location_string
@@ -187,20 +198,23 @@ def _log_error(severity, *args, **kwargs):
raise MesonException("Fatal warnings enabled, aborting")
def error(*args, **kwargs):
- return _log_error('error', *args, **kwargs)
+ return _log_error('error', *args, **kwargs, is_error=True)
def warning(*args, **kwargs):
- return _log_error('warning', *args, **kwargs)
+ return _log_error('warning', *args, **kwargs, is_error=True)
def deprecation(*args, **kwargs):
- return _log_error('deprecation', *args, **kwargs)
+ return _log_error('deprecation', *args, **kwargs, is_error=True)
-def exception(e):
+def exception(e, prefix=red('ERROR:')):
log()
+ args = []
if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
- log('%s:%d:%d:' % (e.file, e.lineno, e.colno), red('ERROR: '), e)
- else:
- log(red('ERROR:'), e)
+ args.append('%s:%d:%d:' % (e.file, e.lineno, e.colno))
+ if prefix:
+ args.append(prefix)
+ args.append(e)
+ log(*args)
# Format a list for logging purposes as a string. It separates
# all but the last item with commas, and the last with 'and'.
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index 6b6aa8b..2df4d7c 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -58,6 +58,10 @@ class GResourceHeaderTarget(build.CustomTarget):
def __init__(self, name, subdir, subproject, kwargs):
super().__init__(name, subdir, subproject, kwargs)
+class GResourceObjectTarget(build.CustomTarget):
+ def __init__(self, name, subdir, subproject, kwargs):
+ super().__init__(name, subdir, subproject, kwargs)
+
class GirTarget(build.CustomTarget):
def __init__(self, name, subdir, subproject, kwargs):
super().__init__(name, subdir, subproject, kwargs)
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
new file mode 100644
index 0000000..d98213d
--- /dev/null
+++ b/mesonbuild/modules/cmake.py
@@ -0,0 +1,221 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import re
+import os, os.path, pathlib
+import shutil
+
+from . import ExtensionModule, ModuleReturnValue
+
+from .. import build, dependencies, mesonlib, mlog
+from ..interpreterbase import permittedKwargs
+from ..interpreter import ConfigurationDataHolder
+
+
+COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
+
+# Taken from https://github.com/Kitware/CMake/blob/master/Modules/CMakePackageConfigHelpers.cmake
+PACKAGE_INIT_BASE = '''
+####### Expanded from \\@PACKAGE_INIT\\@ by configure_package_config_file() #######
+####### Any changes to this file will be overwritten by the next CMake run ####
+####### The input file was @inputFileName@ ########
+get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/@PACKAGE_RELATIVE_PATH@" ABSOLUTE)
+'''
+PACKAGE_INIT_EXT = '''
+# Use original install prefix when loaded through a "/usr move"
+# cross-prefix symbolic link such as /lib -> /usr/lib.
+get_filename_component(_realCurr "${CMAKE_CURRENT_LIST_DIR}" REALPATH)
+get_filename_component(_realOrig "@absInstallDir@" REALPATH)
+if(_realCurr STREQUAL _realOrig)
+ set(PACKAGE_PREFIX_DIR "@installPrefix@")
+endif()
+unset(_realOrig)
+unset(_realCurr)
+'''
+
+
+class CmakeModule(ExtensionModule):
+ cmake_detected = False
+ cmake_root = None
+
+ def __init__(self, interpreter):
+ super().__init__(interpreter)
+ self.snippets.add('configure_package_config_file')
+
+ def detect_voidp_size(self, compilers, env):
+ compiler = compilers.get('c', None)
+ if not compiler:
+ compiler = compilers.get('cpp', None)
+
+ if not compiler:
+ raise mesonlib.MesonException('Requires a C or C++ compiler to compute sizeof(void *).')
+
+ return compiler.sizeof('void *', '', env)
+
+ def detect_cmake(self):
+ if self.cmake_detected:
+ return True
+
+ cmakebin = dependencies.ExternalProgram('cmake', silent=False)
+ p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3]
+ if p.returncode != 0:
+ mlog.log('error retrieving cmake informations: returnCode={0} stdout={1} stderr={2}'.format(p.returncode, stdout, stderr))
+ return False
+
+ match = re.search('\n_INCLUDED_FILE \\"([^"]+)"\n', stdout.strip())
+ if not match:
+ mlog.log('unable to determine cmake root')
+ return False
+
+ # compilerpath is something like '/usr/share/cmake-3.5/Modules/Platform/Linux-GNU-CXX.cmake'
+ # or 'C:/Program Files (x86)/CMake 2.8/share/cmake-2.8/Modules/Platform/Windows-MSVC-CXX.cmake' under windows
+ compilerpath = match.group(1)
+ pos = compilerpath.find('/Modules/Platform/')
+ if pos < 0:
+ mlog.log('unknown _INCLUDED_FILE path scheme')
+ return False
+
+ cmakePath = pathlib.PurePath(compilerpath[0:pos])
+ self.cmake_root = os.path.join(*cmakePath.parts)
+ self.cmake_detected = True
+ return True
+
+ @permittedKwargs({'version', 'name', 'compatibility', 'install_dir'})
+ def write_basic_package_version_file(self, state, _args, kwargs):
+ version = kwargs.get('version', None)
+ if not isinstance(version, str):
+ raise mesonlib.MesonException('Version must be specified.')
+
+ name = kwargs.get('name', None)
+ if not isinstance(name, str):
+ raise mesonlib.MesonException('Name not specified.')
+
+ compatibility = kwargs.get('compatibility', 'AnyNewerVersion')
+ if not isinstance(compatibility, str):
+ raise mesonlib.MesonException('compatibility is not string.')
+ if compatibility not in COMPATIBILITIES:
+ raise mesonlib.MesonException('compatibility must be either AnyNewerVersion, SameMajorVersion or ExactVersion.')
+
+ if not self.detect_cmake():
+ raise mesonlib.MesonException('Unable to find cmake')
+
+ pkgroot = kwargs.get('install_dir', None)
+ if pkgroot is None:
+ pkgroot = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)
+ if not isinstance(pkgroot, str):
+ raise mesonlib.MesonException('Install_dir must be a string.')
+
+ template_file = os.path.join(self.cmake_root, 'Modules', 'BasicConfigVersion-{}.cmake.in'.format(compatibility))
+ if not os.path.exists(template_file):
+ raise mesonlib.MesonException('your cmake installation doesn\'t support the {} compatibility'.format(compatibility))
+
+ version_file = os.path.join(state.environment.scratch_dir, '{}ConfigVersion.cmake'.format(name))
+
+ conf = {
+ 'CVF_VERSION': (version, ''),
+ 'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.compilers, state.environment)), '')
+ }
+ mesonlib.do_conf_file(template_file, version_file, conf, 'meson')
+
+ res = build.Data(mesonlib.File(True, state.environment.get_scratch_dir(), version_file), pkgroot)
+ return ModuleReturnValue(res, [res])
+
+ def create_package_file(self, infile, outfile, PACKAGE_RELATIVE_PATH, extra, confdata):
+ package_init = PACKAGE_INIT_BASE.replace('@PACKAGE_RELATIVE_PATH@', PACKAGE_RELATIVE_PATH)
+ package_init = package_init.replace('@inputFileName@', infile)
+ package_init += extra
+
+ try:
+ with open(infile, "r") as fin:
+ data = fin.readlines()
+ except Exception as e:
+ raise mesonlib.MesonException('Could not read input file %s: %s' % (infile, str(e)))
+
+ result = []
+ regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+ for line in data:
+ line = line.replace('@PACKAGE_INIT@', package_init)
+ line, _missing = mesonlib.do_replacement(regex, line, 'meson', confdata)
+
+ result.append(line)
+
+ outfile_tmp = outfile + "~"
+ with open(outfile_tmp, "w", encoding='utf-8') as fout:
+ fout.writelines(result)
+
+ shutil.copymode(infile, outfile_tmp)
+ mesonlib.replace_if_different(outfile, outfile_tmp)
+
+ @permittedKwargs({'input', 'name', 'install_dir', 'configuration'})
+ def configure_package_config_file(self, interpreter, state, args, kwargs):
+ if len(args) > 0:
+ raise mesonlib.MesonException('configure_package_config_file takes only keyword arguments.')
+
+ if 'input' not in kwargs:
+ raise mesonlib.MesonException('configure_package_config_file requires "input" keyword.')
+ inputfile = kwargs['input']
+ if isinstance(inputfile, list):
+ if len(inputfile) != 1:
+ m = "Keyword argument 'input' requires exactly one file"
+ raise mesonlib.MesonException(m)
+ inputfile = inputfile[0]
+ if not isinstance(inputfile, (str, mesonlib.File)):
+ raise mesonlib.MesonException("input must be a string or a file")
+ if isinstance(inputfile, str):
+ inputfile = mesonlib.File.from_source_file(state.environment.source_dir, state.subdir, inputfile)
+
+ ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir)
+
+ if 'name' not in kwargs:
+ raise mesonlib.MesonException('"name" not specified.')
+ name = kwargs['name']
+
+ (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name)))
+ ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
+
+ if 'install_dir' not in kwargs:
+ install_dir = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)
+ if not isinstance(install_dir, str):
+ raise mesonlib.MesonException('"install_dir" must be a string.')
+
+ if 'configuration' not in kwargs:
+ raise mesonlib.MesonException('"configuration" not specified.')
+ conf = kwargs['configuration']
+ if not isinstance(conf, ConfigurationDataHolder):
+ raise mesonlib.MesonException('Argument "configuration" is not of type configuration_data')
+
+ prefix = state.environment.coredata.get_builtin_option('prefix')
+ abs_install_dir = install_dir
+ if not os.path.isabs(abs_install_dir):
+ abs_install_dir = os.path.join(prefix, install_dir)
+
+ PACKAGE_RELATIVE_PATH = os.path.relpath(prefix, abs_install_dir)
+ extra = ''
+ if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir):
+ extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir)
+ extra = extra.replace('@installPrefix@', prefix)
+
+ self.create_package_file(ifile_abs, ofile_abs, PACKAGE_RELATIVE_PATH, extra, conf.held_object)
+ conf.mark_used()
+
+ conffile = os.path.normpath(inputfile.relative_name())
+ if conffile not in interpreter.build_def_files:
+ interpreter.build_def_files.append(conffile)
+
+ res = build.Data(mesonlib.File(True, ofile_path, ofile_fname), install_dir)
+ interpreter.build.data.append(res)
+
+ return res
+
+def initialize(*args, **kwargs):
+ return CmakeModule(*args, **kwargs)
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index be99059..8833a21 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -16,6 +16,8 @@
functionality such as gobject-introspection, gresources and gtk-doc'''
import os
+import re
+import sys
import copy
import shlex
import subprocess
@@ -25,11 +27,13 @@ from .. import mlog
from .. import mesonlib
from .. import compilers
from .. import interpreter
-from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget
+from . import GResourceTarget, GResourceHeaderTarget, GResourceObjectTarget, GirTarget, TypelibTarget, VapiTarget
from . import get_include_args
from . import ExtensionModule
from . import ModuleReturnValue
-from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list
+from ..mesonlib import (
+ MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list
+)
from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -40,6 +44,8 @@ from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewK
# https://bugzilla.gnome.org/show_bug.cgi?id=774368
gresource_dep_needed_version = '>= 2.51.1'
+gresource_ld_binary_needed_version = '>= 2.60'
+
native_glib_version = None
girwarning_printed = False
gdbuswarning_printed = False
@@ -164,7 +170,10 @@ class GnomeModule(ExtensionModule):
cmd += ['--sourcedir', source_dir]
if 'c_name' in kwargs:
- cmd += ['--c-name', kwargs.pop('c_name')]
+ c_name = kwargs.pop('c_name')
+ cmd += ['--c-name', c_name]
+ else:
+ c_name = None
export = kwargs.pop('export', False)
if not export:
cmd += ['--internal']
@@ -173,13 +182,23 @@ class GnomeModule(ExtensionModule):
cmd += mesonlib.stringlistify(kwargs.pop('extra_args', []))
+ gresource_ld_binary = False
+ if mesonlib.is_linux() and mesonlib.version_compare(glib_version, gresource_ld_binary_needed_version) and not state.environment.is_cross_build():
+ ld_obj = self.interpreter.find_program_impl('ld', required=False)
+ if ld_obj.found():
+ gresource_ld_binary = True
+
gresource = kwargs.pop('gresource_bundle', False)
+
if gresource:
- output = args[0] + '.gresource'
- name = args[0] + '_gresource'
- else:
- output = args[0] + '.c'
- name = args[0] + '_c'
+ g_output = args[0] + '.gresource'
+ g_name = args[0] + '_gresource'
+ elif gresource_ld_binary:
+ g_output = args[0] + '_ld_binary.gresource'
+ g_name = args[0] + '_ld_binary_gresource'
+
+ output = args[0] + '.c'
+ name = args[0] + '_c'
if kwargs.get('install', False) and not gresource:
raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
@@ -193,18 +212,41 @@ class GnomeModule(ExtensionModule):
kwargs['input'] = args[1]
kwargs['output'] = output
kwargs['depends'] = depends
+ if gresource or gresource_ld_binary:
+ g_kwargs = copy.deepcopy(kwargs)
+ g_kwargs['input'] = args[1]
+ g_kwargs['output'] = g_output
+ g_kwargs['depends'] = depends
if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
# This will eventually go out of sync if dependencies are added
kwargs['depend_files'] = depend_files
- kwargs['command'] = cmd
+ if gresource_ld_binary:
+ kwargs['command'] = copy.copy(cmd) + ['--external-data']
+ else:
+ kwargs['command'] = cmd
+ if gresource or gresource_ld_binary:
+ # This will eventually go out of sync if dependencies are added
+ g_kwargs['depend_files'] = depend_files
+ g_kwargs['command'] = cmd
else:
depfile = kwargs['output'] + '.d'
- kwargs['depfile'] = depfile
- kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
- target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
+ if gresource_ld_binary:
+ depfile2 = kwargs['output'] + '.2.d'
+ kwargs['depfile'] = depfile2
+ kwargs['command'] = copy.copy(cmd) + ['--external-data', '--dependency-file', '@DEPFILE@']
+ else:
+ kwargs['depfile'] = depfile
+ kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+ if gresource or gresource_ld_binary:
+ g_kwargs['depfile'] = depfile
+ g_kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+
+ if gresource or gresource_ld_binary:
+ target_g = GResourceTarget(g_name, state.subdir, state.subproject, g_kwargs)
+ if gresource: # Only one target for .gresource files
+ return ModuleReturnValue(target_g, [target_g])
- if gresource: # Only one target for .gresource files
- return ModuleReturnValue(target_c, [target_c])
+ target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs)
h_kwargs = {
'command': cmd,
@@ -220,9 +262,99 @@ class GnomeModule(ExtensionModule):
h_kwargs['install_dir'] = kwargs.get('install_dir',
state.environment.coredata.get_builtin_option('includedir'))
target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs)
- rv = [target_c, target_h]
+
+ if gresource_ld_binary:
+ return self._create_gresource_ld_binary_targets(args, state, ifile, ld_obj, c_name, target_g, g_output, target_c, target_h)
+ else:
+ rv = [target_c, target_h]
+
return ModuleReturnValue(rv, rv)
+ def _create_gresource_ld_binary_targets(self, args, state, ifile, ld_obj, c_name, target_g, g_output, target_c, target_h):
+ if c_name is None:
+ # Create proper c identifier from filename in the way glib-compile-resources does
+ c_name = os.path.basename(ifile).partition('.')[0]
+ c_name = c_name.replace('-', '_')
+ c_name = re.sub(r'^([^(_a-zA-Z)])+', '', c_name)
+ c_name = re.sub(r'([^(_a-zA-Z0-9)])', '', c_name)
+
+ c_name_no_underscores = re.sub(r'^_+', '', c_name)
+
+ ld = ld_obj.get_command()
+ objcopy_object = self.interpreter.find_program_impl('objcopy', required=False)
+ if objcopy_object.found():
+ objcopy = objcopy_object.get_command()
+ else:
+ objcopy = None
+
+ o_kwargs = {
+ 'command': [ld, '-r', '-b', 'binary', '@INPUT@', '-o', '@OUTPUT@'],
+ 'input': target_g,
+ 'output': args[0] + '1.o'
+ }
+
+ target_o = GResourceObjectTarget(args[0] + '1_o', state.subdir, state.subproject, o_kwargs)
+
+ builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
+ linkerscript_name = args[0] + '_map.ld'
+ linkerscript_path = os.path.join(builddir, linkerscript_name)
+ linkerscript_file = open(linkerscript_path, 'w')
+
+ # Create symbol name the way bfd does
+ binary_name = os.path.join(state.subdir, g_output)
+ encoding = sys.getfilesystemencoding()
+ symbol_name = re.sub(rb'([^(_a-zA-Z0-9)])', b'_', binary_name.encode(encoding)).decode(encoding)
+
+ linkerscript_string = '''SECTIONS
+{{
+ .gresource.{} : ALIGN(8)
+ {{
+ {}_resource_data = _binary_{}_start;
+ }}
+ .data :
+ {{
+ *(.data)
+ }}
+}}'''.format(c_name_no_underscores, c_name, symbol_name)
+
+ linkerscript_file.write(linkerscript_string)
+
+ o2_kwargs = {
+ 'command': [ld, '-r', '-T', os.path.join(state.subdir, linkerscript_name), '@INPUT@', '-o', '@OUTPUT@'],
+ 'input': target_o,
+ 'output': args[0] + '2.o',
+ }
+ target_o2 = GResourceObjectTarget(args[0] + '2_o', state.subdir, state.subproject, o2_kwargs)
+
+ if objcopy is not None:
+ objcopy_cmd = [objcopy, '--set-section-flags', '.gresource.' + c_name + '=readonly,alloc,load,data']
+ objcopy_cmd += ['-N', '_binary_' + symbol_name + '_start']
+ objcopy_cmd += ['-N', '_binary_' + symbol_name + '_end']
+ objcopy_cmd += ['-N', '_binary_' + symbol_name + '_size']
+ objcopy_cmd += ['@INPUT@', '@OUTPUT@']
+
+ o3_kwargs = {
+ 'command': objcopy_cmd,
+ 'input': target_o2,
+ 'output': args[0] + '3.o'
+ }
+
+ target_o3 = GResourceObjectTarget(args[0] + '3_o', state.subdir, state.subproject, o3_kwargs)
+
+ rv1 = [target_c, target_h, target_o3]
+ if target_g.get_id() not in self.interpreter.build.targets:
+ rv2 = rv1 + [target_g, target_o, target_o2]
+ else:
+ rv2 = rv1 + [target_o, target_o2]
+ else:
+ rv1 = [target_c, target_h, target_o2]
+ if target_g.get_id() not in self.interpreter.build.targets:
+ rv2 = rv1 + [target_g, target_o]
+ else:
+ rv2 = rv1 + [target_o]
+
+ return ModuleReturnValue(rv1, rv2)
+
def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies):
cmd = ['glib-compile-resources',
@@ -531,11 +663,7 @@ class GnomeModule(ExtensionModule):
ret = []
for lang in langs:
- if state.environment.is_cross_build():
- link_args = state.environment.cross_info.config["properties"].get(lang + '_link_args', "")
- else:
- link_args = state.environment.coredata.get_external_link_args(lang)
-
+ link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang)
for link_arg in link_args:
if link_arg.startswith('-L'):
ret.append(link_arg)
@@ -607,9 +735,15 @@ class GnomeModule(ExtensionModule):
if 'b_sanitize' in compiler.base_options:
sanitize = state.environment.coredata.base_options['b_sanitize'].value
cflags += compilers.sanitizer_compile_args(sanitize)
- if 'address' in sanitize.split(','):
- internal_ldflags += ['-lasan'] # This must be first in ldflags
- # FIXME: Linking directly to libasan is not recommended but g-ir-scanner
+ sanitize = sanitize.split(',')
+ # These must be first in ldflags
+ if 'address' in sanitize:
+ internal_ldflags += ['-lasan']
+ if 'thread' in sanitize:
+ internal_ldflags += ['-ltsan']
+ if 'undefined' in sanitize:
+ internal_ldflags += ['-lubsan']
+ # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner
# does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
# ldflags += compilers.sanitizer_link_args(sanitize)
@@ -714,10 +848,7 @@ class GnomeModule(ExtensionModule):
def _get_external_args_for_langs(self, state, langs):
ret = []
for lang in langs:
- if state.environment.is_cross_build():
- ret += state.environment.cross_info.config["properties"].get(lang + '_args', "")
- else:
- ret += state.environment.coredata.get_external_args(lang)
+ ret += state.environment.coredata.get_external_args(MachineChoice.HOST, lang)
return ret
@staticmethod
@@ -1042,13 +1173,11 @@ This will become a hard error in the future.''')
ldflags.update(internal_ldflags)
ldflags.update(external_ldflags)
+ cflags.update(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c'))
+ ldflags.update(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c'))
if state.environment.is_cross_build():
- cflags.update(state.environment.cross_info.config["properties"].get('c_args', ""))
- ldflags.update(state.environment.cross_info.config["properties"].get('c_link_args', ""))
compiler = state.environment.coredata.cross_compilers.get('c')
else:
- cflags.update(state.environment.coredata.get_external_args('c'))
- ldflags.update(state.environment.coredata.get_external_link_args('c'))
compiler = state.environment.coredata.compilers.get('c')
compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)])
diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py
index c07391e..5064803 100644
--- a/mesonbuild/modules/hotdoc.py
+++ b/mesonbuild/modules/hotdoc.py
@@ -155,6 +155,19 @@ class HotdocTargetBuilder:
def replace_dirs_in_string(self, string):
return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir)
+ def process_gi_c_source_roots(self):
+ if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0:
+ return
+
+ value, _ = self.get_value([list, str], 'gi_c_source_roots', default=[], force_list=True)
+ value.extend([
+ os.path.join(self.state.environment.get_source_dir(),
+ self.interpreter.subproject_dir, self.state.subproject),
+ os.path.join(self.state.environment.get_build_dir(), self.interpreter.subproject_dir, self.state.subproject)
+ ])
+
+ self.cmd += ['--gi-c-source-roots'] + value
+
def process_dependencies(self, deps):
cflags = set()
for dep in mesonlib.listify(ensure_list(deps)):
@@ -271,6 +284,7 @@ class HotdocTargetBuilder:
self.process_known_arg('--c-include-directories',
[Dependency, build.StaticLibrary, build.SharedLibrary, list], argname="dependencies",
force_list=True, value_processor=self.process_dependencies)
+ self.process_gi_c_source_roots()
self.process_extra_assets()
self.process_extra_extension_paths()
self.process_subprojects()
diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py
index aeab813..4b37069 100644
--- a/mesonbuild/modules/i18n.py
+++ b/mesonbuild/modules/i18n.py
@@ -44,6 +44,14 @@ PRESET_ARGS = {
'--flag=g_string_append_printf:2:c-format',
'--flag=g_error_new:3:c-format',
'--flag=g_set_error:4:c-format',
+ '--flag=g_markup_printf_escaped:1:c-format',
+ '--flag=g_log:3:c-format',
+ '--flag=g_print:1:c-format',
+ '--flag=g_printerr:1:c-format',
+ '--flag=g_printf:1:c-format',
+ '--flag=g_fprintf:2:c-format',
+ '--flag=g_sprintf:2:c-format',
+ '--flag=g_snprintf:3:c-format',
]
}
@@ -56,8 +64,7 @@ class I18nModule(ExtensionModule):
return [path.join(src_dir, d) for d in dirs]
@FeatureNew('i18n.merge_file', '0.37.0')
- @permittedKwargs({'languages', 'data_dirs', 'preset', 'args', 'po_dir', 'type',
- 'input', 'output', 'install', 'install_dir'})
+ @permittedKwargs(build.CustomTarget.known_kwargs | {'data_dirs', 'po_dir', 'type'})
def merge_file(self, state, args, kwargs):
podir = kwargs.pop('po_dir', None)
if not podir:
@@ -102,7 +109,8 @@ class I18nModule(ExtensionModule):
return ModuleReturnValue(ct, [ct])
@FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset'])
- @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install'})
+ @FeatureNewKwargs('i18n.gettext', '0.50.0', ['install_dir'])
+ @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install', 'install_dir'})
def gettext(self, state, args, kwargs):
if len(args) != 1:
raise coredata.MesonException('Gettext requires one positional argument (package name).')
@@ -151,10 +159,11 @@ class I18nModule(ExtensionModule):
install = kwargs.get('install', True)
if install:
+ install_dir = kwargs.get('install_dir', state.environment.coredata.get_builtin_option('localedir'))
script = state.environment.get_build_command()
args = ['--internal', 'gettext', 'install',
'--subdir=' + state.subdir,
- '--localedir=' + state.environment.coredata.get_builtin_option('localedir'),
+ '--localedir=' + install_dir,
pkg_arg]
if lang_arg:
args.append(lang_arg)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index 47edeee..2f8b533 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -23,6 +23,8 @@ from . import ModuleReturnValue
from . import ExtensionModule
from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
+already_warned_objs = set()
+
class DependenciesHelper:
def __init__(self, name):
self.name = name
@@ -51,16 +53,21 @@ class DependenciesHelper:
self.priv_reqs += self._process_reqs(reqs)
def _check_generated_pc_deprecation(self, obj):
- if hasattr(obj, 'generated_pc_warn'):
- mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
- '"libraries" keyword argument of a previous call '
- 'to generate() method instead of first positional '
- 'argument.', 'Adding', mlog.bold(obj.generated_pc),
- 'to "Requires" field, but this is a deprecated '
- 'behaviour that will change in a future version '
- 'of Meson. Please report the issue if this '
- 'warning cannot be avoided in your case.',
- location=obj.generated_pc_warn)
+ if not hasattr(obj, 'generated_pc_warn'):
+ return
+ name = obj.generated_pc_warn[0]
+ if (name, obj.name) in already_warned_objs:
+ return
+ mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
+ '"libraries" keyword argument of a previous call '
+ 'to generate() method instead of first positional '
+ 'argument.', 'Adding', mlog.bold(obj.generated_pc),
+ 'to "Requires" field, but this is a deprecated '
+ 'behaviour that will change in a future version '
+ 'of Meson. Please report the issue if this '
+ 'warning cannot be avoided in your case.',
+ location=obj.generated_pc_warn[1])
+ already_warned_objs.add((name, obj.name))
def _process_reqs(self, reqs):
'''Returns string names of requirements'''
@@ -192,7 +199,11 @@ class DependenciesHelper:
for x in xs:
# Don't de-dup unknown strings to avoid messing up arguments like:
# ['-framework', 'CoreAudio', '-framework', 'CoreMedia']
- if x not in result or (libs and (isinstance(x, str) and not x.endswith(('-l', '-L')))):
+ known_flags = ['-pthread']
+ cannot_dedup = libs and isinstance(x, str) and \
+ not x.startswith(('-l', '-L')) and \
+ x not in known_flags
+ if x not in result or cannot_dedup:
result.append(x)
return result
self.pub_libs = _fn(self.pub_libs, True)
@@ -235,7 +246,7 @@ class PkgConfigModule(ExtensionModule):
# https://bugs.freedesktop.org/show_bug.cgi?id=103203
if isinstance(value, PurePath):
value = value.as_posix()
- return value.replace(' ', '\ ')
+ return value.replace(' ', r'\ ')
def _make_relative(self, prefix, subdir):
if isinstance(prefix, PurePath):
@@ -342,7 +353,9 @@ class PkgConfigModule(ExtensionModule):
default_description = None
default_name = None
mainlib = None
- if len(args) == 1:
+ if not args and 'version' not in kwargs:
+ FeatureNew('pkgconfig.generate implicit version keyword', '0.46.0').use(state.subproject)
+ elif len(args) == 1:
FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use(state.subproject)
mainlib = getattr(args[0], 'held_object', args[0])
if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
@@ -434,11 +447,13 @@ class PkgConfigModule(ExtensionModule):
mainlib.generated_pc = filebase
else:
mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name))
- for lib in deps.pub_libs:
- if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
- lib.generated_pc = filebase
- lib.generated_pc_warn = types.SimpleNamespace(subdir=state.subdir,
- lineno=state.current_lineno)
+ else:
+ for lib in deps.pub_libs:
+ if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'):
+ lib.generated_pc = filebase
+ location = types.SimpleNamespace(subdir=state.subdir,
+ lineno=state.current_lineno)
+ lib.generated_pc_warn = [name, location]
return ModuleReturnValue(res, [res])
def initialize(*args, **kwargs):
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 9643ebc..34fe5a5 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -23,11 +23,10 @@ from . import ExtensionModule
from mesonbuild.modules import ModuleReturnValue
from ..interpreterbase import (
noPosargs, noKwargs, permittedKwargs,
- InterpreterObject, InvalidArguments,
+ InvalidArguments,
FeatureNew, FeatureNewKwargs, disablerIfNotFound
)
from ..interpreter import ExternalProgramHolder, extract_required_kwarg
-from ..interpreterbase import flatten
from ..build import known_shmod_kwargs
from .. import mlog
from ..environment import detect_cpu_family
@@ -61,6 +60,7 @@ class PythonDependency(ExternalDependency):
self.pkgdep = None
self.variables = python_holder.variables
self.paths = python_holder.paths
+ self.link_libpython = python_holder.link_libpython
if mesonlib.version_compare(self.version, '>= 3.0'):
self.major_version = 3
else:
@@ -150,11 +150,11 @@ class PythonDependency(ExternalDependency):
libdirs = []
largs = self.clib_compiler.find_library(libname, environment, libdirs)
-
- self.is_found = largs is not None
- if self.is_found:
+ if largs is not None:
self.link_args = largs
+ self.is_found = largs is not None or not self.link_libpython
+
inc_paths = mesonlib.OrderedSet([
self.variables.get('INCLUDEPY'),
self.paths.get('include'),
@@ -184,10 +184,14 @@ class PythonDependency(ExternalDependency):
if self.platform.startswith('win'):
vernum = self.variables.get('py_version_nodot')
if self.static:
- libname = 'libpython{}.a'.format(vernum)
+ libpath = Path('libs') / 'libpython{}.a'.format(vernum)
else:
- libname = 'python{}.lib'.format(vernum)
- lib = Path(self.variables.get('base')) / 'libs' / libname
+ comp = self.get_compiler()
+ if comp.id == "gcc":
+ libpath = 'python{}.dll'.format(vernum)
+ else:
+ libpath = Path('libs') / 'python{}.lib'.format(vernum)
+ lib = Path(self.variables.get('base')) / libpath
elif self.platform == 'mingw':
if self.static:
libname = self.variables.get('LIBRARY')
@@ -501,20 +505,17 @@ class PythonModule(ExtensionModule):
if len(args) > 1:
raise InvalidArguments('find_installation takes zero or one positional argument.')
- if 'python' in state.environment.config_info.binaries:
- name_or_path = state.environment.config_info.binaries['python']
- elif args:
+ name_or_path = state.environment.binaries.host.lookup_entry('python')
+ if name_or_path is None and args:
name_or_path = args[0]
if not isinstance(name_or_path, str):
raise InvalidArguments('find_installation argument must be a string.')
- else:
- name_or_path = None
if not name_or_path:
mlog.log("Using meson's python {}".format(mesonlib.python_command))
python = ExternalProgram('python3', mesonlib.python_command, silent=True)
else:
- python = ExternalProgram(name_or_path, silent = True)
+ python = ExternalProgram.from_entry('python3', name_or_path)
if not python.found() and mesonlib.is_windows():
pythonpath = self._get_win_pythonpath(name_or_path)
diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py
index f664632..46f15f0 100644
--- a/mesonbuild/modules/python3.py
+++ b/mesonbuild/modules/python3.py
@@ -48,10 +48,11 @@ class Python3Module(ExtensionModule):
@noKwargs
def find_python(self, state, args, kwargs):
- options = [state.environment.config_info.binaries.get('python3')]
- if not options[0]: # because this would be [None]
- options = ['python3', mesonlib.python_command]
- py3 = dependencies.ExternalProgram(*options, silent=True)
+ command = state.environment.binaries.host.lookup_entry('python3')
+ if command is not None:
+ py3 = dependencies.ExternalProgram.from_entry('python3', command)
+ else:
+ py3 = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
return ModuleReturnValue(py3, [py3])
@noKwargs
diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py
index 28fb98c..0b252ac 100644
--- a/mesonbuild/modules/qt.py
+++ b/mesonbuild/modules/qt.py
@@ -19,7 +19,7 @@ from ..mesonlib import MesonException, Popen_safe, extract_as_list, File
from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency
import xml.etree.ElementTree as ET
from . import ModuleReturnValue, get_include_args, ExtensionModule
-from ..interpreterbase import permittedKwargs, FeatureNewKwargs
+from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
_QT_DEPS_LUT = {
4: Qt4Dependency,
@@ -199,7 +199,7 @@ class QtBaseModule(ExtensionModule):
sources.append(moc_output)
return ModuleReturnValue(sources, sources)
- @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @FeatureNew('qt.compile_translations', '0.44.0')
@permittedKwargs({'ts_files', 'install', 'install_dir', 'build_by_default', 'method'})
def compile_translations(self, state, args, kwargs):
ts_files, install_dir = extract_as_list(kwargs, 'ts_files', 'install_dir', pop=True)
diff --git a/mesonbuild/modules/rpm.py b/mesonbuild/modules/rpm.py
index ba5bcaa..9774286 100644
--- a/mesonbuild/modules/rpm.py
+++ b/mesonbuild/modules/rpm.py
@@ -29,39 +29,16 @@ import os
class RPMModule(ExtensionModule):
@noKwargs
- def generate_spec_template(self, state, args, kwargs):
- compiler_deps = set()
- for compiler in state.compilers.values():
- # Elbrus has one 'lcc' package for every compiler
- if isinstance(compiler, compilers.GnuCCompiler):
- compiler_deps.add('gcc')
- elif isinstance(compiler, compilers.GnuCPPCompiler):
- compiler_deps.add('gcc-c++')
- elif isinstance(compiler, compilers.ElbrusCCompiler):
- compiler_deps.add('lcc')
- elif isinstance(compiler, compilers.ElbrusCPPCompiler):
- compiler_deps.add('lcc')
- elif isinstance(compiler, compilers.ElbrusFortranCompiler):
- compiler_deps.add('lcc')
- elif isinstance(compiler, compilers.ValaCompiler):
- compiler_deps.add('vala')
- elif isinstance(compiler, compilers.GnuFortranCompiler):
- compiler_deps.add('gcc-gfortran')
- elif isinstance(compiler, compilers.GnuObjCCompiler):
- compiler_deps.add('gcc-objc')
- elif compiler == compilers.GnuObjCPPCompiler:
- compiler_deps.add('gcc-objc++')
- else:
- mlog.log('RPM spec file will not created, generating not allowed for:',
- mlog.bold(compiler.get_id()))
- return
- proj = state.project_name.replace(' ', '_').replace('\t', '_')
+ def generate_spec_template(self, coredata, args, kwargs):
+ self.coredata = coredata
+ required_compilers = self.__get_required_compilers()
+ proj = coredata.project_name.replace(' ', '_').replace('\t', '_')
so_installed = False
devel_subpkg = False
files = set()
files_devel = set()
to_delete = set()
- for target in state.targets.values():
+ for target in coredata.targets.values():
if isinstance(target, build.Executable) and target.need_install:
files.add('%%{_bindir}/%s' % target.get_filename())
elif isinstance(target, build.SharedLibrary) and target.need_install:
@@ -80,18 +57,19 @@ class RPMModule(ExtensionModule):
files_devel.add('%%{_datadir}/gir-1.0/%s' % target.get_filename()[0])
elif isinstance(target, TypelibTarget) and target.should_install():
files.add('%%{_libdir}/girepository-1.0/%s' % target.get_filename()[0])
- for header in state.headers:
+ for header in coredata.headers:
if len(header.get_install_subdir()) > 0:
files_devel.add('%%{_includedir}/%s/' % header.get_install_subdir())
else:
for hdr_src in header.get_sources():
files_devel.add('%%{_includedir}/%s' % hdr_src)
- for man in state.man:
+ for man in coredata.man:
for man_file in man.get_sources():
files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file))
if len(files_devel) > 0:
devel_subpkg = True
- filename = os.path.join(state.environment.get_build_dir(),
+
+ filename = os.path.join(coredata.environment.get_build_dir(),
'%s.spec' % proj)
with open(filename, 'w+') as fn:
fn.write('Name: %s\n' % proj)
@@ -102,24 +80,28 @@ class RPMModule(ExtensionModule):
fn.write('\n')
fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
fn.write('\n')
- for compiler in compiler_deps:
+ fn.write('BuildRequires: meson\n')
+ for compiler in required_compilers:
fn.write('BuildRequires: %s\n' % compiler)
- for dep in state.environment.coredata.deps:
+ for dep in coredata.environment.coredata.deps:
fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0])
- for lib in state.environment.coredata.ext_libs.values():
- name = lib.get_name()
- fn.write('BuildRequires: {} # FIXME\n'.format(name))
- mlog.warning('replace', mlog.bold(name), 'with the real package.',
- 'You can use following command to find package which '
- 'contains this lib:',
- mlog.bold("dnf provides '*/lib{}.so'".format(name)))
- for prog in state.environment.coredata.ext_progs.values():
- if not prog.found():
- fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' %
- prog.get_name())
- else:
- fn.write('BuildRequires: {}\n'.format(prog.get_path()))
- fn.write('BuildRequires: meson\n')
+# ext_libs and ext_progs have been removed from coredata so the following code
+# no longer works. It is kept as a reminder of the idea should anyone wish
+# to re-implement it.
+#
+# for lib in state.environment.coredata.ext_libs.values():
+# name = lib.get_name()
+# fn.write('BuildRequires: {} # FIXME\n'.format(name))
+# mlog.warning('replace', mlog.bold(name), 'with the real package.',
+# 'You can use following command to find package which '
+# 'contains this lib:',
+# mlog.bold("dnf provides '*/lib{}.so'".format(name)))
+# for prog in state.environment.coredata.ext_progs.values():
+# if not prog.found():
+# fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' %
+# prog.get_name())
+# else:
+# fn.write('BuildRequires: {}\n'.format(prog.get_path()))
fn.write('\n')
fn.write('%description\n')
fn.write('\n')
@@ -167,5 +149,33 @@ class RPMModule(ExtensionModule):
mlog.log('RPM spec template written to %s.spec.\n' % proj)
return ModuleReturnValue(None, [])
+ def __get_required_compilers(self):
+ required_compilers = set()
+ for compiler in self.coredata.compilers.values():
+ # Elbrus has one 'lcc' package for every compiler
+ if isinstance(compiler, compilers.GnuCCompiler):
+ required_compilers.add('gcc')
+ elif isinstance(compiler, compilers.GnuCPPCompiler):
+ required_compilers.add('gcc-c++')
+ elif isinstance(compiler, compilers.ElbrusCCompiler):
+ required_compilers.add('lcc')
+ elif isinstance(compiler, compilers.ElbrusCPPCompiler):
+ required_compilers.add('lcc')
+ elif isinstance(compiler, compilers.ElbrusFortranCompiler):
+ required_compilers.add('lcc')
+ elif isinstance(compiler, compilers.ValaCompiler):
+ required_compilers.add('vala')
+ elif isinstance(compiler, compilers.GnuFortranCompiler):
+ required_compilers.add('gcc-gfortran')
+ elif isinstance(compiler, compilers.GnuObjCCompiler):
+ required_compilers.add('gcc-objc')
+ elif compiler == compilers.GnuObjCPPCompiler:
+ required_compilers.add('gcc-objc++')
+ else:
+ mlog.log('RPM spec file not created, generation not allowed for:',
+ mlog.bold(compiler.get_id()))
+ return required_compilers
+
+
def initialize(*args, **kwargs):
return RPMModule(*args, **kwargs)
diff --git a/mesonbuild/modules/unstable_cuda.py b/mesonbuild/modules/unstable_cuda.py
new file mode 100644
index 0000000..cd116cc
--- /dev/null
+++ b/mesonbuild/modules/unstable_cuda.py
@@ -0,0 +1,270 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+
+from ..mesonlib import version_compare
+from ..interpreter import CompilerHolder
+from ..compilers import CudaCompiler
+
+from . import ExtensionModule, ModuleReturnValue
+
+from ..interpreterbase import (
+ flatten, permittedKwargs, noKwargs,
+ InvalidArguments, FeatureNew
+)
+
+class CudaModule(ExtensionModule):
+
+ @FeatureNew('CUDA module', '0.50.0')
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ @noKwargs
+ def min_driver_version(self, state, args, kwargs):
+ argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' +
+ 'an NVCC compiler object, or its version string.')
+
+ if len(args) != 1:
+ raise argerror
+ else:
+ cuda_version = self._version_from_compiler(args[0])
+ if cuda_version == 'unknown':
+ raise argerror
+
+ driver_version_table = [
+ {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'},
+ {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'},
+ {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'},
+ {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'},
+ {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'},
+ {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'},
+ {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'},
+ {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'},
+ {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'},
+ {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'},
+ ]
+
+ driver_version = 'unknown'
+ for d in driver_version_table:
+ if version_compare(cuda_version, d['cuda_version']):
+ driver_version = d.get(state.host_machine.system, d['linux'])
+ break
+
+ return ModuleReturnValue(driver_version, [driver_version])
+
+ @permittedKwargs(['detected'])
+ def nvcc_arch_flags(self, state, args, kwargs):
+ nvcc_arch_args = self._validate_nvcc_arch_args(state, args, kwargs)
+ ret = self._nvcc_arch_flags(*nvcc_arch_args)[0]
+ return ModuleReturnValue(ret, [ret])
+
+ @permittedKwargs(['detected'])
+ def nvcc_arch_readable(self, state, args, kwargs):
+ nvcc_arch_args = self._validate_nvcc_arch_args(state, args, kwargs)
+ ret = self._nvcc_arch_flags(*nvcc_arch_args)[1]
+ return ModuleReturnValue(ret, [ret])
+
+ @staticmethod
+ def _break_arch_string(s):
+ s = re.sub('[ \t\r\n,;]+', ';', s)
+ s = s.strip(';').split(';')
+ return s
+
+ @staticmethod
+ def _detected_cc_from_compiler(c):
+ if isinstance(c, CompilerHolder):
+ c = c.compiler
+ if isinstance(c, CudaCompiler):
+ return c.detected_cc
+ return ''
+
+ @staticmethod
+ def _version_from_compiler(c):
+ if isinstance(c, CompilerHolder):
+ c = c.compiler
+ if isinstance(c, CudaCompiler):
+ return c.version
+ if isinstance(c, str):
+ return c
+ return 'unknown'
+
+ def _validate_nvcc_arch_args(self, state, args, kwargs):
+ argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!')
+
+ if len(args) < 1:
+ raise argerror
+ else:
+ compiler = args[0]
+ cuda_version = self._version_from_compiler(compiler)
+ if cuda_version == 'unknown':
+ raise argerror
+
+ arch_list = [] if len(args) <= 1 else flatten(args[1:])
+ arch_list = [self._break_arch_string(a) for a in arch_list]
+ arch_list = flatten(arch_list)
+ if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}):
+ raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+ arch_list = arch_list[0] if len(arch_list) == 1 else arch_list
+
+ detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler))
+ detected = flatten([detected])
+ detected = [self._break_arch_string(a) for a in detected]
+ detected = flatten(detected)
+ if not set(detected).isdisjoint({'All', 'Common', 'Auto'}):
+ raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+
+ return cuda_version, arch_list, detected
+
+ def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''):
+ """
+ Using the CUDA Toolkit version (the NVCC version) and the target
+ architectures, compute the NVCC architecture flags.
+ """
+
+ cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221
+ cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221
+ cuda_limit_gpu_architecture = None # noqa: E221
+ cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221
+
+ if version_compare(cuda_version, '<7.0'):
+ cuda_limit_gpu_architecture = '5.2'
+
+ if version_compare(cuda_version, '>=7.0'):
+ cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['5.2'] # noqa: E221
+
+ if version_compare(cuda_version, '<8.0'):
+ cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221
+ cuda_limit_gpu_architecture = '6.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=8.0'):
+ cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221
+ cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221
+ cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221
+
+ if version_compare(cuda_version, '<9.0'):
+ cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221
+ cuda_limit_gpu_architecture = '7.0' # noqa: E221
+
+ if version_compare(cuda_version, '>=9.0'):
+ cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221
+ cuda_common_gpu_architectures += ['7.0', '7.0+PTX'] # noqa: E221
+ cuda_all_gpu_architectures += ['7.0', '7.0+PTX', '7.2', '7.2+PTX'] # noqa: E221
+
+ if version_compare(cuda_version, '<10.0'):
+ cuda_limit_gpu_architecture = '7.5'
+
+ if version_compare(cuda_version, '>=10.0'):
+ cuda_known_gpu_architectures += ['Turing'] # noqa: E221
+ cuda_common_gpu_architectures += ['7.5', '7.5+PTX'] # noqa: E221
+ cuda_all_gpu_architectures += ['7.5', '7.5+PTX'] # noqa: E221
+
+ if version_compare(cuda_version, '<11.0'):
+ cuda_limit_gpu_architecture = '8.0'
+
+ if not cuda_arch_list:
+ cuda_arch_list = 'Auto'
+
+ if cuda_arch_list == 'All': # noqa: E271
+ cuda_arch_list = cuda_known_gpu_architectures
+ elif cuda_arch_list == 'Common': # noqa: E271
+ cuda_arch_list = cuda_common_gpu_architectures
+ elif cuda_arch_list == 'Auto': # noqa: E271
+ if detected:
+ if isinstance(detected, list):
+ cuda_arch_list = detected
+ else:
+ cuda_arch_list = self._break_arch_string(detected)
+
+ if cuda_limit_gpu_architecture:
+ filtered_cuda_arch_list = []
+ for arch in cuda_arch_list:
+ if arch:
+ if version_compare(arch, '>=' + cuda_limit_gpu_architecture):
+ arch = cuda_common_gpu_architectures[-1]
+ if arch not in filtered_cuda_arch_list:
+ filtered_cuda_arch_list.append(arch)
+ cuda_arch_list = filtered_cuda_arch_list
+ else:
+ cuda_arch_list = cuda_common_gpu_architectures
+ elif isinstance(cuda_arch_list, str):
+ cuda_arch_list = self._break_arch_string(cuda_arch_list)
+
+ cuda_arch_list = sorted([x for x in set(cuda_arch_list) if x])
+
+ cuda_arch_bin = []
+ cuda_arch_ptx = []
+ for arch_name in cuda_arch_list:
+ arch_bin = []
+ arch_ptx = []
+ add_ptx = arch_name.endswith('+PTX')
+ if add_ptx:
+ arch_name = arch_name[:-len('+PTX')]
+
+ if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name):
+ arch_bin, arch_ptx = [arch_name], [arch_name]
+ else:
+ arch_bin, arch_ptx = {
+ 'Fermi': (['2.0', '2.1(2.0)'], []),
+ 'Kepler+Tegra': (['3.2'], []),
+ 'Kepler+Tesla': (['3.7'], []),
+ 'Kepler': (['3.0', '3.5'], ['3.5']),
+ 'Maxwell+Tegra': (['5.3'], []),
+ 'Maxwell': (['5.0', '5.2'], ['5.2']),
+ 'Pascal': (['6.0', '6.1'], ['6.1']),
+ 'Pascal+Tegra': (['6.2'], []),
+ 'Volta': (['7.0'], ['7.0']),
+ 'Xavier': (['7.2'], []),
+ 'Turing': (['7.5'], ['7.5']),
+ }.get(arch_name, (None, None))
+
+ if arch_bin is None:
+ raise InvalidArguments('Unknown CUDA Architecture Name {}!'
+ .format(arch_name))
+
+ cuda_arch_bin += arch_bin
+
+ if add_ptx:
+ if not arch_ptx:
+ arch_ptx = arch_bin
+ cuda_arch_ptx += arch_ptx
+
+ cuda_arch_bin = re.sub('\\.', '', ' '.join(cuda_arch_bin))
+ cuda_arch_ptx = re.sub('\\.', '', ' '.join(cuda_arch_ptx))
+ cuda_arch_bin = re.findall('[0-9()]+', cuda_arch_bin)
+ cuda_arch_ptx = re.findall('[0-9]+', cuda_arch_ptx)
+ cuda_arch_bin = sorted(list(set(cuda_arch_bin)))
+ cuda_arch_ptx = sorted(list(set(cuda_arch_ptx)))
+
+ nvcc_flags = []
+ nvcc_archs_readable = []
+
+ for arch in cuda_arch_bin:
+ m = re.match('([0-9]+)\\(([0-9]+)\\)', arch)
+ if m:
+ nvcc_flags += ['-gencode', 'arch=compute_' + m[2] + ',code=sm_' + m[1]]
+ nvcc_archs_readable += ['sm_' + m[1]]
+ else:
+ nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch]
+ nvcc_archs_readable += ['sm_' + arch]
+
+ for arch in cuda_arch_ptx:
+ nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch]
+ nvcc_archs_readable += ['compute_' + arch]
+
+ return nvcc_flags, nvcc_archs_readable
+
+def initialize(*args, **kwargs):
+ return CudaModule(*args, **kwargs)
diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py
index d185d89..e8d266e 100644
--- a/mesonbuild/modules/windows.py
+++ b/mesonbuild/modules/windows.py
@@ -41,29 +41,13 @@ class WindowsModule(ExtensionModule):
def _find_resource_compiler(self, state):
# FIXME: Does not handle `native: true` executables, see
# See https://github.com/mesonbuild/meson/issues/1531
+ # But given a machine, we can un-hardcode `binaries.host` below.
if hasattr(self, '_rescomp'):
return self._rescomp
- rescomp = None
- if state.environment.is_cross_build():
- # If cross compiling see if windres has been specified in the
- # cross file before trying to find it another way.
- bins = state.environment.cross_info.config['binaries']
- rescomp = ExternalProgram.from_bin_list(bins, 'windres')
-
- if not rescomp or not rescomp.found():
- if 'WINDRES' in os.environ:
- # Pick-up env var WINDRES if set. This is often used for
- # specifying an arch-specific windres.
- rescomp = ExternalProgram('windres', command=os.environ.get('WINDRES'), silent=True)
-
- if not rescomp or not rescomp.found():
- # Take windres from the config file after the environment, which is
- # in keeping with the expectations on unix-like OSes that
- # environment variables trump config files.
- bins = state.environment.config_info.binaries
- rescomp = ExternalProgram.from_bin_list(bins, 'windres')
+ # Will try cross / native file and then env var
+ rescomp = ExternalProgram.from_bin_list(state.environment.binaries.host, 'windres')
if not rescomp or not rescomp.found():
comp = self.detect_compiler(state.compilers)
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index be5c807..17783ce 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -37,7 +37,7 @@ class MesonUnicodeDecodeError(MesonException):
def decode_match(match):
try:
return codecs.decode(match.group(0), 'unicode_escape')
- except UnicodeDecodeError as err:
+ except UnicodeDecodeError:
raise MesonUnicodeDecodeError(match.group(0))
class ParseException(MesonException):
@@ -190,7 +190,11 @@ This will become a hard error in a future Meson release.""", self.getline(line_s
line_start = mo.end() - len(lines[-1])
elif tid == 'number':
value = int(match_text, base=0)
- elif tid == 'eol' or tid == 'eol_cont':
+ elif tid == 'eol_cont':
+ lineno += 1
+ line_start = loc
+ break
+ elif tid == 'eol':
lineno += 1
line_start = loc
if par_count > 0 or bracket_count > 0 or curl_count > 0:
@@ -208,7 +212,15 @@ This will become a hard error in a future Meson release.""", self.getline(line_s
if not matched:
raise ParseException('lexer', self.getline(line_start), lineno, col)
-class ElementaryNode:
+class BaseNode:
+ def accept(self, visitor):
+ fname = 'visit_{}'.format(type(self).__name__)
+ if hasattr(visitor, fname):
+ func = getattr(visitor, fname)
+ if hasattr(func, '__call__'):
+ func(self)
+
+class ElementaryNode(BaseNode):
def __init__(self, token):
self.lineno = token.lineno
self.subdir = token.subdir
@@ -249,28 +261,32 @@ class ContinueNode(ElementaryNode):
class BreakNode(ElementaryNode):
pass
-class ArrayNode:
- def __init__(self, args):
+class ArrayNode(BaseNode):
+ def __init__(self, args, lineno, colno, end_lineno, end_colno):
self.subdir = args.subdir
- self.lineno = args.lineno
- self.colno = args.colno
+ self.lineno = lineno
+ self.colno = colno
+ self.end_lineno = end_lineno
+ self.end_colno = end_colno
self.args = args
-class DictNode:
- def __init__(self, args):
+class DictNode(BaseNode):
+ def __init__(self, args, lineno, colno, end_lineno, end_colno):
self.subdir = args.subdir
- self.lineno = args.lineno
- self.colno = args.colno
+ self.lineno = lineno
+ self.colno = colno
+ self.end_lineno = end_lineno
+ self.end_colno = end_colno
self.args = args
-class EmptyNode:
+class EmptyNode(BaseNode):
def __init__(self, lineno, colno):
self.subdir = ''
self.lineno = lineno
self.colno = colno
self.value = None
-class OrNode:
+class OrNode(BaseNode):
def __init__(self, left, right):
self.subdir = left.subdir
self.lineno = left.lineno
@@ -278,7 +294,7 @@ class OrNode:
self.left = left
self.right = right
-class AndNode:
+class AndNode(BaseNode):
def __init__(self, left, right):
self.subdir = left.subdir
self.lineno = left.lineno
@@ -286,7 +302,7 @@ class AndNode:
self.left = left
self.right = right
-class ComparisonNode:
+class ComparisonNode(BaseNode):
def __init__(self, ctype, left, right):
self.lineno = left.lineno
self.colno = left.colno
@@ -295,7 +311,7 @@ class ComparisonNode:
self.right = right
self.ctype = ctype
-class ArithmeticNode:
+class ArithmeticNode(BaseNode):
def __init__(self, operation, left, right):
self.subdir = left.subdir
self.lineno = left.lineno
@@ -304,21 +320,21 @@ class ArithmeticNode:
self.right = right
self.operation = operation
-class NotNode:
+class NotNode(BaseNode):
def __init__(self, location_node, value):
self.subdir = location_node.subdir
self.lineno = location_node.lineno
self.colno = location_node.colno
self.value = value
-class CodeBlockNode:
+class CodeBlockNode(BaseNode):
def __init__(self, location_node):
self.subdir = location_node.subdir
self.lineno = location_node.lineno
self.colno = location_node.colno
self.lines = []
-class IndexNode:
+class IndexNode(BaseNode):
def __init__(self, iobject, index):
self.iobject = iobject
self.index = index
@@ -326,7 +342,7 @@ class IndexNode:
self.lineno = iobject.lineno
self.colno = iobject.colno
-class MethodNode:
+class MethodNode(BaseNode):
def __init__(self, subdir, lineno, colno, source_object, name, args):
self.subdir = subdir
self.lineno = lineno
@@ -336,32 +352,36 @@ class MethodNode:
assert(isinstance(self.name, str))
self.args = args
-class FunctionNode:
- def __init__(self, subdir, lineno, colno, func_name, args):
+class FunctionNode(BaseNode):
+ def __init__(self, subdir, lineno, colno, end_lineno, end_colno, func_name, args):
self.subdir = subdir
self.lineno = lineno
self.colno = colno
+ self.end_lineno = end_lineno
+ self.end_colno = end_colno
self.func_name = func_name
assert(isinstance(func_name, str))
self.args = args
-class AssignmentNode:
- def __init__(self, lineno, colno, var_name, value):
+class AssignmentNode(BaseNode):
+ def __init__(self, subdir, lineno, colno, var_name, value):
+ self.subdir = subdir
self.lineno = lineno
self.colno = colno
self.var_name = var_name
assert(isinstance(var_name, str))
self.value = value
-class PlusAssignmentNode:
- def __init__(self, lineno, colno, var_name, value):
+class PlusAssignmentNode(BaseNode):
+ def __init__(self, subdir, lineno, colno, var_name, value):
+ self.subdir = subdir
self.lineno = lineno
self.colno = colno
self.var_name = var_name
assert(isinstance(var_name, str))
self.value = value
-class ForeachClauseNode:
+class ForeachClauseNode(BaseNode):
def __init__(self, lineno, colno, varnames, items, block):
self.lineno = lineno
self.colno = colno
@@ -369,36 +389,37 @@ class ForeachClauseNode:
self.items = items
self.block = block
-class IfClauseNode:
+class IfClauseNode(BaseNode):
def __init__(self, lineno, colno):
self.lineno = lineno
self.colno = colno
self.ifs = []
self.elseblock = EmptyNode(lineno, colno)
-class UMinusNode:
+class UMinusNode(BaseNode):
def __init__(self, current_location, value):
self.subdir = current_location.subdir
self.lineno = current_location.lineno
self.colno = current_location.colno
self.value = value
-class IfNode:
+class IfNode(BaseNode):
def __init__(self, lineno, colno, condition, block):
self.lineno = lineno
self.colno = colno
self.condition = condition
self.block = block
-class TernaryNode:
- def __init__(self, lineno, colno, condition, trueblock, falseblock):
+class TernaryNode(BaseNode):
+ def __init__(self, subdir, lineno, colno, condition, trueblock, falseblock):
+ self.subdir = subdir
self.lineno = lineno
self.colno = colno
self.condition = condition
self.trueblock = trueblock
self.falseblock = falseblock
-class ArgumentNode:
+class ArgumentNode(BaseNode):
def __init__(self, token):
self.lineno = token.lineno
self.colno = token.colno
@@ -510,13 +531,13 @@ class Parser:
value = self.e1()
if not isinstance(left, IdNode):
raise ParseException('Plusassignment target must be an id.', self.getline(), left.lineno, left.colno)
- return PlusAssignmentNode(left.lineno, left.colno, left.value, value)
+ return PlusAssignmentNode(left.subdir, left.lineno, left.colno, left.value, value)
elif self.accept('assign'):
value = self.e1()
if not isinstance(left, IdNode):
raise ParseException('Assignment target must be an id.',
self.getline(), left.lineno, left.colno)
- return AssignmentNode(left.lineno, left.colno, left.value, value)
+ return AssignmentNode(left.subdir, left.lineno, left.colno, left.value, value)
elif self.accept('questionmark'):
if self.in_ternary:
raise ParseException('Nested ternary operators are not allowed.',
@@ -526,7 +547,7 @@ class Parser:
self.expect('colon')
falseblock = self.e1()
self.in_ternary = False
- return TernaryNode(left.lineno, left.colno, left, trueblock, falseblock)
+ return TernaryNode(left.subdir, left.lineno, left.colno, left, trueblock, falseblock)
return left
def e2(self):
@@ -605,7 +626,7 @@ class Parser:
if not isinstance(left, IdNode):
raise ParseException('Function call must be applied to plain id',
self.getline(), left.lineno, left.colno)
- left = FunctionNode(left.subdir, left.lineno, left.colno, left.value, args)
+ left = FunctionNode(left.subdir, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
go_again = True
while go_again:
go_again = False
@@ -626,11 +647,11 @@ class Parser:
elif self.accept('lbracket'):
args = self.args()
self.block_expect('rbracket', block_start)
- return ArrayNode(args)
+ return ArrayNode(args, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
elif self.accept('lcurl'):
key_values = self.key_values()
self.block_expect('rcurl', block_start)
- return DictNode(key_values)
+ return DictNode(key_values, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
else:
return self.e9()
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index 56a0e9a..023afdb 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -23,6 +23,8 @@ import argparse
from . import environment, interpreter, mesonlib
from . import build
from . import mlog, coredata
+from . import mintro
+from .mconf import make_lower_case
from .mesonlib import MesonException
def add_arguments(parser):
@@ -183,16 +185,20 @@ class MesonApp:
mlog.log('Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {})))
mlog.log('Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {})))
mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {})))
- if self.options.profile:
- fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log')
- profile.runctx('intr.run()', globals(), locals(), filename=fname)
- else:
- intr.run()
+ try:
+ if self.options.profile:
+ fname = os.path.join(self.build_dir, 'meson-private', 'profile-interpreter.log')
+ profile.runctx('intr.run()', globals(), locals(), filename=fname)
+ else:
+ intr.run()
+ except Exception as e:
+ mintro.write_meson_info_file(b, [e])
+ raise
# Print all default option values that don't match the current value
for def_opt_name, def_opt_value, cur_opt_value in intr.get_non_matching_default_options():
mlog.log('Option', mlog.bold(def_opt_name), 'is:',
- mlog.bold(str(cur_opt_value)),
- '[default: {}]'.format(str(def_opt_value)))
+ mlog.bold(make_lower_case(cur_opt_value.printable_value())),
+ '[default: {}]'.format(make_lower_case(def_opt_value)))
try:
dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
# We would like to write coredata as late as possible since we use the existence of
@@ -215,7 +221,16 @@ class MesonApp:
coredata.write_cmd_line_file(self.build_dir, self.options)
else:
coredata.update_cmd_line_file(self.build_dir, self.options)
- except:
+
+ # Generate an IDE introspection file with the same syntax as the already existing API
+ if self.options.profile:
+ fname = os.path.join(self.build_dir, 'meson-private', 'profile-introspector.log')
+ profile.runctx('mintro.generate_introspection_file(b, intr.backend)', globals(), locals(), filename=fname)
+ else:
+ mintro.generate_introspection_file(b, intr.backend)
+ mintro.write_meson_info_file(b, [], True)
+ except Exception as e:
+ mintro.write_meson_info_file(b, [e])
if 'cdf' in locals():
old_cdf = cdf + '.prev'
if os.path.exists(old_cdf):
diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py
index 1536d96..2c1bf8b 100644
--- a/mesonbuild/msubprojects.py
+++ b/mesonbuild/msubprojects.py
@@ -92,7 +92,7 @@ def update_git(wrap, repo_dir, options):
' To rebase your branch on top of', mlog.bold(revision), 'use', mlog.bold('--rebase'), 'option.')
return
- git(['submodule', 'update'], repo_dir)
+ git(['submodule', 'update', '--checkout', '--recursive'], repo_dir)
git_show(repo_dir)
def update_hg(wrap, repo_dir, options):
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 8ce9538..77a0f82 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -23,6 +23,9 @@ from mesonbuild.dependencies import ExternalProgram
from mesonbuild.mesonlib import substring_is_in_list, MesonException
from mesonbuild import mlog
+from collections import namedtuple
+import io
+import re
import tempfile
import time, datetime, multiprocessing, json
import concurrent.futures as conc
@@ -36,6 +39,10 @@ import enum
# mean that the test should be skipped.
GNU_SKIP_RETURNCODE = 77
+# GNU autotools interprets a return code of 99 from tests it executes to
+# mean that the test failed even before testing what it is supposed to test.
+GNU_ERROR_RETURNCODE = 99
+
def is_windows():
platname = platform.system().lower()
return platname == 'windows' or 'mingw' in platname
@@ -146,11 +153,202 @@ class TestResult(enum.Enum):
FAIL = 'FAIL'
EXPECTEDFAIL = 'EXPECTEDFAIL'
UNEXPECTEDPASS = 'UNEXPECTEDPASS'
+ ERROR = 'ERROR'
+
+
+class TAPParser(object):
+ Plan = namedtuple('Plan', ['count', 'late', 'skipped', 'explanation'])
+ Bailout = namedtuple('Bailout', ['message'])
+ Test = namedtuple('Test', ['number', 'name', 'result', 'explanation'])
+ Error = namedtuple('Error', ['message'])
+ Version = namedtuple('Version', ['version'])
+
+ _MAIN = 1
+ _AFTER_TEST = 2
+ _YAML = 3
+
+ _RE_BAILOUT = r'Bail out!\s*(.*)'
+ _RE_DIRECTIVE = r'(?:\s*\#\s*([Ss][Kk][Ii][Pp]\S*|[Tt][Oo][Dd][Oo])\b\s*(.*))?'
+ _RE_PLAN = r'1\.\.([0-9]+)' + _RE_DIRECTIVE
+ _RE_TEST = r'((?:not )?ok)\s*(?:([0-9]+)\s*)?([^#]*)' + _RE_DIRECTIVE
+ _RE_VERSION = r'TAP version ([0-9]+)'
+ _RE_YAML_START = r'(\s+)---.*'
+ _RE_YAML_END = r'\s+\.\.\.\s*'
+
+ def __init__(self, io):
+ self.io = io
+
+ def parse_test(self, ok, num, name, directive, explanation):
+ name = name.strip()
+ explanation = explanation.strip() if explanation else None
+ if directive is not None:
+ directive = directive.upper()
+ if directive == 'SKIP':
+ if ok:
+ yield self.Test(num, name, TestResult.SKIP, explanation)
+ return
+ elif directive == 'TODO':
+ yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation)
+ return
+ else:
+ yield self.Error('invalid directive "%s"' % (directive,))
+
+ yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation)
+
+ def parse(self):
+ found_late_test = False
+ bailed_out = False
+ plan = None
+ lineno = 0
+ num_tests = 0
+ yaml_lineno = None
+ yaml_indent = None
+ state = self._MAIN
+ version = 12
+ while True:
+ lineno += 1
+ try:
+ line = next(self.io).rstrip()
+ except StopIteration:
+ break
+
+ # YAML blocks are only accepted after a test
+ if state == self._AFTER_TEST:
+ if version >= 13:
+ m = re.match(self._RE_YAML_START, line)
+ if m:
+ state = self._YAML
+ yaml_lineno = lineno
+ yaml_indent = m.group(1)
+ continue
+ state = self._MAIN
+
+ elif state == self._YAML:
+ if re.match(self._RE_YAML_END, line):
+ state = self._MAIN
+ continue
+ if line.startswith(yaml_indent):
+ continue
+ yield self.Error('YAML block not terminated (started on line %d)' % (yaml_lineno,))
+ state = self._MAIN
+
+ assert state == self._MAIN
+ if line.startswith('#'):
+ continue
+
+ m = re.match(self._RE_TEST, line)
+ if m:
+ if plan and plan.late and not found_late_test:
+ yield self.Error('unexpected test after late plan')
+ found_late_test = True
+ num_tests += 1
+ num = num_tests if m.group(2) is None else int(m.group(2))
+ if num != num_tests:
+ yield self.Error('out of order test numbers')
+ yield from self.parse_test(m.group(1) == 'ok', num,
+ m.group(3), m.group(4), m.group(5))
+ state = self._AFTER_TEST
+ continue
+
+ m = re.match(self._RE_PLAN, line)
+ if m:
+ if plan:
+ yield self.Error('more than one plan found')
+ else:
+ count = int(m.group(1))
+ skipped = (count == 0)
+ if m.group(2):
+ if m.group(2).upper().startswith('SKIP'):
+ if count > 0:
+ yield self.Error('invalid SKIP directive for plan')
+ skipped = True
+ else:
+ yield self.Error('invalid directive for plan')
+ plan = self.Plan(count=count, late=(num_tests > 0),
+ skipped=skipped, explanation=m.group(3))
+ yield plan
+ continue
+
+ m = re.match(self._RE_BAILOUT, line)
+ if m:
+ yield self.Bailout(m.group(1))
+ bailed_out = True
+ continue
+
+ m = re.match(self._RE_VERSION, line)
+ if m:
+ # The TAP version is only accepted as the first line
+ if lineno != 1:
+ yield self.Error('version number must be on the first line')
+ continue
+ version = int(m.group(1))
+ if version < 13:
+ yield self.Error('version number should be at least 13')
+ else:
+ yield self.Version(version=version)
+ continue
+
+ yield self.Error('unexpected input at line %d' % (lineno,))
+
+ if state == self._YAML:
+ yield self.Error('YAML block not terminated (started on line %d)' % (yaml_lineno,))
+
+ if not bailed_out and plan and num_tests != plan.count:
+ if num_tests < plan.count:
+ yield self.Error('Too few tests run (expected %d, got %d)' % (plan.count, num_tests))
+ else:
+ yield self.Error('Too many tests run (expected %d, got %d)' % (plan.count, num_tests))
class TestRun:
- def __init__(self, res, returncode, should_fail, duration, stdo, stde, cmd,
- env):
+ @staticmethod
+ def make_exitcode(test, returncode, duration, stdo, stde, cmd):
+ if returncode == GNU_SKIP_RETURNCODE:
+ res = TestResult.SKIP
+ elif returncode == GNU_ERROR_RETURNCODE:
+ res = TestResult.ERROR
+ elif test.should_fail:
+ res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS
+ else:
+ res = TestResult.FAIL if bool(returncode) else TestResult.OK
+ return TestRun(test, res, returncode, duration, stdo, stde, cmd)
+
+ def make_tap(test, returncode, duration, stdo, stde, cmd):
+ res = None
+ num_tests = 0
+ failed = False
+ num_skipped = 0
+
+ for i in TAPParser(io.StringIO(stdo)).parse():
+ if isinstance(i, TAPParser.Bailout):
+ res = TestResult.ERROR
+ elif isinstance(i, TAPParser.Test):
+ if i.result == TestResult.SKIP:
+ num_skipped += 1
+ elif i.result in (TestResult.FAIL, TestResult.UNEXPECTEDPASS):
+ failed = True
+ num_tests += 1
+ elif isinstance(i, TAPParser.Error):
+ res = TestResult.ERROR
+ stde += '\nTAP parsing error: ' + i.message
+
+ if returncode != 0:
+ res = TestResult.ERROR
+ stde += '\n(test program exited with status code %d)' % (returncode,)
+
+ if res is None:
+ # Now determine the overall result of the test based on the outcome of the subcases
+ if num_skipped == num_tests:
+ # This includes the case where num_tests is zero
+ res = TestResult.SKIP
+ elif test.should_fail:
+ res = TestResult.EXPECTEDFAIL if failed else TestResult.UNEXPECTEDPASS
+ else:
+ res = TestResult.FAIL if failed else TestResult.OK
+
+ return TestRun(test, res, returncode, duration, stdo, stde, cmd)
+
+ def __init__(self, test, res, returncode, duration, stdo, stde, cmd):
assert isinstance(res, TestResult)
self.res = res
self.returncode = returncode
@@ -158,8 +356,8 @@ class TestRun:
self.stdo = stdo
self.stde = stde
self.cmd = cmd
- self.env = env
- self.should_fail = should_fail
+ self.env = test.env
+ self.should_fail = test.should_fail
def get_log(self):
res = '--- command ---\n'
@@ -257,9 +455,8 @@ class SingleTestRunner:
cmd = self._get_cmd()
if cmd is None:
skip_stdout = 'Not run because can not execute cross compiled binaries.'
- return TestRun(res=TestResult.SKIP, returncode=GNU_SKIP_RETURNCODE,
- should_fail=self.test.should_fail, duration=0.0,
- stdo=skip_stdout, stde=None, cmd=None, env=self.test.env)
+ return TestRun(test=self.test, res=TestResult.SKIP, returncode=GNU_SKIP_RETURNCODE,
+ duration=0.0, stdo=skip_stdout, stde=None, cmd=None)
else:
wrap = TestHarness.get_wrapper(self.options)
if self.options.gdb:
@@ -388,14 +585,12 @@ class SingleTestRunner:
stdo = ""
stde = additional_error
if timed_out:
- res = TestResult.TIMEOUT
- elif p.returncode == GNU_SKIP_RETURNCODE:
- res = TestResult.SKIP
- elif self.test.should_fail:
- res = TestResult.EXPECTEDFAIL if bool(p.returncode) else TestResult.UNEXPECTEDPASS
+ return TestRun(self.test, TestResult.TIMEOUT, p.returncode, duration, stdo, stde, cmd)
else:
- res = TestResult.FAIL if bool(p.returncode) else TestResult.OK
- return TestRun(res, p.returncode, self.test.should_fail, duration, stdo, stde, cmd, self.test.env)
+ if self.test.protocol == 'exitcode':
+ return TestRun.make_exitcode(self.test, p.returncode, duration, stdo, stde, cmd)
+ else:
+ return TestRun.make_tap(self.test, p.returncode, duration, stdo, stde, cmd)
class TestHarness:
@@ -438,6 +633,8 @@ class TestHarness:
current = self.build_data.test_setups[full_name]
if not options.gdb:
options.gdb = current.gdb
+ if options.gdb:
+ options.verbose = True
if options.timeout_multiplier is None:
options.timeout_multiplier = current.timeout_multiplier
# if options.env is None:
@@ -469,7 +666,7 @@ class TestHarness:
self.skip_count += 1
elif result.res is TestResult.OK:
self.success_count += 1
- elif result.res is TestResult.FAIL:
+ elif result.res is TestResult.FAIL or result.res is TestResult.ERROR:
self.fail_count += 1
elif result.res is TestResult.EXPECTEDFAIL:
self.expectedfail_count += 1
@@ -491,9 +688,11 @@ class TestHarness:
(num, name, padding1, result.res.value, padding2, result.duration,
status)
ok_statuses = (TestResult.OK, TestResult.EXPECTEDFAIL)
+ bad_statuses = (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS,
+ TestResult.ERROR)
if not self.options.quiet or result.res not in ok_statuses:
if result.res not in ok_statuses and mlog.colorize_console:
- if result.res in (TestResult.FAIL, TestResult.TIMEOUT, TestResult.UNEXPECTEDPASS):
+ if result.res in bad_statuses:
decorator = mlog.red
elif result.res is TestResult.SKIP:
decorator = mlog.yellow
@@ -503,8 +702,7 @@ class TestHarness:
else:
print(result_str)
result_str += "\n\n" + result.get_log()
- if (result.returncode != GNU_SKIP_RETURNCODE) \
- and (result.returncode != 0) != result.should_fail:
+ if result.res in bad_statuses:
if self.options.print_errorlogs:
self.collected_logs.append(result_str)
if self.logfile:
@@ -647,8 +845,8 @@ Timeout: %4d
self.logfilename = logfile_base + '.txt'
self.jsonlogfilename = logfile_base + '.json'
- self.jsonlogfile = open(self.jsonlogfilename, 'w', encoding='utf-8')
- self.logfile = open(self.logfilename, 'w', encoding='utf-8')
+ self.jsonlogfile = open(self.jsonlogfilename, 'w', encoding='utf-8', errors='replace')
+ self.logfile = open(self.logfilename, 'w', encoding='utf-8', errors='surrogateescape')
self.logfile.write('Log of Meson test suite run on %s\n\n'
% datetime.datetime.now().isoformat())
@@ -693,18 +891,17 @@ Timeout: %4d
for _ in range(self.options.repeat):
for i, test in enumerate(tests):
visible_name = self.get_pretty_suite(test)
+ single_test = self.get_test_runner(test)
- if not test.is_parallel or self.options.gdb:
+ if not test.is_parallel or single_test.options.gdb:
self.drain_futures(futures)
futures = []
- single_test = self.get_test_runner(test)
res = single_test.run()
self.process_test_result(res)
self.print_stats(numlen, tests, visible_name, res, i)
else:
if not executor:
executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes)
- single_test = self.get_test_runner(test)
f = executor.submit(single_test.run)
futures.append((f, numlen, tests, visible_name, i))
if self.options.repeat > 1 and self.fail_count:
@@ -746,6 +943,7 @@ def list_tests(th):
tests = th.get_tests()
for t in tests:
print(th.get_pretty_suite(t))
+ return not tests
def rebuild_all(wd):
if not os.path.isfile(os.path.join(wd, 'build.ninja')):
@@ -799,8 +997,7 @@ def run(options):
try:
th = TestHarness(options)
if options.list:
- list_tests(th)
- return 0
+ return list_tests(th)
if not options.args:
return th.doit()
return th.run_special()
diff --git a/mesonbuild/munstable_coredata.py b/mesonbuild/munstable_coredata.py
new file mode 100644
index 0000000..78f3f34
--- /dev/null
+++ b/mesonbuild/munstable_coredata.py
@@ -0,0 +1,126 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from . import coredata as cdata
+
+import os.path
+import pprint
+import textwrap
+
+def add_arguments(parser):
+ parser.add_argument('--all', action='store_true', dest='all', default=False,
+ help='Show data not used by current backend.')
+
+ parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+
+def dump_compilers(compilers):
+ for lang, compiler in compilers.items():
+ print(' ' + lang + ':')
+ print(' Id: ' + compiler.id)
+ print(' Command: ' + ' '.join(compiler.exelist))
+ print(' Full version: ' + compiler.full_version)
+ print(' Detected version: ' + compiler.version)
+ print(' Detected type: ' + repr(compiler.compiler_type))
+ #pprint.pprint(compiler.__dict__)
+
+
+def dump_guids(d):
+ for name, value in d.items():
+ print(' ' + name + ': ' + value)
+
+
+def run(options):
+ datadir = 'meson-private'
+ if options.builddir is not None:
+ datadir = os.path.join(options.builddir, datadir)
+ if not os.path.isdir(datadir):
+ print('Current directory is not a build dir. Please specify it or '
+ 'change the working directory to it.')
+ return 1
+
+ all = options.all
+
+ print('This is a dump of the internal unstable cache of meson. This is for debugging only.')
+ print('Do NOT parse, this will change from version to version in incompatible ways')
+ print('')
+
+ coredata = cdata.load(options.builddir)
+ backend = coredata.get_builtin_option('backend')
+ for k, v in sorted(coredata.__dict__.items()):
+ if k in ('backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'):
+ # use `meson configure` to view these
+ pass
+ elif k in ['install_guid', 'test_guid', 'regen_guid']:
+ if all or backend.startswith('vs'):
+ print(k + ': ' + v)
+ elif k == 'target_guids':
+ if all or backend.startswith('vs'):
+ print(k + ':')
+ dump_guids(v)
+ elif k in ['lang_guids']:
+ if all or backend.startswith('vs') or backend == 'xcode':
+ print(k + ':')
+ dump_guids(v)
+ elif k == 'meson_command':
+ if all or backend.startswith('vs'):
+ print('Meson command used in build file regeneration: ' + ' '.join(v))
+ elif k == 'pkgconf_envvar':
+ print('Last seen PKGCONFIG enviroment variable value: ' + v)
+ elif k == 'version':
+ print('Meson version: ' + v)
+ elif k == 'cross_file':
+ print('Cross File: ' + (v or 'None'))
+ elif k == 'config_files':
+ if v:
+ print('Native File: ' + ' '.join(v))
+ elif k == 'compilers':
+ print('Cached native compilers:')
+ dump_compilers(v)
+ elif k == 'cross_compilers':
+ print('Cached cross compilers:')
+ dump_compilers(v)
+ elif k == 'deps':
+ native = []
+ cross = []
+ for dep_key, dep in sorted(v.items()):
+ if dep_key[2]:
+ cross.append((dep_key, dep))
+ else:
+ native.append((dep_key, dep))
+
+ def print_dep(dep_key, dep):
+ print(' ' + dep_key[0] + ": ")
+ print(' compile args: ' + repr(dep.get_compile_args()))
+ print(' link args: ' + repr(dep.get_link_args()))
+ if dep.get_sources():
+ print(' sources: ' + repr(dep.get_sources()))
+ print(' version: ' + repr(dep.get_version()))
+
+ if native:
+ print('Cached native dependencies:')
+ for dep_key, dep in native:
+ print_dep(dep_key, dep)
+ if cross:
+ print('Cached dependencies:')
+ for dep_key, dep in cross:
+ print_dep(dep_key, dep)
+ elif k == 'external_preprocess_args':
+ for lang, opts in v.items():
+ if opts:
+ print('Preprocessor args for ' + lang + ': ' + ' '.join(opts))
+ else:
+ print(k + ':')
+ print(textwrap.indent(pprint.pformat(v), ' '))
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index 8967a53..85f6897 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -22,6 +22,7 @@ from . import compilers
forbidden_option_names = coredata.get_builtin_options()
forbidden_prefixes = [lang + '_' for lang in compilers.all_languages] + ['b_', 'backend_']
+reserved_prefixes = ['cross_']
def is_invalid_name(name):
if name in forbidden_option_names:
@@ -29,6 +30,9 @@ def is_invalid_name(name):
pref = name.split('_')[0] + '_'
if pref in forbidden_prefixes:
return True
+ if pref in reserved_prefixes:
+ from . import mlog
+ mlog.deprecation('Option uses prefix "%s", which is reserved for Meson. This will become an error in the future.' % pref)
return False
class OptionException(mesonlib.MesonException):
diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py
index 37ed7ef..975655c 100644
--- a/mesonbuild/rewriter.py
+++ b/mesonbuild/rewriter.py
@@ -23,36 +23,934 @@
# - move targets
# - reindent?
-import mesonbuild.astinterpreter
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter
from mesonbuild.mesonlib import MesonException
-from mesonbuild import mlog
-import sys, traceback
-
-def add_arguments(parser):
- parser.add_argument('--sourcedir', default='.',
- help='Path to source directory.')
- parser.add_argument('--target', default=None,
- help='Name of target to edit.')
- parser.add_argument('--filename', default=None,
- help='Name of source file to add or remove to target.')
- parser.add_argument('commands', nargs='+')
+from . import mlog, environment
+from functools import wraps
+from typing import List, Dict, Optional
+from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, BaseNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, StringNode
+import json, os, re, sys
+
+class RewriterException(MesonException):
+ pass
+
+def add_arguments(parser, formater=None):
+ parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.')
+ parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output')
+ parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting')
+ subparsers = parser.add_subparsers(dest='type', title='Rewriter commands', description='Rewrite command to execute')
+
+ # Target
+ tgt_parser = subparsers.add_parser('target', help='Modify a target', formatter_class=formater)
+ tgt_parser.add_argument('-s', '--subdir', default='', dest='subdir', help='Subdirectory of the new target (only for the "add_target" action)')
+ tgt_parser.add_argument('--type', dest='tgt_type', choices=rewriter_keys['target']['target_type'][2], default='executable',
+ help='Type of the target to add (only for the "add_target" action)')
+ tgt_parser.add_argument('target', help='Name or ID of the target')
+ tgt_parser.add_argument('operation', choices=['add', 'rm', 'add_target', 'rm_target', 'info'],
+ help='Action to execute')
+ tgt_parser.add_argument('sources', nargs='*', help='Sources to add/remove')
+
+ # KWARGS
+ kw_parser = subparsers.add_parser('kwargs', help='Modify keyword arguments', formatter_class=formater)
+ kw_parser.add_argument('operation', choices=rewriter_keys['kwargs']['operation'][2],
+ help='Action to execute')
+ kw_parser.add_argument('function', choices=list(rewriter_func_kwargs.keys()),
+ help='Function type to modify')
+ kw_parser.add_argument('id', help='ID of the function to modify (can be anything for "project")')
+ kw_parser.add_argument('kwargs', nargs='*', help='Pairs of keyword and value')
+
+ # Default options
+ def_parser = subparsers.add_parser('default-options', help='Modify the project default options', formatter_class=formater)
+ def_parser.add_argument('operation', choices=rewriter_keys['default_options']['operation'][2],
+ help='Action to execute')
+ def_parser.add_argument('options', nargs='*', help='Key, value pairs of configuration option')
+
+ # JSON file/command
+ cmd_parser = subparsers.add_parser('command', help='Execute a JSON array of commands', formatter_class=formater)
+ cmd_parser.add_argument('json', help='JSON string or file to execute')
+
+class RequiredKeys:
+ def __init__(self, keys):
+ self.keys = keys
+
+ def __call__(self, f):
+ @wraps(f)
+ def wrapped(*wrapped_args, **wrapped_kwargs):
+ assert(len(wrapped_args) >= 2)
+ cmd = wrapped_args[1]
+ for key, val in self.keys.items():
+ typ = val[0] # The type of the value
+ default = val[1] # The default value -- None is required
+ choices = val[2] # Valid choices -- None is for everything
+ if key not in cmd:
+ if default is not None:
+ cmd[key] = default
+ else:
+ raise RewriterException('Key "{}" is missing in object for {}'
+ .format(key, f.__name__))
+ if not isinstance(cmd[key], typ):
+ raise RewriterException('Invalid type of "{}". Required is {} but provided was {}'
+ .format(key, typ.__name__, type(cmd[key]).__name__))
+ if choices is not None:
+ assert(isinstance(choices, list))
+ if cmd[key] not in choices:
+ raise RewriterException('Invalid value of "{}": Possible values are {} but provided was "{}"'
+ .format(key, choices, cmd[key]))
+ return f(*wrapped_args, **wrapped_kwargs)
+
+ return wrapped
+
+class MTypeBase:
+ def __init__(self, node: Optional[BaseNode] = None):
+ if node is None:
+ self.node = self._new_node()
+ else:
+ self.node = node
+ self.node_type = None
+ for i in self.supported_nodes():
+ if isinstance(self.node, i):
+ self.node_type = i
+
+ def _new_node(self):
+ # Overwrite in derived class
+ return BaseNode()
+
+ def can_modify(self):
+ return self.node_type is not None
+
+ def get_node(self):
+ return self.node
+
+ def supported_nodes(self):
+ # Overwrite in derived class
+ return []
+
+ def set_value(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot set the value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+ def add_value(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot add a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+ def remove_value(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+ def remove_regex(self, value):
+ # Overwrite in derived class
+ mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping')
+
+class MTypeStr(MTypeBase):
+ def __init__(self, node: Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return StringNode(Token('', '', 0, 0, 0, None, ''))
+
+ def supported_nodes(self):
+ return [StringNode]
+
+ def set_value(self, value):
+ self.node.value = str(value)
+
+class MTypeBool(MTypeBase):
+ def __init__(self, node: Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return StringNode(Token('', '', 0, 0, 0, None, False))
+
+ def supported_nodes(self):
+ return [BooleanNode]
+
+ def set_value(self, value):
+ self.node.value = bool(value)
+
+class MTypeID(MTypeBase):
+ def __init__(self, node: Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return StringNode(Token('', '', 0, 0, 0, None, ''))
+
+ def supported_nodes(self):
+ return [IdNode]
+
+ def set_value(self, value):
+ self.node.value = str(value)
+
+class MTypeList(MTypeBase):
+ def __init__(self, node: Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_node(self):
+ return ArrayNode(ArgumentNode(Token('', '', 0, 0, 0, None, '')), 0, 0, 0, 0)
+
+ def _new_element_node(self, value):
+ # Overwrite in derived class
+ return BaseNode()
+
+ def _ensure_array_node(self):
+ if not isinstance(self.node, ArrayNode):
+ tmp = self.node
+ self.node = self._new_node()
+ self.node.args.arguments += [tmp]
+
+ def _check_is_equal(self, node, value) -> bool:
+ # Overwrite in derived class
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ # Overwrite in derived class
+ return False
+
+ def get_node(self):
+ if isinstance(self.node, ArrayNode):
+ if len(self.node.args.arguments) == 1:
+ return self.node.args.arguments[0]
+ return self.node
+
+ def supported_element_nodes(self):
+ # Overwrite in derived class
+ return []
+
+ def supported_nodes(self):
+ return [ArrayNode] + self.supported_element_nodes()
+
+ def set_value(self, value):
+ if not isinstance(value, list):
+ value = [value]
+ self._ensure_array_node()
+ self.node.args.arguments = [] # Remove all current nodes
+ for i in value:
+ self.node.args.arguments += [self._new_element_node(i)]
+
+ def add_value(self, value):
+ if not isinstance(value, list):
+ value = [value]
+ self._ensure_array_node()
+ for i in value:
+ self.node.args.arguments += [self._new_element_node(i)]
+
+ def _remove_helper(self, value, equal_func):
+ def check_remove_node(node):
+ for j in value:
+ if equal_func(i, j):
+ return True
+ return False
+
+ if not isinstance(value, list):
+ value = [value]
+ self._ensure_array_node()
+ removed_list = []
+ for i in self.node.args.arguments:
+ if not check_remove_node(i):
+ removed_list += [i]
+ self.node.args.arguments = removed_list
+
+ def remove_value(self, value):
+ self._remove_helper(value, self._check_is_equal)
+
+ def remove_regex(self, regex: str):
+ self._remove_helper(regex, self._check_regex_matches)
+
+class MTypeStrList(MTypeList):
+ def __init__(self, node: Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_element_node(self, value):
+ return StringNode(Token('', '', 0, 0, 0, None, str(value)))
+
+ def _check_is_equal(self, node, value) -> bool:
+ if isinstance(node, StringNode):
+ return node.value == value
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ if isinstance(node, StringNode):
+ return re.match(regex, node.value) is not None
+ return False
+
+ def supported_element_nodes(self):
+ return [StringNode]
+
+class MTypeIDList(MTypeList):
+ def __init__(self, node: Optional[BaseNode] = None):
+ super().__init__(node)
+
+ def _new_element_node(self, value):
+ return IdNode(Token('', '', 0, 0, 0, None, str(value)))
+
+ def _check_is_equal(self, node, value) -> bool:
+ if isinstance(node, IdNode):
+ return node.value == value
+ return False
+
+ def _check_regex_matches(self, node, regex: str) -> bool:
+ if isinstance(node, StringNode):
+ return re.match(regex, node.value) is not None
+ return False
+
+ def supported_element_nodes(self):
+ return [IdNode]
+
+rewriter_keys = {
+ 'default_options': {
+ 'operation': (str, None, ['set', 'delete']),
+ 'options': (dict, {}, None)
+ },
+ 'kwargs': {
+ 'function': (str, None, None),
+ 'id': (str, None, None),
+ 'operation': (str, None, ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']),
+ 'kwargs': (dict, {}, None)
+ },
+ 'target': {
+ 'target': (str, None, None),
+ 'operation': (str, None, ['src_add', 'src_rm', 'target_rm', 'target_add', 'info']),
+ 'sources': (list, [], None),
+ 'subdir': (str, '', None),
+ 'target_type': (str, 'executable', ['both_libraries', 'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library']),
+ }
+}
+
+rewriter_func_kwargs = {
+ 'dependency': {
+ 'language': MTypeStr,
+ 'method': MTypeStr,
+ 'native': MTypeBool,
+ 'not_found_message': MTypeStr,
+ 'required': MTypeBool,
+ 'static': MTypeBool,
+ 'version': MTypeStrList,
+ 'modules': MTypeStrList
+ },
+ 'target': {
+ 'build_by_default': MTypeBool,
+ 'build_rpath': MTypeStr,
+ 'dependencies': MTypeIDList,
+ 'gui_app': MTypeBool,
+ 'link_with': MTypeIDList,
+ 'export_dynamic': MTypeBool,
+ 'implib': MTypeBool,
+ 'install': MTypeBool,
+ 'install_dir': MTypeStr,
+ 'install_rpath': MTypeStr,
+ 'pie': MTypeBool
+ },
+ 'project': {
+ 'default_options': MTypeStrList,
+ 'meson_version': MTypeStr,
+ 'license': MTypeStrList,
+ 'subproject_dir': MTypeStr,
+ 'version': MTypeStr
+ }
+}
+
+class Rewriter:
+ def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False):
+ self.sourcedir = sourcedir
+ self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+ self.skip_errors = skip_errors
+ self.modefied_nodes = []
+ self.to_remove_nodes = []
+ self.to_add_nodes = []
+ self.functions = {
+ 'default_options': self.process_default_options,
+ 'kwargs': self.process_kwargs,
+ 'target': self.process_target,
+ }
+ self.info_dump = None
+
+ def analyze_meson(self):
+ mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename)))
+ self.interpreter.analyze()
+ mlog.log(' -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name']))
+ mlog.log(' -- Version:', mlog.cyan(self.interpreter.project_data['version']))
+
+ def add_info(self, cmd_type: str, cmd_id: str, data: dict):
+ if self.info_dump is None:
+ self.info_dump = {}
+ if cmd_type not in self.info_dump:
+ self.info_dump[cmd_type] = {}
+ self.info_dump[cmd_type][cmd_id] = data
+
+ def print_info(self):
+ if self.info_dump is None:
+ return
+ sys.stderr.write(json.dumps(self.info_dump, indent=2))
+
+ def on_error(self):
+ if self.skip_errors:
+ return mlog.cyan('-->'), mlog.yellow('skipping')
+ return mlog.cyan('-->'), mlog.red('aborting')
+
+ def handle_error(self):
+ if self.skip_errors:
+ return None
+ raise MesonException('Rewriting the meson.build failed')
+
+ def find_target(self, target: str):
+ def check_list(name: str) -> List[BaseNode]:
+ result = []
+ for i in self.interpreter.targets:
+ if name == i['name'] or name == i['id']:
+ result += [i]
+ return result
+
+ targets = check_list(target)
+ if targets:
+ if len(targets) == 1:
+ return targets[0]
+ else:
+ mlog.error('There are multiple targets matching', mlog.bold(target))
+ for i in targets:
+ mlog.error(' -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id']))
+ mlog.error('Please try again with the unique ID of the target', *self.on_error())
+ self.handle_error()
+ return None
+
+ # Check the assignments
+ tgt = None
+ if target in self.interpreter.assignments:
+ node = self.interpreter.assignments[target][0]
+ if isinstance(node, FunctionNode):
+ if node.func_name in ['executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries']:
+ tgt = self.interpreter.assign_vals[target][0]
+
+ return tgt
+
+ def find_dependency(self, dependency: str):
+ def check_list(name: str):
+ for i in self.interpreter.dependencies:
+ if name == i['name']:
+ return i
+ return None
+
+ dep = check_list(dependency)
+ if dep is not None:
+ return dep
+
+ # Check the assignments
+ if dependency in self.interpreter.assignments:
+ node = self.interpreter.assignments[dependency][0]
+ if isinstance(node, FunctionNode):
+ if node.func_name in ['dependency']:
+ name = self.interpreter.flatten_args(node.args)[0]
+ dep = check_list(name)
+
+ return dep
+
+ @RequiredKeys(rewriter_keys['default_options'])
+ def process_default_options(self, cmd):
+ # First, remove the old values
+ kwargs_cmd = {
+ 'function': 'project',
+ 'id': "",
+ 'operation': 'remove_regex',
+ 'kwargs': {
+ 'default_options': ['{}=.*'.format(x) for x in cmd['options'].keys()]
+ }
+ }
+ self.process_kwargs(kwargs_cmd)
+
+ # Then add the new values
+ if cmd['operation'] != 'set':
+ return
+
+ kwargs_cmd['operation'] = 'add'
+ kwargs_cmd['kwargs']['default_options'] = []
+
+ cdata = self.interpreter.coredata
+ options = {
+ **cdata.builtins,
+ **cdata.backend_options,
+ **cdata.base_options,
+ **cdata.compiler_options.build,
+ **cdata.user_options
+ }
+
+ for key, val in sorted(cmd['options'].items()):
+ if key not in options:
+ mlog.error('Unknown options', mlog.bold(key), *self.on_error())
+ self.handle_error()
+ continue
+
+ try:
+ val = options[key].validate_value(val)
+ except MesonException as e:
+ mlog.error('Unable to set', mlog.bold(key), mlog.red(str(e)), *self.on_error())
+ self.handle_error()
+ continue
+
+ kwargs_cmd['kwargs']['default_options'] += ['{}={}'.format(key, val)]
+
+ self.process_kwargs(kwargs_cmd)
+
+ @RequiredKeys(rewriter_keys['kwargs'])
+ def process_kwargs(self, cmd):
+ mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'"))
+ if cmd['function'] not in rewriter_func_kwargs:
+ mlog.error('Unknown function type', cmd['function'], *self.on_error())
+ return self.handle_error()
+ kwargs_def = rewriter_func_kwargs[cmd['function']]
+
+ # Find the function node to modify
+ node = None
+ arg_node = None
+ if cmd['function'] == 'project':
+ if cmd['id'] != '/':
+ mlog.error('The ID for the function type project must be an empty string', *self.on_error())
+ self.handle_error()
+ node = self.interpreter.project_node
+ arg_node = node.args
+ elif cmd['function'] == 'target':
+ tmp = self.find_target(cmd['id'])
+ if tmp:
+ node = tmp['node']
+ arg_node = node.args
+ elif cmd['function'] == 'dependency':
+ tmp = self.find_dependency(cmd['id'])
+ if tmp:
+ node = tmp['node']
+ arg_node = node.args
+ if not node:
+ mlog.error('Unable to find the function node')
+ assert(isinstance(node, FunctionNode))
+ assert(isinstance(arg_node, ArgumentNode))
+
+ # Print kwargs info
+ if cmd['operation'] == 'info':
+ info_data = {}
+ for key, val in sorted(arg_node.kwargs.items()):
+ info_data[key] = None
+ if isinstance(val, ElementaryNode):
+ info_data[key] = val.value
+ elif isinstance(val, ArrayNode):
+ data_list = []
+ for i in val.args.arguments:
+ element = None
+ if isinstance(i, ElementaryNode):
+ element = i.value
+ data_list += [element]
+ info_data[key] = data_list
+
+ self.add_info('kwargs', '{}#{}'.format(cmd['function'], cmd['id']), info_data)
+ return # Nothing else to do
+
+ # Modify the kwargs
+ num_changed = 0
+ for key, val in sorted(cmd['kwargs'].items()):
+ if key not in kwargs_def:
+ mlog.error('Cannot modify unknown kwarg', mlog.bold(key), *self.on_error())
+ self.handle_error()
+ continue
+
+ # Remove the key from the kwargs
+ if cmd['operation'] == 'delete':
+ if key in arg_node.kwargs:
+ mlog.log(' -- Deleting', mlog.bold(key), 'from the kwargs')
+ del arg_node.kwargs[key]
+ num_changed += 1
+ else:
+ mlog.log(' -- Key', mlog.bold(key), 'is already deleted')
+ continue
+
+ if key not in arg_node.kwargs:
+ arg_node.kwargs[key] = None
+ modifyer = kwargs_def[key](arg_node.kwargs[key])
+ if not modifyer.can_modify():
+ mlog.log(' -- Skipping', mlog.bold(key), 'because it is to complex to modify')
+
+ # Apply the operation
+ val_str = str(val)
+ if cmd['operation'] == 'set':
+ mlog.log(' -- Setting', mlog.bold(key), 'to', mlog.yellow(val_str))
+ modifyer.set_value(val)
+ elif cmd['operation'] == 'add':
+ mlog.log(' -- Adding', mlog.yellow(val_str), 'to', mlog.bold(key))
+ modifyer.add_value(val)
+ elif cmd['operation'] == 'remove':
+ mlog.log(' -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key))
+ modifyer.remove_value(val)
+ elif cmd['operation'] == 'remove_regex':
+ mlog.log(' -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key))
+ modifyer.remove_regex(val)
+
+ # Write back the result
+ arg_node.kwargs[key] = modifyer.get_node()
+ num_changed += 1
+
+ if num_changed > 0 and node not in self.modefied_nodes:
+ self.modefied_nodes += [node]
+
+ def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
+ if hasattr(node, 'ast_id') and node.ast_id in self.interpreter.reverse_assignment:
+ return self.interpreter.reverse_assignment[node.ast_id]
+ return None
+
+ @RequiredKeys(rewriter_keys['target'])
+ def process_target(self, cmd):
+ mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
+ target = self.find_target(cmd['target'])
+ if target is None and cmd['operation'] != 'target_add':
+ mlog.error('Unknown target', mlog.bold(cmd['target']), *self.on_error())
+ return self.handle_error()
+
+ # Make source paths relative to the current subdir
+ def rel_source(src: str) -> str:
+ subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir']))
+ if os.path.isabs(src):
+ return os.path.relpath(src, subdir)
+ elif not os.path.exists(src):
+ return src # Trust the user when the source doesn't exist
+ # Make sure that the path is relative to the subdir
+ return os.path.relpath(os.path.abspath(src), subdir)
+
+ if target is not None:
+ cmd['sources'] = [rel_source(x) for x in cmd['sources']]
+
+ # Utility function to get a list of the sources from a node
+ def arg_list_from_node(n):
+ args = []
+ if isinstance(n, FunctionNode):
+ args = list(n.args.arguments)
+ if n.func_name in build_target_functions:
+ args.pop(0)
+ elif isinstance(n, ArrayNode):
+ args = n.args.arguments
+ elif isinstance(n, ArgumentNode):
+ args = n.arguments
+ return args
+
+ to_sort_nodes = []
+
+ if cmd['operation'] == 'src_add':
+ node = None
+ if target['sources']:
+ node = target['sources'][0]
+ else:
+ node = target['node']
+ assert(node is not None)
+
+ # Generate the current source list
+ src_list = []
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ src_list += [j.value]
+
+ # Generate the new String nodes
+ to_append = []
+ for i in sorted(set(cmd['sources'])):
+ if i in src_list:
+ mlog.log(' -- Source', mlog.green(i), 'is already defined for the target --> skipping')
+ continue
+ mlog.log(' -- Adding source', mlog.green(i), 'at',
+ mlog.yellow('{}:{}'.format(os.path.join(node.subdir, environment.build_filename), node.lineno)))
+ token = Token('string', node.subdir, 0, 0, 0, None, i)
+ to_append += [StringNode(token)]
+
+ # Append to the AST at the right place
+ arg_node = None
+ if isinstance(node, (FunctionNode, ArrayNode)):
+ arg_node = node.args
+ elif isinstance(node, ArgumentNode):
+ arg_node = node
+ assert(arg_node is not None)
+ arg_node.arguments += to_append
+
+ # Mark the node as modified
+ if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode):
+ to_sort_nodes += [arg_node]
+ if node not in self.modefied_nodes:
+ self.modefied_nodes += [node]
+
+ elif cmd['operation'] == 'src_rm':
+ # Helper to find the exact string node and its parent
+ def find_node(src):
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ if j.value == src:
+ return i, j
+ return None, None
+
+ for i in cmd['sources']:
+ # Try to find the node with the source string
+ root, string_node = find_node(i)
+ if root is None:
+ mlog.warning(' -- Unable to find source', mlog.green(i), 'in the target')
+ continue
+
+ # Remove the found string node from the argument list
+ arg_node = None
+ if isinstance(root, (FunctionNode, ArrayNode)):
+ arg_node = root.args
+ elif isinstance(root, ArgumentNode):
+ arg_node = root
+ assert(arg_node is not None)
+ mlog.log(' -- Removing source', mlog.green(i), 'from',
+ mlog.yellow('{}:{}'.format(os.path.join(string_node.subdir, environment.build_filename), string_node.lineno)))
+ arg_node.arguments.remove(string_node)
+
+ # Mark the node as modified
+ if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
+ to_sort_nodes += [arg_node]
+ if root not in self.modefied_nodes:
+ self.modefied_nodes += [root]
+
+ elif cmd['operation'] == 'target_add':
+ if target is not None:
+ mlog.error('Can not add target', mlog.bold(cmd['target']), 'because it already exists', *self.on_error())
+ return self.handle_error()
+
+ id_base = re.sub(r'[- ]', '_', cmd['target'])
+ target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
+ source_id = id_base + '_sources'
+
+ # Build src list
+ src_arg_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
+ src_arr_node = ArrayNode(src_arg_node, 0, 0, 0, 0)
+ src_far_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
+ src_fun_node = FunctionNode(cmd['subdir'], 0, 0, 0, 0, 'files', src_far_node)
+ src_ass_node = AssignmentNode(cmd['subdir'], 0, 0, source_id, src_fun_node)
+ src_arg_node.arguments = [StringNode(Token('string', cmd['subdir'], 0, 0, 0, None, x)) for x in cmd['sources']]
+ src_far_node.arguments = [src_arr_node]
+
+ # Build target
+ tgt_arg_node = ArgumentNode(Token('string', cmd['subdir'], 0, 0, 0, None, ''))
+ tgt_fun_node = FunctionNode(cmd['subdir'], 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
+ tgt_ass_node = AssignmentNode(cmd['subdir'], 0, 0, target_id, tgt_fun_node)
+ tgt_arg_node.arguments = [
+ StringNode(Token('string', cmd['subdir'], 0, 0, 0, None, cmd['target'])),
+ IdNode(Token('string', cmd['subdir'], 0, 0, 0, None, source_id))
+ ]
+
+ src_ass_node.accept(AstIndentationGenerator())
+ tgt_ass_node.accept(AstIndentationGenerator())
+ self.to_add_nodes += [src_ass_node, tgt_ass_node]
+
+ elif cmd['operation'] == 'target_rm':
+ to_remove = self.find_assignment_node(target['node'])
+ if to_remove is None:
+ to_remove = target['node']
+ self.to_remove_nodes += [to_remove]
+ mlog.log(' -- Removing target', mlog.green(cmd['target']), 'at',
+ mlog.yellow('{}:{}'.format(os.path.join(to_remove.subdir, environment.build_filename), to_remove.lineno)))
+
+ elif cmd['operation'] == 'info':
+ # List all sources in the target
+ src_list = []
+ for i in target['sources']:
+ for j in arg_list_from_node(i):
+ if isinstance(j, StringNode):
+ src_list += [j.value]
+ test_data = {
+ 'name': target['name'],
+ 'sources': src_list
+ }
+ self.add_info('target', target['id'], test_data)
+
+ # Sort files
+ for i in to_sort_nodes:
+ convert = lambda text: int(text) if text.isdigit() else text.lower()
+ alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
+ path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))])
+
+ unknown = [x for x in i.arguments if not isinstance(x, StringNode)]
+ sources = [x for x in i.arguments if isinstance(x, StringNode)]
+ sources = sorted(sources, key=lambda x: path_sorter(x.value))
+ i.arguments = unknown + sources
+
+ def process(self, cmd):
+ if 'type' not in cmd:
+ raise RewriterException('Command has no key "type"')
+ if cmd['type'] not in self.functions:
+ raise RewriterException('Unknown command "{}". Supported commands are: {}'
+ .format(cmd['type'], list(self.functions.keys())))
+ self.functions[cmd['type']](cmd)
+
+ def apply_changes(self):
+ assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'subdir') for x in self.modefied_nodes))
+ assert(all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'subdir') for x in self.to_remove_nodes))
+ assert(all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modefied_nodes))
+ assert(all(isinstance(x, (ArrayNode, AssignmentNode, FunctionNode)) for x in self.to_remove_nodes))
+ # Sort based on line and column in reversed order
+ work_nodes = [{'node': x, 'action': 'modify'} for x in self.modefied_nodes]
+ work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes]
+ work_nodes = list(sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True))
+ work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes]
+
+ # Generating the new replacement string
+ str_list = []
+ for i in work_nodes:
+ new_data = ''
+ if i['action'] == 'modify' or i['action'] == 'add':
+ printer = AstPrinter()
+ i['node'].accept(printer)
+ printer.post_process()
+ new_data = printer.result.strip()
+ data = {
+ 'file': os.path.join(i['node'].subdir, environment.build_filename),
+ 'str': new_data,
+ 'node': i['node'],
+ 'action': i['action']
+ }
+ str_list += [data]
+
+ # Load build files
+ files = {}
+ for i in str_list:
+ if i['file'] in files:
+ continue
+ fpath = os.path.realpath(os.path.join(self.sourcedir, i['file']))
+ fdata = ''
+ # Create an empty file if it does not exist
+ if not os.path.exists(fpath):
+ with open(fpath, 'w'):
+ pass
+ with open(fpath, 'r') as fp:
+ fdata = fp.read()
+
+ # Generate line offsets numbers
+ m_lines = fdata.splitlines(True)
+ offset = 0
+ line_offsets = []
+ for j in m_lines:
+ line_offsets += [offset]
+ offset += len(j)
+
+ files[i['file']] = {
+ 'path': fpath,
+ 'raw': fdata,
+ 'offsets': line_offsets
+ }
+
+ # Replace in source code
+ def remove_node(i):
+ offsets = files[i['file']]['offsets']
+ raw = files[i['file']]['raw']
+ node = i['node']
+ line = node.lineno - 1
+ col = node.colno
+ start = offsets[line] + col
+ end = start
+ if isinstance(node, (ArrayNode, FunctionNode)):
+ end = offsets[node.end_lineno - 1] + node.end_colno
+
+ # Only removal is supported for assignments
+ elif isinstance(node, AssignmentNode) and i['action'] == 'rm':
+ if isinstance(node.value, (ArrayNode, FunctionNode)):
+ remove_node({'file': i['file'], 'str': '', 'node': node.value, 'action': 'rm'})
+ raw = files[i['file']]['raw']
+ while raw[end] != '=':
+ end += 1
+ end += 1 # Handle the '='
+ while raw[end] in [' ', '\n', '\t']:
+ end += 1
+
+ raw = files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:]
+
+ for i in str_list:
+ if i['action'] in ['modify', 'rm']:
+ remove_node(i)
+ elif i['action'] in ['add']:
+ files[i['file']]['raw'] += i['str'] + '\n'
+
+ # Write the files back
+ for key, val in files.items():
+ mlog.log('Rewriting', mlog.yellow(key))
+ with open(val['path'], 'w') as fp:
+ fp.write(val['raw'])
+
+target_operation_map = {
+ 'add': 'src_add',
+ 'rm': 'src_rm',
+ 'add_target': 'target_add',
+ 'rm_target': 'target_rm',
+ 'info': 'info',
+}
+
+def list_to_dict(in_list: List[str]) -> Dict[str, str]:
+ if len(in_list) % 2 != 0:
+ raise TypeError('An even ammount of arguments are required')
+ result = {}
+ for i in range(0, len(in_list), 2):
+ result[in_list[i]] = in_list[i + 1]
+ return result
+
+def generate_target(options) -> List[dict]:
+ return [{
+ 'type': 'target',
+ 'target': options.target,
+ 'operation': target_operation_map[options.operation],
+ 'sources': options.sources,
+ 'subdir': options.subdir,
+ 'target_type': options.tgt_type,
+ }]
+
+def generate_kwargs(options) -> List[dict]:
+ return [{
+ 'type': 'kwargs',
+ 'function': options.function,
+ 'id': options.id,
+ 'operation': options.operation,
+ 'kwargs': list_to_dict(options.kwargs),
+ }]
+
+def generate_def_opts(options) -> List[dict]:
+ return [{
+ 'type': 'default_options',
+ 'operation': options.operation,
+ 'options': list_to_dict(options.options),
+ }]
+
+def genreate_cmd(options) -> List[dict]:
+ if os.path.exists(options.json):
+ with open(options.json, 'r') as fp:
+ return json.load(fp)
+ else:
+ return json.loads(options.json)
+
+# Map options.type to the actual type name
+cli_type_map = {
+ 'target': generate_target,
+ 'tgt': generate_target,
+ 'kwargs': generate_kwargs,
+ 'default-options': generate_def_opts,
+ 'def': generate_def_opts,
+ 'command': genreate_cmd,
+ 'cmd': genreate_cmd,
+}
def run(options):
- if options.target is None or options.filename is None:
- sys.exit("Must specify both target and filename.")
- print('This tool is highly experimental, use with care.')
- rewriter = mesonbuild.astinterpreter.RewriterInterpreter(options.sourcedir, '')
+ if not options.verbose:
+ mlog.set_quiet()
+
try:
- if options.commands[0] == 'add':
- rewriter.add_source(options.target, options.filename)
- elif options.commands[0] == 'remove':
- rewriter.remove_source(options.target, options.filename)
- else:
- sys.exit('Unknown command: ' + options.commands[0])
+ rewriter = Rewriter(options.sourcedir, skip_errors=options.skip)
+ rewriter.analyze_meson()
+
+ if options.type is None:
+ mlog.error('No command specified')
+ return 1
+
+ commands = cli_type_map[options.type](options)
+
+ if not isinstance(commands, list):
+ raise TypeError('Command is not a list')
+
+ for i in commands:
+ if not isinstance(i, object):
+ raise TypeError('Command is not an object')
+ rewriter.process(i)
+
+ rewriter.apply_changes()
+ rewriter.print_info()
+ return 0
except Exception as e:
- if isinstance(e, MesonException):
- mlog.exception(e)
- else:
- traceback.print_exc()
- return 1
- return 0
+ raise e
+ finally:
+ mlog.set_verbose()
diff --git a/mesonbuild/scripts/dist.py b/mesonbuild/scripts/dist.py
index f49492c..a8d9674 100644
--- a/mesonbuild/scripts/dist.py
+++ b/mesonbuild/scripts/dist.py
@@ -188,7 +188,8 @@ def run(args):
dist_name = build.project_name + '-' + build.project_version
- if os.path.isdir(os.path.join(src_root, '.git')):
+ _git = os.path.join(src_root, '.git')
+ if os.path.isdir(_git) or os.path.isfile(_git):
names = create_dist_git(dist_name, src_root, bld_root, dist_sub, build.dist_scripts)
elif os.path.isdir(os.path.join(src_root, '.hg')):
names = create_dist_hg(dist_name, src_root, bld_root, dist_sub, build.dist_scripts)
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index f4134d3..4d9d032 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -218,17 +218,16 @@ class Resolver:
def get_git(self):
revno = self.wrap.get('revision')
- if self.wrap.values.get('clone-recursive', '').lower() == 'true':
- subprocess.check_call(['git', 'clone', '--recursive', self.wrap.get('url'),
- self.directory], cwd=self.subdir_root)
- else:
- subprocess.check_call(['git', 'clone', self.wrap.get('url'),
- self.directory], cwd=self.subdir_root)
+ subprocess.check_call(['git', 'clone', self.wrap.get('url'),
+ self.directory], cwd=self.subdir_root)
if revno.lower() != 'head':
if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0:
subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname)
- subprocess.check_call(['git', 'checkout', revno],
- cwd=self.dirname)
+ subprocess.check_call(['git', 'checkout', revno], cwd=self.dirname)
+ if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+ subprocess.check_call(['git', 'submodule', 'update',
+ '--init', '--checkout', '--recursive'],
+ cwd=self.dirname)
push_url = self.wrap.values.get('push-url')
if push_url:
subprocess.check_call(['git', 'remote', 'set-url',
@@ -349,7 +348,7 @@ class Resolver:
if os.path.exists(dst_file):
try:
os.remove(dst_file)
- except PermissionError as exc:
+ except PermissionError:
os.chmod(dst_file, stat.S_IWUSR)
os.remove(dst_file)
shutil.copy2(src_file, dst_dir)
diff --git a/run_project_tests.py b/run_project_tests.py
index 02897ce..96fe99a 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -345,6 +345,12 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen
if pass_libdir_to_test(testdir):
gen_args += ['--libdir', 'lib']
gen_args += [testdir, test_build_dir] + flags + test_args + extra_args
+ nativefile = os.path.join(testdir, 'nativefile.ini')
+ if os.path.exists(nativefile):
+ gen_args.extend(['--native-file', nativefile])
+ crossfile = os.path.join(testdir, 'crossfile.ini')
+ if os.path.exists(crossfile):
+ gen_args.extend(['--cross-file', crossfile])
(returncode, stdo, stde) = run_configure(gen_args)
try:
logfile = Path(test_build_dir, 'meson-logs', 'meson-log.txt')
@@ -434,6 +440,14 @@ def have_d_compiler():
elif shutil.which("gdc"):
return True
elif shutil.which("dmd"):
+ # The Windows installer sometimes produces a DMD install
+ # that exists but segfaults every time the compiler is run.
+ # Don't know why. Don't know how to fix. Skip in this case.
+ cp = subprocess.run(['dmd', '--version'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ if cp.stdout == b'':
+ return False
return True
return False
@@ -484,6 +498,10 @@ def skippable(suite, test):
if test.endswith('10 gtk-doc'):
return True
+ # NetCDF is not in the CI image
+ if test.endswith('netcdf'):
+ return True
+
# No frameworks test should be skipped on linux CI, as we expect all
# prerequisites to be installed
if mesonlib.is_linux():
@@ -496,6 +514,10 @@ def skippable(suite, test):
return 'BOOST_ROOT' not in os.environ
return False
+ # Qt is provided on macOS by Homebrew
+ if test.endswith('4 qt') and mesonlib.is_osx():
+ return False
+
# Other framework tests are allowed to be skipped on other platforms
return True
@@ -525,6 +547,7 @@ def detect_tests_to_run():
# Name, subdirectory, skip condition.
all_tests = [
('common', 'common', False),
+ ('warning-meson', 'warning', False),
('failing-meson', 'failing', False),
('failing-build', 'failing build', False),
('failing-test', 'failing test', False),
@@ -537,11 +560,12 @@ def detect_tests_to_run():
('C#', 'csharp', skip_csharp(backend)),
('vala', 'vala', backend is not Backend.ninja or not shutil.which('valac')),
('rust', 'rust', backend is not Backend.ninja or not shutil.which('rustc')),
- ('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
+ ('d', 'd', backend is not Backend.ninja or not have_d_compiler() or mesonlib.is_windows()),
('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or mesonlib.is_windows() or not have_objc_compiler()),
('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or mesonlib.is_windows() or not have_objcpp_compiler()),
('fortran', 'fortran', backend is not Backend.ninja or not shutil.which('gfortran')),
('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
+ ('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')),
('python3', 'python3', backend is not Backend.ninja),
('python', 'python', backend is not Backend.ninja),
('fpga', 'fpga', shutil.which('yosys') is None),
@@ -600,9 +624,14 @@ def _run_tests(all_tests, log_name_base, failfast, extra_args):
(testnum, testbase) = t.name.split(' ', 1)
testname = '%.3d %s' % (int(testnum), testbase)
should_fail = False
+ suite_args = []
if name.startswith('failing'):
should_fail = name.split('failing-')[1]
- result = executor.submit(run_test, skipped, t.as_posix(), extra_args, system_compiler, backend, backend_flags, commands, should_fail)
+ if name.startswith('warning'):
+ suite_args = ['--fatal-meson-warnings']
+ should_fail = name.split('warning-')[1]
+ result = executor.submit(run_test, skipped, t.as_posix(), extra_args + suite_args,
+ system_compiler, backend, backend_flags, commands, should_fail)
futures.append((testname, t, result))
for (testname, t, result) in futures:
sys.stdout.flush()
diff --git a/run_tests.py b/run_tests.py
index aa8a589..d72546b 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -66,7 +66,7 @@ class FakeCompilerOptions:
def __init__(self):
self.value = []
-def get_fake_options(prefix):
+def get_fake_options(prefix=''):
import argparse
opts = argparse.Namespace()
opts.cross_file = None
@@ -76,9 +76,12 @@ def get_fake_options(prefix):
opts.native_file = []
return opts
-def get_fake_env(sdir, bdir, prefix):
- env = Environment(sdir, bdir, get_fake_options(prefix))
- env.coredata.compiler_options['c_args'] = FakeCompilerOptions()
+def get_fake_env(sdir='', bdir=None, prefix='', opts=None):
+ if opts is None:
+ opts = get_fake_options(prefix)
+ env = Environment(sdir, bdir, opts)
+ env.coredata.compiler_options.host['c_args'] = FakeCompilerOptions()
+ env.machines.host.cpu_family = 'x86_64' # Used on macOS inside find_library
return env
@@ -211,6 +214,14 @@ def run_mtest_inprocess(commandlist):
sys.stderr = old_stderr
return returncode, mystdout.getvalue(), mystderr.getvalue()
+def clear_meson_configure_class_caches():
+ mesonbuild.compilers.CCompiler.library_dirs_cache = {}
+ mesonbuild.compilers.CCompiler.program_dirs_cache = {}
+ mesonbuild.compilers.CCompiler.find_library_cache = {}
+ mesonbuild.compilers.CCompiler.find_framework_cache = {}
+ mesonbuild.dependencies.PkgConfigDependency.pkgbin_cache = {}
+ mesonbuild.dependencies.PkgConfigDependency.class_pkgbin = mesonlib.PerMachine(None, None, None)
+
def run_configure_inprocess(commandlist):
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
@@ -221,6 +232,7 @@ def run_configure_inprocess(commandlist):
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
+ clear_meson_configure_class_caches()
return returncode, mystdout.getvalue(), mystderr.getvalue()
def run_configure_external(full_command):
diff --git a/run_unittests.py b/run_unittests.py
index 07e6bf9..0b2164f 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -27,11 +27,14 @@ import unittest
import platform
import pickle
import functools
+import io
from itertools import chain
from unittest import mock
from configparser import ConfigParser
+from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
+from distutils.dir_util import copy_tree
import mesonbuild.mlog
import mesonbuild.compilers
@@ -40,10 +43,11 @@ import mesonbuild.mesonlib
import mesonbuild.coredata
import mesonbuild.modules.gnome
from mesonbuild.interpreter import Interpreter, ObjectHolder
+from mesonbuild.ast import AstInterpreter
from mesonbuild.mesonlib import (
is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku,
windows_proof_rmtree, python_command, version_compare,
- BuildDirLock, Version
+ BuildDirLock, Version, PerMachine
)
from mesonbuild.environment import detect_ninja
from mesonbuild.mesonlib import MesonException, EnvironmentException
@@ -51,16 +55,18 @@ from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram
from mesonbuild.build import Target
import mesonbuild.modules.pkgconfig
+from mesonbuild.mtest import TAPParser, TestResult
+
from run_tests import (
Backend, FakeBuild, FakeCompilerOptions,
ensure_backend_detects_changes, exe_suffix, get_backend_commands,
- get_builddir_target_args, get_fake_env, get_meson_script,
+ get_builddir_target_args, get_fake_env, get_fake_options, get_meson_script,
run_configure_inprocess, run_mtest_inprocess
)
def get_dynamic_section_entry(fname, entry):
if is_cygwin() or is_osx():
- raise unittest.SkipTest('Test only applicable to ELF platforms')
+ raise unittest.SkipTest('Test only applicable to ELF platforms')
try:
raw_out = subprocess.check_output(['readelf', '-d', fname],
@@ -102,6 +108,16 @@ def _git_init(project_dir):
subprocess.check_call(['git', 'commit', '-a', '-m', 'I am a project'], cwd=project_dir,
stdout=subprocess.DEVNULL)
+@functools.lru_cache()
+def is_real_gnu_compiler(path):
+ '''
+ Check if the gcc we have is a real gcc and not a macOS wrapper around clang
+ '''
+ if not path:
+ return False
+ out = subprocess.check_output([path, '--version'], universal_newlines=True, stderr=subprocess.STDOUT)
+ return 'Free Software Foundation' in out
+
def skipIfNoExecutable(exename):
'''
Skip this test if the given executable is not found.
@@ -151,7 +167,7 @@ def skip_if_not_language(lang):
@functools.wraps(func)
def wrapped(*args, **kwargs):
try:
- env = get_fake_env('', '', '')
+ env = get_fake_env()
f = getattr(env, 'detect_{}_compiler'.format(lang))
if lang in ['cs', 'vala', 'java', 'swift']:
f()
@@ -163,14 +179,23 @@ def skip_if_not_language(lang):
return wrapped
return wrapper
-def skip_if_env_value(value):
+def skip_if_env_set(key):
+ '''
+ Skip a test if a particular env is set, except when running under CI
+ '''
def wrapper(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
- if value in os.environ:
- raise unittest.SkipTest(
- 'Environment variable "{}" set, skipping.'.format(value))
- return func(*args, **kwargs)
+ old = None
+ if key in os.environ:
+ if not is_ci():
+ raise unittest.SkipTest('Env var {!r} set, skipping'.format(key))
+ old = os.environ.pop(key)
+ try:
+ return func(*args, **kwargs)
+ finally:
+ if old is not None:
+ os.environ[key] = old
return wrapped
return wrapper
@@ -182,7 +207,7 @@ def skip_if_not_base_option(feature):
def actual(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
- env = get_fake_env('', '', '')
+ env = get_fake_env()
cc = env.detect_c_compiler(False)
if feature not in cc.base_options:
raise unittest.SkipTest(
@@ -192,6 +217,50 @@ def skip_if_not_base_option(feature):
return actual
+@contextmanager
+def temp_filename():
+ '''A context manager which provides a filename to an empty temporary file.
+
+ On exit the file will be deleted.
+ '''
+
+ fd, filename = tempfile.mkstemp()
+ os.close(fd)
+ try:
+ yield filename
+ finally:
+ try:
+ os.remove(filename)
+ except OSError:
+ pass
+
+@contextmanager
+def no_pkgconfig():
+ '''
+ A context manager that overrides shutil.which and ExternalProgram to force
+ them to return None for pkg-config to simulate it not existing.
+ '''
+ old_which = shutil.which
+ old_search = ExternalProgram._search
+
+ def new_search(self, name, search_dir):
+ if name == 'pkg-config':
+ return [None]
+ return old_search(self, name, search_dir)
+
+ def new_which(cmd, *kwargs):
+ if cmd == 'pkg-config':
+ return None
+ return old_which(cmd, *kwargs)
+
+ shutil.which = new_which
+ ExternalProgram._search = new_search
+ try:
+ yield
+ finally:
+ shutil.which = old_which
+ ExternalProgram._search = old_search
+
class PatchModule:
'''
Fancy monkey-patching! Whee! Can't use mock.patch because it only
@@ -542,7 +611,10 @@ class InternalTests(unittest.TestCase):
config.write(configfile)
configfile.flush()
configfile.close()
- detected_value = mesonbuild.environment.CrossBuildInfo(configfile.name).need_exe_wrapper()
+ opts = get_fake_options()
+ opts.cross_file = configfilename
+ env = get_fake_env(opts=opts)
+ detected_value = env.need_exe_wrapper()
os.unlink(configfilename)
desired_value = not detected_value
@@ -554,7 +626,10 @@ class InternalTests(unittest.TestCase):
configfilename = configfile.name
config.write(configfile)
configfile.close()
- forced_value = mesonbuild.environment.CrossBuildInfo(configfile.name).need_exe_wrapper()
+ opts = get_fake_options()
+ opts.cross_file = configfilename
+ env = get_fake_env(opts=opts)
+ forced_value = env.need_exe_wrapper()
os.unlink(configfilename)
self.assertEqual(forced_value, desired_value)
@@ -625,16 +700,16 @@ class InternalTests(unittest.TestCase):
def _test_all_naming(self, cc, env, patterns, platform):
shr = patterns[platform]['shared']
stc = patterns[platform]['static']
+ shrstc = shr + tuple([x for x in stc if x not in shr])
+ stcshr = stc + tuple([x for x in shr if x not in stc])
p = cc.get_library_naming(env, 'shared')
self.assertEqual(p, shr)
p = cc.get_library_naming(env, 'static')
self.assertEqual(p, stc)
p = cc.get_library_naming(env, 'static-shared')
- self.assertEqual(p, stc + shr)
+ self.assertEqual(p, stcshr)
p = cc.get_library_naming(env, 'shared-static')
- self.assertEqual(p, shr + stc)
- p = cc.get_library_naming(env, 'default')
- self.assertEqual(p, shr + stc)
+ self.assertEqual(p, shrstc)
# Test find library by mocking up openbsd
if platform != 'openbsd':
return
@@ -649,7 +724,7 @@ class InternalTests(unittest.TestCase):
f.write('')
with open(os.path.join(tmpdir, 'libfoo.so.70.0.so.1'), 'w') as f:
f.write('')
- found = cc.find_library_real('foo', env, [tmpdir], '', 'default')
+ found = cc.find_library_real('foo', env, [tmpdir], '', 'shared-static')
self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0')
def test_find_library_patterns(self):
@@ -659,7 +734,7 @@ class InternalTests(unittest.TestCase):
unix_static = ('lib{}.a', '{}.a')
msvc_static = ('lib{}.a', 'lib{}.lib', '{}.a', '{}.lib')
# This is the priority list of pattern matching for library searching
- patterns = {'openbsd': {'shared': ('lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*'),
+ patterns = {'openbsd': {'shared': ('lib{}.so', '{}.so', 'lib{}.so.[0-9]*.[0-9]*', '{}.so.[0-9]*.[0-9]*'),
'static': unix_static},
'linux': {'shared': ('lib{}.so', '{}.so'),
'static': unix_static},
@@ -673,7 +748,7 @@ class InternalTests(unittest.TestCase):
'windows-mingw': {'shared': ('lib{}.dll.a', 'lib{}.lib', 'lib{}.dll',
'{}.dll.a', '{}.lib', '{}.dll'),
'static': msvc_static}}
- env = get_fake_env('', '', '')
+ env = get_fake_env()
cc = env.detect_c_compiler(False)
if is_osx():
self._test_all_naming(cc, env, patterns, 'darwin')
@@ -684,25 +759,18 @@ class InternalTests(unittest.TestCase):
self._test_all_naming(cc, env, patterns, 'windows-msvc')
else:
self._test_all_naming(cc, env, patterns, 'windows-mingw')
+ elif is_openbsd():
+ self._test_all_naming(cc, env, patterns, 'openbsd')
else:
self._test_all_naming(cc, env, patterns, 'linux')
- # Mock OpenBSD since we don't have tests for it
- true = lambda x, y: True
- if not is_openbsd():
- with PatchModule(mesonbuild.compilers.c.for_openbsd,
- 'mesonbuild.compilers.c.for_openbsd', true):
- self._test_all_naming(cc, env, patterns, 'openbsd')
- else:
- self._test_all_naming(cc, env, patterns, 'openbsd')
- with PatchModule(mesonbuild.compilers.c.for_darwin,
- 'mesonbuild.compilers.c.for_darwin', true):
- self._test_all_naming(cc, env, patterns, 'darwin')
- with PatchModule(mesonbuild.compilers.c.for_cygwin,
- 'mesonbuild.compilers.c.for_cygwin', true):
- self._test_all_naming(cc, env, patterns, 'cygwin')
- with PatchModule(mesonbuild.compilers.c.for_windows,
- 'mesonbuild.compilers.c.for_windows', true):
- self._test_all_naming(cc, env, patterns, 'windows-mingw')
+ env.machines.host.system = 'openbsd'
+ self._test_all_naming(cc, env, patterns, 'openbsd')
+ env.machines.host.system = 'darwin'
+ self._test_all_naming(cc, env, patterns, 'darwin')
+ env.machines.host.system = 'cygwin'
+ self._test_all_naming(cc, env, patterns, 'cygwin')
+ env.machines.host.system = 'windows'
+ self._test_all_naming(cc, env, patterns, 'windows-mingw')
def test_pkgconfig_parse_libs(self):
'''
@@ -710,27 +778,38 @@ class InternalTests(unittest.TestCase):
https://github.com/mesonbuild/meson/issues/3951
'''
+ def create_static_lib(name):
+ if not is_osx():
+ name.open('w').close()
+ return
+ src = name.with_suffix('.c')
+ out = name.with_suffix('.o')
+ with src.open('w') as f:
+ f.write('int meson_foobar (void) { return 0; }')
+ subprocess.check_call(['clang', '-c', str(src), '-o', str(out)])
+ subprocess.check_call(['ar', 'csr', str(name), str(out)])
+
with tempfile.TemporaryDirectory() as tmpdir:
pkgbin = ExternalProgram('pkg-config', command=['pkg-config'], silent=True)
- env = get_fake_env('', '', '')
+ env = get_fake_env()
compiler = env.detect_c_compiler(False)
env.coredata.compilers = {'c': compiler}
- env.coredata.compiler_options['c_link_args'] = FakeCompilerOptions()
+ env.coredata.compiler_options.host['c_link_args'] = FakeCompilerOptions()
p1 = Path(tmpdir) / '1'
p2 = Path(tmpdir) / '2'
p1.mkdir()
p2.mkdir()
# libfoo.a is in one prefix
- (p1 / 'libfoo.a').open('w').close()
+ create_static_lib(p1 / 'libfoo.a')
# libbar.a is in both prefixes
- (p1 / 'libbar.a').open('w').close()
- (p2 / 'libbar.a').open('w').close()
+ create_static_lib(p1 / 'libbar.a')
+ create_static_lib(p2 / 'libbar.a')
# Ensure that we never statically link to these
- (p1 / 'libpthread.a').open('w').close()
- (p1 / 'libm.a').open('w').close()
- (p1 / 'libc.a').open('w').close()
- (p1 / 'libdl.a').open('w').close()
- (p1 / 'librt.a').open('w').close()
+ create_static_lib(p1 / 'libpthread.a')
+ create_static_lib(p1 / 'libm.a')
+ create_static_lib(p1 / 'libc.a')
+ create_static_lib(p1 / 'libdl.a')
+ create_static_lib(p1 / 'librt.a')
def fake_call_pkgbin(self, args, env=None):
if '--libs' not in args:
@@ -745,28 +824,30 @@ class InternalTests(unittest.TestCase):
old_call = PkgConfigDependency._call_pkgbin
old_check = PkgConfigDependency.check_pkgconfig
PkgConfigDependency._call_pkgbin = fake_call_pkgbin
- PkgConfigDependency.check_pkgconfig = lambda x: pkgbin
+ PkgConfigDependency.check_pkgconfig = lambda x, _: pkgbin
# Test begins
- kwargs = {'required': True, 'silent': True}
- foo_dep = PkgConfigDependency('foo', env, kwargs)
- self.assertEqual(foo_dep.get_link_args(),
- [(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()])
- bar_dep = PkgConfigDependency('bar', env, kwargs)
- self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()])
- internal_dep = PkgConfigDependency('internal', env, kwargs)
- if compiler.get_argument_syntax() == 'msvc':
- self.assertEqual(internal_dep.get_link_args(), [])
- else:
- link_args = internal_dep.get_link_args()
- for link_arg in link_args:
- for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
- self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args)
- # Test ends
- PkgConfigDependency._call_pkgbin = old_call
- PkgConfigDependency.check_pkgconfig = old_check
- # Reset dependency class to ensure that in-process configure doesn't mess up
- PkgConfigDependency.pkgbin_cache = {}
- PkgConfigDependency.class_pkgbin = None
+ try:
+ kwargs = {'required': True, 'silent': True}
+ foo_dep = PkgConfigDependency('foo', env, kwargs)
+ self.assertEqual(foo_dep.get_link_args(),
+ [(p1 / 'libfoo.a').as_posix(), (p2 / 'libbar.a').as_posix()])
+ bar_dep = PkgConfigDependency('bar', env, kwargs)
+ self.assertEqual(bar_dep.get_link_args(), [(p2 / 'libbar.a').as_posix()])
+ internal_dep = PkgConfigDependency('internal', env, kwargs)
+ if compiler.get_argument_syntax() == 'msvc':
+ self.assertEqual(internal_dep.get_link_args(), [])
+ else:
+ link_args = internal_dep.get_link_args()
+ for link_arg in link_args:
+ for lib in ('pthread', 'm', 'c', 'dl', 'rt'):
+ self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args)
+ finally:
+ # Test ends
+ PkgConfigDependency._call_pkgbin = old_call
+ PkgConfigDependency.check_pkgconfig = old_check
+ # Reset dependency class to ensure that in-process configure doesn't mess up
+ PkgConfigDependency.pkgbin_cache = {}
+ PkgConfigDependency.class_pkgbin = PerMachine(None, None, None)
def test_version_compare(self):
comparefunc = mesonbuild.mesonlib.version_compare_many
@@ -916,7 +997,7 @@ class DataTests(unittest.TestCase):
with open('docs/markdown/Builtin-options.md') as f:
md = f.read()
self.assertIsNotNone(md)
- env = get_fake_env('', '', '')
+ env = get_fake_env()
# FIXME: Support other compilers
cc = env.detect_c_compiler(False)
cpp = env.detect_cpp_compiler(False)
@@ -962,13 +1043,23 @@ class DataTests(unittest.TestCase):
Ensure that syntax highlighting files were updated for new functions in
the global namespace in build files.
'''
- env = get_fake_env('', '', '')
+ env = get_fake_env()
interp = Interpreter(FakeBuild(env), mock=True)
with open('data/syntax-highlighting/vim/syntax/meson.vim') as f:
res = re.search(r'syn keyword mesonBuiltin(\s+\\\s\w+)+', f.read(), re.MULTILINE)
defined = set([a.strip() for a in res.group().split('\\')][1:])
self.assertEqual(defined, set(chain(interp.funcs.keys(), interp.builtin.keys())))
+ def test_all_functions_defined_in_ast_interpreter(self):
+ '''
+ Ensure that the all functions defined in the Interpreter are also defined
+ in the AstInterpreter (and vice versa).
+ '''
+ env = get_fake_env()
+ interp = Interpreter(FakeBuild(env), mock=True)
+ astint = AstInterpreter('.', '')
+ self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
+
class BasePlatformTests(unittest.TestCase):
def setUp(self):
@@ -988,6 +1079,7 @@ class BasePlatformTests(unittest.TestCase):
self.mconf_command = self.meson_command + ['configure']
self.mintro_command = self.meson_command + ['introspect']
self.wrap_command = self.meson_command + ['wrap']
+ self.rewrite_command = self.meson_command + ['rewrite']
# Backend-specific build commands
self.build_command, self.clean_command, self.test_command, self.install_command, \
self.uninstall_command = get_backend_commands(self.backend)
@@ -996,6 +1088,7 @@ class BasePlatformTests(unittest.TestCase):
self.vala_test_dir = os.path.join(src_root, 'test cases/vala')
self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks')
self.unit_test_dir = os.path.join(src_root, 'test cases/unit')
+ self.rewrite_test_dir = os.path.join(src_root, 'test cases/rewrite')
# Misc stuff
self.orig_env = os.environ.copy()
if self.backend is Backend.ninja:
@@ -1214,10 +1307,13 @@ class BasePlatformTests(unittest.TestCase):
self.assertEqual(PurePath(path1), PurePath(path2))
def assertPathListEqual(self, pathlist1, pathlist2):
- self.assertEquals(len(pathlist1), len(pathlist2))
+ self.assertEqual(len(pathlist1), len(pathlist2))
worklist = list(zip(pathlist1, pathlist2))
for i in worklist:
- self.assertPathEqual(i[0], i[1])
+ if i[0] is None:
+ self.assertEqual(i[0], i[1])
+ else:
+ self.assertPathEqual(i[0], i[1])
def assertPathBasenameEqual(self, path, basename):
msg = '{!r} does not end with {!r}'.format(path, basename)
@@ -1286,6 +1382,21 @@ class AllPlatformTests(BasePlatformTests):
prefix = opt['value']
self.assertEqual(prefix, '/absoluteprefix')
+ def test_do_conf_file_preserve_newlines(self):
+
+ def conf_file(in_data, confdata):
+ with temp_filename() as fin:
+ with open(fin, 'wb') as fobj:
+ fobj.write(in_data.encode('utf-8'))
+ with temp_filename() as fout:
+ mesonbuild.mesonlib.do_conf_file(fin, fout, confdata, 'meson')
+ with open(fout, 'rb') as fobj:
+ return fobj.read().decode('utf-8')
+
+ confdata = {'VAR': ('foo', 'bar')}
+ self.assertEqual(conf_file('@VAR@\n@VAR@\n', confdata), 'foo\nfoo\n')
+ self.assertEqual(conf_file('@VAR@\r\n@VAR@\r\n', confdata), 'foo\r\nfoo\r\n')
+
def test_absolute_prefix_libdir(self):
'''
Tests that setting absolute paths for --prefix and --libdir work. Can't
@@ -1429,7 +1540,7 @@ class AllPlatformTests(BasePlatformTests):
# Get name of static library
targets = self.introspect('--targets')
self.assertEqual(len(targets), 1)
- libname = targets[0]['filename']
+ libname = targets[0]['filename'][0]
# Build and get contents of static library
self.build()
before = self._run(['ar', 't', os.path.join(self.builddir, libname)]).split()
@@ -1493,6 +1604,9 @@ class AllPlatformTests(BasePlatformTests):
'''
Tests that the Meson introspection API exposes multiple install filenames correctly without crashing
https://github.com/mesonbuild/meson/pull/4555
+
+ Reverted to the first file only because of https://github.com/mesonbuild/meson/pull/4547#discussion_r244173438
+ TODO Change the format to a list officialy in a followup PR
'''
if self.backend is not Backend.ninja:
raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name))
@@ -1503,8 +1617,8 @@ class AllPlatformTests(BasePlatformTests):
intro = intro[::-1]
self.assertPathListEqual(intro[0]['install_filename'], ['/usr/include/diff.h', '/usr/bin/diff.sh'])
self.assertPathListEqual(intro[1]['install_filename'], ['/opt/same.h', '/opt/same.sh'])
- self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h'])
- self.assertPathListEqual(intro[3]['install_filename'], ['/usr/bin/second.sh'])
+ self.assertPathListEqual(intro[2]['install_filename'], ['/usr/include/first.h', None])
+ self.assertPathListEqual(intro[3]['install_filename'], [None, '/usr/bin/second.sh'])
def test_uninstall(self):
exename = os.path.join(self.installdir, 'usr/bin/prog' + exe_suffix)
@@ -1524,7 +1638,7 @@ class AllPlatformTests(BasePlatformTests):
def test_testsetups(self):
if not shutil.which('valgrind'):
- raise unittest.SkipTest('Valgrind not installed.')
+ raise unittest.SkipTest('Valgrind not installed.')
testdir = os.path.join(self.unit_test_dir, '2 testsetups')
self.init(testdir)
self.build()
@@ -1578,7 +1692,7 @@ class AllPlatformTests(BasePlatformTests):
self.mtest_command + ['--setup=main:onlyinbar'])
def test_testsetup_default(self):
- testdir = os.path.join(self.unit_test_dir, '47 testsetup default')
+ testdir = os.path.join(self.unit_test_dir, '49 testsetup default')
self.init(testdir)
self.build()
@@ -2132,7 +2246,7 @@ int main(int argc, char **argv) {
self.assertPathExists(os.path.join(testdir, i))
def detect_prebuild_env(self):
- env = get_fake_env('', self.builddir, self.prefix)
+ env = get_fake_env()
cc = env.detect_c_compiler(False)
stlinker = env.detect_static_linker(cc)
if mesonbuild.mesonlib.is_windows():
@@ -2559,7 +2673,7 @@ int main(int argc, char **argv) {
self.init(testdir)
expected = {
'bindat@cus': ['data_source.txt'],
- 'depfile@cus': [],
+ 'a685fbc@@depfile@cus': [],
}
t_intro = self.introspect('--targets')
self.assertCountEqual([t['id'] for t in t_intro], expected)
@@ -2819,10 +2933,10 @@ recommended as it is not supported on some platforms''')
# c_args value should be parsed with shlex
self.init(testdir, extra_args=['-Dc_args=foo bar "one two"'])
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.compiler_options['c_args'].value, ['foo', 'bar', 'one two'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo', 'bar', 'one two'])
self.setconf('-Dc_args="foo bar" one two')
obj = mesonbuild.coredata.load(self.builddir)
- self.assertEqual(obj.compiler_options['c_args'].value, ['foo bar', 'one', 'two'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo bar', 'one', 'two'])
self.wipe()
# Setting a 2nd time the same option should override the first value
@@ -2835,7 +2949,7 @@ recommended as it is not supported on some platforms''')
self.assertEqual(obj.builtins['bindir'].value, 'bar')
self.assertEqual(obj.builtins['buildtype'].value, 'release')
self.assertEqual(obj.base_options['b_sanitize'].value, 'thread')
- self.assertEqual(obj.compiler_options['c_args'].value, ['bar'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['bar'])
self.setconf(['--bindir=bar', '--bindir=foo',
'-Dbuildtype=release', '-Dbuildtype=plain',
'-Db_sanitize=thread', '-Db_sanitize=address',
@@ -2844,7 +2958,7 @@ recommended as it is not supported on some platforms''')
self.assertEqual(obj.builtins['bindir'].value, 'foo')
self.assertEqual(obj.builtins['buildtype'].value, 'plain')
self.assertEqual(obj.base_options['b_sanitize'].value, 'address')
- self.assertEqual(obj.compiler_options['c_args'].value, ['foo'])
+ self.assertEqual(obj.compiler_options.host['c_args'].value, ['foo'])
self.wipe()
except KeyError:
# Ignore KeyError, it happens on CI for compilers that does not
@@ -2852,6 +2966,33 @@ recommended as it is not supported on some platforms''')
# they used to fail this test with Meson 0.46 an earlier versions.
pass
+ def test_warning_level_0(self):
+ testdir = os.path.join(self.common_test_dir, '215 warning level 0')
+
+ # Verify default values when passing no args
+ self.init(testdir)
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.builtins['warning_level'].value, '0')
+ self.wipe()
+
+ # verify we can override w/ --warnlevel
+ self.init(testdir, extra_args=['--warnlevel=1'])
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.builtins['warning_level'].value, '1')
+ self.setconf('--warnlevel=0')
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.builtins['warning_level'].value, '0')
+ self.wipe()
+
+ # verify we can override w/ -Dwarning_level
+ self.init(testdir, extra_args=['-Dwarning_level=1'])
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.builtins['warning_level'].value, '1')
+ self.setconf('-Dwarning_level=0')
+ obj = mesonbuild.coredata.load(self.builddir)
+ self.assertEqual(obj.builtins['warning_level'].value, '0')
+ self.wipe()
+
def test_feature_check_usage_subprojects(self):
testdir = os.path.join(self.unit_test_dir, '41 featurenew subprojects')
out = self.init(testdir)
@@ -2970,17 +3111,21 @@ recommended as it is not supported on some platforms''')
self.wipe()
self.init(testdir, extra_args=['-Dstart_native=true'])
- def __reconfigure(self):
+ def __reconfigure(self, change_minor=False):
# Set an older version to force a reconfigure from scratch
filename = os.path.join(self.privatedir, 'coredata.dat')
with open(filename, 'rb') as f:
obj = pickle.load(f)
- obj.version = '0.47.0'
+ if change_minor:
+ v = mesonbuild.coredata.version.split('.')
+ obj.version = '.'.join(v[0:2] + [str(int(v[2]) + 1)])
+ else:
+ obj.version = '0.47.0'
with open(filename, 'wb') as f:
pickle.dump(obj, f)
def test_reconfigure(self):
- testdir = os.path.join(self.unit_test_dir, '46 reconfigure')
+ testdir = os.path.join(self.unit_test_dir, '48 reconfigure')
self.init(testdir, extra_args=['-Dopt1=val1'])
self.setconf('-Dopt2=val2')
@@ -3016,6 +3161,22 @@ recommended as it is not supported on some platforms''')
with Path(self.builddir):
self.init(testdir, extra_args=['--wipe'])
+ def test_minor_version_does_not_reconfigure_wipe(self):
+ testdir = os.path.join(self.unit_test_dir, '48 reconfigure')
+ self.init(testdir, extra_args=['-Dopt1=val1'])
+ self.setconf('-Dopt2=val2')
+
+ self.__reconfigure(change_minor=True)
+
+ out = self.init(testdir, extra_args=['--reconfigure', '-Dopt3=val3'])
+ self.assertNotRegex(out, 'WARNING:.*Regenerating configuration from scratch')
+ self.assertRegex(out, 'opt1 val1')
+ self.assertRegex(out, 'opt2 val2')
+ self.assertRegex(out, 'opt3 val3')
+ self.assertRegex(out, 'opt4 default4')
+ self.build()
+ self.run_tests()
+
def test_target_construct_id_from_path(self):
# This id is stable but not guessable.
# The test is supposed to prevent unintentional
@@ -3031,21 +3192,21 @@ recommended as it is not supported on some platforms''')
testfile = os.path.join(self.common_test_dir, '36 run program', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson.build']))
- self.assertEqual(res['version'], None)
+ self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'run command')
self.assertEqual(res['subprojects'], [])
testfile = os.path.join(self.common_test_dir, '44 options', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build']))
- self.assertEqual(res['version'], None)
+ self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'options')
self.assertEqual(res['subprojects'], [])
testfile = os.path.join(self.common_test_dir, '47 subproject options', 'meson.build')
res = self.introspect_directory(testfile, '--projectinfo')
self.assertEqual(set(res['buildsystem_files']), set(['meson_options.txt', 'meson.build']))
- self.assertEqual(res['version'], None)
+ self.assertEqual(res['version'], 'undefined')
self.assertEqual(res['descriptive_name'], 'suboptions')
self.assertEqual(len(res['subprojects']), 1)
subproject_files = set(f.replace('\\', '/') for f in res['subprojects'][0]['buildsystem_files'])
@@ -3061,6 +3222,7 @@ recommended as it is not supported on some platforms''')
expected = {
'descriptive_name': 'proj',
'version': 'undefined',
+ 'subproject_dir': 'subprojects',
'subprojects': [
{
'descriptive_name': 'sub',
@@ -3071,11 +3233,39 @@ recommended as it is not supported on some platforms''')
}
self.assertDictEqual(res, expected)
+ def test_introspection_target_subproject(self):
+ testdir = os.path.join(self.common_test_dir, '46 subproject')
+ self.init(testdir)
+ res = self.introspect('--targets')
+
+ expected = {
+ 'sublib': 'sublib',
+ 'simpletest': 'sublib',
+ 'user': None
+ }
+
+ for entry in res:
+ name = entry['name']
+ self.assertEquals(entry['subproject'], expected[name])
+
+ def test_introspect_projectinfo_subproject_dir(self):
+ testdir = os.path.join(self.common_test_dir, '79 custom subproject dir')
+ self.init(testdir)
+ res = self.introspect('--projectinfo')
+
+ self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
+
+ def test_introspect_projectinfo_subproject_dir_from_source(self):
+ testfile = os.path.join(self.common_test_dir, '79 custom subproject dir', 'meson.build')
+ res = self.introspect_directory(testfile, '--projectinfo')
+
+ self.assertEqual(res['subproject_dir'], 'custom_subproject_dir')
+
@skipIfNoExecutable('clang-format')
def test_clang_format(self):
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Clang-format is for now only supported on Ninja, not {}'.format(self.backend.name))
- testdir = os.path.join(self.unit_test_dir, '51 clang-format')
+ testdir = os.path.join(self.unit_test_dir, '53 clang-format')
testfile = os.path.join(testdir, 'prog.c')
badfile = os.path.join(testdir, 'prog_orig_c')
goodfile = os.path.join(testdir, 'prog_expected_c')
@@ -3095,7 +3285,7 @@ recommended as it is not supported on some platforms''')
Path(goodfile).read_text())
def test_introspect_buildoptions_without_configured_build(self):
- testdir = os.path.join(self.unit_test_dir, '51 introspect buildoptions')
+ testdir = os.path.join(self.unit_test_dir, '56 introspect buildoptions')
testfile = os.path.join(testdir, 'meson.build')
res_nb = self.introspect_directory(testfile, ['--buildoptions'] + self.meson_args)
self.init(testdir, default_args=False)
@@ -3103,6 +3293,280 @@ recommended as it is not supported on some platforms''')
self.maxDiff = None
self.assertListEqual(res_nb, res_wb)
+ def test_introspect_json_dump(self):
+ testdir = os.path.join(self.unit_test_dir, '55 introspection')
+ self.init(testdir)
+ infodir = os.path.join(self.builddir, 'meson-info')
+ self.assertPathExists(infodir)
+
+ def assertKeyTypes(key_type_list, obj):
+ for i in key_type_list:
+ self.assertIn(i[0], obj)
+ self.assertIsInstance(obj[i[0]], i[1])
+
+ root_keylist = [
+ ('benchmarks', list),
+ ('buildoptions', list),
+ ('buildsystem_files', list),
+ ('dependencies', list),
+ ('installed', dict),
+ ('projectinfo', dict),
+ ('targets', list),
+ ('tests', list),
+ ]
+
+ test_keylist = [
+ ('cmd', list),
+ ('env', dict),
+ ('name', str),
+ ('timeout', int),
+ ('suite', list),
+ ('is_parallel', bool),
+ ]
+
+ buildoptions_keylist = [
+ ('name', str),
+ ('section', str),
+ ('type', str),
+ ('description', str),
+ ]
+
+ buildoptions_typelist = [
+ ('combo', str, [('choices', list)]),
+ ('string', str, []),
+ ('boolean', bool, []),
+ ('integer', int, []),
+ ('array', list, []),
+ ]
+
+ dependencies_typelist = [
+ ('name', str),
+ ('compile_args', list),
+ ('link_args', list),
+ ]
+
+ targets_typelist = [
+ ('name', str),
+ ('id', str),
+ ('type', str),
+ ('defined_in', str),
+ ('filename', list),
+ ('build_by_default', bool),
+ ('target_sources', list),
+ ('installed', bool),
+ ]
+
+ targets_sources_typelist = [
+ ('language', str),
+ ('compiler', list),
+ ('parameters', list),
+ ('sources', list),
+ ('generated_sources', list),
+ ]
+
+ # First load all files
+ res = {}
+ for i in root_keylist:
+ curr = os.path.join(infodir, 'intro-{}.json'.format(i[0]))
+ self.assertPathExists(curr)
+ with open(curr, 'r') as fp:
+ res[i[0]] = json.load(fp)
+
+ assertKeyTypes(root_keylist, res)
+
+ # Check Tests and benchmarks
+ tests_to_find = ['test case 1', 'test case 2', 'benchmark 1']
+ for i in res['benchmarks'] + res['tests']:
+ assertKeyTypes(test_keylist, i)
+ if i['name'] in tests_to_find:
+ tests_to_find.remove(i['name'])
+ self.assertListEqual(tests_to_find, [])
+
+ # Check buildoptions
+ buildopts_to_find = {'cpp_std': 'c++11'}
+ for i in res['buildoptions']:
+ assertKeyTypes(buildoptions_keylist, i)
+ valid_type = False
+ for j in buildoptions_typelist:
+ if i['type'] == j[0]:
+ self.assertIsInstance(i['value'], j[1])
+ assertKeyTypes(j[2], i)
+ valid_type = True
+ break
+
+ self.assertTrue(valid_type)
+ if i['name'] in buildopts_to_find:
+ self.assertEqual(i['value'], buildopts_to_find[i['name']])
+ buildopts_to_find.pop(i['name'], None)
+ self.assertDictEqual(buildopts_to_find, {})
+
+ # Check buildsystem_files
+ bs_files = ['meson.build', 'sharedlib/meson.build', 'staticlib/meson.build']
+ bs_files = [os.path.join(testdir, x) for x in bs_files]
+ self.assertPathListEqual(list(sorted(res['buildsystem_files'])), list(sorted(bs_files)))
+
+ # Check dependencies
+ dependencies_to_find = ['threads']
+ for i in res['dependencies']:
+ assertKeyTypes(dependencies_typelist, i)
+ if i['name'] in dependencies_to_find:
+ dependencies_to_find.remove(i['name'])
+ self.assertListEqual(dependencies_to_find, [])
+
+ # Check projectinfo
+ self.assertDictEqual(res['projectinfo'], {'version': '1.2.3', 'descriptive_name': 'introspection', 'subproject_dir': 'subprojects', 'subprojects': []})
+
+ # Check targets
+ targets_to_find = {
+ 'sharedTestLib': ('shared library', True, False, 'sharedlib/meson.build'),
+ 'staticTestLib': ('static library', True, False, 'staticlib/meson.build'),
+ 'test1': ('executable', True, True, 'meson.build'),
+ 'test2': ('executable', True, False, 'meson.build'),
+ 'test3': ('executable', True, False, 'meson.build'),
+ }
+ for i in res['targets']:
+ assertKeyTypes(targets_typelist, i)
+ if i['name'] in targets_to_find:
+ tgt = targets_to_find[i['name']]
+ self.assertEqual(i['type'], tgt[0])
+ self.assertEqual(i['build_by_default'], tgt[1])
+ self.assertEqual(i['installed'], tgt[2])
+ self.assertPathEqual(i['defined_in'], os.path.join(testdir, tgt[3]))
+ targets_to_find.pop(i['name'], None)
+ for j in i['target_sources']:
+ assertKeyTypes(targets_sources_typelist, j)
+ self.assertDictEqual(targets_to_find, {})
+
+ def test_introspect_file_dump_equals_all(self):
+ testdir = os.path.join(self.unit_test_dir, '55 introspection')
+ self.init(testdir)
+ res_all = self.introspect('--all')
+ res_file = {}
+
+ root_keylist = [
+ 'benchmarks',
+ 'buildoptions',
+ 'buildsystem_files',
+ 'dependencies',
+ 'installed',
+ 'projectinfo',
+ 'targets',
+ 'tests',
+ ]
+
+ infodir = os.path.join(self.builddir, 'meson-info')
+ self.assertPathExists(infodir)
+ for i in root_keylist:
+ curr = os.path.join(infodir, 'intro-{}.json'.format(i))
+ self.assertPathExists(curr)
+ with open(curr, 'r') as fp:
+ res_file[i] = json.load(fp)
+
+ self.assertEqual(res_all, res_file)
+
+ def test_introspect_meson_info(self):
+ testdir = os.path.join(self.unit_test_dir, '55 introspection')
+ introfile = os.path.join(self.builddir, 'meson-info', 'meson-info.json')
+ self.init(testdir)
+ self.assertPathExists(introfile)
+ with open(introfile, 'r') as fp:
+ res1 = json.load(fp)
+
+ for i in ['meson_version', 'directories', 'introspection', 'build_files_updated', 'error']:
+ self.assertIn(i, res1)
+
+ self.assertEqual(res1['error'], False)
+ self.assertEqual(res1['build_files_updated'], True)
+
+ def test_introspect_config_update(self):
+ testdir = os.path.join(self.unit_test_dir, '55 introspection')
+ introfile = os.path.join(self.builddir, 'meson-info', 'intro-buildoptions.json')
+ self.init(testdir)
+ self.assertPathExists(introfile)
+ with open(introfile, 'r') as fp:
+ res1 = json.load(fp)
+
+ self.setconf('-Dcpp_std=c++14')
+ self.setconf('-Dbuildtype=release')
+
+ for idx, i in enumerate(res1):
+ if i['name'] == 'cpp_std':
+ res1[idx]['value'] = 'c++14'
+ if i['name'] == 'buildtype':
+ res1[idx]['value'] = 'release'
+ if i['name'] == 'optimization':
+ res1[idx]['value'] = '3'
+ if i['name'] == 'debug':
+ res1[idx]['value'] = False
+
+ with open(introfile, 'r') as fp:
+ res2 = json.load(fp)
+
+ self.assertListEqual(res1, res2)
+
+ def test_introspect_targets_from_source(self):
+ testdir = os.path.join(self.unit_test_dir, '55 introspection')
+ testfile = os.path.join(testdir, 'meson.build')
+ introfile = os.path.join(self.builddir, 'meson-info', 'intro-targets.json')
+ self.init(testdir)
+ self.assertPathExists(introfile)
+ with open(introfile, 'r') as fp:
+ res_wb = json.load(fp)
+
+ res_nb = self.introspect_directory(testfile, ['--targets'] + self.meson_args)
+
+ # Account for differences in output
+ for i in res_wb:
+ i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']]
+ if 'install_filename' in i:
+ del i['install_filename']
+
+ sources = []
+ for j in i['target_sources']:
+ sources += j['sources']
+ i['target_sources'] = [{
+ 'language': 'unknown',
+ 'compiler': [],
+ 'parameters': [],
+ 'sources': sources,
+ 'generated_sources': []
+ }]
+
+ self.maxDiff = None
+ self.assertListEqual(res_nb, res_wb)
+
+ def test_introspect_dependencies_from_source(self):
+ testdir = os.path.join(self.unit_test_dir, '55 introspection')
+ testfile = os.path.join(testdir, 'meson.build')
+ res_nb = self.introspect_directory(testfile, ['--scan-dependencies'] + self.meson_args)
+ expected = [
+ {
+ 'name': 'threads',
+ 'required': True,
+ 'has_fallback': False,
+ 'conditional': False
+ },
+ {
+ 'name': 'zlib',
+ 'required': False,
+ 'has_fallback': False,
+ 'conditional': False
+ },
+ {
+ 'name': 'somethingthatdoesnotexist',
+ 'required': True,
+ 'has_fallback': False,
+ 'conditional': True
+ },
+ {
+ 'name': 'look_i_have_a_fallback',
+ 'required': True,
+ 'has_fallback': True,
+ 'conditional': True
+ }
+ ]
+ self.maxDiff = None
+ self.assertListEqual(res_nb, expected)
class FailureTests(BasePlatformTests):
'''
@@ -3113,7 +3577,7 @@ class FailureTests(BasePlatformTests):
and slows down testing.
'''
dnf = "[Dd]ependency.*not found(:.*)?"
- nopkg = '[Pp]kg-config not found'
+ nopkg = '[Pp]kg-config.*not found'
def setUp(self):
super().setUp()
@@ -3194,16 +3658,29 @@ class FailureTests(BasePlatformTests):
self.assertMesonRaises("dependency('appleframeworks')",
"requires at least one module")
+ def test_extraframework_dependency_method(self):
+ code = "dependency('python', method : 'extraframework')"
+ if not is_osx():
+ self.assertMesonRaises(code, self.dnf)
+ else:
+ # Python2 framework is always available on macOS
+ self.assertMesonOutputs(code, '[Dd]ependency.*python.*found.*YES')
+
def test_sdl2_notfound_dependency(self):
# Want to test failure, so skip if available
if shutil.which('sdl2-config'):
raise unittest.SkipTest('sdl2-config found')
self.assertMesonRaises("dependency('sdl2', method : 'sdlconfig')", self.dnf)
if shutil.which('pkg-config'):
- errmsg = self.dnf
- else:
- errmsg = self.nopkg
- self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", errmsg)
+ self.assertMesonRaises("dependency('sdl2', method : 'pkg-config')", self.dnf)
+ with no_pkgconfig():
+ # Look for pkg-config, cache it, then
+ # Use cached pkg-config without erroring out, then
+ # Use cached pkg-config to error out
+ code = "dependency('foobarrr', method : 'pkg-config', required : false)\n" \
+ "dependency('foobarrr2', method : 'pkg-config', required : false)\n" \
+ "dependency('sdl2', method : 'pkg-config')"
+ self.assertMesonRaises(code, self.nopkg)
def test_gnustep_notfound_dependency(self):
# Want to test failure, so skip if available
@@ -3248,7 +3725,7 @@ class FailureTests(BasePlatformTests):
code = '''zlib_dep = dependency('zlib', required : false)
zlib_dep.get_configtool_variable('foo')
'''
- self.assertMesonRaises(code, "'zlib' is not a config-tool dependency")
+ self.assertMesonRaises(code, ".* is not a config-tool dependency")
code = '''zlib_dep = dependency('zlib', required : false)
dep = declare_dependency(dependencies : zlib_dep)
dep.get_pkgconfig_variable('foo')
@@ -3264,7 +3741,7 @@ class FailureTests(BasePlatformTests):
'''
Test that when we can't detect objc or objcpp, we fail gracefully.
'''
- env = get_fake_env('', self.builddir, self.prefix)
+ env = get_fake_env()
try:
env.detect_objc_compiler(False)
env.detect_objcpp_compiler(False)
@@ -3279,23 +3756,22 @@ class FailureTests(BasePlatformTests):
Test that:
1. The correct message is outputted when a not-required dep is not
found and the fallback subproject is also not found.
- 2. A not-found not-required dep with a fallback subproject outputs the
+ 2. A not-required fallback dependency is not found because the
+ subproject failed to parse.
+ 3. A not-found not-required dep with a fallback subproject outputs the
correct message when the fallback subproject is found but the
variable inside it is not.
- 3. A fallback dependency is found from the subproject parsed in (2)
- 4. A not-required fallback dependency is not found because the
- subproject failed to parse.
+ 4. A fallback dependency is found from the subproject parsed in (3)
+ 5. The correct message is outputted when the .wrap file is missing for
+ a sub-subproject.
'''
tdir = os.path.join(self.unit_test_dir, '20 subproj dep variables')
out = self.init(tdir, inprocess=True)
- self.assertRegex(out, r"Couldn't use fallback subproject "
- "in.*subprojects.*nosubproj.*for the dependency.*somedep")
- self.assertRegex(out, r'Dependency.*somenotfounddep.*from subproject.*'
- 'subprojects.*somesubproj.*found:.*NO')
- self.assertRegex(out, r'Dependency.*zlibproxy.*from subproject.*'
- 'subprojects.*somesubproj.*found:.*YES.*(cached)')
- self.assertRegex(out, r'Couldn\'t use fallback subproject in '
- '.*subprojects.*failingsubproj.*for the dependency.*somedep')
+ self.assertRegex(out, r"Subproject directory not found and .*nosubproj.wrap.* file not found")
+ self.assertRegex(out, r'Function does not take positional arguments.')
+ self.assertRegex(out, r'WARNING:.* Dependency .*subsubproject.* not found but it is available in a sub-subproject.')
+ self.assertRegex(out, r'Subproject directory not found and .*subsubproject.wrap.* file not found')
+ self.assertRegex(out, r'Dependency .*zlibproxy.* from subproject .*subprojects.*somesubproj.* found: .*YES.*')
def test_exception_exit_status(self):
'''
@@ -3360,6 +3836,7 @@ class FailureTests(BasePlatformTests):
"""Subproject "subprojects/not-found-subproject" disabled can't get_variable on it.""")
+@unittest.skipUnless(is_windows() or is_cygwin(), "requires Windows (or Windows via Cygwin)")
class WindowsTests(BasePlatformTests):
'''
Tests that should run on Cygwin, MinGW, and MSVC
@@ -3416,7 +3893,7 @@ class WindowsTests(BasePlatformTests):
if cc.get_argument_syntax() != 'msvc':
raise unittest.SkipTest('Not using MSVC')
# To force people to update this test, and also test
- self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt'})
+ self.assertEqual(set(cc.ignore_libs), {'c', 'm', 'pthread', 'dl', 'rt', 'execinfo'})
for l in cc.ignore_libs:
self.assertEqual(cc.find_library(l, env, []), [])
@@ -3470,6 +3947,7 @@ class WindowsTests(BasePlatformTests):
return
self.build()
+@unittest.skipUnless(is_osx(), "requires Darwin")
class DarwinTests(BasePlatformTests):
'''
Tests that should run on macOS
@@ -3532,6 +4010,7 @@ class DarwinTests(BasePlatformTests):
self.assertIsNotNone(m, msg=out)
return m.groups()
+ @skipIfNoPkgconfig
def test_library_versioning(self):
'''
Ensure that compatibility_version and current_version are set correctly
@@ -3541,7 +4020,7 @@ class DarwinTests(BasePlatformTests):
self.build()
targets = {}
for t in self.introspect('--targets'):
- targets[t['name']] = t['filename']
+ targets[t['name']] = t['filename'][0] if isinstance(t['filename'], list) else t['filename']
self.assertEqual(self._get_darwin_versions(targets['some']), ('7.0.0', '7.0.0'))
self.assertEqual(self._get_darwin_versions(targets['noversion']), ('0.0.0', '0.0.0'))
self.assertEqual(self._get_darwin_versions(targets['onlyversion']), ('1.0.0', '1.0.0'))
@@ -3565,6 +4044,7 @@ class DarwinTests(BasePlatformTests):
del os.environ["LDFLAGS"]
+@unittest.skipUnless(not is_windows(), "requires something Unix-like")
class LinuxlikeTests(BasePlatformTests):
'''
Tests that should run on Linux, macOS, and *BSD
@@ -3650,26 +4130,30 @@ class LinuxlikeTests(BasePlatformTests):
privatedir2 = self.privatedir
os.environ['PKG_CONFIG_LIBDIR'] = os.pathsep.join([privatedir1, privatedir2])
- cmd = ['pkg-config', 'dependency-test']
-
- out = self._run(cmd + ['--print-requires']).strip().split('\n')
- self.assertEqual(sorted(out), sorted(['libexposed']))
+ self._run(['pkg-config', 'dependency-test', '--validate'])
- out = self._run(cmd + ['--print-requires-private']).strip().split('\n')
- self.assertEqual(sorted(out), sorted(['libfoo >= 1.0']))
-
- out = self._run(cmd + ['--cflags-only-other']).strip().split()
- self.check_pkg_flags_are_same(out, ['-pthread', '-DCUSTOM'])
-
- out = self._run(cmd + ['--libs-only-l', '--libs-only-other']).strip().split()
- self.check_pkg_flags_are_same(out, ['-pthread', '-lcustom',
- '-llibmain', '-llibexposed'])
-
- out = self._run(cmd + ['--libs-only-l', '--libs-only-other', '--static']).strip().split()
- self.check_pkg_flags_are_same(out, ['-pthread', '-lcustom',
- '-llibmain', '-llibexposed',
- '-llibinternal', '-lcustom2',
- '-lfoo'])
+ # pkg-config strips some duplicated flags so we have to parse the
+ # generated file ourself.
+ expected = {
+ 'Requires': 'libexposed',
+ 'Requires.private': 'libfoo >= 1.0',
+ 'Libs': '-L${libdir} -llibmain -pthread -lcustom',
+ 'Libs.private': '-lcustom2 -L${libdir} -llibinternal',
+ 'Cflags': '-I${includedir} -pthread -DCUSTOM',
+ }
+ if is_osx() or is_haiku():
+ expected['Cflags'] = expected['Cflags'].replace('-pthread ', '')
+ with open(os.path.join(privatedir2, 'dependency-test.pc')) as f:
+ matched_lines = 0
+ for line in f:
+ parts = line.split(':', 1)
+ if parts[0] in expected:
+ key = parts[0]
+ val = parts[1].strip()
+ expected_val = expected[key]
+ self.assertEqual(expected_val, val)
+ matched_lines += 1
+ self.assertEqual(len(expected), matched_lines)
cmd = ['pkg-config', 'requires-test']
out = self._run(cmd + ['--print-requires']).strip().split('\n')
@@ -3679,11 +4163,6 @@ class LinuxlikeTests(BasePlatformTests):
out = self._run(cmd + ['--print-requires-private']).strip().split('\n')
self.assertEqual(sorted(out), sorted(['libexposed', 'libfoo >= 1.0', 'libhello']))
- def check_pkg_flags_are_same(self, output, expected):
- if is_osx() or is_haiku():
- expected = [x for x in expected if x != '-pthread']
- self.assertEqual(sorted(output), sorted(expected))
-
def test_pkg_unfound(self):
testdir = os.path.join(self.unit_test_dir, '23 unfound pkgconfig')
self.init(testdir)
@@ -3751,6 +4230,8 @@ class LinuxlikeTests(BasePlatformTests):
def test_generate_gir_with_address_sanitizer(self):
if is_cygwin():
raise unittest.SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.framework_test_dir, '7 gnome')
self.init(testdir, ['-Db_sanitize=address', '-Db_lundef=false'])
@@ -4190,8 +4671,8 @@ class LinuxlikeTests(BasePlatformTests):
docbook_target = t
break
self.assertIsInstance(docbook_target, dict)
- ifile = self.introspect(['--target-files', 'generated-gdbus-docbook@cus'])[0]
- self.assertEqual(t['filename'], 'gdbus/generated-gdbus-doc-' + os.path.basename(ifile))
+ ifile = self.introspect(['--target-files', '8d60afc@@generated-gdbus-docbook@cus'])[0]
+ self.assertListEqual(t['filename'], [os.path.join(self.builddir, 'gdbus/generated-gdbus-doc-' + os.path.basename(ifile))])
def test_build_rpath(self):
if is_cygwin():
@@ -4216,6 +4697,8 @@ class LinuxlikeTests(BasePlatformTests):
def test_pch_with_address_sanitizer(self):
if is_cygwin():
raise unittest.SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise unittest.SkipTest('-fsanitize=address is not supported on OpenBSD')
testdir = os.path.join(self.common_test_dir, '13 pch')
self.init(testdir, ['-Db_sanitize=address'])
@@ -4338,6 +4821,23 @@ endian = 'little'
subprocess.check_call(test_exe, env=myenv)
@skipIfNoPkgconfig
+ def test_pkgconfig_relative_paths(self):
+ testdir = os.path.join(self.unit_test_dir, '58 pkgconfig relative paths')
+ pkg_dir = os.path.join(testdir, 'pkgconfig')
+ self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'librelativepath.pc')))
+ os.environ['PKG_CONFIG_PATH'] = pkg_dir
+
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ kwargs = {'required': True, 'silent': True}
+ relative_path_dep = PkgConfigDependency('librelativepath', env, kwargs)
+ self.assertTrue(relative_path_dep.found())
+
+ # Ensure link_args are properly quoted
+ libpath = Path(self.builddir) / '../relativepath/lib'
+ link_args = ['-L' + libpath.as_posix(), '-lrelativepath']
+ self.assertEqual(relative_path_dep.get_link_args(), link_args)
+
+ @skipIfNoPkgconfig
def test_pkgconfig_internal_libraries(self):
'''
'''
@@ -4374,7 +4874,7 @@ endian = 'little'
@skipIfNoPkgconfig
@skip_if_not_language('cs')
def test_pkgconfig_csharp_library(self):
- testdir = os.path.join(self.unit_test_dir, '48 pkgconfig csharp library')
+ testdir = os.path.join(self.unit_test_dir, '50 pkgconfig csharp library')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
@@ -4387,7 +4887,7 @@ endian = 'little'
'''
Test that libraries are listed before their dependencies.
'''
- testdir = os.path.join(self.unit_test_dir, '50 pkgconfig static link order')
+ testdir = os.path.join(self.unit_test_dir, '52 pkgconfig static link order')
self.init(testdir)
myenv = os.environ.copy()
myenv['PKG_CONFIG_PATH'] = self.privatedir
@@ -4417,9 +4917,9 @@ endian = 'little'
testdir = os.path.join(self.unit_test_dir, '42 rpath order')
self.init(testdir)
if is_osx():
- rpathre = re.compile('-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
+ rpathre = re.compile(r'-rpath,.*/subprojects/sub1.*-rpath,.*/subprojects/sub2')
else:
- rpathre = re.compile('-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
+ rpathre = re.compile(r'-rpath,\$\$ORIGIN/subprojects/sub1:\$\$ORIGIN/subprojects/sub2')
with open(os.path.join(self.builddir, 'build.ninja')) as bfile:
for line in bfile:
if '-rpath' in line:
@@ -4531,7 +5031,7 @@ endian = 'little'
@skipIfNoPkgconfigDep('gmodule-2.0')
def test_ldflag_dedup(self):
- testdir = os.path.join(self.unit_test_dir, '49 ldflagdedup')
+ testdir = os.path.join(self.unit_test_dir, '51 ldflagdedup')
if is_cygwin() or is_osx():
raise unittest.SkipTest('Not applicable on Cygwin or OSX.')
self.init(testdir)
@@ -4543,7 +5043,24 @@ endian = 'little'
max_count = max(max_count, line.count(search_term))
self.assertEqual(max_count, 1, 'Export dynamic incorrectly deduplicated.')
+ def test_compiler_libs_static_dedup(self):
+ testdir = os.path.join(self.unit_test_dir, '55 dedup compiler libs')
+ self.init(testdir)
+ build_ninja = os.path.join(self.builddir, 'build.ninja')
+ with open(build_ninja, 'r', encoding='utf-8') as f:
+ lines = f.readlines()
+ for lib in ('-ldl', '-lm', '-lc', '-lrt'):
+ for line in lines:
+ if lib not in line:
+ continue
+ # Assert that
+ self.assertEqual(len(line.split(lib)), 2, msg=(lib, line))
+
+def should_run_cross_arm_tests():
+ return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
+
+@unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM")
class LinuxCrossArmTests(BasePlatformTests):
'''
Tests that cross-compilation to Linux/ARM works
@@ -4592,6 +5109,10 @@ class LinuxCrossArmTests(BasePlatformTests):
self.assertTrue(False, 'Option libdir not in introspect data.')
+def should_run_cross_mingw_tests():
+ return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
+
+@unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW")
class LinuxCrossMingwTests(BasePlatformTests):
'''
Tests that cross-compilation to Windows/MinGW works
@@ -4692,74 +5213,337 @@ class PythonTests(BasePlatformTests):
self.wipe()
-class RewriterTests(unittest.TestCase):
-
+class RewriterTests(BasePlatformTests):
def setUp(self):
super().setUp()
- src_root = os.path.dirname(__file__)
- self.testroot = os.path.realpath(tempfile.mkdtemp())
- self.rewrite_command = python_command + [os.path.join(src_root, 'mesonrewriter.py')]
- self.tmpdir = os.path.realpath(tempfile.mkdtemp())
- self.workdir = os.path.join(self.tmpdir, 'foo')
- self.test_dir = os.path.join(src_root, 'test cases/rewrite')
+ self.maxDiff = None
- def tearDown(self):
- windows_proof_rmtree(self.tmpdir)
+ def prime(self, dirname):
+ copy_tree(os.path.join(self.rewrite_test_dir, dirname), self.builddir)
- def read_contents(self, fname):
- with open(os.path.join(self.workdir, fname)) as f:
- return f.read()
+ def rewrite_raw(self, directory, args):
+ if isinstance(args, str):
+ args = [args]
+ command = self.rewrite_command + ['--verbose', '--skip', '--sourcedir', directory] + args
+ p = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ universal_newlines=True, timeout=60)
+ print('STDOUT:')
+ print(p.stdout)
+ print('STDERR:')
+ print(p.stderr)
+ if p.returncode != 0:
+ if 'MESON_SKIP_TEST' in p.stdout:
+ raise unittest.SkipTest('Project requested skipping.')
+ raise subprocess.CalledProcessError(p.returncode, command, output=p.stdout)
+ if not p.stderr:
+ return {}
+ return json.loads(p.stderr)
- def check_effectively_same(self, mainfile, truth):
- mf = self.read_contents(mainfile)
- t = self.read_contents(truth)
- # Rewriting is not guaranteed to do a perfect job of
- # maintaining whitespace.
- self.assertEqual(mf.replace(' ', ''), t.replace(' ', ''))
+ def rewrite(self, directory, args):
+ if isinstance(args, str):
+ args = [args]
+ return self.rewrite_raw(directory, ['command'] + args)
- def prime(self, dirname):
- shutil.copytree(os.path.join(self.test_dir, dirname), self.workdir)
+ def test_target_source_list(self):
+ self.prime('1 basic')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_target_add_sources(self):
+ self.prime('1 basic')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ expected = {
+ 'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
- def test_basic(self):
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ self.assertDictEqual(out, expected)
+
+ def test_target_add_sources_abs(self):
+ self.prime('1 basic')
+ abs_src = [os.path.join(self.builddir, x) for x in ['a1.cpp', 'a2.cpp', 'a6.cpp']]
+ add = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "src_add", "sources": abs_src}])
+ inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}])
+ self.rewrite(self.builddir, add)
+ out = self.rewrite(self.builddir, inf)
+ expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp']}}}
+ self.assertDictEqual(out, expected)
+
+ def test_target_remove_sources(self):
self.prime('1 basic')
- subprocess.check_call(self.rewrite_command + ['remove',
- '--target=trivialprog',
- '--filename=notthere.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('meson.build', 'removed.txt')
- subprocess.check_call(self.rewrite_command + ['add',
- '--target=trivialprog',
- '--filename=notthere.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('meson.build', 'added.txt')
- subprocess.check_call(self.rewrite_command + ['remove',
- '--target=trivialprog',
- '--filename=notthere.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('meson.build', 'removed.txt')
-
- def test_subdir(self):
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json'))
+ expected = {
+ 'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ self.assertDictEqual(out, expected)
+
+ def test_target_subdir(self):
self.prime('2 subdirs')
- top = self.read_contents('meson.build')
- s2 = self.read_contents('sub2/meson.build')
- subprocess.check_call(self.rewrite_command + ['remove',
- '--target=something',
- '--filename=second.c',
- '--sourcedir', self.workdir],
- universal_newlines=True)
- self.check_effectively_same('sub1/meson.build', 'sub1/after.txt')
- self.assertEqual(top, self.read_contents('meson.build'))
- self.assertEqual(s2, self.read_contents('sub2/meson.build'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c']}
+ self.assertDictEqual(list(out['target'].values())[0], expected)
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ self.assertDictEqual(list(out['target'].values())[0], expected)
+
+ def test_target_remove(self):
+ self.prime('1 basic')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+
+ expected = {
+ 'target': {
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_tatrget_add(self):
+ self.prime('1 basic')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+
+ expected = {
+ 'target': {
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp']},
+ 'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileB.cpp', 'fileC.cpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp']},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp']},
+ 'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp']},
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_target_remove_subdir(self):
+ self.prime('2 subdirs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'rmTgt.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ self.assertDictEqual(out, {})
+
+ def test_target_add_subdir(self):
+ self.prime('2 subdirs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {'name': 'something', 'sources': ['first.c', 'second.c']}
+ self.assertDictEqual(out['target']['94b671c@@something@exe'], expected)
+
+ def test_target_source_sorting(self):
+ self.prime('5 sorting')
+ add_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'src_add', 'sources': ['a666.c']}])
+ inf_json = json.dumps([{'type': 'target', 'target': 'exe1', 'operation': 'info'}])
+ out = self.rewrite(self.builddir, add_json)
+ out = self.rewrite(self.builddir, inf_json)
+ expected = {
+ 'target': {
+ 'exe1@exe': {
+ 'name': 'exe1',
+ 'sources': [
+ 'aaa/a/a1.c',
+ 'aaa/b/b1.c',
+ 'aaa/b/b2.c',
+ 'aaa/f1.c',
+ 'aaa/f2.c',
+ 'aaa/f3.c',
+ 'bbb/a/b1.c',
+ 'bbb/b/b2.c',
+ 'bbb/c1/b5.c',
+ 'bbb/c2/b7.c',
+ 'bbb/c10/b6.c',
+ 'bbb/a4.c',
+ 'bbb/b3.c',
+ 'bbb/b4.c',
+ 'bbb/b5.c',
+ 'a1.c',
+ 'a2.c',
+ 'a3.c',
+ 'a10.c',
+ 'a20.c',
+ 'a30.c',
+ 'a100.c',
+ 'a101.c',
+ 'a110.c',
+ 'a210.c',
+ 'a666.c',
+ 'b1.c',
+ 'c2.c'
+ ]
+ }
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_target_same_name_skip(self):
+ self.prime('4 same name targets')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {'name': 'myExe', 'sources': ['main.cpp']}
+ self.assertEqual(len(out['target']), 2)
+ for _, val in out['target'].items():
+ self.assertDictEqual(expected, val)
+
+ def test_kwargs_info(self):
+ self.prime('3 kwargs')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1'},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_kwargs_set(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'set.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.2', 'meson_version': '0.50.0', 'license': ['GPL', 'MIT']},
+ 'target#tgt1': {'build_by_default': False, 'build_rpath': '/usr/local', 'dependencies': 'dep1'},
+ 'dependency#dep1': {'required': True, 'method': 'cmake'}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_kwargs_add(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'add.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'license': ['GPL', 'MIT', 'BSD']},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_kwargs_remove(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'remove.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'license': 'GPL'},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_kwargs_remove_regex(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'remove_regex.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=true']},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_kwargs_delete(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'delete.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {},
+ 'target#tgt1': {},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_default_options_set(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_set.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'default_options': ['buildtype=release', 'debug=True', 'cpp_std=c++11']},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
+
+ def test_default_options_delete(self):
+ self.prime('3 kwargs')
+ self.rewrite(self.builddir, os.path.join(self.builddir, 'defopts_delete.json'))
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ expected = {
+ 'kwargs': {
+ 'project#/': {'version': '0.0.1', 'default_options': ['cpp_std=c++14', 'debug=true']},
+ 'target#tgt1': {'build_by_default': True},
+ 'dependency#dep1': {'required': False}
+ }
+ }
+ self.assertDictEqual(out, expected)
class NativeFileTests(BasePlatformTests):
def setUp(self):
super().setUp()
- self.testcase = os.path.join(self.unit_test_dir, '46 native file binary')
+ self.testcase = os.path.join(self.unit_test_dir, '47 native file binary')
self.current_config = 0
self.current_wrapper = 0
@@ -4808,10 +5592,9 @@ class NativeFileTests(BasePlatformTests):
ret = subprocess.run(
["{}"] + extra_args,
stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- encoding='utf-8')
- print(ret.stdout)
- print(ret.stderr, file=sys.stderr)
+ stderr=subprocess.PIPE)
+ print(ret.stdout.decode('utf-8'))
+ print(ret.stderr.decode('utf-8'), file=sys.stderr)
sys.exit(ret.returncode)
if __name__ == '__main__':
@@ -4837,13 +5620,13 @@ class NativeFileTests(BasePlatformTests):
"""Helper for generating tests for overriding compilers for langaugages
with more than one implementation, such as C, C++, ObjC, ObjC++, and D.
"""
- env = get_fake_env('', '', '')
+ env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
if lang not in ['cs']:
getter = functools.partial(getter, False)
cc = getter()
binary, newid = cb(cc)
- env.config_info.binaries = {lang: binary}
+ env.binaries.host.binaries[lang] = binary
compiler = getter()
self.assertEqual(compiler.id, newid)
@@ -4893,59 +5676,59 @@ class NativeFileTests(BasePlatformTests):
self._simple_test('python', 'python')
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
- @skip_if_env_value('CC')
+ @skip_if_env_set('CC')
def test_c_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
- if not shutil.which('gcc'):
+ if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('c', cb)
@unittest.skipIf(is_windows(), 'Setting up multiple compilers on windows is hard')
- @skip_if_env_value('CXX')
+ @skip_if_env_set('CXX')
def test_cpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
- if not shutil.which('g++'):
+ if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('cpp', cb)
@skip_if_not_language('objc')
- @skip_if_env_value('OBJC')
+ @skip_if_env_set('OBJC')
def test_objc_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang', 'clang'
- if not shutil.which('gcc'):
+ if not is_real_gnu_compiler(shutil.which('gcc')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'gcc', 'gcc'
self.helper_for_compiler('objc', cb)
@skip_if_not_language('objcpp')
- @skip_if_env_value('OBJCXX')
+ @skip_if_env_set('OBJCXX')
def test_objcpp_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if not shutil.which('clang++'):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'clang++', 'clang'
- if not shutil.which('g++'):
+ if not is_real_gnu_compiler(shutil.which('g++')):
raise unittest.SkipTest('Only one compiler found, cannot test.')
return 'g++', 'gcc'
self.helper_for_compiler('objcpp', cb)
@skip_if_not_language('d')
- @skip_if_env_value('DC')
+ @skip_if_env_set('DC')
def test_d_compiler(self):
def cb(comp):
if comp.id == 'dmd':
@@ -4961,7 +5744,7 @@ class NativeFileTests(BasePlatformTests):
self.helper_for_compiler('d', cb)
@skip_if_not_language('cs')
- @skip_if_env_value('CSC')
+ @skip_if_env_set('CSC')
def test_cs_compiler(self):
def cb(comp):
if comp.id == 'csc':
@@ -4974,17 +5757,21 @@ class NativeFileTests(BasePlatformTests):
self.helper_for_compiler('cs', cb)
@skip_if_not_language('fortran')
- @skip_if_env_value('FC')
+ @skip_if_env_set('FC')
def test_fortran_compiler(self):
def cb(comp):
if comp.id == 'gcc':
if shutil.which('ifort'):
return 'ifort', 'intel'
+ elif shutil.which('flang'):
+ return 'flang', 'flang'
+ elif shutil.which('pgfortran'):
+ return 'pgfortran', 'pgi'
# XXX: there are several other fortran compilers meson
# supports, but I don't have any of them to test with
raise unittest.SkipTest('No alternate Fortran implementation.')
if not shutil.which('gfortran'):
- raise unittest.SkipTest('No alternate C# implementation.')
+ raise unittest.SkipTest('No alternate Fortran implementation.')
return 'gfortran', 'gcc'
self.helper_for_compiler('fortran', cb)
@@ -4994,22 +5781,22 @@ class NativeFileTests(BasePlatformTests):
Builds a wrapper around the compiler to override the version.
"""
wrapper = self.helper_create_binary_wrapper(binary, version=version_str)
- env = get_fake_env('', '', '')
+ env = get_fake_env()
getter = getattr(env, 'detect_{}_compiler'.format(lang))
if lang in ['rust']:
getter = functools.partial(getter, False)
- env.config_info.binaries = {lang: wrapper}
+ env.binaries.host.binaries[lang] = wrapper
compiler = getter()
self.assertEqual(compiler.version, version)
@skip_if_not_language('vala')
- @skip_if_env_value('VALAC')
+ @skip_if_env_set('VALAC')
def test_vala_compiler(self):
self._single_implementation_compiler(
'vala', 'valac', 'Vala 1.2345', '1.2345')
@skip_if_not_language('rust')
- @skip_if_env_value('RUSTC')
+ @skip_if_env_set('RUSTC')
def test_rust_compiler(self):
self._single_implementation_compiler(
'rust', 'rustc', 'rustc 1.2345', '1.2345')
@@ -5023,11 +5810,333 @@ class NativeFileTests(BasePlatformTests):
def test_swift_compiler(self):
wrapper = self.helper_create_binary_wrapper(
'swiftc', version='Swift 1.2345', outfile='stderr')
- env = get_fake_env('', '', '')
- env.config_info.binaries = {'swift': wrapper}
+ env = get_fake_env()
+ env.binaries.host.binaries['swift'] = wrapper
compiler = env.detect_swift_compiler()
self.assertEqual(compiler.version, '1.2345')
+ def test_native_file_dirs(self):
+ testcase = os.path.join(self.unit_test_dir, '57 native file override')
+ self.init(testcase, default_args=False,
+ extra_args=['--native-file', os.path.join(testcase, 'nativefile')])
+
+ def test_native_file_dirs_overriden(self):
+ testcase = os.path.join(self.unit_test_dir, '57 native file override')
+ self.init(testcase, default_args=False,
+ extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
+ '-Ddef_libdir=liblib', '-Dlibdir=liblib'])
+
+
+class CrossFileTests(BasePlatformTests):
+
+ """Tests for cross file functioality not directly related to
+ cross compiling.
+
+ This is mainly aimed to testing overrides from cross files.
+ """
+
+ def test_cross_file_dirs(self):
+ testcase = os.path.join(self.unit_test_dir, '57 native file override')
+ self.init(testcase, default_args=False,
+ extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
+ '--cross-file', os.path.join(testcase, 'crossfile'),
+ '-Ddef_bindir=binbar',
+ '-Ddef_datadir=databar',
+ '-Ddef_includedir=includebar',
+ '-Ddef_infodir=infobar',
+ '-Ddef_libdir=libbar',
+ '-Ddef_libexecdir=libexecbar',
+ '-Ddef_localedir=localebar',
+ '-Ddef_localstatedir=localstatebar',
+ '-Ddef_mandir=manbar',
+ '-Ddef_sbindir=sbinbar',
+ '-Ddef_sharedstatedir=sharedstatebar',
+ '-Ddef_sysconfdir=sysconfbar'])
+
+ def test_cross_file_dirs_overriden(self):
+ testcase = os.path.join(self.unit_test_dir, '57 native file override')
+ self.init(testcase, default_args=False,
+ extra_args=['--native-file', os.path.join(testcase, 'nativefile'),
+ '--cross-file', os.path.join(testcase, 'crossfile'),
+ '-Ddef_libdir=liblib', '-Dlibdir=liblib',
+ '-Ddef_bindir=binbar',
+ '-Ddef_datadir=databar',
+ '-Ddef_includedir=includebar',
+ '-Ddef_infodir=infobar',
+ '-Ddef_libexecdir=libexecbar',
+ '-Ddef_localedir=localebar',
+ '-Ddef_localstatedir=localstatebar',
+ '-Ddef_mandir=manbar',
+ '-Ddef_sbindir=sbinbar',
+ '-Ddef_sharedstatedir=sharedstatebar',
+ '-Ddef_sysconfdir=sysconfbar'])
+
+
+class TAPParserTests(unittest.TestCase):
+ def assert_test(self, events, **kwargs):
+ if 'explanation' not in kwargs:
+ kwargs['explanation'] = None
+ self.assertEqual(next(events), TAPParser.Test(**kwargs))
+
+ def assert_plan(self, events, **kwargs):
+ if 'skipped' not in kwargs:
+ kwargs['skipped'] = False
+ if 'explanation' not in kwargs:
+ kwargs['explanation'] = None
+ self.assertEqual(next(events), TAPParser.Plan(**kwargs))
+
+ def assert_version(self, events, **kwargs):
+ self.assertEqual(next(events), TAPParser.Version(**kwargs))
+
+ def assert_error(self, events):
+ self.assertEqual(type(next(events)), TAPParser.Error)
+
+ def assert_bailout(self, events, **kwargs):
+ self.assertEqual(next(events), TAPParser.Bailout(**kwargs))
+
+ def assert_last(self, events):
+ with self.assertRaises(StopIteration):
+ next(events)
+
+ def parse_tap(self, s):
+ parser = TAPParser(io.StringIO(s))
+ return iter(parser.parse())
+
+ def parse_tap_v13(self, s):
+ events = self.parse_tap('TAP version 13\n' + s)
+ self.assert_version(events, version=13)
+ return events
+
+ def test_empty(self):
+ events = self.parse_tap('')
+ self.assert_last(events)
+
+ def test_empty_plan(self):
+ events = self.parse_tap('1..0')
+ self.assert_plan(events, count=0, late=False, skipped=True)
+ self.assert_last(events)
+
+ def test_plan_directive(self):
+ events = self.parse_tap('1..0 # skipped for some reason')
+ self.assert_plan(events, count=0, late=False, skipped=True,
+ explanation='for some reason')
+ self.assert_last(events)
+
+ events = self.parse_tap('1..1 # skipped for some reason\nok 1')
+ self.assert_error(events)
+ self.assert_plan(events, count=1, late=False, skipped=True,
+ explanation='for some reason')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..1 # todo not supported here\nok 1')
+ self.assert_error(events)
+ self.assert_plan(events, count=1, late=False, skipped=False,
+ explanation='not supported here')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_ok(self):
+ events = self.parse_tap('ok')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_with_number(self):
+ events = self.parse_tap('ok 1')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_with_name(self):
+ events = self.parse_tap('ok 1 abc')
+ self.assert_test(events, number=1, name='abc', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_not_ok(self):
+ events = self.parse_tap('not ok')
+ self.assert_test(events, number=1, name='', result=TestResult.FAIL)
+ self.assert_last(events)
+
+ def test_one_test_todo(self):
+ events = self.parse_tap('not ok 1 abc # TODO')
+ self.assert_test(events, number=1, name='abc', result=TestResult.EXPECTEDFAIL)
+ self.assert_last(events)
+
+ events = self.parse_tap('ok 1 abc # TODO')
+ self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
+ self.assert_last(events)
+
+ def test_one_test_skip(self):
+ events = self.parse_tap('ok 1 abc # SKIP')
+ self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
+ self.assert_last(events)
+
+ def test_one_test_skip_failure(self):
+ events = self.parse_tap('not ok 1 abc # SKIP')
+ self.assert_test(events, number=1, name='abc', result=TestResult.FAIL)
+ self.assert_last(events)
+
+ def test_many_early_plan(self):
+ events = self.parse_tap('1..4\nok 1\nnot ok 2\nok 3\nnot ok 4')
+ self.assert_plan(events, count=4, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_test(events, number=4, name='', result=TestResult.FAIL)
+ self.assert_last(events)
+
+ def test_many_late_plan(self):
+ events = self.parse_tap('ok 1\nnot ok 2\nok 3\nnot ok 4\n1..4')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_test(events, number=3, name='', result=TestResult.OK)
+ self.assert_test(events, number=4, name='', result=TestResult.FAIL)
+ self.assert_plan(events, count=4, late=True)
+ self.assert_last(events)
+
+ def test_directive_case(self):
+ events = self.parse_tap('ok 1 abc # skip')
+ self.assert_test(events, number=1, name='abc', result=TestResult.SKIP)
+ self.assert_last(events)
+
+ events = self.parse_tap('ok 1 abc # ToDo')
+ self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS)
+ self.assert_last(events)
+
+ def test_directive_explanation(self):
+ events = self.parse_tap('ok 1 abc # skip why')
+ self.assert_test(events, number=1, name='abc', result=TestResult.SKIP,
+ explanation='why')
+ self.assert_last(events)
+
+ events = self.parse_tap('ok 1 abc # ToDo Because')
+ self.assert_test(events, number=1, name='abc', result=TestResult.UNEXPECTEDPASS,
+ explanation='Because')
+ self.assert_last(events)
+
+ def test_one_test_early_plan(self):
+ events = self.parse_tap('1..1\nok')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_one_test_late_plan(self):
+ events = self.parse_tap('ok\n1..1')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, count=1, late=True)
+ self.assert_last(events)
+
+ def test_out_of_order(self):
+ events = self.parse_tap('ok 2')
+ self.assert_error(events)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_middle_plan(self):
+ events = self.parse_tap('ok 1\n1..2\nok 2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, count=2, late=True)
+ self.assert_error(events)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_too_many_plans(self):
+ events = self.parse_tap('1..1\n1..2\nok 1')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_error(events)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_too_many(self):
+ events = self.parse_tap('ok 1\nnot ok 2\n1..1')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_plan(events, count=1, late=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..1\nok 1\nnot ok 2')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ def test_too_few(self):
+ events = self.parse_tap('ok 1\nnot ok 2\n1..3')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_plan(events, count=3, late=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..3\nok 1\nnot ok 2')
+ self.assert_plan(events, count=3, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ def test_too_few_bailout(self):
+ events = self.parse_tap('1..3\nok 1\nnot ok 2\nBail out! no third test')
+ self.assert_plan(events, count=3, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_bailout(events, message='no third test')
+ self.assert_last(events)
+
+ def test_diagnostics(self):
+ events = self.parse_tap('1..1\n# ignored\nok 1')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap('# ignored\n1..1\nok 1\n# ignored too')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap('# ignored\nok 1\n1..1\n# ignored too')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_plan(events, count=1, late=True)
+ self.assert_last(events)
+
+ def test_unexpected(self):
+ events = self.parse_tap('1..1\ninvalid\nok 1')
+ self.assert_plan(events, count=1, late=False)
+ self.assert_error(events)
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ def test_version(self):
+ events = self.parse_tap('TAP version 13\n')
+ self.assert_version(events, version=13)
+ self.assert_last(events)
+
+ events = self.parse_tap('TAP version 12\n')
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap('1..0\nTAP version 13\n')
+ self.assert_plan(events, count=0, late=False, skipped=True)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ def test_yaml(self):
+ events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def\n ...\nok 2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_test(events, number=2, name='', result=TestResult.OK)
+ self.assert_last(events)
+
+ events = self.parse_tap_v13('ok\n ---\n foo: abc\n bar: def')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_error(events)
+ self.assert_last(events)
+
+ events = self.parse_tap_v13('ok 1\n ---\n foo: abc\n bar: def\nnot ok 2')
+ self.assert_test(events, number=1, name='', result=TestResult.OK)
+ self.assert_error(events)
+ self.assert_test(events, number=2, name='', result=TestResult.FAIL)
+ self.assert_last(events)
def unset_envs():
# For unit tests we must fully control all command lines
@@ -5038,26 +6147,14 @@ def unset_envs():
if v in os.environ:
del os.environ[v]
-def should_run_cross_arm_tests():
- return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
-
-def should_run_cross_mingw_tests():
- return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
-
def main():
unset_envs()
cases = ['InternalTests', 'DataTests', 'AllPlatformTests', 'FailureTests',
- 'PythonTests', 'NativeFileTests']
- if not is_windows():
- cases += ['LinuxlikeTests']
- if should_run_cross_arm_tests():
- cases += ['LinuxCrossArmTests']
- if should_run_cross_mingw_tests():
- cases += ['LinuxCrossMingwTests']
- if is_windows() or is_cygwin():
- cases += ['WindowsTests']
- if is_osx():
- cases += ['DarwinTests']
+ 'PythonTests', 'NativeFileTests', 'RewriterTests', 'CrossFileTests',
+ 'TAPParserTests',
+
+ 'LinuxlikeTests', 'LinuxCrossArmTests', 'LinuxCrossMingwTests',
+ 'WindowsTests', 'DarwinTests']
return unittest.main(defaultTest=cases, buffer=True)
diff --git a/setup.py b/setup.py
index f1f2e81..07bd3dd 100644
--- a/setup.py
+++ b/setup.py
@@ -28,13 +28,14 @@ from setuptools import setup
# Other platforms will create bin/meson
entries = {'console_scripts': ['meson=mesonbuild.mesonmain:main']}
packages = ['mesonbuild',
+ 'mesonbuild.ast',
'mesonbuild.backend',
'mesonbuild.compilers',
'mesonbuild.dependencies',
'mesonbuild.modules',
'mesonbuild.scripts',
'mesonbuild.wrap']
-package_data = {'mesonbuild.dependencies': ['data/CMakeLists.txt']}
+package_data = {'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakePathInfo.txt']}
data_files = []
if sys.platform != 'win32':
# Only useful on UNIX-like systems
diff --git a/test cases/common/113 ternary/meson.build b/test cases/common/113 ternary/meson.build
index 3e65046..7539d56 100644
--- a/test cases/common/113 ternary/meson.build
+++ b/test cases/common/113 ternary/meson.build
@@ -1,7 +1,12 @@
project('ternary operator', 'c')
+x = true
one = true ? 1 : error('False branch should not be evaluated')
two = false ? error('True branch should not be evaluated.') : 2
+three = '@0@'.format(x ? 'yes' : 'no')
+four = [x ? '0' : '1']
assert(one == 1, 'Return value from ternary true is wrong.')
assert(two == 2, 'Return value from ternary false is wrong.')
+assert(three == 'yes', 'Return value for ternary inside method call is wrong.')
+assert(four == ['0'], 'Return value for ternary inside of list is wrong.')
diff --git a/test cases/common/13 pch/c/meson.build b/test cases/common/13 pch/c/meson.build
index cb8349d..fe4ac68 100644
--- a/test cases/common/13 pch/c/meson.build
+++ b/test cases/common/13 pch/c/meson.build
@@ -5,4 +5,4 @@ if cc_id == 'lcc'
endif
exe = executable('prog', 'prog.c',
-c_pch : ['pch/prog_pch.c', 'pch/prog.h'])
+c_pch : 'pch/prog.h')
diff --git a/test cases/common/13 pch/c/pch/prog.h b/test cases/common/13 pch/c/pch/prog.h
index 354499a..c89890a 100644
--- a/test cases/common/13 pch/c/pch/prog.h
+++ b/test cases/common/13 pch/c/pch/prog.h
@@ -1 +1,6 @@
+#ifndef PROG_H
+// Header guards for PCH confuse msvc in some situations.
+// Using them here makes sure we handle this correctly.
+#define PROG_H
#include<stdio.h>
+#endif
diff --git a/test cases/common/13 pch/c/pch/prog_pch.c b/test cases/common/13 pch/c/pch/prog_pch.c
deleted file mode 100644
index 4960505..0000000
--- a/test cases/common/13 pch/c/pch/prog_pch.c
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !defined(_MSC_VER)
-#error "This file is only for use with MSVC."
-#endif
-
-#include "prog.h"
diff --git a/test cases/common/13 pch/cpp/meson.build b/test cases/common/13 pch/cpp/meson.build
index 802c3e1..b01cd58 100644
--- a/test cases/common/13 pch/cpp/meson.build
+++ b/test cases/common/13 pch/cpp/meson.build
@@ -1 +1 @@
-exe = executable('prog', 'prog.cc', cpp_pch : ['pch/prog.hh', 'pch/prog_pch.cc'])
+exe = executable('prog', 'prog.cc', cpp_pch : 'pch/prog.hh')
diff --git a/test cases/common/13 pch/cpp/pch/prog_pch.cc b/test cases/common/13 pch/cpp/pch/prog_pch.cc
deleted file mode 100644
index aff1225..0000000
--- a/test cases/common/13 pch/cpp/pch/prog_pch.cc
+++ /dev/null
@@ -1,5 +0,0 @@
-#if !defined(_MSC_VER)
-#error "This file is only for use with MSVC."
-#endif
-
-#include "prog.hh"
diff --git a/test cases/common/13 pch/generated/gen_custom.py b/test cases/common/13 pch/generated/gen_custom.py
new file mode 100644
index 0000000..650e03c
--- /dev/null
+++ b/test cases/common/13 pch/generated/gen_custom.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ f.write("#define FOO 0")
diff --git a/test cases/common/13 pch/generated/gen_generator.py b/test cases/common/13 pch/generated/gen_generator.py
new file mode 100644
index 0000000..a245e7a
--- /dev/null
+++ b/test cases/common/13 pch/generated/gen_generator.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1]) as f:
+ content = f.read()
+with open(sys.argv[2], 'w') as f:
+ f.write(content)
diff --git a/test cases/common/13 pch/generated/generated_generator.in b/test cases/common/13 pch/generated/generated_generator.in
new file mode 100644
index 0000000..1a00ebd
--- /dev/null
+++ b/test cases/common/13 pch/generated/generated_generator.in
@@ -0,0 +1 @@
+#define BAR 0
diff --git a/test cases/common/13 pch/generated/meson.build b/test cases/common/13 pch/generated/meson.build
new file mode 100644
index 0000000..1ef771b
--- /dev/null
+++ b/test cases/common/13 pch/generated/meson.build
@@ -0,0 +1,16 @@
+cc = meson.get_compiler('c')
+cc_id = cc.get_id()
+if cc_id == 'lcc'
+ error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.')
+endif
+
+generated_customTarget = custom_target('makeheader',
+ output: 'generated_customTarget.h',
+ command : [find_program('gen_custom.py'), '@OUTPUT0@'])
+
+generated_generator = generator(find_program('gen_generator.py'),
+ output: '@BASENAME@.h',
+ arguments: ['@INPUT@', '@OUTPUT@'])
+
+exe = executable('prog', 'prog.c', generated_customTarget, generated_generator.process('generated_generator.in'),
+ c_pch: 'pch/prog.h')
diff --git a/test cases/common/13 pch/generated/pch/prog.h b/test cases/common/13 pch/generated/pch/prog.h
new file mode 100644
index 0000000..15fec38
--- /dev/null
+++ b/test cases/common/13 pch/generated/pch/prog.h
@@ -0,0 +1,2 @@
+#include "generated_customTarget.h"
+#include "generated_generator.h"
diff --git a/test cases/common/13 pch/generated/prog.c b/test cases/common/13 pch/generated/prog.c
new file mode 100644
index 0000000..9b2e2ef
--- /dev/null
+++ b/test cases/common/13 pch/generated/prog.c
@@ -0,0 +1,6 @@
+// No includes here, they need to come from the PCH
+
+int main(int argc, char **argv) {
+ return FOO + BAR;
+}
+
diff --git a/test cases/common/13 pch/meson.build b/test cases/common/13 pch/meson.build
index d39527b..4438c9e 100644
--- a/test cases/common/13 pch/meson.build
+++ b/test cases/common/13 pch/meson.build
@@ -2,6 +2,9 @@ project('pch test', 'c', 'cpp')
subdir('c')
subdir('cpp')
+subdir('generated')
+subdir('userDefined')
+subdir('withIncludeDirectories')
if meson.backend() == 'xcode'
warning('Xcode backend only supports one precompiled header per target. Skipping "mixed" which has various precompiled headers.')
diff --git a/test cases/common/13 pch/mixed/meson.build b/test cases/common/13 pch/mixed/meson.build
index f0c3eca..cbb7bac 100644
--- a/test cases/common/13 pch/mixed/meson.build
+++ b/test cases/common/13 pch/mixed/meson.build
@@ -1,17 +1,6 @@
exe = executable(
'prog',
files('main.cc', 'func.c'),
- c_pch : ['pch/func.h', 'pch/func_pch.c'],
- cpp_pch : ['pch/main_pch.cc', 'pch/main.h'],
+ c_pch : ['pch/func.h'],
+ cpp_pch : ['pch/main.h'],
)
-
-# test pch when only a header is given (not supported by msvc)
-cc = meson.get_compiler('c')
-if not ['msvc', 'clang-cl'].contains(cc.get_id())
- exe2 = executable(
- 'prog2',
- files('main.cc', 'func.c'),
- c_pch : 'pch/func.h',
- cpp_pch : 'pch/main.h',
- )
-endif
diff --git a/test cases/common/13 pch/mixed/pch/func_pch.c b/test cases/common/13 pch/mixed/pch/func_pch.c
deleted file mode 100644
index 5566739..0000000
--- a/test cases/common/13 pch/mixed/pch/func_pch.c
+++ /dev/null
@@ -1 +0,0 @@
-#include"func.h"
diff --git a/test cases/common/13 pch/mixed/pch/main_pch.cc b/test cases/common/13 pch/mixed/pch/main_pch.cc
deleted file mode 100644
index acd3f57..0000000
--- a/test cases/common/13 pch/mixed/pch/main_pch.cc
+++ /dev/null
@@ -1 +0,0 @@
-#include"main.h"
diff --git a/test cases/common/13 pch/userDefined/meson.build b/test cases/common/13 pch/userDefined/meson.build
new file mode 100644
index 0000000..9b60572
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/meson.build
@@ -0,0 +1,10 @@
+cc = meson.get_compiler('c')
+cc_id = cc.get_id()
+
+# User supplied PCH implementation should override the auto
+# generated one. PCH implementations are only supported for
+# msvc and generally should not be used at all. Support for
+# them is only kept for backwards compatibility.
+if cc_id == 'msvc'
+ exe = executable('prog', 'prog.c', c_pch : ['pch/pch.h', 'pch/pch.c'])
+endif
diff --git a/test cases/common/13 pch/userDefined/pch/pch.c b/test cases/common/13 pch/userDefined/pch/pch.c
new file mode 100644
index 0000000..c107b1a
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/pch/pch.c
@@ -0,0 +1,5 @@
+#include "pch.h"
+
+int foo() {
+ return 0;
+}
diff --git a/test cases/common/13 pch/userDefined/pch/pch.h b/test cases/common/13 pch/userDefined/pch/pch.h
new file mode 100644
index 0000000..5d5f8f0
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/pch/pch.h
@@ -0,0 +1 @@
+int foo();
diff --git a/test cases/common/13 pch/userDefined/prog.c b/test cases/common/13 pch/userDefined/prog.c
new file mode 100644
index 0000000..eb068d9
--- /dev/null
+++ b/test cases/common/13 pch/userDefined/prog.c
@@ -0,0 +1,8 @@
+// No includes here, they need to come from the PCH
+
+int main(int argc, char **argv) {
+ // Method is implemented in pch.c.
+ // This makes sure that we can properly handle user defined
+ // pch implementation files and not only auto-generated ones.
+ return foo();
+}
diff --git a/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h b/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h
new file mode 100644
index 0000000..53c5fdf
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/include/lib/lib.h
@@ -0,0 +1 @@
+#include <stdio.h>
diff --git a/test cases/common/13 pch/withIncludeDirectories/meson.build b/test cases/common/13 pch/withIncludeDirectories/meson.build
new file mode 100644
index 0000000..68e544b
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/meson.build
@@ -0,0 +1,9 @@
+cc = meson.get_compiler('c')
+cc_id = cc.get_id()
+if cc_id == 'lcc'
+ error('MESON_SKIP_TEST: Elbrus compiler does not support PCH.')
+endif
+
+exe = executable('prog', 'prog.c',
+ include_directories: 'include',
+ c_pch : 'pch/prog.h')
diff --git a/test cases/common/13 pch/withIncludeDirectories/pch/prog.h b/test cases/common/13 pch/withIncludeDirectories/pch/prog.h
new file mode 100644
index 0000000..383b2c5
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/pch/prog.h
@@ -0,0 +1 @@
+#include<lib/lib.h>
diff --git a/test cases/common/13 pch/withIncludeDirectories/prog.c b/test cases/common/13 pch/withIncludeDirectories/prog.c
new file mode 100644
index 0000000..0ce3d0a
--- /dev/null
+++ b/test cases/common/13 pch/withIncludeDirectories/prog.c
@@ -0,0 +1,10 @@
+// No includes here, they need to come from the PCH
+
+void func() {
+ fprintf(stdout, "This is a function that fails if stdio is not #included.\n");
+}
+
+int main(int argc, char **argv) {
+ return 0;
+}
+
diff --git a/test cases/common/137 get define/meson.build b/test cases/common/137 get define/meson.build
index 1647e22..c3364b9 100644
--- a/test cases/common/137 get define/meson.build
+++ b/test cases/common/137 get define/meson.build
@@ -32,6 +32,9 @@ foreach lang : ['c', 'cpp']
elif host_system == 'netbsd'
d = cc.get_define('__NetBSD__')
assert(d == '1', '__NetBSD__ value is @0@ instead of 1'.format(d))
+ elif host_system == 'openbsd'
+ d = cc.get_define('__OpenBSD__')
+ assert(d == '1', '__OpenBSD__ value is @0@ instead of 1'.format(d))
elif host_system == 'gnu'
d = cc.get_define('__GNU__')
assert(d == '1', '__GNU__ value is @0@ instead of 1'.format(d))
@@ -44,10 +47,10 @@ foreach lang : ['c', 'cpp']
# found in the compiler's default search path, GCC inserts an extra comment
# between the delimiter and the define which causes a parsing error.
# https://github.com/mesonbuild/meson/issues/1726
- if host_machine.system() == 'netbsd'
- # NetBSD's zlib doesn't is version 1.2.3 and doesn't have a
- # ZLIB_VER_MAJOR, but it does have a ZLIB_VERSION (which is a string), so
- # check the first non-quote character of that.
+ if host_machine.system() == 'netbsd' or host_machine.system() == 'openbsd'
+ # NetBSD and OpenBSD's zlib don't have a ZLIB_VER_MAJOR, but they do have
+ # a ZLIB_VERSION (which is a string), so check the first non-quote
+ # character of that.
ver = cc.get_define('ZLIB_VERSION', prefix : '#include <zlib.h>')[1]
assert(ver == '1', 'ZLIB_VERSION (major) value is "@0@" instead of "1"'.format(ver))
else
diff --git a/test cases/common/14 configure file/meson.build b/test cases/common/14 configure file/meson.build
index 53b06f3..50393e9 100644
--- a/test cases/common/14 configure file/meson.build
+++ b/test cases/common/14 configure file/meson.build
@@ -141,6 +141,19 @@ cfile = configure_file(input : 'config.h.in',
install_dir : '',
configuration : conf)
+# test install_dir : false (deprecated)
+cfile = configure_file(input : 'config.h.in',
+ output : 'do_not_get_installed_please.h',
+ install_dir : false,
+ configuration : conf)
+
+# test intsall_dir with install: false
+cfile = configure_file(input : 'config.h.in',
+ output : 'do_not_get_installed_in_install_dir.h',
+ install : false,
+ install_dir : 'share/appdir',
+ configuration : conf)
+
# Test escaping with cmake format
conf7 = configuration_data()
conf7.set('var1', 'foo')
diff --git a/test cases/common/170 dependency factory/meson.build b/test cases/common/170 dependency factory/meson.build
index 1b8ed17..2de0f0e 100644
--- a/test cases/common/170 dependency factory/meson.build
+++ b/test cases/common/170 dependency factory/meson.build
@@ -1,4 +1,4 @@
-project('dependency factory', meson_version : '>=0.40')
+project('dependency factory', 'c', meson_version : '>=0.40')
dep = dependency('gl', method: 'pkg-config', required: false)
if dep.found() and dep.type_name() == 'pkgconfig'
diff --git a/test cases/common/19 includedir/src/meson.build b/test cases/common/19 includedir/src/meson.build
index 20d9db1..30d2e0c 100644
--- a/test cases/common/19 includedir/src/meson.build
+++ b/test cases/common/19 includedir/src/meson.build
@@ -1,5 +1,5 @@
exe = executable('prog', 'prog.c', 'func.c', include_directories : inc)
test('inc test', exe)
-exe2 = executable('prog2', 'prog.c', 'func.c', include_directories : '../include')
+exe2 = executable('prog2', 'prog.c', 'func.c', include_directories : [['../include']])
test('inc test 2', exe2)
diff --git a/test cases/common/190 openmp/main.f90 b/test cases/common/190 openmp/main.f90
index c062d86..d80f90f 100644
--- a/test cases/common/190 openmp/main.f90
+++ b/test cases/common/190 openmp/main.f90
@@ -1,8 +1,9 @@
-program main
- if (omp_get_max_threads() .eq. 2) then
- stop 0
- else
- print *, 'Max threads is', omp_get_max_threads(), 'not 2.'
- stop 1
- endif
-end program main
+use, intrinsic :: iso_fortran_env, only: stderr=>error_unit
+use omp_lib
+
+if (omp_get_max_threads() /= 2) then
+ write(stderr, *) 'Max Fortran threads is', omp_get_max_threads(), 'not 2.'
+ stop 1
+endif
+
+end program
diff --git a/test cases/common/190 openmp/meson.build b/test cases/common/190 openmp/meson.build
index f4652db..71bf697 100644
--- a/test cases/common/190 openmp/meson.build
+++ b/test cases/common/190 openmp/meson.build
@@ -38,11 +38,19 @@ test('OpenMP C++', execpp, env : env)
if add_languages('fortran', required : false)
- exef = executable('exef',
- 'main.f90',
- dependencies : [openmp])
-
- test('OpenMP Fortran', execpp, env : env)
+ # Mixing compilers (msvc/clang with gfortran) does not seem to work on Windows.
+ if build_machine.system() != 'windows' or cc.get_id() == 'gnu'
+ exef = executable('exef',
+ 'main.f90',
+ dependencies : [openmp])
+ test('OpenMP Fortran', exef, env : env)
+
+ openmp_f = dependency('openmp', language : 'fortran')
+ exe_f = executable('exe_f',
+ 'main.f90',
+ dependencies : [openmp_f])
+ test('OpenMP Fortran-specific', exe_f, env : env)
+ endif
endif
# Check we can apply a version constraint
diff --git a/test cases/common/209 custom target build by default/docgen.py b/test cases/common/209 custom target build by default/docgen.py
new file mode 100644
index 0000000..f343f21
--- /dev/null
+++ b/test cases/common/209 custom target build by default/docgen.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+
+import os
+import sys
+
+out = sys.argv[1]
+
+os.mkdir(out)
+
+for name in ('a', 'b', 'c'):
+ with open(os.path.join(out, name + '.txt'), 'w') as f:
+ f.write(name)
diff --git a/test cases/unit/51 introspect buildoptions/subprojects/evilFile.txt b/test cases/common/209 custom target build by default/installed_files.txt
index e69de29..e69de29 100644
--- a/test cases/unit/51 introspect buildoptions/subprojects/evilFile.txt
+++ b/test cases/common/209 custom target build by default/installed_files.txt
diff --git a/test cases/common/209 custom target build by default/meson.build b/test cases/common/209 custom target build by default/meson.build
new file mode 100644
index 0000000..7c81aa2
--- /dev/null
+++ b/test cases/common/209 custom target build by default/meson.build
@@ -0,0 +1,10 @@
+project('custom-target-dir-install', 'c')
+
+docgen = find_program('docgen.py')
+
+custom_target('docgen',
+ output : 'html',
+ command : [docgen, '@OUTPUT@'],
+ install : true,
+ build_by_default : false,
+ install_dir : join_paths(get_option('datadir'), 'doc/testpkgname'))
diff --git a/test cases/common/209 find_library and headers/foo.h b/test cases/common/210 find_library and headers/foo.h
index 014e06e..014e06e 100644
--- a/test cases/common/209 find_library and headers/foo.h
+++ b/test cases/common/210 find_library and headers/foo.h
diff --git a/test cases/common/209 find_library and headers/meson.build b/test cases/common/210 find_library and headers/meson.build
index bcd71f1..bcd71f1 100644
--- a/test cases/common/209 find_library and headers/meson.build
+++ b/test cases/common/210 find_library and headers/meson.build
diff --git a/test cases/common/211 line continuation/meson.build b/test cases/common/211 line continuation/meson.build
new file mode 100644
index 0000000..16c72f9
--- /dev/null
+++ b/test cases/common/211 line continuation/meson.build
@@ -0,0 +1,17 @@
+project('line continuation')
+
+a = 1
+b = 2
+
+c = a \
++b
+assert(c == 3, 'Line continuation is not working')
+
+d = a + \
+ b
+assert(d == 3, 'Line continuation is not working')
+
+if a == 1 and \
+ b == 3
+ error('Line continuation in "if" condition is not working')
+endif
diff --git a/test cases/common/212 cmake module/cmake_project/CMakeLists.txt b/test cases/common/212 cmake module/cmake_project/CMakeLists.txt
new file mode 100644
index 0000000..cd91584
--- /dev/null
+++ b/test cases/common/212 cmake module/cmake_project/CMakeLists.txt
@@ -0,0 +1,4 @@
+cmake_minimum_required(VERSION 2.8)
+project(cmakeMeson C)
+
+find_package(cmakeModule REQUIRED) \ No newline at end of file
diff --git a/test cases/common/212 cmake module/installed_files.txt b/test cases/common/212 cmake module/installed_files.txt
new file mode 100644
index 0000000..f8b11f0
--- /dev/null
+++ b/test cases/common/212 cmake module/installed_files.txt
@@ -0,0 +1,2 @@
+usr/lib/cmake/cmakeModule/cmakeModuleConfig.cmake
+usr/lib/cmake/cmakeModule/cmakeModuleConfigVersion.cmake \ No newline at end of file
diff --git a/test cases/common/212 cmake module/meson.build b/test cases/common/212 cmake module/meson.build
new file mode 100644
index 0000000..68f9993
--- /dev/null
+++ b/test cases/common/212 cmake module/meson.build
@@ -0,0 +1,31 @@
+project('cmakeModule', 'c', version: '1.0.0')
+
+if build_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST CMake is broken on Cygwin.')
+endif
+
+cmake_bin = find_program('cmake', required: false)
+if not cmake_bin.found()
+ error('MESON_SKIP_TEST CMake not installed.')
+endif
+
+cc = meson.get_compiler('c')
+if cc.get_id() == 'clang-cl' and meson.backend() == 'ninja' and build_machine.system() == 'windows'
+ error('MESON_SKIP_TEST CMake installation nor operational for vs2017 clangclx64ninja')
+endif
+
+cmake = import('cmake')
+
+cmake.write_basic_package_version_file(version: '0.0.1',
+ name: 'cmakeModule',
+)
+
+conf = configuration_data()
+conf.set('MYVAR', 'my variable value')
+conf.set_quoted('MYQUOTEDVAR', 'my quoted variable value')
+
+cmake.configure_package_config_file(
+ input: 'projectConfig.cmake.in',
+ name: 'cmakeModule',
+ configuration: conf,
+)
diff --git a/test cases/common/212 cmake module/projectConfig.cmake.in b/test cases/common/212 cmake module/projectConfig.cmake.in
new file mode 100644
index 0000000..fa3dfca
--- /dev/null
+++ b/test cases/common/212 cmake module/projectConfig.cmake.in
@@ -0,0 +1,4 @@
+@PACKAGE_INIT@
+
+set(MYVAR "@MYVAR@")
+set(MYQUOTEDVAR @MYQUOTEDVAR@)
diff --git a/test cases/common/213 native file path override/installed_files.txt b/test cases/common/213 native file path override/installed_files.txt
new file mode 100644
index 0000000..0044d40
--- /dev/null
+++ b/test cases/common/213 native file path override/installed_files.txt
@@ -0,0 +1,2 @@
+usr/custom_bindir/main?exe
+?msvc:usr/custom_bindir/main.pdb
diff --git a/test cases/common/213 native file path override/main.cpp b/test cases/common/213 native file path override/main.cpp
new file mode 100644
index 0000000..d65cab2
--- /dev/null
+++ b/test cases/common/213 native file path override/main.cpp
@@ -0,0 +1,5 @@
+#include <iostream>
+
+int main() {
+ std::cout << "Hello world!" << std::endl;
+}
diff --git a/test cases/common/213 native file path override/meson.build b/test cases/common/213 native file path override/meson.build
new file mode 100644
index 0000000..142ca1c
--- /dev/null
+++ b/test cases/common/213 native file path override/meson.build
@@ -0,0 +1,7 @@
+project('native file install dir override', 'cpp')
+
+if meson.is_cross_build()
+ error('MESON_SKIP_TEST cannot test native build rules in cross build')
+endif
+
+executable('main', 'main.cpp', install : true)
diff --git a/test cases/common/213 native file path override/nativefile.ini b/test cases/common/213 native file path override/nativefile.ini
new file mode 100644
index 0000000..1c295c7
--- /dev/null
+++ b/test cases/common/213 native file path override/nativefile.ini
@@ -0,0 +1,2 @@
+[paths]
+bindir = 'custom_bindir'
diff --git a/test cases/common/214 tap tests/meson.build b/test cases/common/214 tap tests/meson.build
new file mode 100644
index 0000000..58529a7
--- /dev/null
+++ b/test cases/common/214 tap tests/meson.build
@@ -0,0 +1,10 @@
+project('test features', 'c')
+
+tester = executable('tester', 'tester.c')
+test('pass', tester, args : ['ok'], protocol: 'tap')
+test('fail', tester, args : ['not ok'], should_fail: true, protocol: 'tap')
+test('xfail', tester, args : ['not ok # todo'], protocol: 'tap')
+test('xpass', tester, args : ['ok # todo'], should_fail: true, protocol: 'tap')
+test('skip', tester, args : ['ok # skip'], protocol: 'tap')
+test('skip failure', tester, args : ['not ok # skip'], should_fail: true, protocol: 'tap')
+test('no tests', tester, args : ['1..0 # skip'], protocol: 'tap')
diff --git a/test cases/common/214 tap tests/tester.c b/test cases/common/214 tap tests/tester.c
new file mode 100644
index 0000000..ac582e7
--- /dev/null
+++ b/test cases/common/214 tap tests/tester.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ if (argc != 2) {
+ fprintf(stderr, "Incorrect number of arguments, got %i\n", argc);
+ return 1;
+ }
+ puts(argv[1]);
+ return 0;
+}
diff --git a/test cases/common/215 warning level 0/main.cpp b/test cases/common/215 warning level 0/main.cpp
new file mode 100644
index 0000000..954d9ce
--- /dev/null
+++ b/test cases/common/215 warning level 0/main.cpp
@@ -0,0 +1,12 @@
+#include <iostream>
+
+#define PROJECT_NAME "demo"
+
+int main(int argc, char **argv) {
+ if(argc != 1) {
+ std::cout << argv[0] << "takes no arguments.\n";
+ return 1;
+ }
+ std::cout << "This is project " << PROJECT_NAME << ".\n";
+ return 0;
+}
diff --git a/test cases/common/215 warning level 0/meson.build b/test cases/common/215 warning level 0/meson.build
new file mode 100644
index 0000000..f2bd339
--- /dev/null
+++ b/test cases/common/215 warning level 0/meson.build
@@ -0,0 +1,3 @@
+project('warning_level', 'cpp', default_options : ['warning_level=0'])
+
+exe = executable('main', 'main.cpp', install : false)
diff --git a/test cases/common/23 object extraction/meson.build b/test cases/common/23 object extraction/meson.build
index d99ec84..6776b14 100644
--- a/test cases/common/23 object extraction/meson.build
+++ b/test cases/common/23 object extraction/meson.build
@@ -8,10 +8,13 @@ else
obj1 = lib1.extract_objects('src/lib.c')
obj2 = lib2.extract_objects(['lib.c'])
+ obj3 = lib2.extract_objects(files('lib.c'))
e1 = executable('main1', 'main.c', objects : obj1)
e2 = executable('main2', 'main.c', objects : obj2)
+ e3 = executable('main3', 'main.c', objects : obj3)
test('extraction test 1', e1)
test('extraction test 2', e2)
+ test('extraction test 3', e3)
endif
diff --git a/test cases/common/36 run program/meson.build b/test cases/common/36 run program/meson.build
index a05cea3..93897e3 100644
--- a/test cases/common/36 run program/meson.build
+++ b/test cases/common/36 run program/meson.build
@@ -65,6 +65,12 @@ ret = run_command(py3, '-c', 'print("some output")', capture : false)
assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr())
assert(ret.stdout() == '', 'stdout is "@0@" instead of empty'.format(ret.stdout()))
+c_env = environment()
+c_env.append('CUSTOM_ENV_VAR', 'FOOBAR')
+ret = run_command(py3, '-c', 'import os; print(os.environ.get("CUSTOM_ENV_VAR"))', env : c_env)
+assert(ret.returncode() == 0, 'failed to run python3: ' + ret.stderr())
+assert(ret.stdout() == 'FOOBAR\n', 'stdout is "@0@" instead of FOOBAR'.format(ret.stdout()))
+
dd = find_program('dd', required : false)
if dd.found()
ret = run_command(dd, 'if=/dev/urandom', 'bs=10', 'count=1', capture: false)
diff --git a/test cases/common/48 pkgconfig-gen/dependencies/meson.build b/test cases/common/48 pkgconfig-gen/dependencies/meson.build
index 2dad393..c72f96b 100644
--- a/test cases/common/48 pkgconfig-gen/dependencies/meson.build
+++ b/test cases/common/48 pkgconfig-gen/dependencies/meson.build
@@ -28,8 +28,8 @@ custom2_dep = declare_dependency(link_args : ['-lcustom2'], compile_args : ['-DC
# - Having pc_dep in libraries_private should add it in Requires.private
# - pc_dep_dup is the same library and same version, should be ignored
# - notfound_dep is not required so it shouldn't appear in the pc file.
-pkgg.generate(libraries : [main_lib, exposed_lib, threads_dep , custom_dep],
- libraries_private : [custom_dep, custom2_dep, pc_dep, pc_dep_dup, notfound_dep],
+pkgg.generate(libraries : [main_lib, exposed_lib, threads_dep, threads_dep, custom_dep, custom_dep, '-pthread'],
+ libraries_private : [custom_dep, custom2_dep, custom2_dep, pc_dep, pc_dep_dup, notfound_dep],
version : '1.0',
name : 'dependency-test',
filebase : 'dependency-test',
diff --git a/test cases/common/53 custom target/depfile/dep.py b/test cases/common/53 custom target/depfile/dep.py
index aff325b..476e88b 100755
--- a/test cases/common/53 custom target/depfile/dep.py
+++ b/test cases/common/53 custom target/depfile/dep.py
@@ -7,7 +7,7 @@ _, srcdir, depfile, output = sys.argv
depfiles = glob(os.path.join(srcdir, '*'))
-quoted_depfiles = [x.replace(' ', '\ ') for x in depfiles]
+quoted_depfiles = [x.replace(' ', r'\ ') for x in depfiles]
with open(output, 'w') as f:
f.write('I am the result of globbing.')
diff --git a/test cases/common/84 declare dep/entity/meson.build b/test cases/common/84 declare dep/entity/meson.build
index 5feb1b9..469ecd3 100644
--- a/test cases/common/84 declare dep/entity/meson.build
+++ b/test cases/common/84 declare dep/entity/meson.build
@@ -1,7 +1,7 @@
entity_lib = static_library('entity', 'entity1.c')
entity_dep = declare_dependency(link_with : [[entity_lib]],
- include_directories : '.',
+ include_directories : [['.']],
sources : 'entity2.c',
compile_args : ['-DUSING_ENT=1'],
version : '1.2.3',
diff --git a/test cases/common/87 identical target name in subproject/meson.build b/test cases/common/87 identical target name in subproject/meson.build
index e804d3c..c5be5b7 100644
--- a/test cases/common/87 identical target name in subproject/meson.build
+++ b/test cases/common/87 identical target name in subproject/meson.build
@@ -4,3 +4,4 @@ subproject('foo')
executable('bar', 'bar.c')
run_target('nop', command : ['true'])
+custom_target('cus', output: ['cus.c'], command : ['true'])
diff --git a/test cases/common/87 identical target name in subproject/subprojects/foo/meson.build b/test cases/common/87 identical target name in subproject/subprojects/foo/meson.build
index 3f22337..06addb4 100644
--- a/test cases/common/87 identical target name in subproject/subprojects/foo/meson.build
+++ b/test cases/common/87 identical target name in subproject/subprojects/foo/meson.build
@@ -2,3 +2,4 @@ project('subfoo', 'c')
executable('bar', 'bar.c')
run_target('nop', command : ['true'])
+custom_target('cus', output: ['cus.c'], command : ['true'])
diff --git a/test cases/common/97 test workdir/meson.build b/test cases/common/97 test workdir/meson.build
index 1323a17..a8290f7 100644
--- a/test cases/common/97 test workdir/meson.build
+++ b/test cases/common/97 test workdir/meson.build
@@ -4,3 +4,5 @@ exe = executable('opener', 'opener.c')
test('basic', exe, workdir : meson.source_root())
test('shouldfail', exe, should_fail : true)
+
+subdir('subdir')
diff --git a/test cases/common/97 test workdir/subdir/checker.py b/test cases/common/97 test workdir/subdir/checker.py
new file mode 100755
index 0000000..66e287d
--- /dev/null
+++ b/test cases/common/97 test workdir/subdir/checker.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python3
+
+import sys
+
+data = open(sys.argv[1], 'rb').read()
diff --git a/test cases/common/97 test workdir/subdir/meson.build b/test cases/common/97 test workdir/subdir/meson.build
new file mode 100644
index 0000000..687a1cf
--- /dev/null
+++ b/test cases/common/97 test workdir/subdir/meson.build
@@ -0,0 +1,4 @@
+exe2 = executable('dummy', '../opener.c')
+test('subdir', find_program('checker.py'),
+ workdir : meson.source_root(),
+ args: [exe2])
diff --git a/test cases/cuda/1 simple/meson.build b/test cases/cuda/1 simple/meson.build
new file mode 100644
index 0000000..19af734
--- /dev/null
+++ b/test cases/cuda/1 simple/meson.build
@@ -0,0 +1,5 @@
+project('simple', 'cuda', version : '1.0.0')
+
+exe = executable('prog', 'prog.cu')
+test('cudatest', exe)
+
diff --git a/test cases/cuda/1 simple/prog.cu b/test cases/cuda/1 simple/prog.cu
new file mode 100644
index 0000000..7eab673
--- /dev/null
+++ b/test cases/cuda/1 simple/prog.cu
@@ -0,0 +1,30 @@
+#include <iostream>
+
+int main(int argc, char **argv) {
+ int cuda_devices = 0;
+ std::cout << "CUDA version: " << CUDART_VERSION << "\n";
+ cudaGetDeviceCount(&cuda_devices);
+ if(cuda_devices == 0) {
+ std::cout << "No Cuda hardware found. Exiting.\n";
+ return 0;
+ }
+ std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n";
+ cudaDeviceProp props;
+ cudaGetDeviceProperties(&props, 0);
+ std::cout << "Properties of device 0.\n\n";
+
+ std::cout << " Name: " << props.name << "\n";
+ std::cout << " Global memory: " << props.totalGlobalMem << "\n";
+ std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n";
+ std::cout << " Constant memory: " << props.totalConstMem << "\n";
+ std::cout << " Block registers: " << props.regsPerBlock << "\n";
+
+ std::cout << " Warp size: " << props.warpSize << "\n";
+ std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n";
+ std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n";
+ std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n";
+ std::cout << "\n";
+
+ return 0;
+}
+
diff --git a/test cases/cuda/2 split/lib.cu b/test cases/cuda/2 split/lib.cu
new file mode 100644
index 0000000..c0471d0
--- /dev/null
+++ b/test cases/cuda/2 split/lib.cu
@@ -0,0 +1,13 @@
+#include <stdio.h>
+#include <iostream>
+
+__global__ void kernel (void){
+}
+
+int do_cuda_stuff() {
+ kernel<<<1,1>>>();
+
+ printf("Hello, World!\n");
+ return 0;
+}
+
diff --git a/test cases/cuda/2 split/main.cpp b/test cases/cuda/2 split/main.cpp
new file mode 100644
index 0000000..e5e6bda
--- /dev/null
+++ b/test cases/cuda/2 split/main.cpp
@@ -0,0 +1,7 @@
+#include<iostream>
+
+int do_cuda_stuff();
+
+int main(int argc, char **argv) {
+ return do_cuda_stuff();
+}
diff --git a/test cases/cuda/2 split/meson.build b/test cases/cuda/2 split/meson.build
new file mode 100644
index 0000000..51bf6ce
--- /dev/null
+++ b/test cases/cuda/2 split/meson.build
@@ -0,0 +1,7 @@
+project('simple', 'cuda', 'cpp')
+
+exe = executable('prog', 'main.cpp', 'lib.cu')
+test('cudatest', exe)
+
+subdir('static')
+
diff --git a/test cases/cuda/2 split/static/lib.cu b/test cases/cuda/2 split/static/lib.cu
new file mode 100644
index 0000000..c0471d0
--- /dev/null
+++ b/test cases/cuda/2 split/static/lib.cu
@@ -0,0 +1,13 @@
+#include <stdio.h>
+#include <iostream>
+
+__global__ void kernel (void){
+}
+
+int do_cuda_stuff() {
+ kernel<<<1,1>>>();
+
+ printf("Hello, World!\n");
+ return 0;
+}
+
diff --git a/test cases/cuda/2 split/static/libsta.cu b/test cases/cuda/2 split/static/libsta.cu
new file mode 100644
index 0000000..c0471d0
--- /dev/null
+++ b/test cases/cuda/2 split/static/libsta.cu
@@ -0,0 +1,13 @@
+#include <stdio.h>
+#include <iostream>
+
+__global__ void kernel (void){
+}
+
+int do_cuda_stuff() {
+ kernel<<<1,1>>>();
+
+ printf("Hello, World!\n");
+ return 0;
+}
+
diff --git a/test cases/cuda/2 split/static/main_static.cpp b/test cases/cuda/2 split/static/main_static.cpp
new file mode 100644
index 0000000..e5e6bda
--- /dev/null
+++ b/test cases/cuda/2 split/static/main_static.cpp
@@ -0,0 +1,7 @@
+#include<iostream>
+
+int do_cuda_stuff();
+
+int main(int argc, char **argv) {
+ return do_cuda_stuff();
+}
diff --git a/test cases/cuda/2 split/static/meson.build b/test cases/cuda/2 split/static/meson.build
new file mode 100644
index 0000000..9078198
--- /dev/null
+++ b/test cases/cuda/2 split/static/meson.build
@@ -0,0 +1,4 @@
+l = static_library('clib', 'lib.cu')
+exe = executable('staexe', 'main_static.cpp',
+ link_with : l)
+test('static Cuda test', exe)
diff --git a/test cases/cuda/3 cudamodule/meson.build b/test cases/cuda/3 cudamodule/meson.build
new file mode 100644
index 0000000..0dc9489
--- /dev/null
+++ b/test cases/cuda/3 cudamodule/meson.build
@@ -0,0 +1,16 @@
+project('cudamodule', 'cuda', version : '1.0.0')
+
+nvcc = meson.get_compiler('cuda')
+cuda = import('unstable-cuda')
+
+arch_flags = cuda.nvcc_arch_flags(nvcc, 'Auto', detected: ['3.0'])
+arch_readable = cuda.nvcc_arch_readable(nvcc, 'Auto', detected: ['3.0'])
+driver_version = cuda.min_driver_version(nvcc)
+
+message('NVCC version: ' + nvcc.version())
+message('NVCC flags: ' + ' '.join(arch_flags))
+message('NVCC readable: ' + ' '.join(arch_readable))
+message('Driver version: >=' + driver_version)
+
+exe = executable('prog', 'prog.cu', cuda_args: arch_flags)
+test('cudatest', exe)
diff --git a/test cases/cuda/3 cudamodule/prog.cu b/test cases/cuda/3 cudamodule/prog.cu
new file mode 100644
index 0000000..7eab673
--- /dev/null
+++ b/test cases/cuda/3 cudamodule/prog.cu
@@ -0,0 +1,30 @@
+#include <iostream>
+
+int main(int argc, char **argv) {
+ int cuda_devices = 0;
+ std::cout << "CUDA version: " << CUDART_VERSION << "\n";
+ cudaGetDeviceCount(&cuda_devices);
+ if(cuda_devices == 0) {
+ std::cout << "No Cuda hardware found. Exiting.\n";
+ return 0;
+ }
+ std::cout << "This computer has " << cuda_devices << " Cuda device(s).\n";
+ cudaDeviceProp props;
+ cudaGetDeviceProperties(&props, 0);
+ std::cout << "Properties of device 0.\n\n";
+
+ std::cout << " Name: " << props.name << "\n";
+ std::cout << " Global memory: " << props.totalGlobalMem << "\n";
+ std::cout << " Shared memory: " << props.sharedMemPerBlock << "\n";
+ std::cout << " Constant memory: " << props.totalConstMem << "\n";
+ std::cout << " Block registers: " << props.regsPerBlock << "\n";
+
+ std::cout << " Warp size: " << props.warpSize << "\n";
+ std::cout << " Threads per block: " << props.maxThreadsPerBlock << "\n";
+ std::cout << " Max block dimensions: [ " << props.maxThreadsDim[0] << ", " << props.maxThreadsDim[1] << ", " << props.maxThreadsDim[2] << " ]" << "\n";
+ std::cout << " Max grid dimensions: [ " << props.maxGridSize[0] << ", " << props.maxGridSize[1] << ", " << props.maxGridSize[2] << " ]" << "\n";
+ std::cout << "\n";
+
+ return 0;
+}
+
diff --git a/test cases/cuda/4 shared/main.cu b/test cases/cuda/4 shared/main.cu
new file mode 100644
index 0000000..d251167
--- /dev/null
+++ b/test cases/cuda/4 shared/main.cu
@@ -0,0 +1,20 @@
+#include <stdio.h>
+#include <cuda_runtime.h>
+#include "shared/kernels.h"
+
+
+int main(int argc, char **argv) {
+ int cuda_devices = 0;
+ cudaGetDeviceCount(&cuda_devices);
+ if(cuda_devices == 0) {
+ printf("No Cuda hardware found. Exiting.\n");
+ return 0;
+ }
+
+ if(run_tests() != 0){
+ printf("CUDA tests failed! Exiting.\n");
+ return 0;
+ }
+
+ return 0;
+}
diff --git a/test cases/cuda/4 shared/meson.build b/test cases/cuda/4 shared/meson.build
new file mode 100644
index 0000000..532aaeb
--- /dev/null
+++ b/test cases/cuda/4 shared/meson.build
@@ -0,0 +1,6 @@
+project('simple', 'cuda', version : '1.0.0')
+
+subdir('shared')
+
+exe = executable('prog', 'main.cu', dependencies: libkernels)
+test('cudatest', exe)
diff --git a/test cases/cuda/4 shared/shared/kernels.cu b/test cases/cuda/4 shared/shared/kernels.cu
new file mode 100644
index 0000000..41a9553
--- /dev/null
+++ b/test cases/cuda/4 shared/shared/kernels.cu
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include <cuda_runtime.h>
+#include "kernels.h"
+
+
+TAG_HIDDEN __global__ void kernel (void){
+}
+
+TAG_PUBLIC int run_tests(void) {
+ kernel<<<1,1>>>();
+
+ return (int)cudaDeviceSynchronize();
+}
+
diff --git a/test cases/cuda/4 shared/shared/kernels.h b/test cases/cuda/4 shared/shared/kernels.h
new file mode 100644
index 0000000..dbcb99d
--- /dev/null
+++ b/test cases/cuda/4 shared/shared/kernels.h
@@ -0,0 +1,86 @@
+/* Include Guard */
+#ifndef SHARED_KERNELS_H
+#define SHARED_KERNELS_H
+
+/**
+ * Includes
+ */
+
+#include <cuda_runtime.h>
+
+
+/**
+ * Defines
+ */
+
+/**
+ * When building a library, it is a good idea to expose as few as possible
+ * internal symbols (functions, objects, data structures). Not only does it
+ * prevent users from relying on private portions of the library that are
+ * subject to change without any notice, but it can have performance
+ * advantages:
+ *
+ * - It can make shared libraries link faster at dynamic-load time.
+ * - It can make internal function calls faster by bypassing the PLT.
+ *
+ * Thus, the compilation should by default hide all symbols, while the API
+ * headers will explicitly mark public the few symbols the users are permitted
+ * to use with a PUBLIC tag. We also define a HIDDEN tag, since it may be
+ * required to explicitly tag certain C++ types as visible in order for
+ * exceptions to function correctly.
+ *
+ * Additional complexity comes from non-POSIX-compliant systems, which
+ * artificially impose a requirement on knowing whether we are building or
+ * using a DLL.
+ *
+ * The above commentary and below code is inspired from
+ * 'https://gcc.gnu.org/wiki/Visibility'
+ */
+
+#if defined(_WIN32) || defined(__CYGWIN__)
+# define TAG_ATTRIBUTE_EXPORT __declspec(dllexport)
+# define TAG_ATTRIBUTE_IMPORT __declspec(dllimport)
+# define TAG_ATTRIBUTE_HIDDEN
+#elif __GNUC__ >= 4
+# define TAG_ATTRIBUTE_EXPORT __attribute__((visibility("default")))
+# define TAG_ATTRIBUTE_IMPORT __attribute__((visibility("default")))
+# define TAG_ATTRIBUTE_HIDDEN __attribute__((visibility("hidden")))
+#else
+# define TAG_ATTRIBUTE_EXPORT
+# define TAG_ATTRIBUTE_IMPORT
+# define TAG_ATTRIBUTE_HIDDEN
+#endif
+
+#if TAG_IS_SHARED
+# if TAG_IS_BUILDING
+# define TAG_PUBLIC TAG_ATTRIBUTE_EXPORT
+# else
+# define TAG_PUBLIC TAG_ATTRIBUTE_IMPORT
+# endif
+# define TAG_HIDDEN TAG_ATTRIBUTE_HIDDEN
+#else
+# define TAG_PUBLIC
+# define TAG_HIDDEN
+#endif
+#define TAG_STATIC static
+
+
+
+
+/* Extern "C" Guard */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+/* Function Prototypes */
+TAG_PUBLIC int run_tests(void);
+
+
+
+/* End Extern "C" and Include Guard */
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/test cases/cuda/4 shared/shared/meson.build b/test cases/cuda/4 shared/shared/meson.build
new file mode 100644
index 0000000..5987916
--- /dev/null
+++ b/test cases/cuda/4 shared/shared/meson.build
@@ -0,0 +1,5 @@
+libkernels = shared_library('kernels', 'kernels.cu',
+ cuda_args: ['-DTAG_IS_SHARED=1', '-DTAG_IS_BUILDING=1'],
+ gnu_symbol_visibility: 'hidden')
+libkernels = declare_dependency(compile_args: ['-DTAG_IS_SHARED=1'],
+ link_with: libkernels)
diff --git a/test cases/cuda/5 threads/main.cu b/test cases/cuda/5 threads/main.cu
new file mode 100644
index 0000000..d251167
--- /dev/null
+++ b/test cases/cuda/5 threads/main.cu
@@ -0,0 +1,20 @@
+#include <stdio.h>
+#include <cuda_runtime.h>
+#include "shared/kernels.h"
+
+
+int main(int argc, char **argv) {
+ int cuda_devices = 0;
+ cudaGetDeviceCount(&cuda_devices);
+ if(cuda_devices == 0) {
+ printf("No Cuda hardware found. Exiting.\n");
+ return 0;
+ }
+
+ if(run_tests() != 0){
+ printf("CUDA tests failed! Exiting.\n");
+ return 0;
+ }
+
+ return 0;
+}
diff --git a/test cases/cuda/5 threads/meson.build b/test cases/cuda/5 threads/meson.build
new file mode 100644
index 0000000..2a804a3
--- /dev/null
+++ b/test cases/cuda/5 threads/meson.build
@@ -0,0 +1,7 @@
+project('simple', 'cuda', version : '1.0.0')
+
+subdir('shared')
+
+thread_dep = dependency('threads')
+exe = executable('prog', 'main.cu', dependencies: [libkernels, thread_dep])
+test('cudatest', exe)
diff --git a/test cases/cuda/5 threads/shared/kernels.cu b/test cases/cuda/5 threads/shared/kernels.cu
new file mode 100644
index 0000000..41a9553
--- /dev/null
+++ b/test cases/cuda/5 threads/shared/kernels.cu
@@ -0,0 +1,14 @@
+#include <stdio.h>
+#include <cuda_runtime.h>
+#include "kernels.h"
+
+
+TAG_HIDDEN __global__ void kernel (void){
+}
+
+TAG_PUBLIC int run_tests(void) {
+ kernel<<<1,1>>>();
+
+ return (int)cudaDeviceSynchronize();
+}
+
diff --git a/test cases/cuda/5 threads/shared/kernels.h b/test cases/cuda/5 threads/shared/kernels.h
new file mode 100644
index 0000000..dbcb99d
--- /dev/null
+++ b/test cases/cuda/5 threads/shared/kernels.h
@@ -0,0 +1,86 @@
+/* Include Guard */
+#ifndef SHARED_KERNELS_H
+#define SHARED_KERNELS_H
+
+/**
+ * Includes
+ */
+
+#include <cuda_runtime.h>
+
+
+/**
+ * Defines
+ */
+
+/**
+ * When building a library, it is a good idea to expose as few as possible
+ * internal symbols (functions, objects, data structures). Not only does it
+ * prevent users from relying on private portions of the library that are
+ * subject to change without any notice, but it can have performance
+ * advantages:
+ *
+ * - It can make shared libraries link faster at dynamic-load time.
+ * - It can make internal function calls faster by bypassing the PLT.
+ *
+ * Thus, the compilation should by default hide all symbols, while the API
+ * headers will explicitly mark public the few symbols the users are permitted
+ * to use with a PUBLIC tag. We also define a HIDDEN tag, since it may be
+ * required to explicitly tag certain C++ types as visible in order for
+ * exceptions to function correctly.
+ *
+ * Additional complexity comes from non-POSIX-compliant systems, which
+ * artificially impose a requirement on knowing whether we are building or
+ * using a DLL.
+ *
+ * The above commentary and below code is inspired from
+ * 'https://gcc.gnu.org/wiki/Visibility'
+ */
+
+#if defined(_WIN32) || defined(__CYGWIN__)
+# define TAG_ATTRIBUTE_EXPORT __declspec(dllexport)
+# define TAG_ATTRIBUTE_IMPORT __declspec(dllimport)
+# define TAG_ATTRIBUTE_HIDDEN
+#elif __GNUC__ >= 4
+# define TAG_ATTRIBUTE_EXPORT __attribute__((visibility("default")))
+# define TAG_ATTRIBUTE_IMPORT __attribute__((visibility("default")))
+# define TAG_ATTRIBUTE_HIDDEN __attribute__((visibility("hidden")))
+#else
+# define TAG_ATTRIBUTE_EXPORT
+# define TAG_ATTRIBUTE_IMPORT
+# define TAG_ATTRIBUTE_HIDDEN
+#endif
+
+#if TAG_IS_SHARED
+# if TAG_IS_BUILDING
+# define TAG_PUBLIC TAG_ATTRIBUTE_EXPORT
+# else
+# define TAG_PUBLIC TAG_ATTRIBUTE_IMPORT
+# endif
+# define TAG_HIDDEN TAG_ATTRIBUTE_HIDDEN
+#else
+# define TAG_PUBLIC
+# define TAG_HIDDEN
+#endif
+#define TAG_STATIC static
+
+
+
+
+/* Extern "C" Guard */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+/* Function Prototypes */
+TAG_PUBLIC int run_tests(void);
+
+
+
+/* End Extern "C" and Include Guard */
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/test cases/cuda/5 threads/shared/meson.build b/test cases/cuda/5 threads/shared/meson.build
new file mode 100644
index 0000000..5987916
--- /dev/null
+++ b/test cases/cuda/5 threads/shared/meson.build
@@ -0,0 +1,5 @@
+libkernels = shared_library('kernels', 'kernels.cu',
+ cuda_args: ['-DTAG_IS_SHARED=1', '-DTAG_IS_BUILDING=1'],
+ gnu_symbol_visibility: 'hidden')
+libkernels = declare_dependency(compile_args: ['-DTAG_IS_SHARED=1'],
+ link_with: libkernels)
diff --git a/test cases/failing test/4 hard error/main.c b/test cases/failing test/4 hard error/main.c
new file mode 100644
index 0000000..a1e705a
--- /dev/null
+++ b/test cases/failing test/4 hard error/main.c
@@ -0,0 +1,3 @@
+int main(void) {
+ return 99;
+}
diff --git a/test cases/failing test/4 hard error/meson.build b/test cases/failing test/4 hard error/meson.build
new file mode 100644
index 0000000..6979b04
--- /dev/null
+++ b/test cases/failing test/4 hard error/meson.build
@@ -0,0 +1,4 @@
+project('trivial', 'c')
+
+# Exit code 99 even overrides should_fail
+test('My Test', executable('main', 'main.c'), should_fail: true)
diff --git a/test cases/failing test/5 tap tests/meson.build b/test cases/failing test/5 tap tests/meson.build
new file mode 100644
index 0000000..844c1f9
--- /dev/null
+++ b/test cases/failing test/5 tap tests/meson.build
@@ -0,0 +1,6 @@
+project('test features', 'c')
+
+tester = executable('tester', 'tester.c')
+test('nonzero return code', tester, args : [], protocol: 'tap')
+test('missing test', tester, args : ['1..1'], protocol: 'tap')
+test('incorrect skip', tester, args : ['1..1 # skip\nok 1'], protocol: 'tap')
diff --git a/test cases/failing test/5 tap tests/tester.c b/test cases/failing test/5 tap tests/tester.c
new file mode 100644
index 0000000..ac582e7
--- /dev/null
+++ b/test cases/failing test/5 tap tests/tester.c
@@ -0,0 +1,10 @@
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ if (argc != 2) {
+ fprintf(stderr, "Incorrect number of arguments, got %i\n", argc);
+ return 1;
+ }
+ puts(argv[1]);
+ return 0;
+}
diff --git a/test cases/failing/91 invalid configure file/input b/test cases/failing/91 invalid configure file/input
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/91 invalid configure file/input
diff --git a/test cases/failing/91 invalid configure file/meson.build b/test cases/failing/91 invalid configure file/meson.build
new file mode 100644
index 0000000..08eca2b
--- /dev/null
+++ b/test cases/failing/91 invalid configure file/meson.build
@@ -0,0 +1,9 @@
+project('invalid configura file')
+
+configure_file(
+ configuration : configuration_data(),
+ input : 'input',
+ output : 'output',
+ install_dir : '',
+ install : true,
+)
diff --git a/test cases/failing/91 kwarg dupe/meson.build b/test cases/failing/92 kwarg dupe/meson.build
index 06821a2..06821a2 100644
--- a/test cases/failing/91 kwarg dupe/meson.build
+++ b/test cases/failing/92 kwarg dupe/meson.build
diff --git a/test cases/failing/91 kwarg dupe/prog.c b/test cases/failing/92 kwarg dupe/prog.c
index 5f3fbe6..5f3fbe6 100644
--- a/test cases/failing/91 kwarg dupe/prog.c
+++ b/test cases/failing/92 kwarg dupe/prog.c
diff --git a/test cases/failing/92 missing pch file/meson.build b/test cases/failing/93 missing pch file/meson.build
index a67b798..a67b798 100644
--- a/test cases/failing/92 missing pch file/meson.build
+++ b/test cases/failing/93 missing pch file/meson.build
diff --git a/test cases/failing/92 missing pch file/prog.c b/test cases/failing/93 missing pch file/prog.c
index 11b7fad..11b7fad 100644
--- a/test cases/failing/92 missing pch file/prog.c
+++ b/test cases/failing/93 missing pch file/prog.c
diff --git a/test cases/failing/94 pch source different folder/include/pch.h b/test cases/failing/94 pch source different folder/include/pch.h
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/94 pch source different folder/include/pch.h
diff --git a/test cases/failing/94 pch source different folder/meson.build b/test cases/failing/94 pch source different folder/meson.build
new file mode 100644
index 0000000..d320717
--- /dev/null
+++ b/test cases/failing/94 pch source different folder/meson.build
@@ -0,0 +1,5 @@
+project('pch', 'c')
+# It is not allowed to have the PCH implementation in a different
+# folder than the header.
+exe = executable('prog', 'prog.c',
+ c_pch : ['include/pch.h', 'src/pch.c'])
diff --git a/test cases/failing/94 pch source different folder/prog.c b/test cases/failing/94 pch source different folder/prog.c
new file mode 100644
index 0000000..c272dab
--- /dev/null
+++ b/test cases/failing/94 pch source different folder/prog.c
@@ -0,0 +1 @@
+int main() {} \ No newline at end of file
diff --git a/test cases/failing/94 pch source different folder/src/pch.c b/test cases/failing/94 pch source different folder/src/pch.c
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/failing/94 pch source different folder/src/pch.c
diff --git a/test cases/fortran/1 basic/meson.build b/test cases/fortran/1 basic/meson.build
index 833a177..042902f 100644
--- a/test cases/fortran/1 basic/meson.build
+++ b/test cases/fortran/1 basic/meson.build
@@ -1,6 +1,9 @@
project('simple fortran', 'fortran')
-add_global_arguments('-fbounds-check', language : 'fortran')
+fc = meson.get_compiler('fortran')
+if fc == 'gcc'
+ add_global_arguments('-fbounds-check', language : 'fortran')
+endif
e = executable('simple', 'simple.f90',
fortran_args : '-ffree-form')
diff --git a/test cases/fortran/10 find library/gzip.f90 b/test cases/fortran/10 find library/gzip.f90
index 2a7e7df..32f21d7 100644
--- a/test cases/fortran/10 find library/gzip.f90
+++ b/test cases/fortran/10 find library/gzip.f90
@@ -1,32 +1,32 @@
module gzip
- interface
- function gzopen(path, mode) bind(C)
- use iso_c_binding, only: c_char, c_ptr
- implicit none
- character(c_char), intent(in) :: path(*), mode(*)
- type(c_ptr) :: gzopen
- end function gzopen
- end interface
-
- interface
- function gzwrite(file, buf, len) bind(C)
- use iso_c_binding, only: c_int, c_ptr
- implicit none
- type(c_ptr), value, intent(in) :: file
- type(*), intent(in) :: buf
- integer(c_int), value, intent(in) :: len
- integer(c_int) :: gzwrite
- end function gzwrite
- end interface
-
- interface
- function gzclose(file) bind(C)
- use iso_c_binding, only: c_int, c_ptr
- implicit none
- type(c_ptr), value, intent(in) :: file
- integer(c_int) :: gzclose
- end function gzclose
- end interface
+use iso_c_binding, only: c_char, c_ptr, c_int
+implicit none
+
+interface
+type(c_ptr) function gzopen(path, mode) bind(C)
+import c_char, c_ptr
+
+character(kind=c_char), intent(in) :: path(*), mode(*)
+end function gzopen
+end interface
+
+interface
+integer(c_int) function gzwrite(file, buf, len) bind(C)
+import c_int, c_ptr, c_char
+
+type(c_ptr), value, intent(in) :: file
+character(kind=c_char), intent(in) :: buf
+integer(c_int), value, intent(in) :: len
+end function gzwrite
+end interface
+
+interface
+integer(c_int) function gzclose(file) bind(C)
+import c_int, c_ptr
+
+type(c_ptr), value, intent(in) :: file
+end function gzclose
+end interface
end module gzip
diff --git a/test cases/fortran/10 find library/main.f90 b/test cases/fortran/10 find library/main.f90
index 2550b44..1f5c039 100644
--- a/test cases/fortran/10 find library/main.f90
+++ b/test cases/fortran/10 find library/main.f90
@@ -1,40 +1,38 @@
-program main
-
- use iso_c_binding, only: c_int, c_char, c_null_char, c_ptr
- use gzip, only: gzopen, gzwrite, gzclose
-
- implicit none
-
- character(kind=c_char,len=*), parameter :: path = &
- c_char_"test.gz"//c_null_char
- character(kind=c_char,len=*), parameter :: mode = &
- c_char_"wb9"//c_null_char
- integer(c_int), parameter :: buffer_size = 512
-
- type(c_ptr) :: file
- character(len=buffer_size) :: buffer
- integer(c_int) :: ret
- integer :: i
-
- ! open file
- file = gzopen(path, mode)
-
- ! fill buffer with data
- do i=1,buffer_size/4
- write(buffer(4*(i-1)+1:4*i), '(i3.3, a)') i, new_line('')
- end do
- ret = gzwrite(file, buffer, buffer_size)
- if (ret.ne.buffer_size) then
- write(*,'(a, i3, a, i3, a)') 'Error: ', ret, ' / ', buffer_size, &
- ' bytes written.'
- stop 1
- end if
-
- ! close file
- ret = gzclose(file)
- if (ret.ne.0) then
- print *, 'Error: failure to close file with error code ', ret
- stop 1
- end if
-
-end program main
+
+use iso_fortran_env, only: stderr=>error_unit
+use iso_c_binding, only: c_int, c_char, c_null_char, c_ptr
+use gzip, only: gzopen, gzwrite, gzclose
+
+implicit none
+
+character(kind=c_char,len=*), parameter :: path = c_char_"test.gz"//c_null_char
+character(kind=c_char,len=*), parameter :: mode = c_char_"wb9"//c_null_char
+integer(c_int), parameter :: buffer_size = 512
+
+type(c_ptr) :: file
+character(kind=c_char, len=buffer_size) :: buffer
+integer(c_int) :: ret
+integer :: i
+
+! open file
+file = gzopen(path, mode)
+
+! fill buffer with data
+do i=1,buffer_size/4
+ write(buffer(4*(i-1)+1:4*i), '(i3.3, a)') i, new_line('')
+end do
+ret = gzwrite(file, buffer, buffer_size)
+if (ret /= buffer_size) then
+ write(stderr,'(a, i3, a, i3, a)') 'Error: ', ret, ' / ', buffer_size, &
+ ' bytes written.'
+ stop 1
+end if
+
+! close file
+ret = gzclose(file)
+if (ret /= 0) then
+ write(stderr,*) 'Error: failure to close file with error code ', ret
+ stop 1
+end if
+
+end program
diff --git a/test cases/fortran/11 compiles links runs/meson.build b/test cases/fortran/11 compiles links runs/meson.build
new file mode 100644
index 0000000..81eb907
--- /dev/null
+++ b/test cases/fortran/11 compiles links runs/meson.build
@@ -0,0 +1,20 @@
+project('compiles_links_runs', 'fortran')
+
+fc = meson.get_compiler('fortran')
+
+code = '''error stop 123; end'''
+
+if not fc.compiles(code)
+ error('Fortran 2008 code failed to compile')
+endif
+
+if not fc.links(code)
+ error('Fortran 2008 code failed to link')
+endif
+
+if fc.run(code).returncode() != 123
+ error('Fortran 2008 code failed to run')
+endif
+
+
+
diff --git a/test cases/fortran/12 submodule/a1.f90 b/test cases/fortran/12 submodule/a1.f90
new file mode 100644
index 0000000..cb44916
--- /dev/null
+++ b/test cases/fortran/12 submodule/a1.f90
@@ -0,0 +1,25 @@
+module a1
+implicit none
+
+interface
+module elemental real function pi2tau(pi)
+ real, intent(in) :: pi
+end function pi2tau
+
+module real function get_pi()
+end function get_pi
+end interface
+
+end module a1
+
+program hierN
+
+use a1
+
+pi = get_pi()
+
+tau = pi2tau(pi)
+
+print *,'pi=',pi,'tau=',tau
+
+end program
diff --git a/test cases/fortran/12 submodule/a2.f90 b/test cases/fortran/12 submodule/a2.f90
new file mode 100644
index 0000000..b3ce1f0
--- /dev/null
+++ b/test cases/fortran/12 submodule/a2.f90
@@ -0,0 +1,10 @@
+submodule (a1) a2
+
+contains
+
+module procedure pi2tau
+ pi2tau = 2*pi
+end procedure pi2tau
+
+
+end submodule a2
diff --git a/test cases/fortran/12 submodule/a3.f90 b/test cases/fortran/12 submodule/a3.f90
new file mode 100644
index 0000000..d6929b0
--- /dev/null
+++ b/test cases/fortran/12 submodule/a3.f90
@@ -0,0 +1,10 @@
+submodule (a1:a2) a3
+
+contains
+
+module procedure get_pi
+ get_pi = 4.*atan(1.)
+end procedure get_pi
+
+
+end submodule a3
diff --git a/test cases/fortran/12 submodule/child.f90 b/test cases/fortran/12 submodule/child.f90
new file mode 100644
index 0000000..aa5bb5e
--- /dev/null
+++ b/test cases/fortran/12 submodule/child.f90
@@ -0,0 +1,10 @@
+submodule (mother) daughter
+
+contains
+
+module procedure pi2tau
+ pi2tau = 2*pi
+end procedure pi2tau
+
+end submodule daughter
+
diff --git a/test cases/fortran/12 submodule/meson.build b/test cases/fortran/12 submodule/meson.build
new file mode 100644
index 0000000..cd62a30
--- /dev/null
+++ b/test cases/fortran/12 submodule/meson.build
@@ -0,0 +1,7 @@
+project('submodule single level', 'fortran')
+
+hier2 = executable('single', 'parent.f90','child.f90')
+test('single-level hierarchy', hier2)
+
+hierN = executable('multi', 'a1.f90', 'a2.f90', 'a3.f90')
+test('multi-level hierarchy', hierN)
diff --git a/test cases/fortran/12 submodule/parent.f90 b/test cases/fortran/12 submodule/parent.f90
new file mode 100644
index 0000000..05fe431
--- /dev/null
+++ b/test cases/fortran/12 submodule/parent.f90
@@ -0,0 +1,23 @@
+module mother
+real, parameter :: pi = 4.*atan(1.)
+real :: tau
+
+interface
+module elemental real function pi2tau(pi)
+ real, intent(in) :: pi
+end function pi2tau
+end interface
+
+contains
+
+end module mother
+
+
+program hier1
+use mother
+
+tau = pi2tau(pi)
+
+print *,'pi=',pi, 'tau=', tau
+
+end program
diff --git a/test cases/fortran/13 coarray/main.f90 b/test cases/fortran/13 coarray/main.f90
new file mode 100644
index 0000000..be60552
--- /dev/null
+++ b/test cases/fortran/13 coarray/main.f90
@@ -0,0 +1,9 @@
+implicit none
+
+if (this_image() == 1) print *, 'number of Fortran coarray images:', num_images()
+
+sync all ! semaphore, ensures message above is printed at top.
+
+print *, 'Process ', this_image()
+
+end program
diff --git a/test cases/fortran/13 coarray/meson.build b/test cases/fortran/13 coarray/meson.build
new file mode 100644
index 0000000..57aa29e
--- /dev/null
+++ b/test cases/fortran/13 coarray/meson.build
@@ -0,0 +1,10 @@
+project('Fortran coarray', 'fortran')
+
+# coarray is required because single-image fallback is an intrinsic feature
+coarray = dependency('coarray', required : true)
+
+exe = executable('hello', 'main.f90',
+ dependencies : coarray)
+
+test('Coarray hello world', exe)
+
diff --git a/test cases/fortran/15 include/inc1.f90 b/test cases/fortran/15 include/inc1.f90
new file mode 100644
index 0000000..0aec9ba
--- /dev/null
+++ b/test cases/fortran/15 include/inc1.f90
@@ -0,0 +1,5 @@
+
+real :: pi = 4.*atan(1.)
+real :: tau
+
+include "inc2.f90"
diff --git a/test cases/fortran/15 include/inc2.f90 b/test cases/fortran/15 include/inc2.f90
new file mode 100644
index 0000000..065b990
--- /dev/null
+++ b/test cases/fortran/15 include/inc2.f90
@@ -0,0 +1,2 @@
+
+tau = 2*pi
diff --git a/test cases/fortran/15 include/main.f90 b/test cases/fortran/15 include/main.f90
new file mode 100644
index 0000000..661aa62
--- /dev/null
+++ b/test cases/fortran/15 include/main.f90
@@ -0,0 +1,8 @@
+
+implicit none
+
+include "inc1.f90"
+
+print *, '2*pi:', tau
+
+end program
diff --git a/test cases/fortran/15 include/meson.build b/test cases/fortran/15 include/meson.build
new file mode 100644
index 0000000..5609128
--- /dev/null
+++ b/test cases/fortran/15 include/meson.build
@@ -0,0 +1,4 @@
+project('Inclusive', 'fortran')
+
+exe = executable('incexe', 'main.f90')
+test('Fortran include files', exe)
diff --git a/test cases/fortran/4 self dependency/meson.build b/test cases/fortran/4 self dependency/meson.build
index bc5dab4..e791284 100644
--- a/test cases/fortran/4 self dependency/meson.build
+++ b/test cases/fortran/4 self dependency/meson.build
@@ -2,3 +2,7 @@ project('selfdep', 'fortran')
e = executable('selfdep', 'selfdep.f90')
test('selfdep', e)
+
+library('selfmod', 'src/selfdep_mod.f90')
+
+subproject('sub1')
diff --git a/test cases/fortran/4 self dependency/selfdep.f90 b/test cases/fortran/4 self dependency/selfdep.f90
index a272832..1a71353 100644
--- a/test cases/fortran/4 self dependency/selfdep.f90
+++ b/test cases/fortran/4 self dependency/selfdep.f90
@@ -1,11 +1,18 @@
-MODULE Circle
- REAL, PARAMETER :: Pi = 3.1415927
+MODULE geom
+
+type :: circle
+ REAL :: Pi = 4.*atan(1.)
REAL :: radius
-END MODULE Circle
+end type circle
+END MODULE geom
PROGRAM prog
-use Circle
+use geom, only : circle
IMPLICIT NONE
+type(circle) :: ell
+
+ell%radius = 3.
+
END PROGRAM prog
diff --git a/test cases/fortran/4 self dependency/src/selfdep_mod.f90 b/test cases/fortran/4 self dependency/src/selfdep_mod.f90
new file mode 100644
index 0000000..4aa0057
--- /dev/null
+++ b/test cases/fortran/4 self dependency/src/selfdep_mod.f90
@@ -0,0 +1,6 @@
+module a
+end module a
+
+module b
+use a
+end module b
diff --git a/test cases/fortran/4 self dependency/subprojects/sub1/main.f90 b/test cases/fortran/4 self dependency/subprojects/sub1/main.f90
new file mode 100644
index 0000000..873427d
--- /dev/null
+++ b/test cases/fortran/4 self dependency/subprojects/sub1/main.f90
@@ -0,0 +1,6 @@
+module a
+end
+
+program b
+ use a
+end
diff --git a/test cases/fortran/4 self dependency/subprojects/sub1/meson.build b/test cases/fortran/4 self dependency/subprojects/sub1/meson.build
new file mode 100644
index 0000000..606f338
--- /dev/null
+++ b/test cases/fortran/4 self dependency/subprojects/sub1/meson.build
@@ -0,0 +1,3 @@
+project('subproject self-def', 'fortran')
+
+library('subself', 'main.f90')
diff --git a/test cases/fortran/5 static/main.f90 b/test cases/fortran/5 static/main.f90
index dc6454c..6d878cb 100644
--- a/test cases/fortran/5 static/main.f90
+++ b/test cases/fortran/5 static/main.f90
@@ -1,6 +1,6 @@
-program hello
- use static_hello
- implicit none
- call static_say_hello()
-end program hello
+use static_hello
+implicit none
+
+call static_say_hello()
+end program
diff --git a/test cases/fortran/5 static/static_hello.f90 b/test cases/fortran/5 static/static_hello.f90
index 63415b0..5407560 100644
--- a/test cases/fortran/5 static/static_hello.f90
+++ b/test cases/fortran/5 static/static_hello.f90
@@ -1,17 +1,17 @@
module static_hello
- implicit none
+implicit none
- private
- public :: static_say_hello
+private
+public :: static_say_hello
- interface static_say_hello
- module procedure say_hello
- end interface static_say_hello
+interface static_say_hello
+ module procedure say_hello
+end interface static_say_hello
contains
- subroutine say_hello
- print *, "Static library called."
- end subroutine say_hello
+subroutine say_hello
+ print *, "Static library called."
+end subroutine say_hello
end module static_hello
diff --git a/test cases/fortran/6 dynamic/dynamic.f90 b/test cases/fortran/6 dynamic/dynamic.f90
index e78a406..6a1f359 100644
--- a/test cases/fortran/6 dynamic/dynamic.f90
+++ b/test cases/fortran/6 dynamic/dynamic.f90
@@ -1,17 +1,17 @@
module dynamic
- implicit none
+implicit none
- private
- public :: hello
+private
+public :: hello
- interface hello
- module procedure say
- end interface hello
+interface hello
+ module procedure say
+end interface hello
contains
- subroutine say
- print *, "Hello, hello..."
- end subroutine say
+subroutine say
+ print *, "Hello from shared library."
+end subroutine say
end module dynamic
diff --git a/test cases/fortran/6 dynamic/main.f90 b/test cases/fortran/6 dynamic/main.f90
index cb3a53f..fc48bcb 100644
--- a/test cases/fortran/6 dynamic/main.f90
+++ b/test cases/fortran/6 dynamic/main.f90
@@ -1,6 +1,5 @@
-program main
- use dynamic
- implicit none
+use dynamic, only: hello
+implicit none
- call hello()
-end program main
+call hello()
+end program
diff --git a/test cases/fortran/7 generated/prog.f90 b/test cases/fortran/7 generated/prog.f90
index c476e9c..8a102c0 100644
--- a/test cases/fortran/7 generated/prog.f90
+++ b/test cases/fortran/7 generated/prog.f90
@@ -1,9 +1,7 @@
program prog
- use mod2
- implicit none
+use mod2
+implicit none
- if (modval1 + modval2 /= 3) then
- stop 1
- end if
+if (modval1 + modval2 /= 3) stop 1
end program prog
diff --git a/test cases/fortran/8 module names/mod1.f90 b/test cases/fortran/8 module names/mod1.f90
index 69cc900..29cd9f4 100644
--- a/test cases/fortran/8 module names/mod1.f90
+++ b/test cases/fortran/8 module names/mod1.f90
@@ -1,6 +1,6 @@
module MyMod1
- implicit none
+implicit none
- integer, parameter :: myModVal1 = 1
+integer, parameter :: myModVal1 = 1
end module MyMod1
diff --git a/test cases/fortran/8 module names/mod2.f90 b/test cases/fortran/8 module names/mod2.f90
index 971df44..2087750 100644
--- a/test cases/fortran/8 module names/mod2.f90
+++ b/test cases/fortran/8 module names/mod2.f90
@@ -1,6 +1,6 @@
module mymod2
- implicit none
+implicit none
- integer, parameter :: myModVal2 = 2
+integer, parameter :: myModVal2 = 2
end module mymod2
diff --git a/test cases/fortran/8 module names/test.f90 b/test cases/fortran/8 module names/test.f90
index ff5a545..28847fb 100644
--- a/test cases/fortran/8 module names/test.f90
+++ b/test cases/fortran/8 module names/test.f90
@@ -1,7 +1,8 @@
-program test
- use mymod1
- use MyMod2
+use mymod1
+use MyMod2
- integer, parameter :: testVar = myModVal1 + myModVal2
+implicit none
-end program test
+integer, parameter :: testVar = myModVal1 + myModVal2
+
+end program
diff --git a/test cases/fortran/9 cpp/fortran.f b/test cases/fortran/9 cpp/fortran.f
index e694669..255872c 100644
--- a/test cases/fortran/9 cpp/fortran.f
+++ b/test cases/fortran/9 cpp/fortran.f
@@ -1,5 +1,11 @@
function fortran() bind(C)
- use, intrinsic :: iso_c_binding
- real(kind=c_double) :: fortran
- fortran = 2.0**rand(1)
+ use, intrinsic :: iso_c_binding, only: dp=>c_double
+ implicit none
+
+ real(dp) :: r, fortran
+
+ call random_number(r)
+
+ fortran = 2._dp**r
+
end function fortran
diff --git a/test cases/fortran/9 cpp/meson.build b/test cases/fortran/9 cpp/meson.build
index 93037aa..ad7d4b2 100644
--- a/test cases/fortran/9 cpp/meson.build
+++ b/test cases/fortran/9 cpp/meson.build
@@ -1,12 +1,16 @@
project('C++ and FORTRAN', 'cpp', 'fortran')
cpp = meson.get_compiler('cpp')
+fc = meson.get_compiler('fortran')
if cpp.get_id() == 'clang'
error('MESON_SKIP_TEST Clang C++ does not find -lgfortran for some reason.')
endif
-fc = meson.get_compiler('fortran')
+if build_machine.system() == 'windows' and cpp.get_id() != 'gnu'
+ error('MESON_SKIP_TEST mixing gfortran with non-GNU C++ does not work.')
+endif
+
link_with = []
if fc.get_id() == 'intel'
link_with += fc.find_library('ifport')
@@ -15,7 +19,7 @@ endif
e = executable(
'cppfort',
['main.cpp', 'fortran.f'],
- dependencies : [link_with],
+ dependencies : link_with,
)
test('C++ FORTRAN', e)
diff --git a/test cases/frameworks/17 mpi/meson.build b/test cases/frameworks/17 mpi/meson.build
index 2102b81..2d0e4d3 100644
--- a/test cases/frameworks/17 mpi/meson.build
+++ b/test cases/frameworks/17 mpi/meson.build
@@ -36,11 +36,13 @@ uburesult = run_command(ubudetector)
if uburesult.returncode() != 0 and add_languages('fortran', required : false)
mpifort = dependency('mpi', language : 'fortran')
- exef = executable('exef',
- 'main.f90',
- dependencies : [mpifort])
-
- test('MPI Fortran', exef)
+ # Mixing compilers (msvc/clang with gfortran) does not seem to work on Windows.
+ if build_machine.system() != 'windows' or cc.get_id() == 'gnu'
+ exef = executable('exef',
+ 'main.f90',
+ dependencies : [mpifort])
+ test('MPI Fortran', exef)
+ endif
endif
# Check we can apply a version constraint
diff --git a/test cases/frameworks/25 hdf5/main.c b/test cases/frameworks/25 hdf5/main.c
new file mode 100644
index 0000000..4c46310
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/main.c
@@ -0,0 +1,30 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "hdf5.h"
+
+int main(void)
+{
+herr_t ier;
+unsigned maj, min, rel;
+
+ier = H5open();
+if (ier) {
+ fprintf(stderr,"Unable to initialize HDF5: %d\n", ier);
+ return EXIT_FAILURE;
+}
+
+ier = H5get_libversion(&maj, &min, &rel);
+if (ier) {
+ fprintf(stderr,"HDF5 did not initialize!\n");
+ return EXIT_FAILURE;
+}
+printf("C HDF5 version %d.%d.%d\n", maj, min, rel);
+
+ier = H5close();
+if (ier) {
+ fprintf(stderr,"Unable to close HDF5: %d\n", ier);
+ return EXIT_FAILURE;
+}
+return EXIT_SUCCESS;
+}
diff --git a/test cases/frameworks/25 hdf5/main.cpp b/test cases/frameworks/25 hdf5/main.cpp
new file mode 100644
index 0000000..477e76b
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/main.cpp
@@ -0,0 +1,29 @@
+#include <iostream>
+#include "hdf5.h"
+
+
+int main(void)
+{
+herr_t ier;
+unsigned maj, min, rel;
+
+ier = H5open();
+if (ier) {
+ std::cerr << "Unable to initialize HDF5: " << ier << std::endl;
+ return EXIT_FAILURE;
+}
+
+ier = H5get_libversion(&maj, &min, &rel);
+if (ier) {
+ std::cerr << "HDF5 did not initialize!" << std::endl;
+ return EXIT_FAILURE;
+}
+std::cout << "C++ HDF5 version " << maj << "." << min << "." << rel << std::endl;
+
+ier = H5close();
+if (ier) {
+ std::cerr << "Unable to close HDF5: " << ier << std::endl;
+ return EXIT_FAILURE;
+}
+return EXIT_SUCCESS;
+}
diff --git a/test cases/frameworks/25 hdf5/main.f90 b/test cases/frameworks/25 hdf5/main.f90
new file mode 100644
index 0000000..b21abf1
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/main.f90
@@ -0,0 +1,17 @@
+use hdf5
+
+implicit none
+
+integer :: ier, major, minor, rel
+
+call h5open_f(ier)
+if (ier /= 0) error stop 'Unable to initialize HDF5'
+
+call h5get_libversion_f(major, minor, rel, ier)
+if (ier /= 0) error stop 'Unable to check HDF5 version'
+print '(A,I1,A1,I0.2,A1,I1)','Fortran HDF5 version ',major,'.',minor,'.',rel
+
+call h5close_f(ier)
+if (ier /= 0) error stop 'Unable to close HDF5 library'
+
+end program
diff --git a/test cases/frameworks/25 hdf5/meson.build b/test cases/frameworks/25 hdf5/meson.build
new file mode 100644
index 0000000..9033354
--- /dev/null
+++ b/test cases/frameworks/25 hdf5/meson.build
@@ -0,0 +1,43 @@
+project('hdf5_test', 'c', 'cpp')
+
+if build_machine.system() == 'darwin'
+ error('MESON_SKIP_TEST: HDF5 CI image not setup for OSX.')
+endif
+
+if build_machine.system() == 'cygwin'
+ error('MESON_SKIP_TEST: HDF5 CI image not setup for Cygwin.')
+endif
+
+
+# --- C tests
+h5c = dependency('hdf5', language : 'c', required : false)
+if not h5c.found()
+ error('MESON_SKIP_TEST: HDF5 C library not found, skipping HDF5 framework tests.')
+endif
+exec = executable('exec', 'main.c', dependencies : h5c)
+
+test('HDF5 C', exec)
+
+# --- C++ tests
+h5cpp = dependency('hdf5', language : 'cpp', required : false)
+if h5cpp.found()
+ execpp = executable('execpp', 'main.cpp', dependencies : h5cpp)
+ test('HDF5 C++', execpp)
+endif
+
+# --- Fortran tests
+if build_machine.system() != 'windows'
+ add_languages('fortran')
+
+ h5f = dependency('hdf5', language : 'fortran', required : false)
+ if h5f.found()
+ exef = executable('exef', 'main.f90', dependencies : h5f)
+
+ test('HDF5 Fortran', exef)
+ endif
+endif
+
+# Check we can apply a version constraint
+if h5c.version() != 'unknown'
+ dependency('hdf5', version: '>=@0@'.format(h5c.version()))
+endif
diff --git a/test cases/frameworks/26 netcdf/main.c b/test cases/frameworks/26 netcdf/main.c
new file mode 100644
index 0000000..e592585
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/main.c
@@ -0,0 +1,14 @@
+#include "netcdf.h"
+
+int main(void)
+{
+int ret, ncid;
+
+if ((ret = nc_create("foo.nc", NC_CLOBBER, &ncid)))
+ return ret;
+
+if ((ret = nc_close(ncid)))
+ return ret;
+
+return 0;
+}
diff --git a/test cases/frameworks/26 netcdf/main.cpp b/test cases/frameworks/26 netcdf/main.cpp
new file mode 100644
index 0000000..a3c98ef
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/main.cpp
@@ -0,0 +1,15 @@
+#include <iostream>
+#include "netcdf.h"
+
+int main(void)
+{
+int ret, ncid;
+
+if ((ret = nc_create("foo.nc", NC_CLOBBER, &ncid)))
+ return ret;
+
+if ((ret = nc_close(ncid)))
+ return ret;
+
+return EXIT_SUCCESS;
+}
diff --git a/test cases/frameworks/26 netcdf/main.f90 b/test cases/frameworks/26 netcdf/main.f90
new file mode 100644
index 0000000..3872298
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/main.f90
@@ -0,0 +1,19 @@
+use netcdf
+
+implicit none
+
+integer :: ncid
+
+call check( nf90_create("foo.nc", NF90_CLOBBER, ncid) )
+
+call check( nf90_close(ncid) )
+
+contains
+
+ subroutine check(status)
+ integer, intent (in) :: status
+
+ if(status /= nf90_noerr) error stop trim(nf90_strerror(status))
+end subroutine check
+
+end program
diff --git a/test cases/frameworks/26 netcdf/meson.build b/test cases/frameworks/26 netcdf/meson.build
new file mode 100644
index 0000000..5a10d09
--- /dev/null
+++ b/test cases/frameworks/26 netcdf/meson.build
@@ -0,0 +1,35 @@
+project('netcdf_test', 'c', 'cpp')
+
+
+# --- C tests
+nc_c = dependency('netcdf', language : 'c', required : false)
+if not nc_c.found()
+ error('MESON_SKIP_TEST: NetCDF C library not found, skipping NetCDF framework tests.')
+endif
+exec = executable('exec', 'main.c', dependencies : nc_c)
+
+test('NetCDF C', exec)
+
+# --- C++ tests
+nc_cpp = dependency('netcdf', language : 'cpp', required : false)
+if nc_cpp.found()
+ execpp = executable('execpp', 'main.cpp', dependencies : nc_cpp)
+ test('NetCDF C++', execpp)
+endif
+
+# --- Fortran tests
+if build_machine.system() != 'windows'
+ add_languages('fortran')
+
+ nc_f = dependency('netcdf', language : 'fortran', required : false)
+ if nc_f.found()
+ exef = executable('exef', 'main.f90', dependencies : nc_f)
+
+ test('NetCDF Fortran', exef)
+ endif
+endif
+
+# Check we can apply a version constraint
+if nc_c.version() != 'unknown'
+ dependency('netcdf', version: '>=@0@'.format(nc_c.version()))
+endif
diff --git a/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake b/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake
new file mode 100644
index 0000000..a2f8456
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmake/FindSomethingLikeZLIB.cmake
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+ set(SomethingLikeZLIB_FOUND ON)
+ set(SomethingLikeZLIB_LIBRARIES ${ZLIB_LIBRARY})
+ set(SomethingLikeZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+ set(SomethingLikeZLIB_FOUND OFF)
+endif()
diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build
index 72773b2..a18cd84 100644
--- a/test cases/linuxlike/13 cmake dependency/meson.build
+++ b/test cases/linuxlike/13 cmake dependency/meson.build
@@ -36,6 +36,12 @@ depf2 = dependency('ZLIB', required : false, method : 'cmake', modules : 'dfggh:
assert(depf2.found() == false, 'Invalid CMake targets should fail')
+# Try to find a dependency with a custom CMake module
+
+depm1 = dependency('SomethingLikeZLIB', required : true, method : 'cmake', cmake_module_path : 'cmake')
+depm2 = dependency('SomethingLikeZLIB', required : true, method : 'cmake', cmake_module_path : ['cmake'])
+depm3 = dependency('SomethingLikeZLIB', required : true, cmake_module_path : 'cmake')
+
# Try to compile a test that takes a dep and an include_directories
cc = meson.get_compiler('c')
diff --git a/test cases/linuxlike/14 static dynamic linkage/main.c b/test cases/linuxlike/14 static dynamic linkage/main.c
new file mode 100644
index 0000000..628a200
--- /dev/null
+++ b/test cases/linuxlike/14 static dynamic linkage/main.c
@@ -0,0 +1,7 @@
+#include "stdio.h"
+#include "zlib.h"
+
+int main() {
+ printf("%s\n", zlibVersion());
+ return 0;
+}
diff --git a/test cases/linuxlike/14 static dynamic linkage/meson.build b/test cases/linuxlike/14 static dynamic linkage/meson.build
new file mode 100644
index 0000000..fc3c38a
--- /dev/null
+++ b/test cases/linuxlike/14 static dynamic linkage/meson.build
@@ -0,0 +1,20 @@
+project('static dynamic', 'c')
+
+
+cc = meson.get_compiler('c')
+
+z_default = cc.find_library('z')
+z_static = cc.find_library('z', static: true)
+z_dynamic = cc.find_library('z', static: false)
+
+exe_default = executable('main_default', 'main.c', dependencies: [z_default])
+exe_static = executable('main_static', 'main.c', dependencies: [z_static])
+exe_dynamic = executable('main_dynamic', 'main.c', dependencies: [z_dynamic])
+
+test('test default', exe_default)
+test('test static', exe_static)
+test('test dynamic', exe_dynamic)
+
+test('verify static linking', find_program('verify_static.py'), args:exe_static.full_path())
+test('verify dynamic linking', find_program('verify_static.py'), args:exe_dynamic.full_path(),
+ should_fail: true)
diff --git a/test cases/linuxlike/14 static dynamic linkage/verify_static.py b/test cases/linuxlike/14 static dynamic linkage/verify_static.py
new file mode 100755
index 0000000..92cc308
--- /dev/null
+++ b/test cases/linuxlike/14 static dynamic linkage/verify_static.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+"""Test script that checks if zlib was statically linked to executable"""
+import subprocess
+import sys
+
+def main():
+ """Main function"""
+ output = subprocess.check_output(['nm', sys.argv[1]]).decode('utf-8')
+
+ if 'T zlibVersion' in output:
+ sys.exit(0)
+
+ sys.exit(1)
+
+if __name__ == '__main__':
+ main()
diff --git a/test cases/osx/2 library versions/meson.build b/test cases/osx/2 library versions/meson.build
index 26f945a..0d21a3a 100644
--- a/test cases/osx/2 library versions/meson.build
+++ b/test cases/osx/2 library versions/meson.build
@@ -1,15 +1,27 @@
project('library versions', 'c')
-zlib_dep = dependency('zlib')
-
-some = shared_library('some', 'lib.c',
- # duplicate the rpath again, in order
- # to test Meson's RPATH deduplication
- build_rpath : zlib_dep.get_pkgconfig_variable('libdir'),
- dependencies : zlib_dep,
- version : '1.2.3',
- soversion : '7',
- install : true)
+if run_command(find_program('require_pkgconfig.py'), check: true).stdout().strip() == 'yes'
+ required = true
+else
+ required = false
+endif
+
+zlib_dep = dependency('zlib', required: required)
+if zlib_dep.found()
+ some = shared_library('some', 'lib.c',
+ # duplicate the rpath again, in order
+ # to test Meson's RPATH deduplication
+ build_rpath : zlib_dep.get_pkgconfig_variable('libdir'),
+ dependencies : zlib_dep,
+ version : '1.2.3',
+ soversion : '7',
+ install : true)
+else
+ some = shared_library('some', 'lib.c',
+ version : '1.2.3',
+ soversion : '7',
+ install : true)
+endif
noversion = shared_library('noversion', 'lib.c',
install : true)
diff --git a/test cases/osx/2 library versions/require_pkgconfig.py b/test cases/osx/2 library versions/require_pkgconfig.py
new file mode 100644
index 0000000..3d228aa
--- /dev/null
+++ b/test cases/osx/2 library versions/require_pkgconfig.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python3
+
+import os
+import shutil
+
+if 'CI' in os.environ or shutil.which('pkg-config'):
+ print('yes')
+else:
+ print('no')
diff --git a/test cases/osx/5 extra frameworks/installed_files.txt b/test cases/osx/5 extra frameworks/installed_files.txt
new file mode 100644
index 0000000..2c6bd93
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/installed_files.txt
@@ -0,0 +1,2 @@
+usr/bin/prog
+usr/lib/libstat.a
diff --git a/test cases/osx/5 extra frameworks/meson.build b/test cases/osx/5 extra frameworks/meson.build
new file mode 100644
index 0000000..cb4847e
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/meson.build
@@ -0,0 +1,13 @@
+project('xcode extra framework test', 'c')
+
+dep_libs = dependency('OpenGL', method : 'extraframework')
+assert(dep_libs.type_name() == 'extraframeworks', 'type_name is ' + dep_libs.type_name())
+
+dep_main = dependency('Foundation')
+assert(dep_main.type_name() == 'extraframeworks', 'type_name is ' + dep_main.type_name())
+
+dep_py = dependency('python', method : 'extraframework')
+assert(dep_main.type_name() == 'extraframeworks', 'type_name is ' + dep_main.type_name())
+
+stlib = static_library('stat', 'stat.c', install : true, dependencies: dep_libs)
+exe = executable('prog', 'prog.c', install : true, dependencies: dep_main)
diff --git a/test cases/osx/5 extra frameworks/prog.c b/test cases/osx/5 extra frameworks/prog.c
new file mode 100644
index 0000000..11b7fad
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/prog.c
@@ -0,0 +1,3 @@
+int main(int argc, char **argv) {
+ return 0;
+}
diff --git a/test cases/osx/5 extra frameworks/stat.c b/test cases/osx/5 extra frameworks/stat.c
new file mode 100644
index 0000000..fa76a65
--- /dev/null
+++ b/test cases/osx/5 extra frameworks/stat.c
@@ -0,0 +1 @@
+int func() { return 933; }
diff --git a/test cases/osx/6 multiframework/main.m b/test cases/osx/6 multiframework/main.m
new file mode 100644
index 0000000..8a6799b
--- /dev/null
+++ b/test cases/osx/6 multiframework/main.m
@@ -0,0 +1,5 @@
+#import <Cocoa/Cocoa.h>
+
+int main(int argc, const char * argv[]) {
+ return NSApplicationMain(argc, argv);
+}
diff --git a/test cases/osx/6 multiframework/meson.build b/test cases/osx/6 multiframework/meson.build
new file mode 100644
index 0000000..2884624
--- /dev/null
+++ b/test cases/osx/6 multiframework/meson.build
@@ -0,0 +1,13 @@
+project('multiframework', 'objc')
+
+# In theory only 'AppKit' would be enough but there was a bug
+# that causes a build failure when defining two modules. The
+# arguments for the latter module overwrote the arguments for
+# the first one rather than adding to them.
+cocoa_dep = dependency('appleframeworks', modules : ['AppKit', 'foundation'])
+
+executable('deptester',
+ 'main.m',
+ objc_args : ['-fobjc-arc'],
+ dependencies : [cocoa_dep],
+)
diff --git a/test cases/rewrite/1 basic/addSrc.json b/test cases/rewrite/1 basic/addSrc.json
new file mode 100644
index 0000000..b8bc439
--- /dev/null
+++ b/test cases/rewrite/1 basic/addSrc.json
@@ -0,0 +1,94 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "src_add",
+ "sources": ["a2.cpp", "a1.cpp", "a2.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "src_add",
+ "sources": ["a7.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "src_add",
+ "sources": ["a5.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "src_add",
+ "sources": ["a5.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "src_add",
+ "sources": ["a3.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "src_add",
+ "sources": ["a4.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "src_add",
+ "sources": ["a6.cpp", "a1.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog0",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog8",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/1 basic/addTgt.json b/test cases/rewrite/1 basic/addTgt.json
new file mode 100644
index 0000000..2f4e7e2
--- /dev/null
+++ b/test cases/rewrite/1 basic/addTgt.json
@@ -0,0 +1,9 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog10",
+ "operation": "target_add",
+ "sources": ["new1.cpp", "new2.cpp"],
+ "target_type": "shared_library"
+ }
+]
diff --git a/test cases/rewrite/1 basic/added.txt b/test cases/rewrite/1 basic/added.txt
deleted file mode 100644
index 657dd42..0000000
--- a/test cases/rewrite/1 basic/added.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-project('rewritetest', 'c')
-
-sources = ['trivial.c']
-
-exe = executable('trivialprog', 'notthere.c', sources)
diff --git a/test cases/rewrite/1 basic/info.json b/test cases/rewrite/1 basic/info.json
new file mode 100644
index 0000000..0f1a3bd
--- /dev/null
+++ b/test cases/rewrite/1 basic/info.json
@@ -0,0 +1,57 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog0",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "exe2",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "exe4",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "exe6",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "exe8",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog10",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/1 basic/meson.build b/test cases/rewrite/1 basic/meson.build
index a0485d0..0f87c45 100644
--- a/test cases/rewrite/1 basic/meson.build
+++ b/test cases/rewrite/1 basic/meson.build
@@ -1,5 +1,19 @@
-project('rewritetest', 'c')
+project('rewritetest', 'cpp')
-sources = ['trivial.c', 'notthere.c']
+src1 = ['main.cpp', 'fileA.cpp']
+src2 = files(['fileB.cpp', 'fileC.cpp'])
+src3 = src1
+src4 = [src3]
-exe = executable('trivialprog', sources)
+# Magic comment
+
+exe0 = executable('trivialprog0', src1 + src2)
+exe1 = executable('trivialprog1', src1)
+exe2 = executable('trivialprog2', [src2])
+exe3 = executable('trivialprog3', ['main.cpp', 'fileA.cpp'])
+exe4 = executable('trivialprog4', ['main.cpp', ['fileA.cpp']])
+exe5 = executable('trivialprog5', [src2, 'main.cpp'])
+exe6 = executable('trivialprog6', 'main.cpp', 'fileA.cpp')
+exe7 = executable('trivialprog7', 'fileB.cpp', src1, 'fileC.cpp')
+exe8 = executable('trivialprog8', src3)
+executable('trivialprog9', src4)
diff --git a/test cases/rewrite/1 basic/removed.txt b/test cases/rewrite/1 basic/removed.txt
deleted file mode 100644
index 5519214..0000000
--- a/test cases/rewrite/1 basic/removed.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-project('rewritetest', 'c')
-
-sources = ['trivial.c']
-
-exe = executable('trivialprog', sources)
diff --git a/test cases/rewrite/1 basic/rmSrc.json b/test cases/rewrite/1 basic/rmSrc.json
new file mode 100644
index 0000000..2e7447c
--- /dev/null
+++ b/test cases/rewrite/1 basic/rmSrc.json
@@ -0,0 +1,88 @@
+[
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "src_rm",
+ "sources": ["fileB.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "src_rm",
+ "sources": ["fileA.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "src_rm",
+ "sources": ["fileB.cpp"]
+ },
+ {
+ "type": "target",
+ "target": "trivialprog0",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog2",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog3",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog4",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog5",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog6",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog7",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog8",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/1 basic/rmTgt.json b/test cases/rewrite/1 basic/rmTgt.json
new file mode 100644
index 0000000..dbaf025
--- /dev/null
+++ b/test cases/rewrite/1 basic/rmTgt.json
@@ -0,0 +1,17 @@
+[
+ {
+ "type": "target",
+ "target": "exe0",
+ "operation": "target_rm"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog1",
+ "operation": "target_rm"
+ },
+ {
+ "type": "target",
+ "target": "trivialprog9",
+ "operation": "target_rm"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/addSrc.json b/test cases/rewrite/2 subdirs/addSrc.json
new file mode 100644
index 0000000..1a5c13e
--- /dev/null
+++ b/test cases/rewrite/2 subdirs/addSrc.json
@@ -0,0 +1,13 @@
+[
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "src_add",
+ "sources": ["third.c"]
+ },
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/addTgt.json b/test cases/rewrite/2 subdirs/addTgt.json
new file mode 100644
index 0000000..2e1e8bc
--- /dev/null
+++ b/test cases/rewrite/2 subdirs/addTgt.json
@@ -0,0 +1,10 @@
+[
+ {
+ "type": "target",
+ "target": "newLib",
+ "operation": "target_add",
+ "sources": ["new1.cpp", "new2.cpp"],
+ "target_type": "shared_library",
+ "subdir": "sub2"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/info.json b/test cases/rewrite/2 subdirs/info.json
new file mode 100644
index 0000000..dba2cd6
--- /dev/null
+++ b/test cases/rewrite/2 subdirs/info.json
@@ -0,0 +1,12 @@
+[
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "newLib",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/meson.build b/test cases/rewrite/2 subdirs/meson.build
index 79b7ad7..c7f3fec 100644
--- a/test cases/rewrite/2 subdirs/meson.build
+++ b/test cases/rewrite/2 subdirs/meson.build
@@ -2,4 +2,3 @@ project('subdir rewrite', 'c')
subdir('sub1')
subdir('sub2')
-
diff --git a/test cases/rewrite/2 subdirs/rmTgt.json b/test cases/rewrite/2 subdirs/rmTgt.json
new file mode 100644
index 0000000..9b112f9
--- /dev/null
+++ b/test cases/rewrite/2 subdirs/rmTgt.json
@@ -0,0 +1,7 @@
+[
+ {
+ "type": "target",
+ "target": "something",
+ "operation": "target_rm"
+ }
+]
diff --git a/test cases/rewrite/2 subdirs/sub1/after.txt b/test cases/rewrite/2 subdirs/sub1/after.txt
deleted file mode 100644
index 53ceaff..0000000
--- a/test cases/rewrite/2 subdirs/sub1/after.txt
+++ /dev/null
@@ -1 +0,0 @@
-srcs = ['first.c']
diff --git a/test cases/rewrite/2 subdirs/sub2/meson.build b/test cases/rewrite/2 subdirs/sub2/meson.build
index 0d92e7f..44b4075 100644
--- a/test cases/rewrite/2 subdirs/sub2/meson.build
+++ b/test cases/rewrite/2 subdirs/sub2/meson.build
@@ -1,2 +1 @@
executable('something', srcs)
-
diff --git a/test cases/rewrite/3 kwargs/add.json b/test cases/rewrite/3 kwargs/add.json
new file mode 100644
index 0000000..2148a1e
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/add.json
@@ -0,0 +1,29 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "license": "GPL"
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "add",
+ "kwargs": {
+ "license": ["MIT"]
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "add",
+ "kwargs": {
+ "license": "BSD"
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/defopts_delete.json b/test cases/rewrite/3 kwargs/defopts_delete.json
new file mode 100644
index 0000000..4fe39e2
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/defopts_delete.json
@@ -0,0 +1,18 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "default_options": ["cpp_std=c++14", "buildtype=release", "debug=true"]
+ }
+ },
+ {
+ "type": "default_options",
+ "operation": "delete",
+ "options": {
+ "buildtype": null
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/defopts_set.json b/test cases/rewrite/3 kwargs/defopts_set.json
new file mode 100644
index 0000000..f8f855f
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/defopts_set.json
@@ -0,0 +1,24 @@
+[
+ {
+ "type": "default_options",
+ "operation": "set",
+ "options": {
+ "cpp_std": "c++14"
+ }
+ },
+ {
+ "type": "default_options",
+ "operation": "set",
+ "options": {
+ "buildtype": "release",
+ "debug": true
+ }
+ },
+ {
+ "type": "default_options",
+ "operation": "set",
+ "options": {
+ "cpp_std": "c++11"
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/delete.json b/test cases/rewrite/3 kwargs/delete.json
new file mode 100644
index 0000000..2f75484
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/delete.json
@@ -0,0 +1,20 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "",
+ "operation": "delete",
+ "kwargs": {
+ "version": null
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "target",
+ "id": "helloWorld",
+ "operation": "delete",
+ "kwargs": {
+ "build_by_default": false
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/info.json b/test cases/rewrite/3 kwargs/info.json
new file mode 100644
index 0000000..0eed404
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/info.json
@@ -0,0 +1,20 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "info"
+ },
+ {
+ "type": "kwargs",
+ "function": "target",
+ "id": "tgt1",
+ "operation": "info"
+ },
+ {
+ "type": "kwargs",
+ "function": "dependency",
+ "id": "dep1",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/meson.build b/test cases/rewrite/3 kwargs/meson.build
new file mode 100644
index 0000000..13b336c
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/meson.build
@@ -0,0 +1,7 @@
+project('rewritetest', 'cpp', version: '0.0.1')
+
+# Find ZLIB
+dep1 = dependency('zlib', required: false)
+
+# Make a test exe
+tgt1 = executable('helloWorld', 'main.cpp', build_by_default: true)
diff --git a/test cases/rewrite/3 kwargs/remove.json b/test cases/rewrite/3 kwargs/remove.json
new file mode 100644
index 0000000..5dc7836
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/remove.json
@@ -0,0 +1,29 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "license": ["GPL", "MIT", "BSD"]
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "remove",
+ "kwargs": {
+ "license": ["MIT"]
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "remove",
+ "kwargs": {
+ "license": "BSD"
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/remove_regex.json b/test cases/rewrite/3 kwargs/remove_regex.json
new file mode 100644
index 0000000..1043101
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/remove_regex.json
@@ -0,0 +1,20 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "default_options": ["cpp_std=c++14", "buildtype=release", "debug=true"]
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "remove_regex",
+ "kwargs": {
+ "default_options": ["cpp_std=.*"]
+ }
+ }
+]
diff --git a/test cases/rewrite/3 kwargs/set.json b/test cases/rewrite/3 kwargs/set.json
new file mode 100644
index 0000000..a56c599
--- /dev/null
+++ b/test cases/rewrite/3 kwargs/set.json
@@ -0,0 +1,34 @@
+[
+ {
+ "type": "kwargs",
+ "function": "project",
+ "id": "/",
+ "operation": "set",
+ "kwargs": {
+ "version": "0.0.2",
+ "meson_version": "0.50.0",
+ "license": ["GPL", "MIT"]
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "target",
+ "id": "helloWorld",
+ "operation": "set",
+ "kwargs": {
+ "build_by_default": false,
+ "build_rpath": "/usr/local",
+ "dependencies": "dep1"
+ }
+ },
+ {
+ "type": "kwargs",
+ "function": "dependency",
+ "id": "zlib",
+ "operation": "set",
+ "kwargs": {
+ "required": true,
+ "method": "cmake"
+ }
+ }
+]
diff --git a/test cases/rewrite/4 same name targets/addSrc.json b/test cases/rewrite/4 same name targets/addSrc.json
new file mode 100644
index 0000000..98d0d1e
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/addSrc.json
@@ -0,0 +1,8 @@
+[
+ {
+ "type": "target",
+ "target": "myExe",
+ "operation": "src_add",
+ "sources": ["a1.cpp", "a2.cpp"]
+ }
+]
diff --git a/test cases/rewrite/4 same name targets/info.json b/test cases/rewrite/4 same name targets/info.json
new file mode 100644
index 0000000..a9fc2dd
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/info.json
@@ -0,0 +1,12 @@
+[
+ {
+ "type": "target",
+ "target": "exe1",
+ "operation": "info"
+ },
+ {
+ "type": "target",
+ "target": "exe2",
+ "operation": "info"
+ }
+]
diff --git a/test cases/rewrite/4 same name targets/meson.build b/test cases/rewrite/4 same name targets/meson.build
new file mode 100644
index 0000000..384fa2b
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/meson.build
@@ -0,0 +1,6 @@
+project('rewrite same name targets', 'cpp')
+
+src1 = ['main.cpp']
+
+exe1 = executable('myExe', src1)
+subdir('sub1')
diff --git a/test cases/rewrite/4 same name targets/sub1/meson.build b/test cases/rewrite/4 same name targets/sub1/meson.build
new file mode 100644
index 0000000..ac53667
--- /dev/null
+++ b/test cases/rewrite/4 same name targets/sub1/meson.build
@@ -0,0 +1,3 @@
+src2 = ['main.cpp']
+
+exe2 = executable('myExe', src2)
diff --git a/test cases/rewrite/5 sorting/meson.build b/test cases/rewrite/5 sorting/meson.build
new file mode 100644
index 0000000..80934a0
--- /dev/null
+++ b/test cases/rewrite/5 sorting/meson.build
@@ -0,0 +1,33 @@
+project('rewriter source sorting', ['c', 'cpp'])
+
+src1 = files([
+ 'a1.c',
+ 'a10.c',
+ 'a2.c',
+ 'a3.c',
+ 'bbb/a/b1.c',
+ 'bbb/a4.c',
+ 'bbb/b3.c',
+ 'bbb/b4.c',
+ 'bbb/b/b2.c',
+ 'bbb/c1/b5.c',
+ 'bbb/c10/b6.c',
+ 'bbb/c2/b7.c',
+ 'bbb/b5.c',
+ 'a110.c',
+ 'aaa/f1.c',
+ 'aaa/f2.c',
+ 'aaa/f3.c',
+ 'a20.c',
+ 'b1.c',
+ 'aaa/b/b1.c',
+ 'aaa/b/b2.c',
+ 'a30.c',
+ 'a100.c',
+ 'aaa/a/a1.c',
+ 'a101.c',
+ 'a210.c',
+ 'c2.c'
+])
+
+exe1 = executable('exe1', src1)
diff --git a/test cases/unit/17 prebuilt shared/patron.c b/test cases/unit/17 prebuilt shared/patron.c
index 82d9678..461d7b4 100644
--- a/test cases/unit/17 prebuilt shared/patron.c
+++ b/test cases/unit/17 prebuilt shared/patron.c
@@ -5,4 +5,5 @@ int main(int argc, char **argv) {
printf("You are standing outside the Great Library of Alexandria.\n");
printf("You decide to go inside.\n\n");
alexandria_visit();
+ return 0;
}
diff --git a/test cases/unit/20 subproj dep variables/meson.build b/test cases/unit/20 subproj dep variables/meson.build
index f1622f9..954463b 100644
--- a/test cases/unit/20 subproj dep variables/meson.build
+++ b/test cases/unit/20 subproj dep variables/meson.build
@@ -11,3 +11,6 @@ dependency('somenotfounddep', required : false,
dependency('zlibproxy', required : true,
fallback : ['somesubproj', 'zlibproxy_dep'])
+
+dependency('somedep', required : false,
+ fallback : ['nestedsubproj', 'nestedsubproj_dep'])
diff --git a/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build
new file mode 100644
index 0000000..4bf549e
--- /dev/null
+++ b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/meson.build
@@ -0,0 +1,3 @@
+project('dep', 'c')
+
+subproject('subsubproject')
diff --git a/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap
new file mode 100644
index 0000000..11b2178
--- /dev/null
+++ b/test cases/unit/20 subproj dep variables/subprojects/nestedsubproj/subprojects/subsubproject.wrap
@@ -0,0 +1 @@
+[wrap-file]
diff --git a/test cases/unit/46 native file binary/meson.build b/test cases/unit/47 native file binary/meson.build
index 4489ac1..4489ac1 100644
--- a/test cases/unit/46 native file binary/meson.build
+++ b/test cases/unit/47 native file binary/meson.build
diff --git a/test cases/unit/46 native file binary/meson_options.txt b/test cases/unit/47 native file binary/meson_options.txt
index 651da0e..651da0e 100644
--- a/test cases/unit/46 native file binary/meson_options.txt
+++ b/test cases/unit/47 native file binary/meson_options.txt
diff --git a/test cases/unit/46 reconfigure/main.c b/test cases/unit/48 reconfigure/main.c
index 25927f5..25927f5 100644
--- a/test cases/unit/46 reconfigure/main.c
+++ b/test cases/unit/48 reconfigure/main.c
diff --git a/test cases/unit/46 reconfigure/meson.build b/test cases/unit/48 reconfigure/meson.build
index 6eaac5d..6eaac5d 100644
--- a/test cases/unit/46 reconfigure/meson.build
+++ b/test cases/unit/48 reconfigure/meson.build
diff --git a/test cases/unit/46 reconfigure/meson_options.txt b/test cases/unit/48 reconfigure/meson_options.txt
index 728f7b7..728f7b7 100644
--- a/test cases/unit/46 reconfigure/meson_options.txt
+++ b/test cases/unit/48 reconfigure/meson_options.txt
diff --git a/test cases/unit/47 testsetup default/envcheck.py b/test cases/unit/49 testsetup default/envcheck.py
index 6ba3093..6ba3093 100644
--- a/test cases/unit/47 testsetup default/envcheck.py
+++ b/test cases/unit/49 testsetup default/envcheck.py
diff --git a/test cases/unit/47 testsetup default/meson.build b/test cases/unit/49 testsetup default/meson.build
index bdd35b8..bdd35b8 100644
--- a/test cases/unit/47 testsetup default/meson.build
+++ b/test cases/unit/49 testsetup default/meson.build
diff --git a/test cases/unit/48 pkgconfig csharp library/meson.build b/test cases/unit/50 pkgconfig csharp library/meson.build
index 148d40f..148d40f 100644
--- a/test cases/unit/48 pkgconfig csharp library/meson.build
+++ b/test cases/unit/50 pkgconfig csharp library/meson.build
diff --git a/test cases/unit/48 pkgconfig csharp library/somelib.cs b/test cases/unit/50 pkgconfig csharp library/somelib.cs
index 24d37ed..24d37ed 100644
--- a/test cases/unit/48 pkgconfig csharp library/somelib.cs
+++ b/test cases/unit/50 pkgconfig csharp library/somelib.cs
diff --git a/test cases/unit/49 ldflagdedup/bob.c b/test cases/unit/51 ldflagdedup/bob.c
index a68d4b1..a68d4b1 100644
--- a/test cases/unit/49 ldflagdedup/bob.c
+++ b/test cases/unit/51 ldflagdedup/bob.c
diff --git a/test cases/unit/49 ldflagdedup/meson.build b/test cases/unit/51 ldflagdedup/meson.build
index 0bbcc50..0bbcc50 100644
--- a/test cases/unit/49 ldflagdedup/meson.build
+++ b/test cases/unit/51 ldflagdedup/meson.build
diff --git a/test cases/unit/49 ldflagdedup/prog.c b/test cases/unit/51 ldflagdedup/prog.c
index 02c599d..02c599d 100644
--- a/test cases/unit/49 ldflagdedup/prog.c
+++ b/test cases/unit/51 ldflagdedup/prog.c
diff --git a/test cases/unit/50 pkgconfig static link order/meson.build b/test cases/unit/52 pkgconfig static link order/meson.build
index b61de9a..b61de9a 100644
--- a/test cases/unit/50 pkgconfig static link order/meson.build
+++ b/test cases/unit/52 pkgconfig static link order/meson.build
diff --git a/test cases/unit/51 clang-format/.clang-format b/test cases/unit/53 clang-format/.clang-format
index 5c60ac9..5c60ac9 100644
--- a/test cases/unit/51 clang-format/.clang-format
+++ b/test cases/unit/53 clang-format/.clang-format
diff --git a/test cases/unit/51 clang-format/meson.build b/test cases/unit/53 clang-format/meson.build
index 1b93cd5..1b93cd5 100644
--- a/test cases/unit/51 clang-format/meson.build
+++ b/test cases/unit/53 clang-format/meson.build
diff --git a/test cases/unit/51 clang-format/prog_expected_c b/test cases/unit/53 clang-format/prog_expected_c
index a045966..a045966 100644
--- a/test cases/unit/51 clang-format/prog_expected_c
+++ b/test cases/unit/53 clang-format/prog_expected_c
diff --git a/test cases/unit/51 clang-format/prog_orig_c b/test cases/unit/53 clang-format/prog_orig_c
index f098bbc..f098bbc 100644
--- a/test cases/unit/51 clang-format/prog_orig_c
+++ b/test cases/unit/53 clang-format/prog_orig_c
diff --git a/test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson.build b/test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson.build
new file mode 100644
index 0000000..4d0aeeb
--- /dev/null
+++ b/test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson.build
@@ -0,0 +1,9 @@
+pfggggaergaeg(sdgrgjgn)aga
+
+rgqeh
+th
+thtr
+e
+tb
+tbqebt
+tbqebttrtt
diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson_options.txt b/test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson_options.txt
index f15d352..f15d352 100644
--- a/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson_options.txt
+++ b/test cases/unit/54 introspect buildoptions/subprojects/projectBad/meson_options.txt
diff --git a/test cases/unit/55 dedup compiler libs/app/app.c b/test cases/unit/55 dedup compiler libs/app/app.c
new file mode 100644
index 0000000..4e215b3
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/app/app.c
@@ -0,0 +1,13 @@
+#include <stdio.h>
+#include <liba.h>
+#include <libb.h>
+
+int
+main(void)
+{
+ printf("start value = %d\n", liba_get());
+ liba_add(2);
+ libb_mul(5);
+ printf("end value = %d\n", liba_get());
+ return 0;
+}
diff --git a/test cases/unit/55 dedup compiler libs/app/meson.build b/test cases/unit/55 dedup compiler libs/app/meson.build
new file mode 100644
index 0000000..82ac306
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/app/meson.build
@@ -0,0 +1,2 @@
+executable('app', 'app.c',
+ dependencies: [liba_dep, libb_dep])
diff --git a/test cases/unit/55 dedup compiler libs/liba/liba.c b/test cases/unit/55 dedup compiler libs/liba/liba.c
new file mode 100644
index 0000000..962d47f
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/liba/liba.c
@@ -0,0 +1,18 @@
+#include "liba.h"
+
+static int val;
+
+void liba_add(int x)
+{
+ val += x;
+}
+
+void liba_sub(int x)
+{
+ val -= x;
+}
+
+int liba_get(void)
+{
+ return val;
+}
diff --git a/test cases/unit/55 dedup compiler libs/liba/liba.h b/test cases/unit/55 dedup compiler libs/liba/liba.h
new file mode 100644
index 0000000..a980cdc
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/liba/liba.h
@@ -0,0 +1,8 @@
+#ifndef LIBA_H_
+#define LIBA_H_
+
+void liba_add(int x);
+void liba_sub(int x);
+int liba_get(void);
+
+#endif
diff --git a/test cases/unit/55 dedup compiler libs/liba/meson.build b/test cases/unit/55 dedup compiler libs/liba/meson.build
new file mode 100644
index 0000000..eccfa46
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/liba/meson.build
@@ -0,0 +1,8 @@
+deps = [dependency('threads'), cc.find_library('dl'), cc.find_library('m')]
+
+liba = library('a', 'liba.c',
+ dependencies: deps)
+
+liba_dep = declare_dependency(link_with: liba,
+ include_directories: include_directories('.'),
+ dependencies: deps)
diff --git a/test cases/unit/55 dedup compiler libs/libb/libb.c b/test cases/unit/55 dedup compiler libs/libb/libb.c
new file mode 100644
index 0000000..3720868
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/libb/libb.c
@@ -0,0 +1,7 @@
+#include <liba.h>
+#include "libb.h"
+
+void libb_mul(int x)
+{
+ liba_add(liba_get() * (x - 1));
+}
diff --git a/test cases/unit/55 dedup compiler libs/libb/libb.h b/test cases/unit/55 dedup compiler libs/libb/libb.h
new file mode 100644
index 0000000..2e4ddd0
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/libb/libb.h
@@ -0,0 +1,6 @@
+#ifndef _LIBB_H_
+#define _LIBB_H_
+
+void libb_mul(int x);
+
+#endif
diff --git a/test cases/unit/55 dedup compiler libs/libb/meson.build b/test cases/unit/55 dedup compiler libs/libb/meson.build
new file mode 100644
index 0000000..d59206f
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/libb/meson.build
@@ -0,0 +1,6 @@
+libb = library('b', 'libb.c',
+ dependencies: liba_dep)
+
+libb_dep = declare_dependency(link_with: libb,
+ include_directories: include_directories('.'),
+ dependencies: liba_dep)
diff --git a/test cases/unit/55 dedup compiler libs/meson.build b/test cases/unit/55 dedup compiler libs/meson.build
new file mode 100644
index 0000000..fad0ed5
--- /dev/null
+++ b/test cases/unit/55 dedup compiler libs/meson.build
@@ -0,0 +1,7 @@
+project('temp', 'c')
+
+cc = meson.get_compiler('c')
+
+subdir('liba')
+subdir('libb')
+subdir('app')
diff --git a/test cases/unit/55 introspection/meson.build b/test cases/unit/55 introspection/meson.build
new file mode 100644
index 0000000..98f6f22
--- /dev/null
+++ b/test cases/unit/55 introspection/meson.build
@@ -0,0 +1,20 @@
+project('introspection', ['c', 'cpp'], version: '1.2.3', default_options: ['cpp_std=c++11', 'buildtype=debug'])
+
+dep1 = dependency('threads')
+dep2 = dependency('zlib', required: false)
+
+if false
+ dependency('somethingthatdoesnotexist', required: true)
+ dependency('look_i_have_a_fallback', fallback: ['oh_no', 'the_subproject_does_not_exist'])
+endif
+
+subdir('sharedlib')
+subdir('staticlib')
+
+t1 = executable('test1', 't1.cpp', link_with: [sharedlib], install: true)
+t2 = executable('test2', 't2.cpp', link_with: [staticlib])
+t3 = executable('test3', 't3.cpp', link_with: [sharedlib, staticlib], dependencies: [dep1])
+
+test('test case 1', t1)
+test('test case 2', t2)
+benchmark('benchmark 1', t3)
diff --git a/test cases/unit/55 introspection/sharedlib/meson.build b/test cases/unit/55 introspection/sharedlib/meson.build
new file mode 100644
index 0000000..3de3493
--- /dev/null
+++ b/test cases/unit/55 introspection/sharedlib/meson.build
@@ -0,0 +1,2 @@
+SRC_shared = ['shared.cpp']
+sharedlib = shared_library('sharedTestLib', SRC_shared)
diff --git a/test cases/unit/55 introspection/sharedlib/shared.cpp b/test cases/unit/55 introspection/sharedlib/shared.cpp
new file mode 100644
index 0000000..5030ab7
--- /dev/null
+++ b/test cases/unit/55 introspection/sharedlib/shared.cpp
@@ -0,0 +1,9 @@
+#include "shared.hpp"
+
+void SharedClass::doStuff() {
+ number++;
+}
+
+int SharedClass::getNumber() const {
+ return number;
+}
diff --git a/test cases/unit/55 introspection/sharedlib/shared.hpp b/test cases/unit/55 introspection/sharedlib/shared.hpp
new file mode 100644
index 0000000..dc9b2da
--- /dev/null
+++ b/test cases/unit/55 introspection/sharedlib/shared.hpp
@@ -0,0 +1,10 @@
+#pragma once
+
+class SharedClass {
+ private:
+ int number = 42;
+ public:
+ SharedClass() = default;
+ void doStuff();
+ int getNumber() const;
+}; \ No newline at end of file
diff --git a/test cases/unit/55 introspection/staticlib/meson.build b/test cases/unit/55 introspection/staticlib/meson.build
new file mode 100644
index 0000000..b1b9afe
--- /dev/null
+++ b/test cases/unit/55 introspection/staticlib/meson.build
@@ -0,0 +1,2 @@
+SRC_static = ['static.c']
+staticlib = static_library('staticTestLib', SRC_static)
diff --git a/test cases/unit/55 introspection/staticlib/static.c b/test cases/unit/55 introspection/staticlib/static.c
new file mode 100644
index 0000000..37ebc0d
--- /dev/null
+++ b/test cases/unit/55 introspection/staticlib/static.c
@@ -0,0 +1,5 @@
+#include "static.h"
+
+int add_numbers(int a, int b) {
+ return a + b;
+} \ No newline at end of file
diff --git a/test cases/unit/55 introspection/staticlib/static.h b/test cases/unit/55 introspection/staticlib/static.h
new file mode 100644
index 0000000..506784e
--- /dev/null
+++ b/test cases/unit/55 introspection/staticlib/static.h
@@ -0,0 +1,3 @@
+#pragma once
+
+int add_numbers(int a, int b); \ No newline at end of file
diff --git a/test cases/unit/55 introspection/t1.cpp b/test cases/unit/55 introspection/t1.cpp
new file mode 100644
index 0000000..2bcaab8
--- /dev/null
+++ b/test cases/unit/55 introspection/t1.cpp
@@ -0,0 +1,13 @@
+#include "sharedlib/shared.hpp"
+
+int main() {
+ SharedClass cl1;
+ if(cl1.getNumber() != 42) {
+ return 1;
+ }
+ cl1.doStuff();
+ if(cl1.getNumber() != 43) {
+ return 2;
+ }
+ return 0;
+}
diff --git a/test cases/unit/55 introspection/t2.cpp b/test cases/unit/55 introspection/t2.cpp
new file mode 100644
index 0000000..fee5097
--- /dev/null
+++ b/test cases/unit/55 introspection/t2.cpp
@@ -0,0 +1,8 @@
+#include "staticlib/static.h"
+
+int main() {
+ if(add_numbers(1, 2) != 3) {
+ return 1;
+ }
+ return 0;
+}
diff --git a/test cases/unit/55 introspection/t3.cpp b/test cases/unit/55 introspection/t3.cpp
new file mode 100644
index 0000000..8a906e0
--- /dev/null
+++ b/test cases/unit/55 introspection/t3.cpp
@@ -0,0 +1,16 @@
+#include "sharedlib/shared.hpp"
+#include "staticlib/static.h"
+
+int main() {
+ for(int i = 0; i < 1000; add_numbers(i, 1)) {
+ SharedClass cl1;
+ if(cl1.getNumber() != 42) {
+ return 1;
+ }
+ cl1.doStuff();
+ if(cl1.getNumber() != 43) {
+ return 2;
+ }
+ }
+ return 0;
+}
diff --git a/test cases/unit/51 introspect buildoptions/c_compiler.py b/test cases/unit/56 introspect buildoptions/c_compiler.py
index c7241e7..c7241e7 100644
--- a/test cases/unit/51 introspect buildoptions/c_compiler.py
+++ b/test cases/unit/56 introspect buildoptions/c_compiler.py
diff --git a/test cases/unit/56 introspect buildoptions/main.c b/test cases/unit/56 introspect buildoptions/main.c
new file mode 100644
index 0000000..ef99ae6
--- /dev/null
+++ b/test cases/unit/56 introspect buildoptions/main.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main() {
+ printf("Hello World");
+ return 0;
+}
diff --git a/test cases/unit/51 introspect buildoptions/meson.build b/test cases/unit/56 introspect buildoptions/meson.build
index e94ef61..8052b5f 100644
--- a/test cases/unit/51 introspect buildoptions/meson.build
+++ b/test cases/unit/56 introspect buildoptions/meson.build
@@ -2,6 +2,11 @@ project('introspect buildargs', ['c'], default_options: ['c_std=c11', 'cpp_std=c
subA = subproject('projectA')
+target_name = 'MAIN'
+target_src = ['main.c']
+
+executable(target_name, target_src)
+
r = run_command(find_program('c_compiler.py'))
if r.returncode() != 0
error('FAILED')
diff --git a/test cases/unit/51 introspect buildoptions/meson_options.txt b/test cases/unit/56 introspect buildoptions/meson_options.txt
index 61f9a8d..61f9a8d 100644
--- a/test cases/unit/51 introspect buildoptions/meson_options.txt
+++ b/test cases/unit/56 introspect buildoptions/meson_options.txt
diff --git a/test cases/unit/56 introspect buildoptions/subprojects/evilFile.txt b/test cases/unit/56 introspect buildoptions/subprojects/evilFile.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/unit/56 introspect buildoptions/subprojects/evilFile.txt
diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson.build b/test cases/unit/56 introspect buildoptions/subprojects/projectA/meson.build
index 1ab9ee8..1ab9ee8 100644
--- a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson.build
+++ b/test cases/unit/56 introspect buildoptions/subprojects/projectA/meson.build
diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson_options.txt b/test cases/unit/56 introspect buildoptions/subprojects/projectA/meson_options.txt
index fa77f95..fa77f95 100644
--- a/test cases/unit/51 introspect buildoptions/subprojects/projectA/meson_options.txt
+++ b/test cases/unit/56 introspect buildoptions/subprojects/projectA/meson_options.txt
diff --git a/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build b/test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson.build
index 500c1b9..500c1b9 100644
--- a/test cases/unit/51 introspect buildoptions/subprojects/projectBad/meson.build
+++ b/test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson.build
diff --git a/test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson_options.txt b/test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson_options.txt
new file mode 100644
index 0000000..f15d352
--- /dev/null
+++ b/test cases/unit/56 introspect buildoptions/subprojects/projectBad/meson_options.txt
@@ -0,0 +1 @@
+option('should_not_appear', type: 'integer', min: 0, value: 125)
diff --git a/test cases/unit/57 native file override/crossfile b/test cases/unit/57 native file override/crossfile
new file mode 100644
index 0000000..9dc4fbc
--- /dev/null
+++ b/test cases/unit/57 native file override/crossfile
@@ -0,0 +1,16 @@
+[paths]
+bindir = 'binbar'
+datadir = 'databar'
+includedir = 'includebar'
+infodir = 'infobar'
+libdir = 'libbar'
+libexecdir = 'libexecbar'
+localedir = 'localebar'
+localstatedir = 'localstatebar'
+mandir = 'manbar'
+prefix = '/prefix'
+sbindir = 'sbinbar'
+sharedstatedir = 'sharedstatebar'
+sysconfdir = 'sysconfbar'
+
+; vim: ft=dosini
diff --git a/test cases/unit/57 native file override/meson.build b/test cases/unit/57 native file override/meson.build
new file mode 100644
index 0000000..8318aba
--- /dev/null
+++ b/test cases/unit/57 native file override/meson.build
@@ -0,0 +1,10 @@
+project('native file overrides')
+
+foreach o : ['bindir', 'datadir', 'includedir', 'infodir', 'libdir',
+ 'libexecdir', 'localedir', 'localstatedir', 'mandir', 'prefix',
+ 'sbindir', 'sharedstatedir', 'sysconfdir']
+ expected = get_option('def_' + o)
+ actual = get_option(o)
+ assert(expected == actual,
+ '@0@ should have been @1@, but was @2@!'.format(o, expected, actual))
+endforeach
diff --git a/test cases/unit/57 native file override/meson_options.txt b/test cases/unit/57 native file override/meson_options.txt
new file mode 100644
index 0000000..4d2abf9
--- /dev/null
+++ b/test cases/unit/57 native file override/meson_options.txt
@@ -0,0 +1,13 @@
+option('def_bindir', type: 'string', value : 'binfoo',)
+option('def_datadir', type: 'string', value : 'datafoo',)
+option('def_includedir', type: 'string', value : 'includefoo',)
+option('def_infodir', type: 'string', value : 'infofoo',)
+option('def_libdir', type: 'string', value : 'libfoo',)
+option('def_libexecdir', type: 'string', value : 'libexecfoo',)
+option('def_localedir', type: 'string', value : 'localefoo',)
+option('def_localstatedir', type: 'string', value : 'localstatefoo',)
+option('def_mandir', type: 'string', value : 'manfoo',)
+option('def_prefix', type: 'string', value : '/prefix',)
+option('def_sbindir', type: 'string', value : 'sbinfoo',)
+option('def_sharedstatedir', type: 'string', value : 'sharedstatefoo',)
+option('def_sysconfdir', type: 'string', value : 'sysconffoo',)
diff --git a/test cases/unit/57 native file override/nativefile b/test cases/unit/57 native file override/nativefile
new file mode 100644
index 0000000..a390725
--- /dev/null
+++ b/test cases/unit/57 native file override/nativefile
@@ -0,0 +1,16 @@
+[paths]
+bindir = 'binfoo'
+datadir = 'datafoo'
+includedir = 'includefoo'
+infodir = 'infofoo'
+libdir = 'libfoo'
+libexecdir = 'libexecfoo'
+localedir = 'localefoo'
+localstatedir = 'localstatefoo'
+mandir = 'manfoo'
+prefix = '/prefix'
+sbindir = 'sbinfoo'
+sharedstatedir = 'sharedstatefoo'
+sysconfdir = 'sysconffoo'
+
+; vim: ft=dosini
diff --git a/test cases/unit/58 pkgconfig relative paths/pkgconfig/librelativepath.pc b/test cases/unit/58 pkgconfig relative paths/pkgconfig/librelativepath.pc
new file mode 100644
index 0000000..dae1eed
--- /dev/null
+++ b/test cases/unit/58 pkgconfig relative paths/pkgconfig/librelativepath.pc
@@ -0,0 +1,9 @@
+prefix=../relativepath
+exec_prefix=${prefix}
+libdir=${exec_prefix}/lib
+
+Name: Relative path
+Description: Relative path library
+Version: 0.0.1
+Libs: -L${libdir} -lrelativepath
+Cflags:
diff --git a/test cases/warning/1 version for string div/a/b.c b/test cases/warning/1 version for string div/a/b.c
new file mode 100644
index 0000000..5047a34
--- /dev/null
+++ b/test cases/warning/1 version for string div/a/b.c
@@ -0,0 +1,3 @@
+int main()
+{
+}
diff --git a/test cases/warning/1 version for string div/meson.build b/test cases/warning/1 version for string div/meson.build
new file mode 100644
index 0000000..54e9708
--- /dev/null
+++ b/test cases/warning/1 version for string div/meson.build
@@ -0,0 +1,3 @@
+project('warn on string division', 'c', meson_version: '>=0.48.0')
+
+executable('prog', 'a' / 'b.c')
diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py
index 5fe433c..9dc9f6e 100755
--- a/tools/cmake2meson.py
+++ b/tools/cmake2meson.py
@@ -14,9 +14,12 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys, os
+from typing import List
+from pathlib import Path
+import sys
import re
+
class Token:
def __init__(self, tid, value):
self.tid = tid
@@ -83,7 +86,7 @@ class Lexer:
raise RuntimeError('Lexer got confused line %d column %d' % (lineno, col))
class Parser:
- def __init__(self, code):
+ def __init__(self, code: str):
self.stream = Lexer().lex(code)
self.getsym()
@@ -140,13 +143,13 @@ class Converter:
'enable_testing': True,
'include': True}
- def __init__(self, cmake_root):
- self.cmake_root = cmake_root
+ def __init__(self, cmake_root: str):
+ self.cmake_root = Path(cmake_root).expanduser()
self.indent_unit = ' '
self.indent_level = 0
- self.options = []
+ self.options = [] # type: List[tuple]
- def convert_args(self, args, as_array=True):
+ def convert_args(self, args: List[Token], as_array: bool = True):
res = []
if as_array:
start = '['
@@ -200,7 +203,7 @@ class Converter:
libcmd = 'static_library'
args = [t.args[0]] + t.args[2:]
else:
- libcmd = 'static_library'
+ libcmd = 'library'
args = t.args
line = '%s_lib = %s(%s)' % (t.args[0].value, libcmd, self.convert_args(args, False))
elif t.name == 'add_test':
@@ -223,13 +226,24 @@ class Converter:
l = 'cpp'
args.append(l)
args = ["'%s'" % i for i in args]
- line = 'project(' + ', '.join(args) + ')'
+ line = 'project(' + ', '.join(args) + ", default_options : ['default_library=static'])"
elif t.name == 'set':
varname = t.args[0].value.lower()
line = '%s = %s\n' % (varname, self.convert_args(t.args[1:]))
elif t.name == 'if':
postincrement = 1
- line = 'if %s' % self.convert_args(t.args, False)
+ try:
+ line = 'if %s' % self.convert_args(t.args, False)
+ except AttributeError: # complex if statements
+ line = t.name
+ for arg in t.args:
+ if isinstance(arg, Token):
+ line += ' ' + arg.value
+ elif isinstance(arg, list):
+ line += ' ('
+ for a in arg:
+ line += ' ' + a.value
+ line += ' )'
elif t.name == 'elseif':
preincrement = -1
postincrement = 1
@@ -251,32 +265,32 @@ class Converter:
outfile.write('\n')
self.indent_level += postincrement
- def convert(self, subdir=''):
- if subdir == '':
+ def convert(self, subdir: Path = None):
+ if not subdir:
subdir = self.cmake_root
- cfile = os.path.join(subdir, 'CMakeLists.txt')
+ cfile = Path(subdir).expanduser() / 'CMakeLists.txt'
try:
- with open(cfile) as f:
+ with cfile.open() as f:
cmakecode = f.read()
except FileNotFoundError:
- print('\nWarning: No CMakeLists.txt in', subdir, '\n')
+ print('\nWarning: No CMakeLists.txt in', subdir, '\n', file=sys.stderr)
return
p = Parser(cmakecode)
- with open(os.path.join(subdir, 'meson.build'), 'w') as outfile:
+ with (subdir / 'meson.build').open('w') as outfile:
for t in p.parse():
if t.name == 'add_subdirectory':
# print('\nRecursing to subdir',
- # os.path.join(self.cmake_root, t.args[0].value),
+ # self.cmake_root / t.args[0].value,
# '\n')
- self.convert(os.path.join(subdir, t.args[0].value))
+ self.convert(subdir / t.args[0].value)
# print('\nReturning to', self.cmake_root, '\n')
self.write_entry(outfile, t)
if subdir == self.cmake_root and len(self.options) > 0:
self.write_options()
def write_options(self):
- filename = os.path.join(self.cmake_root, 'meson_options.txt')
- with open(filename, 'w') as optfile:
+ filename = self.cmake_root / 'meson_options.txt'
+ with filename.open('w') as optfile:
for o in self.options:
(optname, description, default) = o
if default is None: