aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/images.yml1
-rw-r--r--.github/workflows/lint_mypy.yml5
-rw-r--r--.github/workflows/os_comp.yml5
-rw-r--r--.github/workflows/unusedargs_missingreturn.yml6
-rw-r--r--.gitignore1
-rw-r--r--.mypy.ini (renamed from mypy.ini)0
-rw-r--r--.travis.yml3
-rw-r--r--MANIFEST.in6
-rw-r--r--azure-pipelines.yml29
-rwxr-xr-xci/ciimage/arch/install.sh4
-rw-r--r--ci/ciimage/bionic/image.json8
-rwxr-xr-xci/ciimage/bionic/install.sh59
-rwxr-xr-xci/ciimage/build.py5
-rwxr-xr-xci/ciimage/eoan/install.sh4
-rwxr-xr-xci/ciimage/fedora/install.sh4
-rwxr-xr-xci/ciimage/opensuse/install.sh8
-rw-r--r--ci/run.ps13
-rwxr-xr-xci/travis_install.sh4
-rwxr-xr-xci/travis_script.sh8
-rw-r--r--cross/armcc.txt2
-rw-r--r--cross/armclang-linux.txt5
-rw-r--r--cross/armclang.txt2
-rw-r--r--cross/c2000.txt6
-rw-r--r--cross/ccrx.txt2
-rw-r--r--cross/iphone.txt6
-rw-r--r--cross/tvos.txt7
-rw-r--r--cross/ubuntu-armhf.txt6
-rw-r--r--cross/wasm.txt3
-rw-r--r--cross/xc16.txt2
-rw-r--r--data/macros.meson25
-rw-r--r--data/schema.xsd96
-rw-r--r--data/syntax-highlighting/vim/syntax/meson.vim3
-rw-r--r--data/test.schema.json25
-rw-r--r--docs/markdown/Adding-new-projects-to-wrapdb.md53
-rw-r--r--docs/markdown/Build-options.md3
-rw-r--r--docs/markdown/Builtin-options.md105
-rw-r--r--docs/markdown/CMake-module.md77
-rw-r--r--docs/markdown/Configuring-a-build-directory.md4
-rw-r--r--docs/markdown/Continuous-Integration.md18
-rw-r--r--docs/markdown/Contributing.md41
-rw-r--r--docs/markdown/Creating-OSX-packages.md2
-rw-r--r--docs/markdown/Creating-releases.md67
-rw-r--r--docs/markdown/Cross-compilation.md9
-rw-r--r--docs/markdown/Dependencies.md13
-rw-r--r--docs/markdown/Design-rationale.md4
-rw-r--r--docs/markdown/FAQ.md4
-rw-r--r--docs/markdown/Feature-autodetection.md4
-rw-r--r--docs/markdown/Gnome-module.md10
-rw-r--r--docs/markdown/IDE-integration.md96
-rw-r--r--docs/markdown/IndepthTutorial.md6
-rw-r--r--docs/markdown/Installing.md20
-rw-r--r--docs/markdown/Keyval-module.md (renamed from docs/markdown/Kconfig-module.md)27
-rw-r--r--docs/markdown/Localisation.md4
-rw-r--r--docs/markdown/Machine-files.md192
-rw-r--r--docs/markdown/Meson-sample.md2
-rw-r--r--docs/markdown/MesonCI.md53
-rw-r--r--docs/markdown/Precompiled-headers.md2
-rw-r--r--docs/markdown/Project-templates.md27
-rw-r--r--docs/markdown/Qt5-module.md2
-rw-r--r--docs/markdown/Quick-guide.md16
-rw-r--r--docs/markdown/Reference-manual.md988
-rw-r--r--docs/markdown/Reference-tables.md135
-rw-r--r--docs/markdown/Release-notes-for-0.54.0.md7
-rw-r--r--docs/markdown/Release-notes-for-0.55.0.md307
-rw-r--r--docs/markdown/Run-targets.md2
-rw-r--r--docs/markdown/Running-Meson.md45
-rw-r--r--docs/markdown/Style-guide.md6
-rw-r--r--docs/markdown/Subprojects.md38
-rw-r--r--docs/markdown/Syntax.md190
-rw-r--r--docs/markdown/Tutorial.md6
-rw-r--r--docs/markdown/Unit-tests.md153
-rw-r--r--docs/markdown/Users.md5
-rw-r--r--docs/markdown/Using-multiple-build-directories.md4
-rw-r--r--docs/markdown/Vala.md2
-rw-r--r--docs/markdown/Vs-External.md6
-rw-r--r--docs/markdown/Wrap-dependency-system-manual.md125
-rw-r--r--docs/markdown/Wrap-maintainer-tools.md17
-rw-r--r--docs/markdown/Wrap-review-guidelines.md42
-rw-r--r--docs/markdown/howtox.md46
-rw-r--r--docs/markdown/legal.md2
-rw-r--r--docs/markdown/snippets/dist_not_tests.md5
-rw-r--r--docs/markdown/snippets/keyval.md7
-rw-r--r--docs/markdown/snippets/per_subproject.md4
-rw-r--r--docs/markdown/snippets/project_options_in_machine_files.md52
-rw-r--r--docs/markdown/snippets/wrap_fallback.md4
-rw-r--r--docs/markdown_dynamic/Commands.md296
-rw-r--r--docs/meson.build32
-rw-r--r--docs/sitemap.txt6
-rw-r--r--docs/theme/extra/templates/navbar_links.html2
-rw-r--r--man/meson.12
-rw-r--r--mesonbuild/arglist.py331
-rw-r--r--mesonbuild/ast/__init__.py3
-rw-r--r--mesonbuild/ast/interpreter.py5
-rw-r--r--mesonbuild/ast/introspection.py2
-rw-r--r--mesonbuild/ast/printer.py160
-rw-r--r--mesonbuild/ast/visitor.py3
-rw-r--r--mesonbuild/backend/backends.py253
-rw-r--r--mesonbuild/backend/ninjabackend.py476
-rw-r--r--mesonbuild/backend/vs2010backend.py79
-rw-r--r--mesonbuild/build.py57
-rw-r--r--mesonbuild/cmake/__init__.py5
-rw-r--r--mesonbuild/cmake/common.py95
-rwxr-xr-xmesonbuild/cmake/data/run_ctgt.py96
-rw-r--r--mesonbuild/cmake/executor.py51
-rw-r--r--mesonbuild/cmake/interpreter.py77
-rw-r--r--mesonbuild/cmake/traceparser.py26
-rw-r--r--mesonbuild/compilers/__init__.py2
-rw-r--r--mesonbuild/compilers/c.py5
-rw-r--r--mesonbuild/compilers/c_function_attributes.py2
-rw-r--r--mesonbuild/compilers/compilers.py394
-rw-r--r--mesonbuild/compilers/cpp.py11
-rw-r--r--mesonbuild/compilers/cuda.py10
-rw-r--r--mesonbuild/compilers/d.py28
-rw-r--r--mesonbuild/compilers/fortran.py14
-rw-r--r--mesonbuild/compilers/mixins/arm.py28
-rw-r--r--mesonbuild/compilers/mixins/clang.py17
-rw-r--r--mesonbuild/compilers/mixins/clike.py185
-rw-r--r--mesonbuild/compilers/mixins/gnu.py2
-rw-r--r--mesonbuild/compilers/mixins/islinker.py7
-rw-r--r--mesonbuild/compilers/mixins/visualstudio.py5
-rw-r--r--mesonbuild/compilers/objc.py2
-rw-r--r--mesonbuild/compilers/objcpp.py2
-rw-r--r--mesonbuild/coredata.py291
-rw-r--r--mesonbuild/dependencies/base.py164
-rw-r--r--mesonbuild/dependencies/boost.py319
-rw-r--r--mesonbuild/dependencies/cuda.py16
-rw-r--r--mesonbuild/dependencies/misc.py54
-rw-r--r--mesonbuild/dependencies/ui.py61
-rw-r--r--mesonbuild/envconfig.py83
-rw-r--r--mesonbuild/environment.py272
-rw-r--r--mesonbuild/interpreter.py486
-rw-r--r--mesonbuild/interpreterbase.py106
-rw-r--r--mesonbuild/linkers.py105
-rw-r--r--mesonbuild/mcompile.py308
-rw-r--r--mesonbuild/mconf.py31
-rw-r--r--mesonbuild/mdist.py4
-rw-r--r--mesonbuild/mesondata.py374
-rw-r--r--mesonbuild/mesonlib.py48
-rw-r--r--mesonbuild/minit.py6
-rw-r--r--mesonbuild/minstall.py2
-rw-r--r--mesonbuild/mintro.py52
-rw-r--r--mesonbuild/mlog.py33
-rw-r--r--mesonbuild/modules/__init__.py11
-rw-r--r--mesonbuild/modules/cmake.py113
-rw-r--r--mesonbuild/modules/gnome.py134
-rw-r--r--mesonbuild/modules/keyval.py (renamed from mesonbuild/modules/unstable_kconfig.py)10
-rw-r--r--mesonbuild/modules/pkgconfig.py126
-rw-r--r--mesonbuild/modules/python.py12
-rw-r--r--mesonbuild/modules/qt.py58
-rw-r--r--mesonbuild/modules/qt4.py3
-rw-r--r--mesonbuild/modules/qt5.py3
-rw-r--r--mesonbuild/modules/windows.py2
-rw-r--r--mesonbuild/mparser.py12
-rw-r--r--mesonbuild/msetup.py2
-rw-r--r--mesonbuild/mtest.py281
-rw-r--r--mesonbuild/optinterpreter.py21
-rwxr-xr-xmesonbuild/scripts/cmake_run_ctgt.py100
-rw-r--r--mesonbuild/scripts/coverage.py46
-rw-r--r--mesonbuild/scripts/depfixer.py32
-rw-r--r--mesonbuild/scripts/gtkdochelper.py8
-rw-r--r--mesonbuild/scripts/symbolextractor.py35
-rw-r--r--mesonbuild/wrap/wrap.py207
-rw-r--r--msi/createmsi.py14
-rwxr-xr-xrun_cross_test.py6
-rwxr-xr-xrun_meson_command_tests.py10
-rwxr-xr-xrun_project_tests.py258
-rwxr-xr-xrun_tests.py11
-rwxr-xr-xrun_unittests.py1321
-rw-r--r--setup.py5
-rw-r--r--test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt2
-rw-r--r--test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp4
-rw-r--r--test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp4
-rw-r--r--test cases/cmake/10 header only/main.cpp6
-rw-r--r--test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt1
-rw-r--r--test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp5
-rw-r--r--test cases/cmake/19 advanced options/main.cpp18
-rw-r--r--test cases/cmake/19 advanced options/meson.build29
-rw-r--r--test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt18
-rw-r--r--test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp31
-rw-r--r--test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp14
-rw-r--r--test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp25
-rw-r--r--test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp3
-rw-r--r--test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp10
-rw-r--r--test cases/cmake/19 advanced options/test.json8
-rw-r--r--test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt2
-rw-r--r--test cases/cmake/2 advanced/test.json5
-rw-r--r--test cases/cmake/20 cmake file/foolib.cmake.in1
-rw-r--r--test cases/cmake/20 cmake file/meson.build14
-rw-r--r--test cases/cmake/20 cmake file/test.json5
-rw-r--r--test cases/cmake/3 advanced no dep/test.json5
-rw-r--r--test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt5
-rw-r--r--test cases/cmake/7 cmake options/test.json9
-rw-r--r--test cases/common/102 subproject subdir/meson.build29
-rw-r--r--test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap6
-rw-r--r--test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build11
-rw-r--r--test cases/common/104 postconf with args/meson.build7
-rw-r--r--test cases/common/109 generatorcustom/meson.build4
-rwxr-xr-xtest cases/common/125 object only target/obj_generator.py2
-rw-r--r--test cases/common/145 special characters/arg-char-test.c10
-rw-r--r--test cases/common/145 special characters/arg-string-test.c12
-rw-r--r--test cases/common/145 special characters/arg-unquoted-test.c17
-rw-r--r--test cases/common/145 special characters/meson.build38
-rw-r--r--test cases/common/157 wrap file should not failed/meson.build8
-rw-r--r--test cases/common/157 wrap file should not failed/src/meson.build4
-rw-r--r--test cases/common/157 wrap file should not failed/src/test.c9
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/.gitignore2
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/bar.wrap8
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c3
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build2
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/foo.wrap4
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xzbin232 -> 244 bytes
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xzbin180 -> 196 bytes
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xzbin0 -> 244 bytes
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xzbin0 -> 200 bytes
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build2
-rw-r--r--test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap9
-rw-r--r--test cases/common/163 disabler/meson.build28
-rw-r--r--test cases/common/187 find override/meson.build3
-rw-r--r--test cases/common/187 find override/subprojects/sub.wrap5
-rw-r--r--test cases/common/187 find override/subprojects/sub/meson.build4
-rw-r--r--test cases/common/201 override with exe/meson.build8
-rw-r--r--test cases/common/201 override with exe/subprojects/sub/meson.build2
-rw-r--r--test cases/common/222 source set realistic example/meson.build6
-rw-r--r--test cases/common/226 include_type dependency/main.cpp8
-rw-r--r--test cases/common/226 include_type dependency/meson.build10
-rw-r--r--test cases/common/226 include_type dependency/pch/test.hpp1
-rw-r--r--test cases/common/230 persubproject options/meson.build4
-rw-r--r--test cases/common/230 persubproject options/subprojects/sub1/foo.c3
-rw-r--r--test cases/common/230 persubproject options/subprojects/sub1/meson.build4
-rw-r--r--test cases/common/232 link language/c_linkage.cpp5
-rw-r--r--test cases/common/232 link language/c_linkage.h10
-rw-r--r--test cases/common/232 link language/lib.cpp5
-rw-r--r--test cases/common/232 link language/main.c5
-rw-r--r--test cases/common/232 link language/meson.build18
-rw-r--r--test cases/common/233 link depends indexed custom target/foo.c15
-rw-r--r--test cases/common/233 link depends indexed custom target/make_file.py8
-rw-r--r--test cases/common/233 link depends indexed custom target/meson.build19
-rwxr-xr-xtest cases/common/234 very long commmand line/codegen.py6
-rw-r--r--test cases/common/234 very long commmand line/main.c5
-rw-r--r--test cases/common/234 very long commmand line/meson.build44
-rwxr-xr-xtest cases/common/234 very long commmand line/seq.py6
-rw-r--r--test cases/common/36 tryrun/meson.build2
-rw-r--r--test cases/common/38 string operations/meson.build15
-rw-r--r--test cases/common/43 options/meson_options.txt6
-rw-r--r--test cases/common/47 pkgconfig-gen/dependencies/main.c4
-rw-r--r--test cases/common/47 pkgconfig-gen/meson.build39
-rw-r--r--test cases/common/47 pkgconfig-gen/simple5.c6
-rw-r--r--test cases/common/47 pkgconfig-gen/test.json6
-rwxr-xr-xtest cases/common/56 install script/customtarget.py19
-rw-r--r--test cases/common/56 install script/meson.build26
-rw-r--r--test cases/common/56 install script/myinstall.py29
-rw-r--r--test cases/common/56 install script/src/a file.txt (renamed from test cases/unit/74 dep files/foo.c)0
-rw-r--r--test cases/common/56 install script/src/exe.c24
-rw-r--r--test cases/common/56 install script/src/meson.build4
-rw-r--r--test cases/common/56 install script/src/myinstall.py4
-rw-r--r--test cases/common/56 install script/test.json10
-rwxr-xr-xtest cases/common/56 install script/wrap.py6
-rw-r--r--test cases/common/93 selfbuilt custom/meson.build2
-rw-r--r--test cases/failing/1 project not first/test.json7
-rw-r--r--test cases/failing/10 out of bounds/test.json7
-rw-r--r--test cases/failing/100 fallback consistency/test.json7
-rw-r--r--test cases/failing/101 no native compiler/test.json7
-rw-r--r--test cases/failing/102 subdir parse error/test.json7
-rw-r--r--test cases/failing/103 invalid option file/test.json7
-rw-r--r--test cases/failing/104 no lang/test.json7
-rw-r--r--test cases/failing/105 no glib-compile-resources/test.json7
-rw-r--r--test cases/failing/106 fallback consistency/meson.build3
-rw-r--r--test cases/failing/106 fallback consistency/subprojects/foo.wrap6
-rw-r--r--test cases/failing/106 fallback consistency/subprojects/foo/meson.build6
-rw-r--r--test cases/failing/106 fallback consistency/test.json7
-rw-r--r--test cases/failing/107 number in combo/meson.build1
-rw-r--r--test cases/failing/107 number in combo/nativefile.ini2
-rw-r--r--test cases/failing/107 number in combo/test.json5
-rw-r--r--test cases/failing/108 bool in combo/meson.build1
-rw-r--r--test cases/failing/108 bool in combo/meson_options.txt5
-rw-r--r--test cases/failing/108 bool in combo/nativefile.ini2
-rw-r--r--test cases/failing/108 bool in combo/test.json5
-rw-r--r--test cases/failing/11 object arithmetic/test.json8
-rw-r--r--test cases/failing/12 string arithmetic/test.json8
-rw-r--r--test cases/failing/13 array arithmetic/test.json7
-rw-r--r--test cases/failing/14 invalid option name/test.json7
-rw-r--r--test cases/failing/15 kwarg before arg/test.json7
-rw-r--r--test cases/failing/16 extract from subproject/test.json7
-rw-r--r--test cases/failing/17 same target/test.json7
-rw-r--r--test cases/failing/18 wrong plusassign/test.json7
-rw-r--r--test cases/failing/19 target clash/meson.build2
-rw-r--r--test cases/failing/19 target clash/test.json7
-rw-r--r--test cases/failing/2 missing file/test.json7
-rw-r--r--test cases/failing/20 version/test.json8
-rw-r--r--test cases/failing/21 subver/test.json7
-rw-r--r--test cases/failing/22 assert/test.json7
-rw-r--r--test cases/failing/23 rel testdir/test.json7
-rw-r--r--test cases/failing/24 int conversion/test.json7
-rw-r--r--test cases/failing/25 badlang/test.json7
-rw-r--r--test cases/failing/26 output subdir/test.json7
-rw-r--r--test cases/failing/27 noprog use/test.json7
-rw-r--r--test cases/failing/28 no crossprop/test.json7
-rw-r--r--test cases/failing/29 nested ternary/test.json7
-rw-r--r--test cases/failing/3 missing subdir/test.json9
-rw-r--r--test cases/failing/30 invalid man extension/test.json7
-rw-r--r--test cases/failing/31 no man extension/test.json7
-rw-r--r--test cases/failing/32 exe static shared/meson.build2
-rw-r--r--test cases/failing/32 exe static shared/test.json7
-rw-r--r--test cases/failing/33 non-root subproject/test.json7
-rw-r--r--test cases/failing/34 dependency not-required then required/test.json8
-rw-r--r--test cases/failing/35 project argument after target/test.json7
-rw-r--r--test cases/failing/36 pkgconfig dependency impossible conditions/meson.build4
-rw-r--r--test cases/failing/36 pkgconfig dependency impossible conditions/test.json7
-rw-r--r--test cases/failing/37 has function external dependency/test.json7
-rw-r--r--test cases/failing/38 libdir must be inside prefix/test.json9
-rw-r--r--test cases/failing/39 prefix absolute/test.json10
-rw-r--r--test cases/failing/4 missing meson.build/test.json9
-rw-r--r--test cases/failing/40 kwarg assign/test.json7
-rw-r--r--test cases/failing/41 custom target plainname many inputs/test.json7
-rw-r--r--test cases/failing/42 custom target outputs not matching install_dirs/meson.build2
-rw-r--r--test cases/failing/42 custom target outputs not matching install_dirs/test.json35
-rw-r--r--test cases/failing/43 project name colon/test.json7
-rw-r--r--test cases/failing/44 abs subdir/test.json7
-rw-r--r--test cases/failing/45 abspath to srcdir/test.json7
-rw-r--r--test cases/failing/46 pkgconfig variables reserved/test.json7
-rw-r--r--test cases/failing/47 pkgconfig variables zero length/test.json7
-rw-r--r--test cases/failing/48 pkgconfig variables zero length value/test.json7
-rw-r--r--test cases/failing/49 pkgconfig variables not key value/test.json7
-rw-r--r--test cases/failing/5 misplaced option/test.json7
-rw-r--r--test cases/failing/50 executable comparison/test.json7
-rw-r--r--test cases/failing/51 inconsistent comparison/test.json7
-rw-r--r--test cases/failing/52 slashname/test.json7
-rw-r--r--test cases/failing/53 reserved meson prefix/test.json7
-rw-r--r--test cases/failing/54 wrong shared crate type/meson.build6
-rw-r--r--test cases/failing/54 wrong shared crate type/test.json7
-rw-r--r--test cases/failing/55 wrong static crate type/meson.build6
-rw-r--r--test cases/failing/55 wrong static crate type/test.json7
-rw-r--r--test cases/failing/56 or on new line/test.json7
-rw-r--r--test cases/failing/57 kwarg in module/test.json7
-rw-r--r--test cases/failing/58 link with executable/test.json7
-rw-r--r--test cases/failing/59 assign custom target index/test.json7
-rw-r--r--test cases/failing/6 missing incdir/test.json7
-rw-r--r--test cases/failing/60 getoption prefix/test.json7
-rw-r--r--test cases/failing/61 bad option argument/test.json7
-rw-r--r--test cases/failing/62 subproj filegrab/test.json7
-rw-r--r--test cases/failing/63 grab subproj/test.json7
-rw-r--r--test cases/failing/64 grab sibling/test.json7
-rw-r--r--test cases/failing/65 string as link target/test.json7
-rw-r--r--test cases/failing/66 dependency not-found and required/test.json7
-rw-r--r--test cases/failing/67 subproj different versions/test.json7
-rw-r--r--test cases/failing/68 wrong boost module/meson.build4
-rw-r--r--test cases/failing/68 wrong boost module/test.json7
-rw-r--r--test cases/failing/69 install_data rename bad size/test.json7
-rw-r--r--test cases/failing/7 go to subproject/test.json7
-rw-r--r--test cases/failing/70 skip only subdir/test.json7
-rw-r--r--test cases/failing/71 dual override/test.json7
-rw-r--r--test cases/failing/72 override used/test.json7
-rw-r--r--test cases/failing/73 run_command unclean exit/test.json8
-rw-r--r--test cases/failing/74 int literal leading zero/test.json8
-rw-r--r--test cases/failing/75 configuration immutable/test.json7
-rw-r--r--test cases/failing/76 link with shared module on osx/meson.build2
-rw-r--r--test cases/failing/76 link with shared module on osx/test.json7
-rw-r--r--test cases/failing/77 non ascii in ascii encoded configure file/test.json8
-rw-r--r--test cases/failing/78 subproj dependency not-found and required/test.json7
-rw-r--r--test cases/failing/79 unfound run/test.json7
-rw-r--r--test cases/failing/8 recursive/test.json7
-rw-r--r--test cases/failing/80 framework dependency with version/meson.build6
-rw-r--r--test cases/failing/80 framework dependency with version/test.json7
-rw-r--r--test cases/failing/81 override exe config/test.json7
-rw-r--r--test cases/failing/82 gl dependency with version/meson.build2
-rw-r--r--test cases/failing/82 gl dependency with version/test.json7
-rw-r--r--test cases/failing/83 threads dependency with version/test.json7
-rw-r--r--test cases/failing/84 gtest dependency with version/meson.build5
-rw-r--r--test cases/failing/84 gtest dependency with version/test.json7
-rw-r--r--test cases/failing/85 dub libray/meson.build10
-rw-r--r--test cases/failing/85 dub libray/test.json7
-rw-r--r--test cases/failing/86 dub executable/meson.build10
-rw-r--r--test cases/failing/86 dub executable/test.json7
-rw-r--r--test cases/failing/87 dub compiler/meson.build10
-rw-r--r--test cases/failing/87 dub compiler/test.json14
-rw-r--r--test cases/failing/88 subproj not-found dep/test.json7
-rw-r--r--test cases/failing/89 invalid configure file/test.json7
-rw-r--r--test cases/failing/9 missing extra file/test.json7
-rw-r--r--test cases/failing/90 kwarg dupe/test.json7
-rw-r--r--test cases/failing/91 missing pch file/test.json8
-rw-r--r--test cases/failing/92 pch source different folder/test.json7
-rw-r--r--test cases/failing/93 vala without c/test.json7
-rw-r--r--test cases/failing/94 unknown config tool/test.json7
-rw-r--r--test cases/failing/95 custom target install data/test.json7
-rw-r--r--test cases/failing/96 add dict non string key/test.json7
-rw-r--r--test cases/failing/97 add dict duplicate keys/test.json7
-rw-r--r--test cases/failing/98 fallback consistency/test.json7
-rw-r--r--test cases/failing/99 no native prop/test.json7
-rw-r--r--test cases/fortran/7 generated/meson.build11
-rw-r--r--test cases/fortran/7 generated/mod1.fpp4
-rw-r--r--test cases/fortran/7 generated/mod2.fpp6
-rw-r--r--test cases/fortran/7 generated/mod3.fpp6
-rw-r--r--test cases/fortran/7 generated/prog.f909
-rw-r--r--test cases/frameworks/1 boost/meson.build2
-rw-r--r--test cases/frameworks/2 gtest/meson.build4
-rw-r--r--test cases/frameworks/21 libwmf/meson.build2
-rw-r--r--test cases/frameworks/23 hotdoc/test.json5
-rw-r--r--test cases/frameworks/32 boost root/boost/include/boost/version.hpp3
-rw-r--r--test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib0
-rw-r--r--test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib0
-rw-r--r--test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.00
-rw-r--r--test cases/frameworks/32 boost root/meson.build6
-rw-r--r--test cases/frameworks/32 boost root/nativefile.ini.in2
-rw-r--r--test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp3
-rw-r--r--test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib0
-rw-r--r--test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib0
-rw-r--r--test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.00
-rw-r--r--test cases/frameworks/33 boost split root/meson.build6
-rw-r--r--test cases/frameworks/33 boost split root/nativefile.ini.in3
-rw-r--r--test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c124
-rw-r--r--test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h21
-rw-r--r--test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build22
-rw-r--r--test cases/frameworks/7 gnome/gir/dep1/meson.build5
-rw-r--r--test cases/frameworks/7 gnome/gir/meson.build2
-rw-r--r--test cases/frameworks/7 gnome/mkenums/meson.build8
-rw-r--r--test cases/frameworks/7 gnome/test.json4
-rw-r--r--test cases/java/3 args/meson.build4
-rw-r--r--test cases/keyval/1 basic/.config (renamed from test cases/kconfig/1 basic/.config)0
-rw-r--r--test cases/keyval/1 basic/meson.build (renamed from test cases/kconfig/1 basic/meson.build)6
-rw-r--r--test cases/keyval/1 basic/test.json7
-rw-r--r--test cases/keyval/2 subdir/.config (renamed from test cases/kconfig/2 subdir/.config)0
-rw-r--r--test cases/keyval/2 subdir/dir/meson.build (renamed from test cases/kconfig/2 subdir/dir/meson.build)2
-rw-r--r--test cases/keyval/2 subdir/meson.build (renamed from test cases/kconfig/3 load_config files/meson.build)2
-rw-r--r--test cases/keyval/3 load_config files/dir/config (renamed from test cases/kconfig/3 load_config files/dir/config)0
-rw-r--r--test cases/keyval/3 load_config files/dir/meson.build (renamed from test cases/kconfig/3 load_config files/dir/meson.build)2
-rw-r--r--test cases/keyval/3 load_config files/meson.build (renamed from test cases/kconfig/2 subdir/meson.build)2
-rw-r--r--test cases/keyval/4 load_config builddir/config (renamed from test cases/kconfig/4 load_config builddir/config)0
-rw-r--r--test cases/keyval/4 load_config builddir/meson.build (renamed from test cases/kconfig/4 load_config builddir/meson.build)4
-rwxr-xr-xtest cases/linuxlike/13 cmake dependency/cmVers.sh6
-rw-r--r--test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake9
-rw-r--r--test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake9
-rw-r--r--test cases/linuxlike/13 cmake dependency/meson.build21
-rw-r--r--test cases/linuxlike/13 cmake dependency/test.json2
-rw-r--r--test cases/linuxlike/3 linker script/meson.build6
-rw-r--r--test cases/linuxlike/5 dependency versions/meson.build4
-rw-r--r--test cases/python/1 basic/meson.build3
-rwxr-xr-xtest cases/python/1 basic/prog.py3
-rwxr-xr-xtest cases/python/1 basic/subdir/subprog.py3
-rwxr-xr-xtest cases/python/2 extmodule/blaster.py7
-rw-r--r--test cases/python/2 extmodule/meson.build31
-rwxr-xr-xtest cases/python/3 cython/cytest.py10
-rw-r--r--test cases/python/3 cython/meson.build34
-rw-r--r--test cases/python/4 custom target depends extmodule/blaster.py6
-rw-r--r--test cases/python/4 custom target depends extmodule/meson.build39
-rw-r--r--test cases/python/5 modules kwarg/meson.build2
-rw-r--r--test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap3
-rw-r--r--test cases/unit/35 dist script/meson.build1
-rw-r--r--test cases/unit/36 exe_wrapper behaviour/meson.build2
-rw-r--r--test cases/unit/40 external, internal library rpath/built library/meson.build5
-rw-r--r--test cases/unit/40 external, internal library rpath/external library/meson.build6
-rw-r--r--test cases/unit/57 introspection/meson.build17
-rwxr-xr-xtest cases/unit/61 identity cross/build_wrapper.py10
-rwxr-xr-xtest cases/unit/61 identity cross/host_wrapper.py10
-rwxr-xr-xtest cases/unit/72 cross test passed/exewrapper.py24
-rw-r--r--test cases/unit/72 cross test passed/meson.build19
-rw-r--r--test cases/unit/72 cross test passed/meson_options.txt5
-rw-r--r--test cases/unit/72 cross test passed/script.py7
-rw-r--r--test cases/unit/72 cross test passed/src/main.c6
-rw-r--r--test cases/unit/73 summary/meson.build (renamed from test cases/unit/72 summary/meson.build)1
-rw-r--r--test cases/unit/73 summary/subprojects/sub/meson.build (renamed from test cases/unit/72 summary/subprojects/sub/meson.build)0
-rw-r--r--test cases/unit/73 summary/subprojects/sub2/meson.build (renamed from test cases/unit/72 summary/subprojects/sub2/meson.build)0
-rw-r--r--test cases/unit/74 wrap file url/meson.build (renamed from test cases/unit/73 wrap file url/meson.build)0
-rw-r--r--test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz (renamed from test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz)bin228 -> 228 bytes
-rw-r--r--test cases/unit/74 wrap file url/subprojects/foo.tar.xz (renamed from test cases/unit/73 wrap file url/subprojects/foo.tar.xz)bin216 -> 216 bytes
-rw-r--r--test cases/unit/75 dep files/foo.c0
-rw-r--r--test cases/unit/75 dep files/meson.build (renamed from test cases/unit/74 dep files/meson.build)0
-rw-r--r--test cases/unit/77 pkgconfig prefixes/client/client.c8
-rw-r--r--test cases/unit/77 pkgconfig prefixes/client/meson.build3
-rw-r--r--test cases/unit/77 pkgconfig prefixes/val1/meson.build5
-rw-r--r--test cases/unit/77 pkgconfig prefixes/val1/val1.c3
-rw-r--r--test cases/unit/77 pkgconfig prefixes/val1/val1.h1
-rw-r--r--test cases/unit/77 pkgconfig prefixes/val2/meson.build8
-rw-r--r--test cases/unit/77 pkgconfig prefixes/val2/val2.c4
-rw-r--r--test cases/unit/77 pkgconfig prefixes/val2/val2.h1
-rw-r--r--test cases/unit/78 subdir libdir/meson.build2
-rw-r--r--test cases/unit/78 subdir libdir/subprojects/flub/meson.build1
-rw-r--r--test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore1
-rw-r--r--test cases/unit/79 user options for subproject/75 user options for subproject/meson.build3
-rw-r--r--test cases/unit/80 global-rpath/meson.build3
-rw-r--r--test cases/unit/80 global-rpath/rpathified.cpp6
-rw-r--r--test cases/unit/80 global-rpath/yonder/meson.build5
-rw-r--r--test cases/unit/80 global-rpath/yonder/yonder.cpp3
-rw-r--r--test cases/unit/80 global-rpath/yonder/yonder.h1
-rw-r--r--test cases/unit/81 wrap-git/meson.build4
-rw-r--r--test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build3
-rw-r--r--test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c4
-rw-r--r--test cases/warning/1 version for string div/test.json8
-rw-r--r--test cases/warning/2 languages missing native/meson.build3
-rw-r--r--test cases/warning/2 languages missing native/test.json7
-rw-r--r--test cases/windows/17 msvc ndebug/main.cpp9
-rw-r--r--test cases/windows/17 msvc ndebug/meson.build7
-rwxr-xr-xtools/boost_names.py56
-rwxr-xr-xtools/build_website.py51
-rw-r--r--tools/copy_files.py55
-rwxr-xr-xtools/dircondenser.py4
-rwxr-xr-xtools/gen_data.py139
-rwxr-xr-xtools/regenerate_docs.py150
497 files changed, 11374 insertions, 3367 deletions
diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml
index bde2223..0cf9156 100644
--- a/.github/workflows/images.yml
+++ b/.github/workflows/images.yml
@@ -31,6 +31,7 @@ jobs:
- { name: CUDA (on Arch), id: cuda }
- { name: Fedora, id: fedora }
- { name: OpenSUSE, id: opensuse }
+ - { name: Ubuntu Bionic, id: bionic }
- { name: Ubuntu Eoan, id: eoan }
steps:
- uses: actions/checkout@v2
diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml
index c826729..056f96e 100644
--- a/.github/workflows/lint_mypy.yml
+++ b/.github/workflows/lint_mypy.yml
@@ -19,7 +19,8 @@ jobs:
- uses: actions/setup-python@v1
with:
python-version: '3.x'
- - run: python -m pip install pylint
+ # pylint version constraint can be removed when https://github.com/PyCQA/pylint/issues/3524 is resolved
+ - run: python -m pip install pylint==2.4.4
- run: pylint mesonbuild
mypy:
@@ -30,4 +31,4 @@ jobs:
with:
python-version: '3.x'
- run: python -m pip install mypy
- - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py
+ - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py mesonbuild/arglist.py
diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml
index 7f3437e..a5abf7d 100644
--- a/.github/workflows/os_comp.yml
+++ b/.github/workflows/os_comp.yml
@@ -11,7 +11,7 @@ jobs:
name: Ubuntu 16.04
runs-on: ubuntu-16.04
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Install Dependencies
run: |
sudo apt update -yq
@@ -39,9 +39,10 @@ jobs:
- { name: CUDA (on Arch), id: cuda }
- { name: Fedora, id: fedora }
- { name: OpenSUSE, id: opensuse }
+ - { name: Ubuntu Bionic, id: bionic }
container: mesonbuild/${{ matrix.cfg.id }}:latest
steps:
- - uses: actions/checkout@v1
+ - uses: actions/checkout@v2
- name: Run tests
# All environment variables are stored inside the docker image in /ci/env_vars.sh
# They are defined in the `env` section in each image.json. CI_ARGS should be set
diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml
index fa4405b..859dec2 100644
--- a/.github/workflows/unusedargs_missingreturn.yml
+++ b/.github/workflows/unusedargs_missingreturn.yml
@@ -55,7 +55,11 @@ jobs:
- uses: actions/setup-python@v1
with:
python-version: '3.x'
- - run: pip install ninja pefile
+ # ninja==1.10 pypi release didn't ship with windows binaries, which causes
+ # pip to try to build it which fails on Windows. Pin the previous version
+ # for now. We can update once that's fixed.
+ # https://pypi.org/project/ninja/1.10.0/#files
+ - run: pip install ninja==1.9.0.post1 pefile
- run: python run_project_tests.py --only platform-windows
env:
CI: "1"
diff --git a/.gitignore b/.gitignore
index f87f562..fea337e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -30,3 +30,4 @@ packagecache
/docs/hotdoc-private*
*.pyc
+/*venv*
diff --git a/mypy.ini b/.mypy.ini
index b8dad03..b8dad03 100644
--- a/mypy.ini
+++ b/.mypy.ini
diff --git a/.travis.yml b/.travis.yml
index f5a32a6..22d76e7 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -31,9 +31,10 @@ matrix:
compiler: gcc
include:
# Test cross builds separately, they do not use the global compiler
+ # Also hijack one cross build to test long commandline handling codepath (and avoid overloading Travis)
- os: linux
compiler: gcc
- env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt"
+ env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_RSP_THRESHOLD=0
- os: linux
compiler: gcc
env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_ARGS="--unity=on"
diff --git a/MANIFEST.in b/MANIFEST.in
index 13f7949..11c804a 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -4,10 +4,9 @@ graft cross
graft data
graft graphics
graft man
-graft syntax-highlighting
graft tools
-include authors.txt
-include contributing.txt
+
+include contributing.md
include COPYING
include README.md
include run_cross_test.py
@@ -15,7 +14,6 @@ include run_tests.py
include run_unittests.py
include run_meson_command_tests.py
include run_project_tests.py
-include mesonrewriter.py
include ghwt.py
include __main__.py
include meson.py
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 066f1a5..1e12f14 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -13,6 +13,7 @@ variables:
jobs:
- job: vs2017
+ timeoutInMinutes: 120
pool:
vmImage: VS2017-Win2016
@@ -22,6 +23,7 @@ jobs:
arch: x86
compiler: msvc2017
backend: ninja
+ MESON_RSP_THRESHOLD: 0
vc2017x64vs:
arch: x64
compiler: msvc2017
@@ -40,6 +42,7 @@ jobs:
- template: ci/azure-steps.yml
- job: vs2019
+ timeoutInMinutes: 120
pool:
vmImage: windows-2019
@@ -63,6 +66,7 @@ jobs:
- template: ci/azure-steps.yml
- job: cygwin
+ timeoutInMinutes: 120
pool:
vmImage: VS2017-Win2016
strategy:
@@ -82,6 +86,7 @@ jobs:
gcc-objc,^
git,^
gobject-introspection,^
+ gtk-doc,^
libarchive13,^
libboost-devel,^
libglib2.0-devel,^
@@ -89,9 +94,15 @@ jobs:
libjsoncpp19,^
librhash0,^
libuv1,^
+ libxml2,^
+ libxml2-devel,^
+ libxslt,^
+ libxslt-devel,^
ninja,^
python2-devel,^
python3-devel,^
+ python3-libxml2,^
+ python3-libxslt,^
python36-pip,^
vala,^
wget,^
@@ -100,8 +111,8 @@ jobs:
displayName: Install Dependencies
- script: |
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
- env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist jsonschema
- displayName: pip install pefile pytest-xdist jsonschema
+ env.exe -- python3 -m pip --disable-pip-version-check install gcovr pefile pytest-xdist jsonschema
+ displayName: pip install gcovr pefile pytest-xdist jsonschema
- script: |
set BOOST_ROOT=
set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32
@@ -138,6 +149,7 @@ jobs:
gccx64ninja:
MSYSTEM: MINGW64
MSYS2_ARCH: x86_64
+ MESON_RSP_THRESHOLD: 0
compiler: gcc
clangx64ninja:
MSYSTEM: MINGW64
@@ -151,7 +163,13 @@ jobs:
displayName: Install MSYS2
- script: |
set PATH=%MSYS2_ROOT%\usr\bin;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem
- %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syyuu
+ # Remove this line when https://github.com/msys2/MSYS2-packages/pull/2022 is merged
+ %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Sy dash
+ echo Updating msys2
+ %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu || echo system update failed, ignoring
+ echo Killing all msys2 processes
+ taskkill /F /FI "MODULES eq msys-2.0.dll"
+ echo Updating msys2 (again)
%MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu
displayName: Update MSYS2
- script: |
@@ -162,14 +180,17 @@ jobs:
git ^
mercurial ^
mingw-w64-$(MSYS2_ARCH)-cmake ^
+ mingw-w64-$(MSYS2_ARCH)-lcov ^
+ mingw-w64-$(MSYS2_ARCH)-libxml2 ^
mingw-w64-$(MSYS2_ARCH)-ninja ^
mingw-w64-$(MSYS2_ARCH)-pkg-config ^
mingw-w64-$(MSYS2_ARCH)-python2 ^
mingw-w64-$(MSYS2_ARCH)-python3 ^
+ mingw-w64-$(MSYS2_ARCH)-python3-lxml ^
mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^
mingw-w64-$(MSYS2_ARCH)-python3-pip ^
%TOOLCHAIN%
- %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile jsonschema"
+ %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install gcovr jsonschema pefile"
displayName: Install Dependencies
- script: |
set BOOST_ROOT=
diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh
index 7fe139e..fb27c26 100755
--- a/ci/ciimage/arch/install.sh
+++ b/ci/ciimage/arch/install.sh
@@ -12,12 +12,12 @@ pkgs=(
itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz
doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools
libwmf valgrind cmake netcdf-fortran openmpi nasm gnustep-base gettext
- python-jsonschema
+ python-jsonschema python-lxml
# cuda
)
aur_pkgs=(scalapack)
-pip_pkgs=(hotdoc)
+pip_pkgs=(hotdoc gcovr)
cleanup_pkgs=(go)
AUR_USER=docker
diff --git a/ci/ciimage/bionic/image.json b/ci/ciimage/bionic/image.json
new file mode 100644
index 0000000..6a3d723
--- /dev/null
+++ b/ci/ciimage/bionic/image.json
@@ -0,0 +1,8 @@
+{
+ "base_image": "ubuntu:bionic",
+ "env": {
+ "CI": "1",
+ "SKIP_SCIENTIFIC": "1",
+ "DC": "gdc"
+ }
+}
diff --git a/ci/ciimage/bionic/install.sh b/ci/ciimage/bionic/install.sh
new file mode 100755
index 0000000..0bfcdfb
--- /dev/null
+++ b/ci/ciimage/bionic/install.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+set -e
+
+export DEBIAN_FRONTEND=noninteractive
+export LANG='C.UTF-8'
+export DC=gdc
+
+pkgs=(
+ python3-pytest-xdist
+ python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev
+ wget unzip cmake doxygen
+ clang
+ pkg-config-arm-linux-gnueabihf
+ qt4-linguist-tools qt5-default qtbase5-private-dev
+ python-dev
+ libomp-dev
+ llvm lcov
+ ldc
+ libclang-dev
+ libgcrypt20-dev
+ libgpgme-dev
+ libhdf5-dev openssh-server
+ libboost-python-dev libboost-regex-dev
+ libblocksruntime-dev
+ libperl-dev libscalapack-mpi-dev libncurses-dev
+)
+
+boost_pkgs=(atomic chrono date-time filesystem log regex serialization system test thread)
+
+sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list"
+apt-get -y update
+apt-get -y upgrade
+apt-get -y install eatmydata
+
+# Base stuff
+eatmydata apt-get -y build-dep meson
+
+# Add boost packages
+for i in "${boost_pkgs[@]}"; do
+ for j in "1.62.0" "1.65.1"; do
+ pkgs+=("libboost-${i}${j}")
+ done
+done
+
+# packages
+eatmydata apt-get -y install "${pkgs[@]}"
+
+eatmydata python3 -m pip install codecov gcovr jsonschema
+
+# Install the ninja 0.10
+wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip
+unzip ninja-linux.zip -d /ci
+
+# cleanup
+apt-get -y remove ninja-build
+apt-get -y clean
+apt-get -y autoclean
+rm ninja-linux.zip
diff --git a/ci/ciimage/build.py b/ci/ciimage/build.py
index 34a92fa..e623a7e 100755
--- a/ci/ciimage/build.py
+++ b/ci/ciimage/build.py
@@ -71,6 +71,9 @@ class Builder(BuilderBase):
for key, val in self.image_def.env.items():
out_data += f'export {key}="{val}"\n'
+ # Also add /ci to PATH
+ out_data += 'export PATH="/ci:$PATH"\n'
+
out_file.write_text(out_data)
# make it executable
@@ -157,7 +160,7 @@ class ImageTester(BuilderBase):
test_cmd = [
self.docker, 'run', '--rm', '-t', 'meson_test_image',
- '/usr/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS'
+ '/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS'
]
if subprocess.run(test_cmd).returncode != 0:
raise RuntimeError('Running tests failed')
diff --git a/ci/ciimage/eoan/install.sh b/ci/ciimage/eoan/install.sh
index 4b3b746..36dec72 100755
--- a/ci/ciimage/eoan/install.sh
+++ b/ci/ciimage/eoan/install.sh
@@ -11,12 +11,14 @@ export DC=gdc
pkgs=(
python3-pytest-xdist
python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev
+ python3-lxml
wget unzip
qt5-default clang
pkg-config-arm-linux-gnueabihf
qt4-linguist-tools
python-dev
libomp-dev
+ llvm lcov
dub ldc
mingw-w64 mingw-w64-tools nim
libclang-dev
@@ -41,7 +43,7 @@ eatmydata apt-get -y build-dep meson
eatmydata apt-get -y install "${pkgs[@]}"
eatmydata apt-get -y install --no-install-recommends wine-stable # Wine is special
-eatmydata python3 -m pip install hotdoc codecov jsonschema
+eatmydata python3 -m pip install hotdoc codecov gcovr jsonschema
# dub stuff
dub_fetch urld
diff --git a/ci/ciimage/fedora/install.sh b/ci/ciimage/fedora/install.sh
index 242d677..3beb11c 100755
--- a/ci/ciimage/fedora/install.sh
+++ b/ci/ciimage/fedora/install.sh
@@ -13,7 +13,7 @@ pkgs=(
doxygen vulkan-devel vulkan-validation-layers-devel openssh mercurial gtk-sharp2-devel libpcap-devel gpgme-devel
qt5-qtbase-devel qt5-qttools-devel qt5-linguist qt5-qtbase-private-devel
libwmf-devel valgrind cmake openmpi-devel nasm gnustep-base-devel gettext-devel ncurses-devel
- libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel
+ libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel python3-lxml
)
# Sys update
@@ -21,7 +21,7 @@ dnf -y upgrade
# Install deps
dnf -y install "${pkgs[@]}"
-python3 -m pip install hotdoc gobject PyGObject
+python3 -m pip install hotdoc gcovr gobject PyGObject
# Cleanup
dnf -y clean all
diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh
index c5dd6df..4c8e770 100755
--- a/ci/ciimage/opensuse/install.sh
+++ b/ci/ciimage/opensuse/install.sh
@@ -5,9 +5,9 @@ set -e
source /ci/common.sh
pkgs=(
- python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3
+ python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3 python3-lxml
ninja make git autoconf automake patch python3-Cython python3-jsonschema
- elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl
+ elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov
mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel
itstool gtk3-devel java-15-openjdk-devel gtk-doc llvm-devel clang-devel libSDL2-devel graphviz-devel zlib-devel zlib-devel-static
#hdf5-devel netcdf-devel libscalapack2-openmpi3-devel libscalapack2-gnu-openmpi3-hpc-devel openmpi3-devel
@@ -17,7 +17,7 @@ pkgs=(
libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel
boost-devel libboost_date_time-devel libboost_filesystem-devel libboost_locale-devel libboost_system-devel
libboost_test-devel libboost_log-devel libboost_regex-devel
- libboost_python-devel libboost_python-py3-1_71_0-devel libboost_regex-devel
+ libboost_python3-devel libboost_regex-devel
)
# Sys update
@@ -26,7 +26,7 @@ zypper --non-interactive update
# Install deps
zypper install -y "${pkgs[@]}"
-python3 -m pip install hotdoc gobject PyGObject
+python3 -m pip install hotdoc gcovr gobject PyGObject
echo 'export PKG_CONFIG_PATH="/usr/lib64/mpi/gcc/openmpi3/lib64/pkgconfig:$PKG_CONFIG_PATH"' >> /ci/env_vars.sh
diff --git a/ci/run.ps1 b/ci/run.ps1
index 34856c0..5065b87 100644
--- a/ci/run.ps1
+++ b/ci/run.ps1
@@ -4,7 +4,8 @@ if ($LastExitCode -ne 0) {
}
# remove Chocolately, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it
-$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey' }) -join ';'
+# remove PostgreSQL from path so we don't pickup a broken zlib from it
+$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';'
# Rust puts its shared stdlib in a secret place, but it is needed to run tests.
$env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin"
diff --git a/ci/travis_install.sh b/ci/travis_install.sh
index 5d191f1..d9d308a 100755
--- a/ci/travis_install.sh
+++ b/ci/travis_install.sh
@@ -7,9 +7,11 @@ msg() { echo -e "\x1b[1;32mINFO: \x1b[37m$*\x1b[0m"; }
if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
msg "Running OSX setup"
brew update
+ # Run one macOS build with pkg-config available (pulled in by qt), and the
+ # other (unity=on) without pkg-config
brew install qt ldc llvm ninja
if [[ "$MESON_ARGS" =~ .*unity=on.* ]]; then
- which pkg-config || brew install pkg-config
+ which pkg-config && rm -f $(which pkg-config)
fi
python3 -m pip install jsonschema
elif [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
diff --git a/ci/travis_script.sh b/ci/travis_script.sh
index a91a5dd..bdfd4c2 100755
--- a/ci/travis_script.sh
+++ b/ci/travis_script.sh
@@ -23,6 +23,10 @@ export CXX=$CXX
export OBJC=$CC
export OBJCXX=$CXX
export PATH=/root/tools:$PATH
+if test "$MESON_RSP_THRESHOLD" != ""
+then
+ export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD
+fi
source /ci/env_vars.sh
cd /root
@@ -55,5 +59,9 @@ elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
export OBJC=$CC
export OBJCXX=$CXX
export PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH:$(brew --prefix llvm)/bin
+ if test "$MESON_RSP_THRESHOLD" != ""
+ then
+ export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD
+ fi
./run_tests.py $RUN_TESTS_ARGS --backend=ninja -- $MESON_ARGS
fi
diff --git a/cross/armcc.txt b/cross/armcc.txt
index c884ffa..ae65c9e 100644
--- a/cross/armcc.txt
+++ b/cross/armcc.txt
@@ -7,7 +7,7 @@ cpp = 'armcc'
ar = 'armar'
strip = 'armar'
-[properties]
+[built-in options]
# The '--cpu' option with the appropriate target type should be mentioned
# to cross compile c/c++ code with armcc,.
c_args = ['--cpu=Cortex-M0plus']
diff --git a/cross/armclang-linux.txt b/cross/armclang-linux.txt
index 6df78d6..10f6fa4 100644
--- a/cross/armclang-linux.txt
+++ b/cross/armclang-linux.txt
@@ -12,7 +12,7 @@
# Armcc is only available in toolchain version 5.
# Armclang is only available in toolchain version 6.
# Start shell with /opt/arm/developmentstudio-2019.0/bin/suite_exec zsh
-# Now the compilers will work.
+# Now the compilers will work.
[binaries]
# we could set exe_wrapper = qemu-arm-static but to test the case
@@ -24,8 +24,7 @@ ar = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler6.12/bin/armar'
#strip = '/usr/arm-linux-gnueabihf/bin/strip'
#pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config'
-[properties]
-
+[built-in options]
c_args = ['--target=aarch64-arm-none-eabi']
[host_machine]
diff --git a/cross/armclang.txt b/cross/armclang.txt
index 955b7ef..6146e0d 100644
--- a/cross/armclang.txt
+++ b/cross/armclang.txt
@@ -7,7 +7,7 @@ cpp = 'armclang'
ar = 'armar'
strip = 'armar'
-[properties]
+[built-in options]
# The '--target', '-mcpu' options with the appropriate values should be mentioned
# to cross compile c/c++ code with armclang.
c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus']
diff --git a/cross/c2000.txt b/cross/c2000.txt
index e624f25..61c0310 100644
--- a/cross/c2000.txt
+++ b/cross/c2000.txt
@@ -12,8 +12,7 @@ cpu_family = 'c2000'
cpu = 'c28x'
endian = 'little'
-[properties]
-needs_exe_wrapper = true
+[built-in options]
c_args = [
'-v28',
'-ml',
@@ -24,3 +23,6 @@ c_link_args = [
'\f28004x_flash.cmd']
cpp_args = []
cpp_link_args = []
+
+[properties]
+needs_exe_wrapper = true
diff --git a/cross/ccrx.txt b/cross/ccrx.txt
index 097ec06..f1b536c 100644
--- a/cross/ccrx.txt
+++ b/cross/ccrx.txt
@@ -7,7 +7,7 @@ cpp = 'ccrx'
ar = 'rlink'
strip = 'rlink'
-[properties]
+[built-in options]
# The '--cpu' option with the appropriate target type should be mentioned
# to cross compile c/c++ code with ccrx,.
c_args = ['-cpu=rx600']
diff --git a/cross/iphone.txt b/cross/iphone.txt
index e714da5..9659407 100644
--- a/cross/iphone.txt
+++ b/cross/iphone.txt
@@ -8,14 +8,14 @@ cpp = 'clang++'
ar = 'ar'
strip = 'strip'
-[properties]
-root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer'
-
+[built-in options]
c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk']
+[properties]
+root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer'
has_function_printf = true
has_function_hfkerhisadf = false
diff --git a/cross/tvos.txt b/cross/tvos.txt
index dd6d5c1..833f04b 100644
--- a/cross/tvos.txt
+++ b/cross/tvos.txt
@@ -8,14 +8,15 @@ cpp = 'clang++'
ar = 'ar'
strip = 'strip'
-[properties]
-root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer'
-
+[built-in options]
c_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
cpp_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
c_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
cpp_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk']
+[properties]
+root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer'
+
has_function_printf = true
has_function_hfkerhisadf = false
diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt
index 4600c22..69e0c86 100644
--- a/cross/ubuntu-armhf.txt
+++ b/cross/ubuntu-armhf.txt
@@ -9,12 +9,14 @@ strip = '/usr/arm-linux-gnueabihf/bin/strip'
pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config'
ld = '/usr/bin/arm-linux/gnueabihf-ld'
-[properties]
-root = '/usr/arm-linux-gnueabihf'
+[built-in options]
# Used in unit test '140 get define'
c_args = ['-DMESON_TEST_ISSUE_1665=1']
cpp_args = '-DMESON_TEST_ISSUE_1665=1'
+[properties]
+root = '/usr/arm-linux-gnueabihf'
+
has_function_printf = true
has_function_hfkerhisadf = false
diff --git a/cross/wasm.txt b/cross/wasm.txt
index a43636f..f2d0cd7 100644
--- a/cross/wasm.txt
+++ b/cross/wasm.txt
@@ -3,8 +3,7 @@ c = '/home/jpakkane/emsdk/fastcomp/emscripten/emcc'
cpp = '/home/jpakkane/emsdk/fastcomp/emscripten/em++'
ar = '/home/jpakkane/emsdk/fastcomp/emscripten/emar'
-[properties]
-
+[built-in options]
c_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1']
c_link_args = ['-s','EXPORT_ALL=1']
cpp_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1']
diff --git a/cross/xc16.txt b/cross/xc16.txt
index 1e67362..c66889d 100644
--- a/cross/xc16.txt
+++ b/cross/xc16.txt
@@ -14,6 +14,8 @@ endian = 'little'
[properties]
needs_exe_wrapper = true
+
+[built-in options]
c_args = [
'-c',
'-mcpu=33EP64MC203',
diff --git a/data/macros.meson b/data/macros.meson
index c5b90de..cc4953c 100644
--- a/data/macros.meson
+++ b/data/macros.meson
@@ -2,12 +2,6 @@
%__meson_wrap_mode nodownload
%__meson_auto_features enabled
-%_smp_mesonflags %([ -z "$MESON_BUILD_NCPUS" ] \\\
- && MESON_BUILD_NCPUS="`/usr/bin/getconf _NPROCESSORS_ONLN`"; \\\
- ncpus_max=%{?_smp_ncpus_max}; \\\
- if [ -n "$ncpus_max" ] && [ "$ncpus_max" -gt 0 ] && [ "$MESON_BUILD_NCPUS" -gt "$ncpus_max" ]; then MESON_BUILD_NCPUS="$ncpus_max"; fi; \\\
- if [ "$MESON_BUILD_NCPUS" -gt 1 ]; then echo "--num-processes $MESON_BUILD_NCPUS"; fi)
-
%meson \
%set_build_flags \
%{shrink:%{__meson} \
@@ -28,17 +22,24 @@
--wrap-mode=%{__meson_wrap_mode} \
--auto-features=%{__meson_auto_features} \
%{_vpath_srcdir} %{_vpath_builddir} \
- %{nil}}
+ %{nil}}
%meson_build \
- %ninja_build -C %{_vpath_builddir}
+ %{shrink:%{__meson} compile \
+ -C %{_vpath_builddir} \
+ -j %{_smp_build_ncpus} \
+ --verbose \
+ %{nil}}
%meson_install \
- %ninja_install -C %{_vpath_builddir}
+ %{shrink:DESTDIR=%{buildroot} %{__meson} install \
+ -C %{_vpath_builddir} \
+ --no-rebuild \
+ %{nil}}
%meson_test \
- %{shrink: %{__meson} test \
+ %{shrink:%{__meson} test \
-C %{_vpath_builddir} \
- %{?_smp_mesonflags} \
+ --num-processes %{_smp_build_ncpus} \
--print-errorlogs \
- %{nil}}
+ %{nil}}
diff --git a/data/schema.xsd b/data/schema.xsd
new file mode 100644
index 0000000..58c6bfd
--- /dev/null
+++ b/data/schema.xsd
@@ -0,0 +1,96 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!-- from https://svn.jenkins-ci.org/trunk/hudson/dtkit/dtkit-format/dtkit-junit-model/src/main/resources/com/thalesgroup/dtkit/junit/model/xsd/junit-4.xsd -->
+<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+ <xs:element name="failure">
+ <xs:complexType mixed="true">
+ <xs:attribute name="type" type="xs:string" use="optional"/>
+ <xs:attribute name="message" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="error">
+ <xs:complexType mixed="true">
+ <xs:attribute name="type" type="xs:string" use="optional"/>
+ <xs:attribute name="message" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="properties">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="property" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="property">
+ <xs:complexType>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="value" type="xs:string" use="required"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="skipped">
+ <xs:complexType mixed="true">
+ <xs:attribute name="message" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="system-err" type="xs:string"/>
+ <xs:element name="system-out" type="xs:string"/>
+
+ <xs:element name="testcase">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="skipped" minOccurs="0" maxOccurs="1"/>
+ <xs:element ref="error" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="failure" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="system-out" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="system-err" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="assertions" type="xs:string" use="optional"/>
+ <xs:attribute name="time" type="xs:string" use="optional"/>
+ <xs:attribute name="classname" type="xs:string" use="optional"/>
+ <xs:attribute name="status" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="testsuite">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="properties" minOccurs="0" maxOccurs="1"/>
+ <xs:element ref="testcase" minOccurs="0" maxOccurs="unbounded"/>
+ <xs:element ref="system-out" minOccurs="0" maxOccurs="1"/>
+ <xs:element ref="system-err" minOccurs="0" maxOccurs="1"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" use="required"/>
+ <xs:attribute name="tests" type="xs:string" use="required"/>
+ <xs:attribute name="failures" type="xs:string" use="optional"/>
+ <xs:attribute name="errors" type="xs:string" use="optional"/>
+ <xs:attribute name="time" type="xs:string" use="optional"/>
+ <xs:attribute name="disabled" type="xs:string" use="optional"/>
+ <xs:attribute name="skipped" type="xs:string" use="optional"/>
+ <xs:attribute name="timestamp" type="xs:string" use="optional"/>
+ <xs:attribute name="hostname" type="xs:string" use="optional"/>
+ <xs:attribute name="id" type="xs:string" use="optional"/>
+ <xs:attribute name="package" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+ <xs:element name="testsuites">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element ref="testsuite" minOccurs="0" maxOccurs="unbounded"/>
+ </xs:sequence>
+ <xs:attribute name="name" type="xs:string" use="optional"/>
+ <xs:attribute name="time" type="xs:string" use="optional"/>
+ <xs:attribute name="tests" type="xs:string" use="optional"/>
+ <xs:attribute name="failures" type="xs:string" use="optional"/>
+ <xs:attribute name="disabled" type="xs:string" use="optional"/>
+ <xs:attribute name="errors" type="xs:string" use="optional"/>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
diff --git a/data/syntax-highlighting/vim/syntax/meson.vim b/data/syntax-highlighting/vim/syntax/meson.vim
index d0d15d9..1100113 100644
--- a/data/syntax-highlighting/vim/syntax/meson.vim
+++ b/data/syntax-highlighting/vim/syntax/meson.vim
@@ -32,8 +32,9 @@ set cpo&vim
" http://mesonbuild.com/Syntax.html
syn keyword mesonConditional elif else if endif
-syn keyword mesonRepeat foreach endforeach
+syn keyword mesonRepeat foreach endforeach
syn keyword mesonOperator and not or in
+syn keyword mesonStatement continue break
syn match mesonComment "#.*$" contains=mesonTodo,@Spell
syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained
diff --git a/data/test.schema.json b/data/test.schema.json
index 72f160f..d3b80d0 100644
--- a/data/test.schema.json
+++ b/data/test.schema.json
@@ -1,5 +1,6 @@
{
"type": "object",
+ "additionalProperties": false,
"properties": {
"env": {
"type": "object",
@@ -100,6 +101,30 @@
"prefix"
]
}
+ },
+ "tools": {
+ "type": "object"
+ },
+ "stdout": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "line": {
+ "type": "string"
+ },
+ "match": {
+ "type": "string",
+ "enum": [
+ "literal",
+ "re"
+ ]
+ }
+ },
+ "required": [
+ "line"
+ ]
+ }
}
}
}
diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md
index bbe945d..25fb61c 100644
--- a/docs/markdown/Adding-new-projects-to-wrapdb.md
+++ b/docs/markdown/Adding-new-projects-to-wrapdb.md
@@ -6,14 +6,17 @@
Each wrap repository has a master branch with only one initial commit and *no* wrap files.
And that is the only commit ever made on that branch.
-For every release of a project a new branch is created. The new branch is named after the
-the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for
+For every release of a project a new branch is created. The new branch is named after the
+the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for
this particular release.
There are two types of wraps on WrapDB - regular wraps and wraps with Meson build
definition patches. A wrap file in a repository on WrapDB must have a name `upstream.wrap`.
-Wraps with Meson build definition patches work in much the same way as Debian: we take the unaltered upstream source package and add a new build system to it as a patch. These build systems are stored as Git repositories on GitHub. They only contain build definition files. You may also think of them as an overlay to upstream source.
+Wraps with Meson build definition patches work in much the same way as Debian:
+we take the unaltered upstream source package and add a new build system to it as a patch.
+These build systems are stored as Git repositories on GitHub. They only contain build definition files.
+You may also think of them as an overlay to upstream source.
Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated
with an incremented version number. All the old releases remain unaltered.
@@ -21,13 +24,15 @@ New commits are always done via GitHub merge requests and must be reviewed by
someone other than the submitter.
Note that your Git repo with wrap must not contain the subdirectory of the source
-release. That gets added automatically by the service. You also must not commit
+release. That gets added automatically by the service. You also must not commit
any source code from the original tarball into the wrap repository.
## Choosing the repository name
Wrapped subprojects are used much like external dependencies. Thus
-they should have the same name as the upstream projects.
+they should have the same name as the upstream projects.
+
+NOTE: Repo names must fully match this regexp: `[a-z0-9._]+`.
If the project provides a pkg-config file, then the repository name should be
the same as the pkg-config name. Usually this is the name of the
@@ -36,16 +41,19 @@ however. As an example the libogg project's chosen pkg-config name is
`ogg` instead of `libogg`, which is the reason why the repository is
named plain `ogg`.
-If there is no a pkg-config file, the name the project uses/promotes should be used,
+If there is no a pkg-config file, the name the project uses/promotes should be used,
lowercase only (Catch2 -> catch2).
+If the project name is too generic or ambiguous (e.g. `benchmark`),
+consider using `organization-project` naming format (e.g. `google-benchmark`).
+
## How to contribute a new wrap
If the project already uses Meson build system, then only a wrap file - `upstream.wrap`
-should be provided. In other case a Meson build definition patch - a set of `meson.build`
+should be provided. In other case a Meson build definition patch - a set of `meson.build`
files - should be also provided.
-### Request a new repository or branch
+### Request a new repository
Create an issue on the [wrapdb bug tracker](https://github.com/mesonbuild/wrapdb/issues)
using *Title* and *Description* below as a template.
@@ -61,6 +69,9 @@ version: <version_you_have_a_wrap_for>
Wait until the new repository or branch is created. A link to the new repository or branch
will be posted in a comment to this issue.
+NOTE: Requesting a branch is not necessary. WrapDB maintainer can create the branch and
+modify the PR accordingly if the project repository exists.
+
### Add a new wrap
First you need to fork the repository to your own page.
@@ -80,28 +91,28 @@ git commit -a -m 'Add wrap files for libfoo-1.0.0'
git push origin 1.0.0
```
-Now you should create a pull request on GitHub. Remember to create it against the
-correct branch rather than master (`1.0.0` branch in this example). GitHub should do
+Now you should create a pull request on GitHub. Remember to create it against the
+correct branch rather than master (`1.0.0` branch in this example). GitHub should do
this automatically.
+If the branch doesn't exist file a pull request against master.
+WrapDB maintainers can fix it before merging.
+
## What is done by WrapDB maintainers
+[mesonwrap tools](Wrap-maintainer-tools.md) must be used for the tasks below.
+
### Adding new project to the Wrap provider service
Each project gets its own repo. It is initialized like this:
```
-git init
-git add readme.txt
-git add LICENSE.build
-git commit -a -m 'Create project foobar'
-git remote add origin <repo url>
-git push -u origin master
+mesonwrap new_repo --homepage=$HOMEPAGE --directory=$NEW_LOCAL_PROJECT_DIR $PROJECT_NAME
```
-Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches.
+The command creates a new repository and uploads it to Github.
-Repo names must fully match this regexp: `[a-z0-9._]+`.
+`--version` flag may be used to create a branch immediately.
### Adding a new branch to an existing project
@@ -129,12 +140,6 @@ to functionality. All such changes must be submitted to upstream. You
may also host your own Git repo with the changes if you wish. The Wrap
system has native support for Git subprojects.
-## Creator script
-
-The WrapDB repository has a
-[helper script](https://github.com/mesonbuild/mesonwrap/blob/master/mesonwrap.py)
-to generate new repositories, verify them and update them.
-
## Reviewing wraps
See [Wrap review guidelines](Wrap-review-guidelines.md).
diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md
index 2d53e28..429b9b2 100644
--- a/docs/markdown/Build-options.md
+++ b/docs/markdown/Build-options.md
@@ -20,6 +20,9 @@ option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) # Since 0.4
option('free_array_opt', type : 'array', value : ['one', 'two']) # Since 0.44.0
option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two'])
option('some_feature', type : 'feature', value : 'enabled') # Since 0.47.0
+option('long_desc', type : 'string', value : 'optval',
+ description : 'An option with a very long description' +
+ 'that does something in a specific context') # Since 0.55.0
```
For built-in options, see [Built-in options][builtin_opts].
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index aa7d500..de801ab 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -17,7 +17,7 @@ by setting them inside `default_options` of `project()` in your `meson.build`.
For legacy reasons `--warnlevel` is the cli argument for the `warning_level` option.
-They can also be edited after setup using `meson configure`.
+They can also be edited after setup using `meson configure -Doption=value`.
Installation options are all relative to the prefix, except:
@@ -55,37 +55,31 @@ particularly the paths section may be necessary.
### Core options
-Options that are labeled "per machine" in the table are set per machine.
-Prefixing the option with `build.` just affects the build machine configuration,
-while unprefixed just affects the host machine configuration, respectively.
-Using the option as-is with no prefix affects all machines. For example:
-
- - `build.pkg_config_path` controls the paths pkg-config will search for just
- `native: true` dependencies (build machine).
-
- - `pkg_config_path` controls the paths pkg-config will search for just
- `native: false` dependencies (host machine).
-
-| Option | Default value | Description | Is per machine |
-| ------ | ------------- | ----------- | -------------- |
-| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no |
-| backend {ninja, vs,<br>vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no |
-| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no |
-| debug | true | Debug | no |
-| default_library {shared, static, both} | shared | Default library type | no |
-| errorlogs | true | Whether to print the logs from failing tests. | no |
-| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no |
-| layout {mirror,flat} | mirror | Build directory layout | no |
-| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no |
-| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes |
-| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes |
-| stdsplit | true | Split stdout and stderr in test logs | no |
-| strip | false | Strip targets on install | no |
-| unity {on, off, subprojects} | off | Unity build | no |
-| unity_size {>=2} | 4 | Unity file block size | no |
-| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no |
-| werror | false | Treat warnings as errors | no |
-| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no |
+Options that are labeled "per machine" in the table are set per machine. See
+the [specifying options per machine](#Specifying-options-per-machine) section
+for details.
+
+| Option | Default value | Description | Is per machine | Is per subproject |
+| ------ | ------------- | ----------- | -------------- | ----------------- |
+| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no |
+| backend {ninja, vs,<br>vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | no |
+| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no | no |
+| debug | true | Debug | no | no |
+| default_library {shared, static, both} | shared | Default library type | no | yes |
+| errorlogs | true | Whether to print the logs from failing tests. | no | no |
+| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no |
+| layout {mirror,flat} | mirror | Build directory layout | no | no |
+| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | no |
+| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | no |
+| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | no |
+| stdsplit | true | Split stdout and stderr in test logs | no | no |
+| strip | false | Strip targets on install | no | no |
+| unity {on, off, subprojects} | off | Unity build | no | no |
+| unity_size {>=2} | 4 | Unity file block size | no | no |
+| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | yes |
+| werror | false | Treat warnings as errors | no | yes |
+| wrap_mode {default, nofallback,<br>nodownload, forcefallback} | default | Wrap mode to use | no | no |
+| force_fallback_for | [] | Force fallback for those dependencies | no | no |
<a name="build-type-options"></a>
For setting optimization levels and toggling debug, you can either set the
@@ -186,9 +180,9 @@ The default values of `c_winlibs` and `cpp_winlibs` are in compiler-specific
argument forms, but the libraries are: kernel32, user32, gdi32, winspool,
shell32, ole32, oleaut32, uuid, comdlg32, advapi32.
-c_args, cpp_args, c_link_args, and cpp_link_args only affect native builds,
-when cross compiling they will not be applied to binaries or libraries
-targeting the host system, only those being run on the build system.
+All these `<lang>_*` options are specified per machine. See below in the
+[specifying options per machine](#Specifying-options-per-machine) section on
+how to do this in cross builds.
When using MSVC, `cpp_eh=none` will result in no exception flags being passed,
while the `cpp_eh=[value]` will result in `/EH[value]`.
@@ -199,3 +193,44 @@ gcc-style compilers, nothing is passed (allowing exceptions to work), while
Since *0.54.0* The `<lang>_thread_count` option can be used to control the
value passed to `-s PTHREAD_POOL_SIZE` when using emcc. No other c/c++
compiler supports this option.
+
+## Specifying options per machine
+
+Since *0.51.0*, some options are specified per machine rather than globally for
+all machine configurations. Prefixing the option with `build.` just affects the
+build machine configuration, while unprefixed just affects the host machine
+configuration, respectively. For example:
+
+ - `build.pkg_config_path` controls the paths pkg-config will search for just
+ `native: true` dependencies (build machine).
+
+ - `pkg_config_path` controls the paths pkg-config will search for just
+ `native: false` dependencies (host machine).
+
+This is useful for cross builds. In the native builds, build = host, and the
+unprefixed option alone will suffice.
+
+Prior to *0.51.0*, these options just effected native builds when specified on
+the command line, as there was no `build.` prefix. Similarly named fields in
+the `[properties]` section of the cross file would effect cross compilers, but
+the code paths were fairly different allowing differences in behavior to crop
+out.
+
+## Specifying options per subproject
+
+Since *0.54.0* `default_library` and `werror` built-in options can be defined
+per subproject. This is useful for example when building shared libraries in the
+main project, but static link a subproject, or when the main project must build
+with no warnings but some subprojects cannot.
+
+Most of the time this would be used either by the parent project by setting
+subproject's default_options (e.g. `subproject('foo', default_options: 'default_library=static')`),
+or by the user using the command line `-Dfoo:default_library=static`.
+
+The value is overriden in this order:
+- Value from parent project
+- Value from subproject's default_options if set
+- Value from subproject() default_options if set
+- Value from command line if set
+
+Since 0.56.0 `warning_level` can also be defined per subproject.
diff --git a/docs/markdown/CMake-module.md b/docs/markdown/CMake-module.md
index 7103608..fc6157e 100644
--- a/docs/markdown/CMake-module.md
+++ b/docs/markdown/CMake-module.md
@@ -48,8 +48,6 @@ The `subproject` method is almost identical to the normal meson
`subproject` function. The only difference is that a CMake project
instead of a meson project is configured.
-Also, project specific CMake options can be added with the `cmake_options` key.
-
The returned `sub_proj` supports the same options as a "normal" subproject.
Meson automatically detects CMake build targets, which can be accessed with
the methods listed [below](#subproject-object).
@@ -87,6 +85,49 @@ It should be noted that not all projects are guaranteed to work. The
safest approach would still be to create a `meson.build` for the
subprojects in question.
+### Configuration options
+
+*New in meson 0.55.0*
+
+Meson also supports passing configuration options to CMake and overriding
+certain build details extracted from the CMake subproject.
+
+```meson
+cmake = import('cmake')
+opt_var = cmake.subproject_options()
+
+# Call CMake with `-DSOME_OTHER_VAR=ON`
+opt_var.add_cmake_defines({'SOME_OTHER_VAR': true})
+
+# Globally override the C++ standard to c++11
+opt_var.set_override_option('cpp_std', 'c++11')
+
+# Override the previous global C++ standard
+# with c++14 only for the CMake target someLib
+opt_var.set_override_option('cpp_std', 'c++14', target: 'someLib')
+
+sub_pro = cmake.subproject('someLibProject', options: opt_var)
+
+# Further changes to opt_var have no effect
+```
+
+See [the CMake options object](#cmake-options-object) for a complete reference
+of all supported functions.
+
+The CMake configuration options object is very similar to the
+[configuration data object](Reference-manual.md#configuration-data-object) object
+returned by [`configuration_data`](Reference-manual.md#configuration_data). It
+is generated by the `subproject_options` function
+
+All configuration options have to be set *before* the subproject is configured
+and must be passed to the `subproject` method via the `options` key. Altering
+the configuration object won't have any effect on previous `cmake.subproject`
+calls.
+
+In earlier meson versions CMake command-line parameters could be set with the
+`cmake_options` kwarg. However, this feature is deprecated since 0.55.0 and only
+kept for compatibility. It will not work together with the `options` kwarg.
+
### `subproject` object
This object is returned by the `subproject` function described above
@@ -103,7 +144,37 @@ and supports the following methods:
the subproject. Usually `dependency()` or `target()` should be
preferred to extract build targets.
- `found` returns true if the subproject is available, otherwise false
- *new in in 0.53.2*
+ *new in meson 0.53.2*
+
+### `cmake options` object
+
+This object is returned by the `subproject_options()` function and consumed by
+the `options` kwarg of the `subproject` function. The following methods are
+supported:
+
+ - `add_cmake_defines({'opt1': val1, ...})` add additional CMake commandline defines
+ - `set_override_option(opt, val)` set specific [build options](Build-options.md)
+ for targets. This will effectively add `opt=val` to the `override_options`
+ array of the [build target](Reference-manual.md#executable)
+ - `set_install(bool)` override wether targets should be installed or not
+ - `append_compile_args(lang, arg1, ...)` append compile flags for a specific
+ language to the targets
+ - `append_link_args(arg1, ...)` append linger args to the targets
+ - `clear()` reset all data in the `cmake options` object
+
+The methods `set_override_option`, `set_install`, `append_compile_args` and
+`append_link_args` support the optional `target` kwarg. If specified, the set
+options affect the specific target. The effect of the option is global for the
+subproject otherwise.
+
+If, for instance, `opt_var.set_install(false)` is called, no target will be
+installed regardless of what is set by CMake. However, it is still possible to
+install specific targets (here `foo`) by setting the `target` kwarg:
+`opt_var.set_install(true, target: 'foo')`
+
+Options that are not set won't affect the generated subproject. So, if for
+instance, `set_install` was not called then the values extracted from CMake will
+be used.
## CMake configuration files
diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md
index 1387a46..330899f 100644
--- a/docs/markdown/Configuring-a-build-directory.md
+++ b/docs/markdown/Configuring-a-build-directory.md
@@ -109,11 +109,11 @@ you would issue the following command.
meson configure -Dprefix=/tmp/testroot
-Then you would run your build command (usually `ninja`), which would
+Then you would run your build command (usually `meson compile`), which would
cause Meson to detect that the build setup has changed and do all the
work required to bring your build tree up to date.
Since 0.50.0, it is also possible to get a list of all build options
-by invoking `meson configure` with the project source directory or
+by invoking [`meson configure`](Commands.md#configure) with the project source directory or
the path to the root `meson.build`. In this case, meson will print the
default values of all options similar to the example output from above.
diff --git a/docs/markdown/Continuous-Integration.md b/docs/markdown/Continuous-Integration.md
index 0846f2d..76a05a3 100644
--- a/docs/markdown/Continuous-Integration.md
+++ b/docs/markdown/Continuous-Integration.md
@@ -36,8 +36,8 @@ script:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM YOUR/REPO:eoan > Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi
- - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && ninja -C builddir test"; fi
- - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && ninja -C builddir test; fi
+ - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && meson test -C builddir"; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && meson test -C builddir; fi
```
## CircleCi for Linux (with Docker)
@@ -69,7 +69,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- - run: ninja -C builddir
+ - run: meson compile -C builddir
- run: meson test -C builddir
meson_debian_build:
@@ -77,7 +77,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- - run: ninja -C builddir
+ - run: meson compile -C builddir
- run: meson test -C builddir
meson_fedora_build:
@@ -85,7 +85,7 @@ jobs:
steps:
- checkout
- run: meson setup builddir --backend ninja
- - run: ninja -C builddir
+ - run: meson compile -C builddir
- run: meson test -C builddir
workflows:
@@ -138,10 +138,10 @@ install:
build_script:
- cmd: echo Building on %arch% with %compiler%
- cmd: meson --backend=ninja builddir
- - cmd: ninja -C builddir
+ - cmd: meson compile -C builddir
test_script:
- - cmd: ninja -C builddir test
+ - cmd: meson test -C builddir
```
### Qt
@@ -187,8 +187,8 @@ install:
script:
- meson builddir
- - ninja -C builddir
- - ninja -C builddir test
+ - meson compile -C builddir
+ - meson test -C builddir
```
## GitHub Actions
diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md
index 5332938..b16f615 100644
--- a/docs/markdown/Contributing.md
+++ b/docs/markdown/Contributing.md
@@ -174,7 +174,7 @@ contents of an additional file into the CI log on failure.
Projects needed by unit tests are in the `test cases/unit`
subdirectory. They are not run as part of `./run_project_tests.py`.
-#### Configuring project tests
+### Configuring project tests
The (optional) `test.json` file, in the root of a test case, is used
for configuring the test. All of the following root entries in the `test.json`
@@ -209,17 +209,20 @@ Exanple `test.json`:
{ "opt1": "qwert", "opt2": "false" },
{ "opt1": "bad" }
]
+ },
+ "tools": {
+ "cmake": ">=3.11"
}
}
```
-##### env
+#### env
The `env` key contains a dictionary which specifies additional
environment variables to be set during the configure step of the test. `@ROOT@`
is replaced with the absolute path of the source directory.
-##### installed
+#### installed
The `installed` dict contains a list of dicts, describing which files are expected
to be installed. Each dict contains the following keys:
@@ -277,7 +280,7 @@ the platform matches. The following values for `platform` are currently supporte
| `cygwin` | Matches when the platform is cygwin |
| `!cygwin` | Not `cygwin` |
-##### matrix
+#### matrix
The `matrix` section can be used to define a test matrix to run project tests
with different meson options.
@@ -318,12 +321,40 @@ The above example will produce the following matrix entries:
- `opt1=qwert`
- `opt1=qwert opt2=true`
-##### do_not_set_opts
+#### do_not_set_opts
Currently supported values are:
- `prefix`
- `libdir`
+#### tools
+
+This section specifies a dict of tool requirements in a simple key-value format.
+If a tool is specified, it has to be present in the environment, and the version
+requirement must be fulfilled. Otherwise, the entire test is skipped (including
+every element in the test matrix).
+
+#### stdout
+
+The `stdout` key contains a list of dicts, describing the expected stdout.
+
+Each dict contains the following keys:
+
+- `line`
+- `match` (optional)
+
+Each item in the list is matched, in order, against the remaining actual stdout
+lines, after any previous matches. If the actual stdout is exhausted before
+every item in the list is matched, the expected output has not been seen, and
+the test has failed.
+
+The `match` element of the dict determines how the `line` element is matched:
+
+| Type | Description |
+| -------- | ----------------------- |
+| `literal` | Literal match (default) |
+| `re` | regex match |
+
### Skipping integration tests
Meson uses several continuous integration testing systems that have slightly
diff --git a/docs/markdown/Creating-OSX-packages.md b/docs/markdown/Creating-OSX-packages.md
index bda06a3..849d5fd 100644
--- a/docs/markdown/Creating-OSX-packages.md
+++ b/docs/markdown/Creating-OSX-packages.md
@@ -39,7 +39,7 @@ $ meson --prefix=/tmp/myapp.app \
<other flags you might need>
```
-Now when we do `ninja install` the bundle is properly staged. If you
+Now when we do `meson install` the bundle is properly staged. If you
have any resource files or data, you need to install them into
`Contents/Resources` either by custom install commands or specifying
more install paths to the Meson command.
diff --git a/docs/markdown/Creating-releases.md b/docs/markdown/Creating-releases.md
index 45c4b4e..040fb53 100644
--- a/docs/markdown/Creating-releases.md
+++ b/docs/markdown/Creating-releases.md
@@ -5,27 +5,56 @@ short-description: Creating releases
# Creating releases
In addition to development, almost all projects provide periodical
-source releases. These are standalone packages (usually either in tar
-or zip format) of the source code. They do not contain any revision
-control metadata, only the source code.
+source releases. These are standalone packages (usually either in
+tar or zip format) of the source code. They do not contain any
+revision control metadata, only the source code. Meson provides
+a simple way of generating these, with the `meson dist` command.
Meson provides a simple way of generating these. It consists of a
-single command:
+single command *(available since 0.52.0)*:
- ninja dist
+```sh
+meson dist
+```
+
+or alternatively (on older meson versions with `ninja` backend):
+
+```sh
+ninja dist
+```
This creates a file called `projectname-version.tar.xz` in the build
-tree subdirectory `meson-dist`. This archive contains the full
-contents of the latest commit in revision control including all the
-submodules (recursively). All revision control metadata is removed.
-Meson then takes
-this archive and tests that it works by doing a full compile + test +
-install cycle. If all these pass, Meson will then create a SHA-256
-checksum file next to the archive.
-
-**Note**: Meson behaviour is different from Autotools. The Autotools
-"dist" target packages up the current source tree. Meson packages
-the latest revision control commit. The reason for this is that it
-prevents developers from doing accidental releases where the
-distributed archive does not match any commit in revision control
-(especially the one tagged for the release).
+tree subdirectory `meson-dist`. This archive contains the full contents
+of the latest commit in revision control including all the submodules
+(recursively). All revision control metadata is removed. Meson then
+takes this archive and tests that it works by doing a full
+`compile` + `test` + `install` cycle. If all these pass, Meson will
+then create a `SHA-256` checksum file next to the archive.
+
+
+## Autotools dist VS Meson dist
+
+Meson behaviour is different from Autotools. The Autotools "dist"
+target packages up the current source tree. Meson packages the latest
+revision control commit. The reason for this is that it prevents developers
+from doing accidental releases where the distributed archive does not match
+any commit in revision control (especially the one tagged for the release).
+
+
+## Include subprojects in your release
+
+The `meson dist` command has `--include-subprojects` command line option.
+When enabled, the source tree of all subprojects used by the current build
+will also be included in the final tarball. This is useful to distribute
+self contained tarball that can be built offline (i.e. `--wrap-mode=nodownload`).
+
+
+## Skip build and test with `--no-tests`
+
+The `meson dist` command has a `--no-tests` option to skip build and
+tests steps of generated packages. It can be used to not waste time
+for example when done in CI that already does its own testing.
+
+So with `--no-tests` you can tell Meson "Do not build and test generated
+packages.".
+
diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md
index 4c4b7bf..d86d417 100644
--- a/docs/markdown/Cross-compilation.md
+++ b/docs/markdown/Cross-compilation.md
@@ -222,7 +222,7 @@ Once you have the cross file, starting a build is simple
$ meson srcdir builddir --cross-file cross_file.txt
```
-Once configuration is done, compilation is started by invoking Ninja
+Once configuration is done, compilation is started by invoking `meson compile`
in the usual way.
## Introspection and system checks
@@ -231,13 +231,10 @@ The main *meson* object provides two functions to determine cross
compilation status.
```meson
-meson.is_cross_build() # returns true when cross compiling
-meson.has_exe_wrapper() # returns true if an exe wrapper has been defined
+meson.is_cross_build() # returns true when cross compiling
+meson.can_run_host_binaries() # returns true if the host binaries can be run, either with a wrapper or natively
```
-Note that the latter gives undefined return value when doing a native
-build.
-
You can run system checks on both the system compiler or the cross
compiler. You just have to specify which one to use.
diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md
index 17c9991..b89a0aa 100644
--- a/docs/markdown/Dependencies.md
+++ b/docs/markdown/Dependencies.md
@@ -76,7 +76,7 @@ and config-tool based variables.
```meson
foo_dep = dependency('foo')
-var = foo.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default')
+var = foo_dep.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default')
```
It accepts the keywords 'cmake', 'pkgconfig', 'pkgconfig_define',
@@ -242,6 +242,9 @@ libgcrypt_dep = dependency('libgcrypt', version: '>= 1.8')
gpgme_dep = dependency('gpgme', version: '>= 1.0')
```
+*Since 0.55.0* Meson won't search $PATH any more for a config tool binary when
+cross compiling if the config tool did not have an entry in the cross file.
+
## AppleFrameworks
Use the `modules` keyword to list frameworks required, e.g.
@@ -285,8 +288,12 @@ You can call `dependency` multiple times with different modules and
use those to link against your targets.
If your boost headers or libraries are in non-standard locations you
-can set the BOOST_ROOT, BOOST_INCLUDEDIR, and/or BOOST_LIBRARYDIR
-environment variables.
+can set the `BOOST_ROOT`, or the `BOOST_INCLUDEDIR` and `BOOST_LIBRARYDIR`
+environment variables. *(added in 0.56.0)* You can also set these
+parameters as `boost_root`, `boost_include`, and `boost_librarydir` in your
+native or cross machine file. Note that machine file variables are
+preferred to environment variables, and that specifying any of these
+disables system-wide search for boost.
You can set the argument `threading` to `single` to use boost
libraries that have been compiled for single-threaded use instead.
diff --git a/docs/markdown/Design-rationale.md b/docs/markdown/Design-rationale.md
index 57aaee4..7121192 100644
--- a/docs/markdown/Design-rationale.md
+++ b/docs/markdown/Design-rationale.md
@@ -223,11 +223,11 @@ add_test('test library', exe)
```
First we build a shared library named foobar. It is marked
-installable, so running `ninja install` installs it to the library
+installable, so running `meson install` installs it to the library
directory (the system knows which one so the user does not have to
care). Then we build a test executable which is linked against the
library. It will not be installed, but instead it is added to the list
-of unit tests, which can be run with the command `ninja test`.
+of unit tests, which can be run with the command `meson test`.
Above we mentioned precompiled headers as a feature not supported by
other build systems. Here's how you would use them.
diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md
index e43c857..7a41443 100644
--- a/docs/markdown/FAQ.md
+++ b/docs/markdown/FAQ.md
@@ -51,7 +51,7 @@ $ /path/to/meson.py <options>
After this you don't have to care about invoking Meson any more. It
remembers where it was originally invoked from and calls itself
appropriately. As a user the only thing you need to do is to `cd` into
-your build directory and invoke `ninja`.
+your build directory and invoke `meson compile`.
## Why can't I specify target files with a wildcard?
@@ -432,7 +432,7 @@ sources in the build target:
libfoo_gen_headers = custom_target('gen-headers', ..., output: 'foo-gen.h')
libfoo_sources = files('foo-utils.c', 'foo-lib.c')
# Add generated headers to the list of sources for the build target
-libfoo = library('foo', sources: libfoo_sources + libfoo_gen_headers)
+libfoo = library('foo', sources: [libfoo_sources + libfoo_gen_headers])
```
Now let's say you have a new target that links to `libfoo`:
diff --git a/docs/markdown/Feature-autodetection.md b/docs/markdown/Feature-autodetection.md
index c1b7659..4d366d9 100644
--- a/docs/markdown/Feature-autodetection.md
+++ b/docs/markdown/Feature-autodetection.md
@@ -28,12 +28,12 @@ the binaries `gcovr`, `lcov` and `genhtml`. If version 3.3 or higher
of the first is found, targets called *coverage-text*, *coverage-xml*
and *coverage-html* are generated. Alternatively, if the latter two
are found, only the target *coverage-html* is generated. Coverage
-reports can then be produced simply by calling e.g. `ninja
+reports can then be produced simply by calling e.g. `meson compile
coverage-xml`. As a convenience, a high-level *coverage* target is
also generated which will produce all 3 coverage report types, if
possible.
Note that generating any of the coverage reports described above
-requires the tests (i.e. `ninja test`) to finish running so the
+requires the tests (i.e. `meson test`) to finish running so the
information about the functions that are called in the tests can be
gathered for the report.
diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md
index a9c4531..3d06233 100644
--- a/docs/markdown/Gnome-module.md
+++ b/docs/markdown/Gnome-module.md
@@ -88,7 +88,6 @@ There are several keyword arguments. Many of these map directly to the
e.g. `Gtk`
* `includes`: list of gir names to be included, can also be a GirTarget
* `header`: *(Added 0.43.0)* name of main c header to include for the library, e.g. `glib.h`
-* `dependencies`: deps to use during introspection scanning
* `include_directories`: extra include paths to look for gir files
* `install`: if true, install the generated files
* `install_dir_gir`: (*Added 0.35.0*) which directory to install the
@@ -98,6 +97,7 @@ There are several keyword arguments. Many of these map directly to the
* `link_with`: list of libraries to link with
* `symbol_prefix`: the symbol prefix for the gir object, e.g. `gtk`,
(*Since 0.43.0*) an ordered list of multiple prefixes is allowed
+* `fatal_warnings`: *Since 0.55.0* turn scanner warnings into fatal errors.
Returns an array of two elements which are: `[gir_target,
typelib_target]`
@@ -223,7 +223,7 @@ directory. Note that this is not for installing schemas and is only
useful when running the application locally for example during tests.
* `build_by_default`: causes, when set to true, to have this target be
- built by default, that is, when invoking plain `ninja`, the default
+ built by default, that is, when invoking plain `meson compile`, the default
value is true for all built target types
* `depend_files`: files ([`string`](Reference-manual.md#string-object),
[`files()`](Reference-manual.md#files), or
@@ -246,7 +246,7 @@ one XML file.
* `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'`
* `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks
* `build_by_default`: causes, when set to true, to have this target be
- built by default, that is, when invoking plain `ninja`, the default
+ built by default, that is, when invoking plain `meson compile`, the default
value is true for all built target types
* `install_dir`: (*Added 0.46.0*) location to install the header or
bundle depending on previous options
@@ -344,8 +344,8 @@ of the module.
Note that this has the downside of rebuilding the doc for each build, which is
often very slow. It usually should be enabled only in CI.
-This creates a `$module-doc` target that can be ran to build docs and
-normally these are only built on install.
+This also creates a `$module-doc` target that can be run to build documentation.
+Normally the documentation is only built on install.
*Since 0.52.0* Returns a target object that can be passed as dependency to other
targets using generated doc files (e.g. in `content_files` of another doc).
diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md
index a6c6f4b..ee51b64 100644
--- a/docs/markdown/IDE-integration.md
+++ b/docs/markdown/IDE-integration.md
@@ -25,20 +25,21 @@ With this command meson will configure the project and also generate
introspection information that is stored in `intro-*.json` files in the
`meson-info` directory. The introspection dump will be automatically updated
when meson is (re)configured, or the build options change. Thus, an IDE can
-watch for changes in this directory to know when something changed.
+watch for changes in this directory to know when something changed. Note that
+`meson-info.json` guaranteed to be the last file written.
The `meson-info` directory should contain the following files:
-| File | Description |
-| ---- | ----------- |
-| `intro-benchmarks.json` | Lists all benchmarks |
-| `intro-buildoptions.json` | Contains a full list of meson configuration options for the project |
-| `intro-buildsystem_files.json` | Full list of all meson build files |
-| `intro-dependencies.json` | Lists all dependencies used in the project |
-| `intro-installed.json` | Contains mapping of files to their installed location |
-| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) |
-| `intro-targets.json` | Full list of all build targets |
-| `intro-tests.json` | Lists all tests with instructions how to run them |
+| File | Description |
+| ------------------------------ | ------------------------------------------------------------------- |
+| `intro-benchmarks.json` | Lists all benchmarks |
+| `intro-buildoptions.json` | Contains a full list of meson configuration options for the project |
+| `intro-buildsystem_files.json` | Full list of all meson build files |
+| `intro-dependencies.json` | Lists all dependencies used in the project |
+| `intro-installed.json` | Contains mapping of files to their installed location |
+| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) |
+| `intro-targets.json` | Full list of all build targets |
+| `intro-tests.json` | Lists all tests with instructions how to run them |
The content of the JSON files is further specified in the remainder of this document.
@@ -99,15 +100,15 @@ for actual compilation.
The following table shows all valid types for a target.
-| value of `type` | Description |
-| --------------- | ----------- |
-| `executable` | This target will generate an executable file |
-| `static library` | Target for a static library |
-| `shared library` | Target for a shared library |
+| value of `type` | Description |
+| ---------------- | --------------------------------------------------------------------------------------------- |
+| `executable` | This target will generate an executable file |
+| `static library` | Target for a static library |
+| `shared library` | Target for a shared library |
| `shared module` | A shared library that is meant to be used with dlopen rather than linking into something else |
-| `custom` | A custom target |
-| `run` | A Meson run target |
-| `jar` | A Java JAR target |
+| `custom` | A custom target |
+| `run` | A Meson run target |
+| `jar` | A Java JAR target |
### Using `--targets` without a build directory
@@ -227,8 +228,8 @@ in the `meson.build`.
## Tests
-Compilation and unit tests are done as usual by running the `ninja` and
-`ninja test` commands. A JSON formatted result log can be found in
+Compilation and unit tests are done as usual by running the `meson compile` and
+`meson test` commands. A JSON formatted result log can be found in
`workspace/project/builddir/meson-logs/testlog.json`.
When these tests fail, the user probably wants to run the failing test in a
@@ -275,11 +276,62 @@ command line. Use `meson introspect -h` to see all available options.
This API can also work without a build directory for the `--projectinfo` command.
+# AST of a `meson.build`
+
+Since meson *0.55.0* it is possible to dump the AST of a `meson.build` as a JSON
+object. The interface for this is `meson introspect --ast /path/to/meson.build`.
+
+Each node of the AST has at least the following entries:
+
+| Key | Description |
+| ------------ | ------------------------------------------------------- |
+| `node` | Type of the node (see following table) |
+| `lineno` | Line number of the node in the file |
+| `colno` | Column number of the node in the file |
+| `end_lineno` | Marks the end of the node (may be the same as `lineno`) |
+| `end_colno` | Marks the end of the node (may be the same as `colno`) |
+
+Possible values for `node` with additional keys:
+
+| Node type | Additional keys |
+| -------------------- | ------------------------------------------------ |
+| `BooleanNode` | `value`: bool |
+| `IdNode` | `value`: str |
+| `NumberNode` | `value`: int |
+| `StringNode` | `value`: str |
+| `ContinueNode` | |
+| `BreakNode` | |
+| `ArgumentNode` | `positional`: node list; `kwargs`: accept_kwargs |
+| `ArrayNode` | `args`: node |
+| `DictNode` | `args`: node |
+| `EmptyNode` | |
+| `OrNode` | `left`: node; `right`: node |
+| `AndNode` | `left`: node; `right`: node |
+| `ComparisonNode` | `left`: node; `right`: node; `ctype`: str |
+| `ArithmeticNode` | `left`: node; `right`: node; `op`: str |
+| `NotNode` | `right`: node |
+| `CodeBlockNode` | `lines`: node list |
+| `IndexNode` | `object`: node; `index`: node |
+| `MethodNode` | `object`: node; `args`: node; `name`: str |
+| `FunctionNode` | `args`: node; `name`: str |
+| `AssignmentNode` | `value`: node; `var_name`: str |
+| `PlusAssignmentNode` | `value`: node; `var_name`: str |
+| `ForeachClauseNode` | `items`: node; `block`: node; `varnames`: list |
+| `IfClauseNode` | `ifs`: node list; `else`: node |
+| `IfNode` | `condition`: node; `block`: node |
+| `UMinusNode` | `right`: node |
+| `TernaryNode` | `condition`: node; `true`: node; `false`: node |
+
+We do not guarantee the stability of this format since it is heavily linked to
+the internal Meson AST. However, breaking changes (removal of a node type or the
+removal of a key) are unlikely and will be announced in the release notes.
+
+
# Existing integrations
- [Gnome Builder](https://wiki.gnome.org/Apps/Builder)
- [KDevelop](https://www.kdevelop.org)
- [Eclipse CDT](https://www.eclipse.org/cdt/) (experimental)
-- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs)
+- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) (currently unmaintained !!)
- [Meson-UI](https://github.com/michaelbadcrumble/meson-ui) (Meson build GUI)
- [Meson Syntax Highlighter](https://plugins.jetbrains.com/plugin/13269-meson-syntax-highlighter) plugin for JetBrains IDEs.
diff --git a/docs/markdown/IndepthTutorial.md b/docs/markdown/IndepthTutorial.md
index dd93f82..d2e2662 100644
--- a/docs/markdown/IndepthTutorial.md
+++ b/docs/markdown/IndepthTutorial.md
@@ -79,12 +79,12 @@ With these four files we are done. To configure, build and run the test suite, w
```console
$ meson builddir && cd builddir
-$ ninja
-$ ninja test
+$ meson compile
+$ meson test
```
To then install the project you only need one command.
```console
-$ ninja install
+$ meson install
```
diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md
index 5abfdd4..9dc2ad4 100644
--- a/docs/markdown/Installing.md
+++ b/docs/markdown/Installing.md
@@ -4,6 +4,18 @@ short-description: Installing targets
# Installing
+Invoked via the [following command](Commands.md#install) *(available since 0.47.0)*:
+
+```sh
+meson install
+```
+
+or alternatively (on older meson versions with `ninja` backend):
+
+```sh
+ninja install
+```
+
By default Meson will not install anything. Build targets can be
installed by tagging them as installable in the definition.
@@ -97,15 +109,13 @@ packages. This is done with the `DESTDIR` environment variable and it
is used just like with other build systems:
```console
-$ DESTDIR=/path/to/staging/area ninja install
+$ DESTDIR=/path/to/staging/area meson install
```
## Custom install behaviour
-The default install target (executed via, e.g., `ninja install`) does
-installing with reasonable default options. More control over the
-install behaviour can be achieved with the `meson install` command,
-that has been available since 0.47.0.
+Installation behaviour can be further customized using
+additional arguments.
For example, if you wish to install the current setup without
rebuilding the code (which the default install target always does) and
diff --git a/docs/markdown/Kconfig-module.md b/docs/markdown/Keyval-module.md
index 5807f8d..afc48fa 100644
--- a/docs/markdown/Kconfig-module.md
+++ b/docs/markdown/Keyval-module.md
@@ -1,15 +1,15 @@
---
-short-description: Unstable kconfig module
+short-description: Keyval module
authors:
- name: Mark Schulte, Paolo Bonzini
years: [2017, 2019]
has-copyright: false
...
-# Unstable kconfig module
+# keyval module
-This module parses Kconfig output files to allow use of kconfig
-configurations in meson projects.
+This module parses files consisting of a series of `key=value` lines. One use
+of this module is to load kconfig configurations in meson projects.
**Note**: this does not provide kconfig frontend tooling to generate a
configuration. You still need something such as kconfig frontends (see
@@ -23,20 +23,23 @@ chosen the configuration options), output a ".config" file.
The module may be imported as follows:
``` meson
-kconfig = import('unstable-kconfig')
+keyval = import('keyval')
```
The following functions will then be available as methods on the object
-with the name `kconfig`. You can, of course, replace the name
-`kconfig` with anything else.
+with the name `keyval`. You can, of course, replace the name
+`keyval` with anything else.
-### kconfig.load()
+### keyval.load()
-This function loads a kconfig output file and returns a dictionary object.
+This function loads a file consisting of a series of `key=value` lines
+and returns a dictionary object.
-`kconfig.load()` makes no attempt at parsing the values in the
-file. Therefore, true boolean values will be represented as the string "y"
-and integer values will have to be converted with `.to_int()`.
+`keyval.load()` makes no attempt at parsing the values in the file.
+In particular boolean and integer values will be represented as strings,
+and strings will keep any quoting that is present in the input file. It
+can be useful to create a [`configuration_data()`](#configuration_data)
+object from the dictionary and use methods such as `get_unquoted()`.
Kconfig frontends usually have ".config" as the default name for the
configuration file. However, placing the configuration file in the source
diff --git a/docs/markdown/Localisation.md b/docs/markdown/Localisation.md
index ce9e3b6..ed63e13 100644
--- a/docs/markdown/Localisation.md
+++ b/docs/markdown/Localisation.md
@@ -48,7 +48,7 @@ Then we need to generate the main pot file. The potfile can have any name but is
Run the following command from your build folder to generate the pot file. It is recommended to inspect it manually afterwards and fill in e.g. proper copyright and contact information.
```console
-$ ninja intltest-pot
+$ meson compile intltest-pot
```
### generate .po files
@@ -56,5 +56,5 @@ $ ninja intltest-pot
For each language listed in the array above we need a corresponding `.po` file. Those can be generated by running the following command from your build folder.
```console
-$ ninja intltest-update-po
+$ meson compile intltest-update-po
```
diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md
index 404c3d2..5ac66a8 100644
--- a/docs/markdown/Machine-files.md
+++ b/docs/markdown/Machine-files.md
@@ -5,24 +5,132 @@ documentation on the common values used by both, for the specific values of
one or the other see the [cross compilation](Cross-compilation.md) and [native
environments](Native-environments.md).
+## Data Types
+
+There are four basic data types in a machine file:
+- strings
+- arrays
+- booleans
+- integers
+
+A string is specified single quoted:
+```ini
+[section]
+option1 = 'false'
+option2 = '2'
+```
+
+An array is enclosed in square brackets, and must consist of strings or booleans
+```ini
+[section]
+option = ['value']
+```
+
+A boolean must be either `true` or `false`, and unquoted.
+```ini
+option = false
+```
+
+An integer must be either an unquoted numeric constant;
+```ini
+option = 42
+```
+
## Sections
The following sections are allowed:
+- constants
- binaries
- paths
- properties
+- project options
+- built-in options
+
+### constants
+
+*Since 0.56.0*
+
+String and list concatenation is supported using the `+` operator, joining paths
+is supported using the `/` operator.
+Entries defined in the `[constants]` section can be used in any other section
+(they are always parsed first), entries in any other section can be used only
+within that same section and only after it has been defined.
+
+```ini
+[constants]
+toolchain = '/toolchain'
+common_flags = ['--sysroot=' + toolchain / 'sysroot']
+
+[properties]
+c_args = common_flags + ['-DSOMETHING']
+cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+[binaries]
+c = toolchain / 'gcc'
+```
+
+This can be useful with cross file composition as well. A generic cross file
+could be composed with a platform specific file where constants are defined:
+```ini
+# aarch64.ini
+[constants]
+arch = 'aarch64-linux-gnu'
+```
+
+```ini
+# cross.ini
+[binaries]
+c = arch + '-gcc'
+cpp = arch + '-g++'
+strip = arch + '-strip'
+pkgconfig = arch + '-pkg-config'
+...
+```
+
+This can be used as `meson setup --cross-file aarch64.ini --cross-file cross.ini builddir`.
+
+Note that file composition happens before the parsing of values. The example
+below results in `b` being `'HelloWorld'`:
+```ini
+# file1.ini:
+[constants]
+a = 'Foo'
+b = a + 'World'
+```
+
+```ini
+#file2.ini:
+[constants]
+a = 'Hello'
+```
+
+The example below results in an error when file1.ini is included before file2.ini
+because `b` would be defined before `a`:
+```ini
+# file1.ini:
+[constants]
+b = a + 'World'
+```
+
+```ini
+#file2.ini:
+[constants]
+a = 'Hello'
+```
### Binaries
The binaries section contains a list of binaries. These can be used
-internally by meson, or by the `find_program` function:
+internally by meson, or by the `find_program` function.
+
+These values must be either strings or an array of strings
Compilers and linkers are defined here using `<lang>` and `<lang>_ld`.
`<lang>_ld` is special because it is compiler specific. For compilers like
gcc and clang which are used to invoke the linker this is a value to pass to
their "choose the linker" argument (-fuse-ld= in this case). For compilers
like MSVC and Clang-Cl, this is the path to a linker for meson to invoke,
-such as `link.exe` or `lld-link.exe`. Support for ls is *new in 0.53.0*
+such as `link.exe` or `lld-link.exe`. Support for `ld` is *new in 0.53.0*
*changed in 0.53.1* the `ld` variable was replaced by `<lang>_ld`, because it
*regressed a large number of projects. in 0.53.0 the `ld` variable was used
@@ -40,8 +148,8 @@ llvm-config = '/usr/lib/llvm8/bin/llvm-config'
Cross example:
```ini
-c = '/usr/bin/i586-mingw32msvc-gcc'
-cpp = '/usr/bin/i586-mingw32msvc-g++'
+c = ['ccache', '/usr/bin/i586-mingw32msvc-gcc']
+cpp = ['ccache', '/usr/bin/i586-mingw32msvc-g++']
c_ld = 'gold'
cpp_ld = 'gold'
ar = '/usr/i586-mingw32msvc/bin/ar'
@@ -64,8 +172,10 @@ An incomplete list of internally used programs that can be overridden here is:
### Paths and Directories
+*Deprecated in 0.56.0* use the built-in section instead.
+
As of 0.50.0 paths and directories such as libdir can be defined in the native
-file in a paths section
+and cross files in a paths section. These should be strings.
```ini
[paths]
@@ -84,21 +194,79 @@ command line will override any options in the native file. For example, passing
In addition to special data that may be specified in cross files, this
section may contain random key value pairs accessed using the
-`meson.get_external_property()`
+`meson.get_external_property()`, or `meson.get_cross_property()`.
+
+*Changed in 0.56.0* putting `<lang>_args` and `<lang>_link_args` in the
+properties section has been deprecated, and should be put in the built-in
+options section.
+
+### Project specific options
+
+*New in 0.56.0*
+
+Path options are not allowed, those must be set in the `[paths]` section.
+
+Being able to set project specific options in a cross or native file can be
+done using the `[project options]` section of the specific file (if doing a
+cross build the options from the native file will be ignored)
+
+For setting options in subprojects use the `[<subproject>:project options]`
+section instead.
+
+```ini
+[project options]
+build-tests = true
+
+[zlib:project options]
+build-tests = false
+```
+
+### Meson built-in options
+
+Meson built-in options can be set the same way:
+
+```ini
+[built-in options]
+c_std = 'c99'
+```
+
+You can set some meson built-in options on a per-subproject basis, such as
+`default_library` and `werror`. The order of precedence is:
+1) Command line
+2) Machine file
+3) Build system definitions
+
+```ini
+[zlib:built-in options]
+default_library = 'static'
+werror = false
+```
+
+Options set on a per-subproject basis will inherit the
+option from the parent if the parent has a setting but the subproject
+doesn't, even when there is a default set meson language.
+
+```ini
+[built-in options]
+default_library = 'static'
+```
-## Properties
+will make subprojects use default_library as static.
-*New for native files in 0.54.0*
+Some options can be set on a per-machine basis (in other words, the value of
+the build machine can be different than the host machine in a cross compile).
+In these cases the values from both a cross file and a native file are used.
-The properties section can contain any variable you like, and is accessed via
-`meson.get_external_property`, or `meson.get_cross_property`.
+An incomplete list of options is:
+- pkg_config_path
+- cmake_prefix_path
## Loading multiple machine files
Native files allow layering (cross files can be layered since meson 0.52.0).
-More than one native file can be loaded, with values from a previous file being
+More than one file can be loaded, with values from a previous file being
overridden by the next. The intention of this is not overriding, but to allow
-composing native files. This composition is done by passing the command line
+composing files. This composition is done by passing the command line
argument multiple times:
```console
diff --git a/docs/markdown/Meson-sample.md b/docs/markdown/Meson-sample.md
index 6f26f36..f98e022 100644
--- a/docs/markdown/Meson-sample.md
+++ b/docs/markdown/Meson-sample.md
@@ -50,7 +50,7 @@ exe = executable('myexe', src)
test('simple test', exe)
```
-Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `ninja test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user.
+Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `meson test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user.
A note to Visual Studio users
-----
diff --git a/docs/markdown/MesonCI.md b/docs/markdown/MesonCI.md
new file mode 100644
index 0000000..73b979b
--- /dev/null
+++ b/docs/markdown/MesonCI.md
@@ -0,0 +1,53 @@
+# Meson CI setup
+
+This document is aimed for Meson contributors and documents
+the CI setup used for testing Meson itself. The Meson
+project uses multiple CI platforms for covering a wide
+range of target systems.
+
+## Travis CI
+
+The travis configuration file is the `.travis.yml` in the
+the project root. This platform tests cross compilation and
+unity builds on a [linux docker image](#docker-images) and
+on OSX.
+
+## GitHub actions
+
+The configuration files for GitHub actions are located in
+`.github/workflows`. Here, all [images](#docker-images)
+are tested with the full `run_tests.py` run. Additionally,
+some other, smaller, tests are run.
+
+## Docker images
+
+The Linux docker images are automatically built and
+uploaded by GitHub actions. An image rebuild is triggerd
+when any of the image definition files are changed (in
+`ci/ciimage`) in the master branch. Additionally, the
+images are also updated weekly.
+
+Each docker image has one corresponding dirctory in
+`ci/ciimage` with an `image.json` and an `install.sh`.
+
+### Image generation
+
+There are no manual Dockerfiles. Instead the Dockerfile is
+automatically generated by the `build.py` script. This is
+done to ensure that all images have the same layout and can
+all be built and tested automatically.
+
+The Dockerfile is generated from the `image.json` file and
+basically only adds a few common files and runs the
+`install.sh` script which should contain all distribution
+specific setup steps. The `common.sh` can be sourced via
+`source /ci/common.sh` to access some shared functionalety.
+
+To generate the image run `build.py -t build <image>`. A
+generated image can be tested with `build.py -t test <image>`.
+
+### Common image setup
+
+Each docker image has a `/ci` directory with an
+`env_vars.sh` script. This script has to be sourced before
+running the meson test suite.
diff --git a/docs/markdown/Precompiled-headers.md b/docs/markdown/Precompiled-headers.md
index d9ac7a4..05b50bc 100644
--- a/docs/markdown/Precompiled-headers.md
+++ b/docs/markdown/Precompiled-headers.md
@@ -51,7 +51,7 @@ Using precompiled headers with GCC and derivatives
--
Once you have a file to precompile, you can enable the use of pch for
-a give target with a *pch* keyword argument. As an example, let's assume
+a given target with a *pch* keyword argument. As an example, let's assume
you want to build a small C binary with precompiled headers.
Let's say the source files of the binary use the system headers `stdio.h`
and `string.h`. Then you create a header file `pch/myexe_pch.h` with this
diff --git a/docs/markdown/Project-templates.md b/docs/markdown/Project-templates.md
index 5f323bd..7ded318 100644
--- a/docs/markdown/Project-templates.md
+++ b/docs/markdown/Project-templates.md
@@ -16,15 +16,34 @@ $ meson init --language=c --name=myproject --version=0.1
```
This would create the build definitions for a helloworld type
-project. The result can be compiled as usual. For example compiling it
-with Ninja could be done like this:
+project. The result can be compiled as usual. For example it
+could be done like this:
```
-$ meson builddir
-$ ninja -C builddir
+$ meson setup builddir
+$ meson compile -C builddir
```
The generator has many different projects and settings. They can all
be listed by invoking the command `meson init --help`.
This feature is available since Meson version 0.45.0.
+
+# Generate a build script for an existing project
+
+With `meson init` you can generate a build script for an existing
+project with existing project files by running the command in the
+root directory of your project. Meson currently supports this
+feature for `executable`, and `jar` projects.
+
+# Build after generation of template
+
+It is possible to have Meson generate a build directory from the
+`meson init` command without running `meson setup`. This is done
+by passing `-b` or `--build` switch.
+
+```console
+$ mkdir project_name
+$ cd project_name
+$ meson init --language=c --name=myproject --version=0.1 --build
+``` \ No newline at end of file
diff --git a/docs/markdown/Qt5-module.md b/docs/markdown/Qt5-module.md
index f1c2f6c..0d9a6b6 100644
--- a/docs/markdown/Qt5-module.md
+++ b/docs/markdown/Qt5-module.md
@@ -21,7 +21,7 @@ This method generates the necessary targets to build translation files with lrel
- `ts_files`, the list of input translation files produced by Qt's lupdate tool.
- `install` when true, this target is installed during the install step (optional).
- `install_dir` directory to install to (optional).
- - `build_by_default` when set to true, to have this target be built by default, that is, when invoking plain ninja; the default value is false (optional).
+ - `build_by_default` when set to true, to have this target be built by default, that is, when invoking `meson compile`; the default value is false (optional).
## has_tools
diff --git a/docs/markdown/Quick-guide.md b/docs/markdown/Quick-guide.md
index 0bed683..74636e5 100644
--- a/docs/markdown/Quick-guide.md
+++ b/docs/markdown/Quick-guide.md
@@ -93,8 +93,8 @@ are working on. The steps to take are very simple.
```console
$ cd /path/to/source/root
$ meson builddir && cd builddir
-$ ninja
-$ ninja test
+$ meson compile
+$ meson test
```
The only thing to note is that you need to create a separate build
@@ -104,14 +104,14 @@ directory. This allows you to have multiple build trees with different
configurations at the same time. This way generated files are not
added into revision control by accident.
-To recompile after code changes, just type `ninja`. The build command
+To recompile after code changes, just type `meson compile`. The build command
is always the same. You can do arbitrary changes to source code and
build system files and Meson will detect those and will do the right
thing. If you want to build optimized binaries, just use the argument
`--buildtype=debugoptimized` when running Meson. It is recommended
that you keep one build directory for unoptimized builds and one for
optimized ones. To compile any given configuration, just go into the
-corresponding build directory and run `ninja`.
+corresponding build directory and run `meson compile`.
Meson will automatically add compiler flags to enable debug
information and compiler warnings (i.e. `-g` and `-Wall`). This means
@@ -128,9 +128,9 @@ build and install Meson projects are the following.
```console
$ cd /path/to/source/root
$ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=...
-$ ninja -v -C builddir
-$ ninja -C builddir test
-$ DESTDIR=/path/to/staging/root ninja -C builddir install
+$ meson compile -C builddir
+$ meson test -C builddir
+$ DESTDIR=/path/to/staging/root meson install -C builddir
```
The command line switch `--buildtype=plain` tells Meson not to add its
@@ -139,7 +139,7 @@ on used flags.
This is very similar to other build systems. The only difference is
that the `DESTDIR` variable is passed as an environment variable
-rather than as an argument to `ninja install`.
+rather than as an argument to `meson install`.
As distro builds happen always from scratch, you might consider
enabling [unity builds](Unity-builds.md) on your packages because they
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 5156b5b..080fe3e 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -15,19 +15,19 @@ afterwards](#returned-objects).
Adds the positional arguments to the compiler command line. This
function has two keyword arguments:
-- `language` specifies the language(s) that the arguments should be
+- `language`: specifies the language(s) that the arguments should be
applied to. If a list of languages is given, the arguments are added
to each of the corresponding compiler command lines. Note that there
is no way to remove an argument set in this way. If you have an
argument that is only used in a subset of targets, you have to specify
it in per-target flags.
-- `native` is a boolean specifying whether the arguments should be
+- `native` *(since 0.48.0)*: a boolean specifying whether the arguments should be
applied to the native or cross compilation. If `true` the arguments
will only be used for native compilations. If `false` the arguments
will only be used in cross compilations. If omitted, the flags are
added to native compilations if compiling natively and cross
- compilations (only) when cross compiling. Available since 0.48.0
+ compilations (only) when cross compiling.
The arguments are used in all compiler invocations with the exception
of compile tests, because you might need to run a compile test with
@@ -70,12 +70,12 @@ endif
Takes the following keyword arguments:
-- `required` defaults to `true`, which means that if any of the languages
-specified is not found, Meson will halt. Since *0.47.0* the value of a
+- `required`: defaults to `true`, which means that if any of the languages
+specified is not found, Meson will halt. *(since 0.47.0)* The value of a
[`feature`](Build-options.md#features) option can also be passed.
-- `native` if set to `true`, the language will be used to compile for the build
- machine, if `false`, for the host machine. Since *0.54.0*.
+- `native` *(since 0.54.0)*: if set to `true`, the language will be used to compile for the build
+ machine, if `false`, for the host machine.
Returns `true` if all languages specified were found and `false` otherwise.
@@ -113,16 +113,16 @@ Add a custom test setup that can be used to run the tests with a
custom setup, for example under Valgrind. The keyword arguments are
the following:
-- `env` environment variables to set, such as `['NAME1=value1',
+- `env`: environment variables to set, such as `['NAME1=value1',
'NAME2=value2']`, or an [`environment()`
object](#environment-object) which allows more sophisticated
- environment juggling. *Since 0.52.0* a dictionary is also accepted.
-- `exe_wrapper` a list containing the wrapper command or script followed by the arguments to it
-- `gdb` if `true`, the tests are also run under `gdb`
-- `timeout_multiplier` a number to multiply the test timeout with
-- `is_default` a bool to set whether this is the default test setup.
+ environment juggling. *(since 0.52.0)* A dictionary is also accepted.
+- `exe_wrapper`: a list containing the wrapper command or script followed by the arguments to it
+- `gdb`: if `true`, the tests are also run under `gdb`
+- `timeout_multiplier`: a number to multiply the test timeout with
+- `is_default` *(since 0.49.0)*: a bool to set whether this is the default test setup.
If `true`, the setup will be used whenever `meson test` is run
- without the `--setup` option. Since 0.49.0
+ without the `--setup` option.
To use the test setup, run `meson test --setup=*name*` inside the
build dir.
@@ -137,11 +137,11 @@ Note that all these options are also available while running the
runtarget alias_target(target_name, dep1, ...)
```
-Since *0.52.0*
+*(since 0.52.0)*
This function creates a new top-level target. Like all top-level targets, this
-integrates with the selected backend. For instance, with Ninja you can
-run it as `ninja target_name`. This is a dummy target that does not execute any
+integrates with the selected backend. For instance, with you can
+run it as `meson compile target_name`. This is a dummy target that does not execute any
command, but ensures that all dependencies are built. Dependencies can be any
build target (e.g. return value of [executable()](#executable), custom_target(), etc)
@@ -153,7 +153,7 @@ build target (e.g. return value of [executable()](#executable), custom_target(),
Abort with an error message if `condition` evaluates to `false`.
-*Since 0.53.0* `message` argument is optional and defaults to print the condition
+*(since 0.53.0)* `message` argument is optional and defaults to print the condition
statement instead.
### benchmark()
@@ -169,7 +169,7 @@ run. The behavior of this function is identical to [`test()`](#test) except for:
* benchmark() does not automatically add the `MALLOC_PERTURB_` environment variable
*Note:* Prior to 0.52.0 benchmark would warn that `depends` and `priority`
-were unsupported, this is incorrect
+were unsupported, this is incorrect.
### both_libraries()
@@ -177,6 +177,8 @@ were unsupported, this is incorrect
buildtarget = both_libraries(library_name, list_of_sources, ...)
```
+*(since 0.46.0)*
+
Builds both a static and shared library with the given
sources. Positional and keyword arguments are otherwise the same as
for [`library`](#library). Source files will be compiled only once and
@@ -190,8 +192,6 @@ shared library. In addition it supports the following extra methods:
- `get_shared_lib()` returns the shared library build target
- `get_static_lib()` returns the static library build target
-*Added 0.46.0*
-
### build_target()
Creates a build target whose type can be set dynamically with the
@@ -234,7 +234,7 @@ Creates an empty configuration object. You should add your
configuration with [its method calls](#configuration-data-object) and
finally use it in a call to `configure_file`.
-Since *0.49.0* takes an optional dictionary as first argument. If
+*(since 0.49.0)* Takes an optional dictionary as first argument. If
provided, each key/value pair is added into the `configuration_data`
as if `set()` method was called for each of them.
@@ -251,7 +251,7 @@ When a [`configuration_data()`](#configuration_data) object is passed
to the `configuration:` keyword argument, it takes a template file as
the `input:` (optional) and produces the `output:` (required) by
substituting values from the configuration data as detailed in [the
-configuration file documentation](Configuration.md). Since *0.49.0* a
+configuration file documentation](Configuration.md). *(since 0.49.0)* A
dictionary can be passed instead of a
[`configuration_data()`](#configuration_data) object.
@@ -259,53 +259,53 @@ When a list of strings is passed to the `command:` keyword argument,
it takes any source or configured file as the `input:` and assumes
that the `output:` is produced when the specified command is run.
-Since *0.47.0*, when the `copy:` keyword argument is set to `true`,
+*(since 0.47.0)* When the `copy:` keyword argument is set to `true`,
this function will copy the file provided in `input:` to a file in the
build directory with the name `output:` in the current directory.
These are all the supported keyword arguments:
-- `capture` when this argument is set to true, Meson captures `stdout`
- of the `command` and writes it to the target file specified as
- `output`. Available since v0.41.0.
-- `command` as explained above, if specified, Meson does not create
+- `capture` *(since 0.41.0)*: when this argument is set to true,
+ Meson captures `stdout` of the `command` and writes it to the target
+ file specified as `output`.
+- `command`: as explained above, if specified, Meson does not create
the file itself but rather runs the specified command, which allows
- you to do fully custom file generation. Since *0.52.0* the command can contain
+ you to do fully custom file generation. *(since 0.52.0)* The command can contain
file objects and more than one file can be passed to the `input` keyword
argument, see [`custom_target()`](#custom_target) for details about string
substitutions.
-- `copy` *(added 0.47.0)* as explained above, if specified Meson only
+- `copy` *(since 0.47.0)*: as explained above, if specified Meson only
copies the file from input to output.
-- `depfile` *(added 0.52.0)* is a dependency file that the command can write listing
+- `depfile` *(since 0.52.0)*: a dependency file that the command can write listing
all the additional files this target depends on. A change
in any one of these files triggers a reconfiguration.
-- `format` *(added 0.46.0)* the format of defines. It defaults to `meson`, and so substitutes
+- `format` *(since 0.46.0)*: the format of defines. It defaults to `meson`, and so substitutes
`#mesondefine` statements and variables surrounded by `@` characters, you can also use `cmake`
to replace `#cmakedefine` statements and variables with the `${variable}` syntax. Finally you can use
`cmake@` in which case substitutions will apply on `#cmakedefine` statements and variables with
the `@variable@` syntax.
-- `input` the input file name. If it's not specified in configuration
+- `input`: the input file name. If it's not specified in configuration
mode, all the variables in the `configuration:` object (see above)
are written to the `output:` file.
-- `install` *(added 0.50.0)* When true, this generated file is installed during
+- `install` *(since 0.50.0)*: when true, this generated file is installed during
the install step, and `install_dir` must be set and not empty. When false, this
generated file is not installed regardless of the value of `install_dir`.
When omitted it defaults to true when `install_dir` is set and not empty,
false otherwise.
-- `install_dir` the subdirectory to install the generated file to
+- `install_dir`: the subdirectory to install the generated file to
(e.g. `share/myproject`), if omitted or given the value of empty
string, the file is not installed.
-- `install_mode` *(added 0.47.0)* specify the file mode in symbolic format
+- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format
and optionally the owner/uid and group/gid for the installed files.
-- `output` the output file name (since v0.41.0, may contain
- `@PLAINNAME@` or `@BASENAME@` substitutions). In configuration mode,
+- `output`: the output file name. *(since 0.41.0)* may contain
+ `@PLAINNAME@` or `@BASENAME@` substitutions. In configuration mode,
the permissions of the input file (if it is specified) are copied to
the output file.
-- `output_format` *(added 0.47.0)* the format of the output to generate when no input
+- `output_format` *(since 0.47.0)*: the format of the output to generate when no input
was specified. It defaults to `c`, in which case preprocessor directives
will be prefixed with `#`, you can also use `nasm`, in which case the
prefix will be `%`.
-- `encoding` *(added v0.47.0)* set the file encoding for the input and output file,
+- `encoding` *(since 0.47.0)*: set the file encoding for the input and output file,
defaults to utf-8. The supported encodings are those of python3, see
[standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings).
@@ -319,34 +319,30 @@ Create a custom top level build target. The only positional argument
is the name of this target and the keyword arguments are the
following.
-- `build_by_default` *(added 0.38)* causes, when set to true, to
+- `build_by_default` *(since 0.38.0)*: causes, when set to true, to
have this target be built by default. This means it will be built when
- `ninja` is called without any arguments or asked to build a target
- like `ninja test` that depends on ninja's [default
- target](https://ninja-build.org/manual.html#_default_target_statements)
- set to `all` by meson. The same behavior applies for backends other
- than `ninja`. The default value is `false`.
- *(changed in 0.50)* if `build_by_default` is explicitly set to false, `install`
+ `meson compile` is called without any arguments. The default value is `false`.
+ *(since 0.50.0)* If `build_by_default` is explicitly set to false, `install`
will no longer override it. If `build_by_default` is not set, `install` will
still determine its default.
-- `build_always` (deprecated) if `true` this target is always considered out of
+- `build_always` **(deprecated)**: if `true` this target is always considered out of
date and is rebuilt every time. Equivalent to setting both
`build_always_stale` and `build_by_default` to true.
-- `build_always_stale` *(added 0.47)* if `true` the target is always considered out of date.
+- `build_always_stale` *(since 0.47.0)*: if `true` the target is always considered out of date.
Useful for things such as build timestamps or revision control tags.
The associated command is run even if the outputs are up to date.
-- `capture`, there are some compilers that can't be told to write
+- `capture`: there are some compilers that can't be told to write
their output to a file but instead write it to standard output. When
this argument is set to true, Meson captures `stdout` and writes it
to the target file. Note that your command argument list may not
contain `@OUTPUT@` when capture mode is active.
-- `console` *(added 0.48)* keyword argument conflicts with `capture`, and is meant
+- `console` *(since 0.48.0)*: keyword argument conflicts with `capture`, and is meant
for commands that are resource-intensive and take a long time to
finish. With the Ninja backend, setting this will add this target
to [Ninja's `console` pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool),
which has special properties such as not buffering stdout and
serializing all targets in this pool.
-- `command` command to run to create outputs from inputs. The command
+- `command`: command to run to create outputs from inputs. The command
may be strings or the return value of functions that return file-like
objects such as [`find_program()`](#find_program),
[`executable()`](#executable), [`configure_file()`](#configure_file),
@@ -356,48 +352,47 @@ following.
Note: always specify commands in array form `['commandname',
'-arg1', '-arg2']` rather than as a string `'commandname -arg1
-arg2'` as the latter will *not* work.
-- `depend_files` files ([`string`](#string-object),
+- `depend_files`: files ([`string`](#string-object),
[`files()`](#files), or [`configure_file()`](#configure_file)) that
this target depends on but are not listed in the `command` keyword
argument. Useful for adding regen dependencies.
-- `depends` specifies that this target depends on the specified
+- `depends`: specifies that this target depends on the specified
target(s), even though it does not take any of them as a command
line argument. This is meant for cases where you have a tool that
e.g. does globbing internally. Usually you should just put the
generated sources as inputs and Meson will set up all dependencies
automatically.
-- `depfile` is a dependency file that the command can write listing
+- `depfile`: a dependency file that the command can write listing
all the additional files this target depends on, for example a C
compiler would list all the header files it included, and a change
in any one of these files triggers a recompilation
-- `input` list of source files. As of 0.41.0 the list will be flattened.
-- `install` when true, this target is installed during the install step
-- `install_dir` directory to install to
-- `install_mode` *(added 0.47.0)* the file mode and optionally the
+- `input`: list of source files. *(since 0.41.0)* the list is flattened.
+- `install`: when true, this target is installed during the install step
+- `install_dir`: directory to install to
+- `install_mode` *(since 0.47.0)*: the file mode and optionally the
owner/uid and group/gid
-- `output` list of output files
+- `output`: list of output files
The list of strings passed to the `command` keyword argument accept
the following special string substitutions:
-- `@INPUT@` the full path to the input passed to `input`. If more than
+- `@INPUT@`: the full path to the input passed to `input`. If more than
one input is specified, all of them will be substituted as separate
arguments only if the command uses `'@INPUT@'` as a
standalone-argument. For instance, this would not work: `command :
['cp', './@INPUT@']`, but this would: `command : ['cp', '@INPUT@']`.
-- `@OUTPUT@` the full path to the output passed to `output`. If more
+- `@OUTPUT@`: the full path to the output passed to `output`. If more
than one outputs are specified, the behavior is the same as
`@INPUT@`.
-- `@INPUT0@` `@INPUT1@` `...` the full path to the input with the specified array index in `input`
-- `@OUTPUT0@` `@OUTPUT1@` `...` the full path to the output with the specified array index in `output`
-- `@OUTDIR@` the full path to the directory where the output(s) must be written
-- `@DEPFILE@` the full path to the dependency file passed to `depfile`
+- `@INPUT0@` `@INPUT1@` `...`: the full path to the input with the specified array index in `input`
+- `@OUTPUT0@` `@OUTPUT1@` `...`: the full path to the output with the specified array index in `output`
+- `@OUTDIR@`: the full path to the directory where the output(s) must be written
+- `@DEPFILE@`: the full path to the dependency file passed to `depfile`
- `@PLAINNAME@`: the input filename, without a path
- `@BASENAME@`: the input filename, with extension removed
-- `@PRIVATE_DIR@`: path to a directory where the custom target must store all its intermediate files, available since 0.50.1
+- `@PRIVATE_DIR@` *(since 0.50.1)*: path to a directory where the custom target must store all its intermediate files.
-The `depfile` keyword argument also accepts the `@BASENAME@` and `@PLAINNAME@`
-substitutions. *(since 0.47)*
+*(since 0.47.0)* The `depfile` keyword argument also accepts the `@BASENAME@` and `@PLAINNAME@` substitutions.
The returned object also has methods that are documented in the
[object methods section](#custom-target-object) below.
@@ -414,22 +409,21 @@ internal to the current build. The main use case for this is in
subprojects. This allows a subproject to easily specify how it should
be used. This makes it interchangeable with the same dependency that
is provided externally by the system. This function has the following
-keyword arguments.
-
- - `compile_args`, compile arguments to use
- - `dependencies`, other dependencies needed to use this dependency
- - `include_directories`, the directories to add to header search path,
- must be include_directories objects or, since 0.50.0, plain strings
- - `link_args`, link arguments to use
- - `link_with`, libraries to link against
- - `link_whole`, libraries to link fully, same as [`executable`](#executable)
- Since 0.46.0
- - `sources`, sources to add to targets (or generated header files
- that should be built before sources including them are built)
- - `version`, the version of this dependency, such as `1.2.3`
- - `variables`, a dictionary of arbitrary strings, this is meant to be used
- in subprojects where special variables would be provided via cmake or
- pkg-config. Since 0.54.0
+keyword arguments:
+
+- `compile_args`: compile arguments to use.
+- `dependencies`: other dependencies needed to use this dependency.
+- `include_directories`: the directories to add to header search path,
+ must be include_directories objects or *(since 0.50.0)* plain strings
+- `link_args`: link arguments to use.
+- `link_with`: libraries to link against.
+- `link_whole` *(since 0.46.0)*: libraries to link fully, same as [`executable`](#executable).
+- `sources`: sources to add to targets (or generated header files
+ that should be built before sources including them are built)
+- `version`: the version of this dependency, such as `1.2.3`
+- `variables` *(since 0.54.0)*: a dictionary of arbitrary strings, this is meant to be used
+ in subprojects where special variables would be provided via cmake or
+ pkg-config.
### dependency()
@@ -445,12 +439,12 @@ logic](Dependencies.md#dependencies-with-custom-lookup-functionality)
are also supported. This function supports the following keyword
arguments:
-- `default_options` *(added 0.37.0)* an array of default option values
+- `default_options` *(since 0.37.0)*: an array of default option values
that override those set in the subproject's `meson_options.txt`
(like `default_options` in [`project()`](#project), they only have
effect when Meson is run for the first time, and command line
arguments override any default options in build files)
-- `fallback` specifies a subproject fallback to use in case the
+- `fallback`: specifies a subproject fallback to use in case the
dependency is not found in the system. The value is an array
`['subproj_name', 'subproj_dep']` where the first value is the name
of the subproject and the second is the variable name in that
@@ -459,36 +453,42 @@ arguments:
[`dependency()`](#dependency), etc. Note that this means the
fallback dependency may be a not-found dependency, in which
case the value of the `required:` kwarg will be obeyed.
- *Since 0.54.0* `'subproj_dep'` argument can be omitted in the case the
+ *(since 0.54.0)* `'subproj_dep'` argument can be omitted in the case the
subproject used `meson.override_dependency('dependency_name', subproj_dep)`.
In that case, the `fallback` keyword argument can be a single string instead
- of a list of 2 strings.
-- `language` *(added 0.42.0)* defines what language-specific
+ of a list of 2 strings. *Since 0.55.0* the `fallback` keyword argument can be
+ omitted when there is a wrap file or a directory with the same `dependency_name`,
+ and subproject registered the dependency using
+ `meson.override_dependency('dependency_name', subproj_dep)`, or when the wrap
+ file has `dependency_name` in its `[provide]` section.
+ See [Wrap documentation](Wrap-dependency-system-manual.md#provide-section)
+ for more details.
+- `language` *(since 0.42.0)*: defines what language-specific
dependency to find if it's available for multiple languages.
-- `method` defines the way the dependency is detected, the default is
+- `method`: defines the way the dependency is detected, the default is
`auto` but can be overridden to be e.g. `qmake` for Qt development,
and [different dependencies support different values](
Dependencies.md#dependencies-with-custom-lookup-functionality)
for this (though `auto` will work on all of them)
-- `native` if set to `true`, causes Meson to find the dependency on
+- `native`: if set to `true`, causes Meson to find the dependency on
the build machine system rather than the host system (i.e. where the
cross compiled binary will run on), usually only needed if you build
a tool to be used during compilation.
-- `not_found_message` *(added 0.50.0)* is an optional string that will
+- `not_found_message` *(since 0.50.0)*: an optional string that will
be printed as a `message()` if the dependency was not found.
-- `required`, when set to false, Meson will proceed with the build
- even if the dependency is not found. Since *0.47.0* the value of a
+- `required`: when set to false, Meson will proceed with the build
+ even if the dependency is not found. *(since 0.47.0)* The value of a
[`feature`](Build-options.md#features) option can also be passed.
-- `static` tells the dependency provider to try to get static
+- `static`: tells the dependency provider to try to get static
libraries instead of dynamic ones (note that this is not supported
by all dependency backends)
-- `version` specifies the required version, a string containing a
+- `version` *(since 0.37.0)*: specifies the required version, a string containing a
comparison operator followed by the version string, examples include
- `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching. *(Added 0.37.0)*
+ `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching.
You can also specify multiple restrictions by passing a list to this
keyword argument, such as: `['>=3.14.0', '<=4.1.0']`.
These requirements are never met if the version is unknown.
-- `include_type` *(added 0.52.0)* is an enum flag, marking how the dependency
+- `include_type` *(since 0.52.0)*: an enum flag, marking how the dependency
flags should be converted. Supported values are `'preserve'`, `'system'` and
`'non-system'`. System dependencies may be handled differently on some
platforms, for instance, using `-isystem` instead of `-I`, where possible.
@@ -499,9 +499,8 @@ arguments:
keywords may also be accepted (e.g. `modules` specifies submodules to use for
dependencies such as Qt5 or Boost. `components` allows the user to manually
add CMake `COMPONENTS` for the `find_package` lookup)
-- `disabler` if `true` and the dependency couldn't be found, return a
- [disabler object](#disabler-object) instead of a not-found dependency.
- *Since 0.49.0*
+- `disabler` *(since 0.49.0)*: if `true` and the dependency couldn't be found,
+ returns a [disabler object](#disabler-object) instead of a not-found dependency.
If dependency_name is `''`, the dependency is always not found. So
with `required: false`, this always returns a dependency object for
@@ -518,7 +517,9 @@ The returned object also has methods that are documented in the
### disabler()
-Returns a [disabler object](#disabler-object). Added in 0.44.0.
+*(since 0.44.0)*
+
+Returns a [disabler object](#disabler-object).
### error()
@@ -534,10 +535,11 @@ Print the argument string and halts the build process.
environment_object environment(...)
```
-Returns an empty [environment variable
-object](#environment-object). Added in 0.35.0.
+*(since 0.35.0)*
+
+Returns an empty [environment variable object](#environment-object).
-Since *0.52.0* takes an optional dictionary as first argument. If
+*(since 0.52.0)* Takes an optional dictionary as first argument. If
provided, each key/value pair is added into the `environment_object`
as if `set()` method was called for each of them.
@@ -577,96 +579,99 @@ Executable supports the following keyword arguments. Note that just
like the positional arguments above, these keyword arguments can also
be passed to [shared and static libraries](#library).
-- `<languagename>_pch` precompiled header file to use for the given language
-- `<languagename>_args` compiler flags to use for the given language;
+- `<languagename>_pch`: precompiled header file to use for the given language
+- `<languagename>_args`: compiler flags to use for the given language;
eg: `cpp_args` for C++
-- `build_by_default` causes, when set to true, to have this target be
- built by default, that is, when invoking plain `ninja`, the default
- value is true for all built target types, since 0.38.0
-- `build_rpath` a string to add to target's rpath definition in the
+- `build_by_default` *(since 0.38.0)*: causes, when set to true, to
+ have this target be built by default. This means it will be built when
+ `meson compile` is called without any arguments. The default value is
+ `true` for all built target types.
+- `build_rpath`: a string to add to target's rpath definition in the
build dir, but which will be removed on install
-- `dependencies` one or more objects created with
+- `dependencies`: one or more objects created with
[`dependency`](#dependency) or [`find_library`](#compiler-object)
(for external deps) or [`declare_dependency`](#declare_dependency)
(for deps built by the project)
-- `extra_files` are not used for the build itself but are shown as
+- `extra_files`: not used for the build itself but are shown as
source files in IDEs that group files by targets (such as Visual
Studio)
-- `gui_app` when set to true flags this target as a GUI application on
- platforms where this makes a difference (e.g. Windows)
-- `link_args` flags to use during linking. You can use UNIX-style
+- `gui_app`: when set to true flags this target as a GUI application on
+ platforms where this makes a difference (e.g. Windows).
+- `link_args`: flags to use during linking. You can use UNIX-style
flags here for all platforms.
-- `link_depends` strings, files, or custom targets the link step
+- `link_depends`: strings, files, or custom targets the link step
depends on such as a symbol visibility map. The purpose is to
automatically trigger a re-link (but not a re-compile) of the target
when this file changes.
-- `link_language` since 0.51.0 makes the linker for this target
- be for the specified language. This is helpful for multi-language targets.
-- `link_whole` links all contents of the given static libraries
- whether they are used by not, equivalent to the
- `-Wl,--whole-archive` argument flag of GCC, available since 0.40.0.
- As of 0.41.0 if passed a list that list will be flattened. Starting
- from version 0.51.0 this argument also accepts outputs produced by
+- `link_language` *(since 0.51.0)* *(broken until 0.55.0)*: makes the linker for this
+ target be for the specified language. It is generally unnecessary to set
+ this, as meson will detect the right linker to use in most cases. There are
+ only two cases where this is needed. One, your main function in an
+ executable is not in the language meson picked, or second you want to force
+ a library to use only one ABI.
+- `link_whole` *(since 0.40.0)*: links all contents of the given static libraries
+ whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC.
+ *(since 0.41.0)* If passed a list that list will be flattened.
+ *(since 0.51.0)* This argument also accepts outputs produced by
custom targets. The user must ensure that the output is a library in
the correct format.
-- `link_with`, one or more shared or static libraries (built by this
- project) that this target should be linked with, If passed a list
- this list will be flattened as of 0.41.0. Starting with version
- 0.51.0, the arguments can also be custom targets. In this case Meson
- will assume that merely adding the output file in the linker command
+- `link_with`: one or more shared or static libraries (built by this
+ project) that this target should be linked with. *(since 0.41.0)* If passed a
+ list this list will be flattened. *(since 0.51.0)* The arguments can also be custom targets.
+ In this case Meson will assume that merely adding the output file in the linker command
line is sufficient to make linking work. If this is not sufficient,
then the build system writer must write all other steps manually.
-- `export_dynamic` when set to true causes the target's symbols to be
+- `export_dynamic` *(since 0.45.0)*: when set to true causes the target's symbols to be
dynamically exported, allowing modules built using the
[`shared_module`](#shared_module) function to refer to functions,
variables and other symbols defined in the executable itself. Implies
- the `implib` argument. Since 0.45.0
-- `implib` when set to true, an import library is generated for the
+ the `implib` argument.
+- `implib` *(since 0.42.0)*: when set to true, an import library is generated for the
executable (the name of the import library is based on *exe_name*).
Alternatively, when set to a string, that gives the base name for
the import library. The import library is used when the returned
build target object appears in `link_with:` elsewhere. Only has any
effect on platforms where that is meaningful (e.g. Windows). Implies
- the `export_dynamic` argument. Since 0.42.0
-- `implicit_include_directories` is a boolean telling whether Meson
+ the `export_dynamic` argument.
+- `implicit_include_directories` *(since 0.42.0)*: a boolean telling whether Meson
adds the current source and build directories to the include path,
- defaults to `true`, since 0.42.0
-- `include_directories` one or more objects created with the
- `include_directories` function, or, since 0.50.0, strings, which
+ defaults to `true`.
+- `include_directories`: one or more objects created with the
+ `include_directories` function, or *(since 0.50.0)* strings, which
will be transparently expanded to include directory objects
-- `install`, when set to true, this executable should be installed, defaults to `false`
-- `install_dir` override install directory for this file. The value is
+- `install`: when set to true, this executable should be installed, defaults to `false`
+- `install_dir`: override install directory for this file. The value is
relative to the `prefix` specified. F.ex, if you want to install
plugins into a subdir, you'd use something like this: `install_dir :
get_option('libdir') / 'projectname-1.0'`.
-- `install_mode` *(added 0.47.0)* specify the file mode in symbolic format
+- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format
and optionally the owner/uid and group/gid for the installed files.
-- `install_rpath` a string to set the target's rpath to after install
+- `install_rpath`: a string to set the target's rpath to after install
(but *not* before that). On Windows, this argument has no effect.
-- `objects` list of prebuilt object files (usually for third party
+- `objects`: list of prebuilt object files (usually for third party
products you don't have source to) that should be linked in this
target, **never** use this for object files that you build yourself.
-- `name_suffix` the string that will be used as the extension for the
+- `name_suffix`: the string that will be used as the extension for the
target by overriding the default. By default on Windows this is
`exe` and on other platforms it is omitted. Set this to `[]`, or omit
the keyword argument for the default behaviour.
-- `override_options` takes an array of strings in the same format as
+- `override_options` *(since 0.40.0)*: takes an array of strings in the same format as
`project`'s `default_options` overriding the values of these options
- for this target only, since 0.40.0.
-- `gnu_symbol_visibility` specifies how symbols should be exported, see
+ for this target only.
+- `gnu_symbol_visibility` *(since 0.48.0)*: specifies how symbols should be exported, see
e.g [the GCC Wiki](https://gcc.gnu.org/wiki/Visibility) for more
information. This value can either be an empty string or one of
`default`, `internal`, `hidden`, `protected` or `inlineshidden`, which
is the same as `hidden` but also includes things like C++ implicit
constructors as specified in the GCC manual. Ignored on compilers that
- do not support GNU visibility arguments. Available since 0.48.0.
-- `d_import_dirs` list of directories to look in for string imports used
+ do not support GNU visibility arguments.
+- `d_import_dirs`: list of directories to look in for string imports used
in the D programming language
-- `d_unittest`, when set to true, the D modules are compiled in debug mode
-- `d_module_versions` list of module version identifiers set when compiling D sources
-- `d_debug` list of module debug identifiers set when compiling D sources
-- `pie` *(added 0.49.0)* build a position-independent executable
-- `native`, is a boolean controlling whether the target is compiled for the
+- `d_unittest`: when set to true, the D modules are compiled in debug mode
+- `d_module_versions`: list of module version identifiers set when compiling D sources
+- `d_debug`: list of module debug identifiers set when compiling D sources
+- `pie` *(since 0.49.0)*: build a position-independent executable
+- `native`: is a boolean controlling whether the target is compiled for the
build or host machines. Defaults to false, building for the host machine.
The list of `sources`, `objects`, and `dependencies` is always
@@ -678,7 +683,7 @@ The returned object also has methods that are documented in the
### find_library()
-This function is deprecated and in the 0.31.0 release it was moved to
+*(since 0.31.0)* **(deprecated)** Use `find_library()` method of
[the compiler object](#compiler-object) as obtained from
`meson.get_compiler(lang)`.
@@ -692,12 +697,11 @@ This function is deprecated and in the 0.31.0 release it was moved to
to be searched for in `PATH`, or a script in the current source
directory.
-`program_name2` and later positional arguments are used as fallback
+*(since 0.37.0)* `program_name2` and later positional arguments are used as fallback
strings to search for. This is meant to be used for cases where the
program may have many alternative names, such as `foo` and
`foo.py`. The function will check for the arguments one by one and the
-first one that is found is returned. Meson versions earlier than
-0.37.0 only accept one argument.
+first one that is found is returned.
Keyword arguments are the following:
@@ -705,21 +709,21 @@ Keyword arguments are the following:
abort if no program can be found. If `required` is set to `false`,
Meson continue even if none of the programs can be found. You can
then use the `.found()` method on the [returned object](#external-program-object) to check
- whether it was found or not. Since *0.47.0* the value of a
+ whether it was found or not. *(since 0.47.0)* The value of a
[`feature`](Build-options.md#features) option can also be passed to the
`required` keyword argument.
-- `native` *(since 0.43)* defines how this executable should be searched. By default
+- `native` *(since 0.43.0)*: defines how this executable should be searched. By default
it is set to `false`, which causes Meson to first look for the
executable in the cross file (when cross building) and if it is not
defined there, then from the system. If set to `true`, the cross
file is ignored and the program is only searched from the system.
-- `disabler` if `true` and the program couldn't be found, return a
+- `disabler` *(since 0.49.0)*: if `true` and the program couldn't be found, return a
[disabler object](#disabler-object) instead of a not-found object.
- *Since 0.49.0*
+
-- `version` *(since 0.52.0)* Specifies the required version, see
+- `version` *(since 0.52.0)*: specifies the required version, see
[`dependency()`](#dependency) for argument format. The version of the program
is determined by running `program_name --version` command. If stdout is empty
it fallbacks to stderr. If the output contains more text than simply a version
@@ -727,7 +731,7 @@ Keyword arguments are the following:
If the output is more complicated than that, the version checking will have to
be done manually using [`run_command()`](#run_command).
-- `dirs` *(since 0.53.0)* Extra list of absolute paths where to look for program
+- `dirs` *(since 0.53.0)*: extra list of absolute paths where to look for program
names.
Meson will also autodetect scripts with a shebang line and run them
@@ -795,22 +799,22 @@ argument is the executable to use. It can either be a self-built
executable or one returned by find_program. Keyword arguments are the
following:
-- `arguments` a list of template strings that will be the command line
+- `arguments`: a list of template strings that will be the command line
arguments passed to the executable
-- `depends` is an array of build targets that must be built before this
+- `depends` *(since 0.51.0)*: is an array of build targets that must be built before this
generator can be run. This is used if you have a generator that calls
- a second executable that is built in this project. Available since 0.51.0
-- `depfile` is a template string pointing to a dependency file that a
+ a second executable that is built in this project.
+- `depfile`: is a template string pointing to a dependency file that a
generator can write listing all the additional files this target
depends on, for example a C compiler would list all the header files
it included, and a change in any one of these files triggers a
recompilation
-- `output` a template string (or list of template strings) defining
+- `output`: a template string (or list of template strings) defining
how an output file name is (or multiple output names are) generated
from a single source file name
-- `capture` when this argument is set to true, Meson captures `stdout`
- of the `executable` and writes it to the target file specified as
- `output`. Available since v0.43.0.
+- `capture` *(since 0.43.0)*: when this argument is set to true, Meson
+ captures `stdout` of the `executable` and writes it to the target file
+ specified as `output`.
The returned object also has methods that are documented in the
[object methods section](#generator-object) below.
@@ -973,13 +977,13 @@ except Visual Studio).
Installs files from the source tree that are listed as positional
arguments. The following keyword arguments are supported:
-- `install_dir` the absolute or relative path to the installation
+- `install_dir`: the absolute or relative path to the installation
directory. If this is a relative path, it is assumed to be relative
to the prefix.
- If omitted, the directory defaults to `{datadir}/{projectname}` *(added 0.45.0)*.
+ If omitted, the directory defaults to `{datadir}/{projectname}` *(since 0.45.0)*.
-- `install_mode` specify the file mode in symbolic format and
+- `install_mode`: specify the file mode in symbolic format and
optionally the owner/uid and group/gid for the installed files. For
example:
@@ -991,10 +995,10 @@ arguments. The following keyword arguments are supported:
To leave any of these three as the default, specify `false`.
-- `rename` if specified renames each source file into corresponding
+- `rename` *(since 0.46.0)*: if specified renames each source file into corresponding
file from `rename` list. Nested paths are allowed and they are
joined with `install_dir`. Length of `rename` list must be equal to
- the number of sources. *(added 0.46.0)*
+ the number of sources.
See [Installing](Installing.md) for more examples.
@@ -1031,10 +1035,11 @@ This will install `common.h` and `kola.h` into `/{prefix}/cust/myproj`:
install_headers('common.h', 'proj/kola.h', install_dir : 'cust', subdir : 'myproj')
```
-The `install_mode` argument can be used to specify the file mode in symbolic
-format and optionally the owner/uid and group/gid for the installed files.
-An example value could be `['rwxr-sr-x', 'root', 'root']`.
-*(Added 0.47.0)*.
+Accepts the following keywords:
+
+- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic
+ format and optionally the owner/uid and group/gid for the installed files.
+ An example value could be `['rwxr-sr-x', 'root', 'root']`.
### install_man()
@@ -1047,12 +1052,13 @@ man directory during the install step. This directory can be
overridden by specifying it with the `install_dir` keyword
argument.
-The `install_mode` argument can be used to specify the file mode in symbolic
-format and optionally the owner/uid and group/gid for the installed files.
-An example value could be `['rwxr-sr-x', 'root', 'root']`.
-*(Added 0.47.0)*.
+Accepts the following keywords:
+
+- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic
+ format and optionally the owner/uid and group/gid for the installed files.
+ An example value could be `['rwxr-sr-x', 'root', 'root']`.
-Since 0.49.0, [manpages are no longer compressed implicitly][install_man_49].
+*(since 0.49.0)* [manpages are no longer compressed implicitly][install_man_49].
[install_man_49]: https://mesonbuild.com/Release-notes-for-0-49-0.html#manpages-are-no-longer-compressed-implicitly
@@ -1073,11 +1079,10 @@ The following keyword arguments are supported:
- `exclude_directories`: a list of directory names that should not be installed.
Names are interpreted as paths relative to the `subdir_name` location.
- `install_dir`: the location to place the installed subdirectory.
-- `install_mode`: the file mode in symbolic format and optionally
- the owner/uid and group/gid for the installed files. *(Added 0.47.0)*
-- `strip_directory`: install directory contents. `strip_directory=false` by default.
+- `install_mode` *(since 0.47.0)*: the file mode in symbolic format and optionally
+ the owner/uid and group/gid for the installed files.
+- `strip_directory` *(since 0.45.0)*: install directory contents. `strip_directory=false` by default.
If `strip_directory=true` only the last component of the source path is used.
- Since 0.45.0
For a given directory `foo`:
```text
@@ -1122,7 +1127,9 @@ share/
bool is_disabler(var)
```
-Returns true if a variable is a disabler and false otherwise. Added in 0.52.0.
+*(since 0.52.0)*
+
+Returns true if a variable is a disabler and false otherwise.
### is_variable()
@@ -1149,6 +1156,8 @@ the jar with `java -jar file.jar`.
string join_paths(string1, string2, ...)
```
+*(since 0.36.0)*
+
Joins the given strings into a file system path segment. For example
`join_paths('foo', 'bar')` results in `foo/bar`. If any one of the
individual segments is an absolute path, all segments before it are
@@ -1157,9 +1166,7 @@ dropped. That means that `join_paths('foo', '/bar')` returns `/bar`.
**Warning** Don't use `join_paths()` for sources in [`library`](#library) and
[`executable`](#executable), you should use [`files`](#files) instead.
-*Added 0.36.0*
-
-Since 0.49.0 using the`/` operator on strings is equivalent to calling
+*(since 0.49.0)* Using the`/` operator on strings is equivalent to calling
`join_paths`.
```meson
@@ -1189,12 +1196,12 @@ library basis using the [dependency()](#dependency)) `static` keyword.
The keyword arguments for this are the same as for
[`executable`](#executable) with the following additions:
-- `name_prefix` the string that will be used as the prefix for the
+- `name_prefix`: the string that will be used as the prefix for the
target output filename by overriding the default (only used for
libraries). By default this is `lib` on all platforms and compilers,
except for MSVC shared libraries where it is omitted to follow
convention, and Cygwin shared libraries where it is `cyg`.
-- `name_suffix` the string that will be used as the suffix for the
+- `name_suffix`: the string that will be used as the suffix for the
target output filename by overriding the default (see also:
[executable()](#executable)). By default, for shared libraries this
is `dylib` on macOS, `dll` on Windows, and `so` everywhere else.
@@ -1202,7 +1209,7 @@ The keyword arguments for this are the same as for
static libraries use the `lib` suffix, but we use `a` to avoid a
potential name clash with shared libraries which also generate
import libraries with a `lib` suffix.
-- `rust_crate_type` specifies the crate type for Rust
+- `rust_crate_type`: specifies the crate type for Rust
libraries. Defaults to `dylib` for shared libraries and `rlib` for
static libraries.
@@ -1220,7 +1227,7 @@ them for the default behaviour for each platform.
This function prints its argument to stdout.
-**Since 0.54.0** Can take more more than one argument that will be separated by
+*(since 0.54.0)* Can take more more than one argument that will be separated by
space.
### warning()
@@ -1229,11 +1236,11 @@ space.
void warning(text)
```
-This function prints its argument to stdout prefixed with WARNING:.
+*(since 0.44.0)*
-*Added 0.44.0*
+This function prints its argument to stdout prefixed with WARNING:.
-**Since 0.54.0** Can take more more than one argument that will be separated by
+*(since 0.54.0)* Can take more more than one argument that will be separated by
space.
### summary()
@@ -1243,6 +1250,8 @@ space.
void summary(dictionary)
```
+*(since 0.53.0)*
+
This function is used to summarize build configuration at the end of the build
process. This function provides a way for projects (and subprojects) to report
this information in a clear way.
@@ -1258,10 +1267,10 @@ pair doesn't appear twice. All sections will be collected and printed at
the end of the configuration in the same order as they have been called.
Keyword arguments:
-- `section` title to group a set of key/value pairs.
-- `bool_yn` if set to true, all boolean values will be replaced by green YES
+- `section`: title to group a set of key/value pairs.
+- `bool_yn`: if set to true, all boolean values will be replaced by green YES
or red NO.
-- `list_sep` *Since 0.54.0* string used to separate list values (e.g. `', '`).
+- `list_sep` *(since 0.54.0)*: string used to separate list values (e.g. `', '`).
Example:
```meson
@@ -1296,8 +1305,6 @@ My Project 1.0
True
```
-*Added 0.53.0*
-
### project()
``` meson
@@ -1308,7 +1315,7 @@ The first argument to this function must be a string defining the name
of this project. It is followed by programming languages that the
project uses. Supported values for languages are `c`, `cpp` (for
`C++`), `cuda`, `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`),
-`vala` and `rust`. Since version `0.40.0` the list of languages
+`vala` and `rust`. *(since 0.40.0)* The list of languages
is optional.
The project name can be any string you want, it's not used for
@@ -1320,40 +1327,40 @@ Library_.
Project supports the following keyword arguments.
- - `default_options` takes an array of strings. The strings are in the
- form `key=value` and have the same format as options to
- `meson configure`. For example to set the default project type you would
- set this: `default_options : ['buildtype=debugoptimized']`. Note
- that these settings are only used when running Meson for the first
- time. Global options such as `buildtype` can only be specified in
- the master project, settings in subprojects are ignored. Project
- specific options are used normally even in subprojects.
-
-
- - `license` takes a string or array of strings describing the
- license(s) the code is under. Usually this would be something like
- `license : 'GPL2+'`, but if the code has multiple licenses you can
- specify them as an array like this: `license : ['proprietary',
- 'GPL3']`. Note that the text is informal and is only written to
- the dependency manifest. Meson does not do any license validation,
- you are responsible for verifying that you abide by all licensing
- terms. You can access the value in your Meson build files with
- `meson.project_license()`.
-
- - `meson_version` takes a string describing which Meson version the
- project requires. Usually something like `>=0.28.0`.
-
- - `subproject_dir` specifies the top level directory name that holds
- Meson subprojects. This is only meant as a compatibility option
- for existing code bases that house their embedded source code in a
- custom directory. All new projects should not set this but instead
- use the default value. It should be noted that this keyword
- argument is ignored inside subprojects. There can be only one
- subproject dir and it is set in the top level Meson file.
-
- - `version`, which is a free form string describing the version of
- this project. You can access the value in your Meson build files
- with `meson.project_version()`.
+- `default_options`: takes an array of strings. The strings are in the
+ form `key=value` and have the same format as options to
+ `meson configure`. For example to set the default project type you would
+ set this: `default_options : ['buildtype=debugoptimized']`. Note
+ that these settings are only used when running Meson for the first
+ time. Global options such as `buildtype` can only be specified in
+ the master project, settings in subprojects are ignored. Project
+ specific options are used normally even in subprojects.
+
+
+- `license`: takes a string or array of strings describing the
+ license(s) the code is under. Usually this would be something like
+ `license : 'GPL2+'`, but if the code has multiple licenses you can
+ specify them as an array like this: `license : ['proprietary',
+ 'GPL3']`. Note that the text is informal and is only written to
+ the dependency manifest. Meson does not do any license validation,
+ you are responsible for verifying that you abide by all licensing
+ terms. You can access the value in your Meson build files with
+ `meson.project_license()`.
+
+- `meson_version`: takes a string describing which Meson version the
+ project requires. Usually something like `>=0.28.0`.
+
+- `subproject_dir`: specifies the top level directory name that holds
+ Meson subprojects. This is only meant as a compatibility option
+ for existing code bases that house their embedded source code in a
+ custom directory. All new projects should not set this but instead
+ use the default value. It should be noted that this keyword
+ argument is ignored inside subprojects. There can be only one
+ subproject dir and it is set in the top level Meson file.
+
+- `version`: which is a free form string describing the version of
+ this project. You can access the value in your Meson build files
+ with `meson.project_version()`.
### run_command()
@@ -1375,15 +1382,13 @@ respectively.
This function supports the following keyword arguments:
- - `check` takes a boolean. If `true`, the exit status code of the command will
+ - `check` *(since 0.47.0)*: takes a boolean. If `true`, the exit status code of the command will
be checked, and the configuration will fail if it is non-zero. The default is
`false`.
- Since 0.47.0
- - `env` environment variables to set, such as `['NAME1=value1',
+ - `env` *(since 0.50.0)*: environment variables to set, such as `['NAME1=value1',
'NAME2=value2']`, or an [`environment()`
object](#environment-object) which allows more sophisticated
- environment juggling. *Since 0.52.0* a dictionary is also accepted.
- Since 0.50.0
+ environment juggling. *(since 0.52.0)* A dictionary is also accepted.
See also [External commands](External-commands.md).
@@ -1395,8 +1400,8 @@ runtarget run_target(target_name, ...)
This function creates a new top-level target that runs a specified
command with the specified arguments. Like all top-level targets, this
-integrates with the selected backend. For instance, with Ninja you can
-run it as `ninja target_name`. Note that a run target produces no
+integrates with the selected backend. For instance, you can
+run it as `meson compile target_name`. Note that a run target produces no
output as far as Meson is concerned. It is only meant for tasks such
as running a code formatter or flashing an external device's firmware
with a built file.
@@ -1424,8 +1429,7 @@ and subdirectory the target was defined in, respectively.
Assigns a value to the given variable name. Calling
`set_variable('foo', bar)` is equivalent to `foo = bar`.
-**Note:** Prior to v0.46.1, the `value` parameter could not be an
-array type, due to flattening of the function parameters.
+*(since 0.46.1)* The `value` parameter can be an array type.
### shared_library()
@@ -1437,7 +1441,7 @@ Builds a shared library with the given sources. Positional and keyword
arguments are the same as for [`library`](#library) with the following
extra keyword arguments.
-- `soversion` a string specifying the soversion of this shared
+- `soversion`: a string specifying the soversion of this shared
library, such as `0`. On Linux and Windows this is used to set the
soversion (or equivalent) in the filename. For example, if
`soversion` is `4`, a Windows DLL will be called `foo-4.dll` and one
@@ -1445,19 +1449,19 @@ extra keyword arguments.
`libfoo.so.4`. If this is not specified, the first part of `version`
is used instead (see below). For example, if `version` is `3.6.0` and
`soversion` is not defined, it is set to `3`.
-- `version` a string specifying the version of this shared library,
+- `version`: a string specifying the version of this shared library,
such as `1.1.0`. On Linux and OS X, this is used to set the shared
library version in the filename, such as `libfoo.so.1.1.0` and
`libfoo.1.1.0.dylib`. If this is not specified, `soversion` is used
instead (see above).
-- `darwin_versions` *(added 0.48)* an integer, string, or a list of
+- `darwin_versions` *(since 0.48.0)*: an integer, string, or a list of
versions to use for setting dylib `compatibility version` and
`current version` on macOS. If a list is specified, it must be
either zero, one, or two elements. If only one element is specified
or if it's not a list, the specified value will be used for setting
both compatibility version and current version. If unspecified, the
`soversion` will be used as per the aforementioned rules.
-- `vs_module_defs` a string, a File object, or Custom Target for a
+- `vs_module_defs`: a string, a File object, or Custom Target for a
Microsoft module definition file for controlling symbol exports,
etc., on platforms where that is possible (e.g. Windows).
@@ -1467,6 +1471,8 @@ extra keyword arguments.
buildtarget shared_module(module_name, list_of_sources, ...)
```
+*(since 0.37.0)*
+
Builds a shared module with the given sources. Positional and keyword
arguments are the same as for [`library`](#library).
@@ -1481,7 +1487,7 @@ you will need to set the `export_dynamic` argument of the executable to
Supports the following extra keyword arguments:
-- `vs_module_defs`, *(Added 0.52.0)*, a string, a File object, or
+- `vs_module_defs` *(since 0.52.0)*: a string, a File object, or
Custom Target for a Microsoft module definition file for controlling
symbol exports, etc., on platforms where that is possible
(e.g. Windows).
@@ -1491,8 +1497,6 @@ platforms, notably OSX. Consider using a
[`shared_library`](#shared_library) instead, if you need to both
`dlopen()` and link with a library.
-*Added 0.37.0*
-
### static_library()
``` meson
@@ -1503,7 +1507,7 @@ Builds a static library with the given sources. Positional and keyword
arguments are otherwise the same as for [`library`](#library), but it
has one argument the others don't have:
- - `pic`, *(Added 0.36.0)* builds the library as positional
+ - `pic` *(since 0.36.0)*: builds the library as positional
independent code (so it can be linked into a shared library). This
option has no effect on Windows and OS X since it doesn't make
sense on Windows and PIC cannot be disabled on OS X.
@@ -1526,7 +1530,7 @@ and must only be executed once.
This function has one keyword argument.
- - `if_found` takes one or several dependency objects and will only
+ - `if_found`: takes one or several dependency objects and will only
recurse in the subdir if they all return `true` when queried with
`.found()`
@@ -1571,16 +1575,15 @@ example a subproject called `foo` must be located in
`${MESON_SOURCE_ROOT}/subprojects/foo`. Supports the following keyword
arguments:
- - `default_options` *(added 0.37.0)* an array of default option values
+ - `default_options` *(since 0.37.0)*: an array of default option values
that override those set in the subproject's `meson_options.txt`
(like `default_options` in `project`, they only have effect when
Meson is run for the first time, and command line arguments override
- any default options in build files). *Since 0.54.0* `default_library`
+ any default options in build files). *(since 0.54.0)*: `default_library`
built-in option can also be overridden.
- - `version` keyword argument that works just like the one in
- `dependency`. It specifies what version the subproject should be,
- as an example `>=1.0.1`
- - `required` *(added 0.48.0)* By default, `required` is `true` and
+ - `version`: works just like the same as in `dependency`.
+ It specifies what version the subproject should be, as an example `>=1.0.1`
+ - `required` *(since 0.48.0)*: By default, `required` is `true` and
Meson will abort if the subproject could not be setup. You can set
this to `false` and then use the `.found()` method on the [returned
object](#subproject-object). You may also pass the value of a
@@ -1609,6 +1612,13 @@ object](#build-target-object) returned by
object](#external-program-object) returned by
[`find_program()`](#find_program).
+*(since 0.55.0)* When cross compiling, if an exe_wrapper is needed and defined
+the environment variable `MESON_EXE_WRAPPER` will be set to the string value
+of that wrapper (implementation detail: using `mesonlib.join_args`). Test
+scripts may use this to run cross built binaries. If your test needs
+`MESON_EXE_WRAPPER` in cross build situations it is your responsibility to
+return code 77 to tell the harness to report "skip".
+
By default, environment variable
[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html)
is automatically set by `meson test` to a random value between 1..255.
@@ -1629,50 +1639,52 @@ test(..., env: nomalloc, ...)
#### test() Keyword arguments
-- `args` arguments to pass to the executable
+- `args`: arguments to pass to the executable
-- `env` environment variables to set, such as `['NAME1=value1',
+- `env`: environment variables to set, such as `['NAME1=value1',
'NAME2=value2']`, or an [`environment()`
object](#environment-object) which allows more sophisticated
- environment juggling. *Since 0.52.0* a dictionary is also accepted.
+ environment juggling. *(since 0.52.0)* A dictionary is also accepted.
-- `is_parallel` when false, specifies that no other test must be
+- `is_parallel`: when false, specifies that no other test must be
running at the same time as this test
-- `should_fail` when true the test is considered passed if the
+- `should_fail`: when true the test is considered passed if the
executable returns a non-zero return value (i.e. reports an error)
-- `suite` `'label'` (or list of labels `['label1', 'label2']`)
+- `suite`: `'label'` (or list of labels `['label1', 'label2']`)
attached to this test. The suite name is qualified by a (sub)project
name resulting in `(sub)project_name:label`. In the case of a list
of strings, the suite names will be `(sub)project_name:label1`,
`(sub)project_name:label2`, etc.
-- `timeout` the amount of seconds the test is allowed to run, a test
+- `timeout`: the amount of seconds the test is allowed to run, a test
that exceeds its time limit is always considered failed, defaults to
30 seconds
-- `workdir` absolute path that will be used as the working directory
+- `workdir`: absolute path that will be used as the working directory
for the test
-- `depends` specifies that this test depends on the specified
+- `depends` *(since 0.46.0)*: specifies that this test depends on the specified
target(s), even though it does not take any of them as a command
line argument. This is meant for cases where test finds those
targets internally, e.g. plugins or globbing. Those targets are built
before test is executed even if they have `build_by_default : false`.
- Since 0.46.0
-- `protocol` specifies how the test results are parsed and can be one
- of `exitcode` (the executable's exit code is used by the test harness
- to record the outcome of the test) or `tap` ([Test Anything
- Protocol](https://www.testanything.org/)). For more on the Meson test
- harness protocol read [Unit Tests](Unit-tests.md). Since 0.50.0
+- `protocol` *(since 0.50.0)*: specifies how the test results are parsed and can
+ be one of `exitcode`, `tap`, or `gtest`. For more information about test
+ harness protocol read [Unit Tests](Unit-tests.md). The following values are
+ accepted:
+ - `exitcode`: the executable's exit code is used by the test harness
+ to record the outcome of the test).
+ - `tap`: [Test Anything Protocol](https://www.testanything.org/).
+ - `gtest` *(since 0.55.0)*: for Google Tests.
-- `priority` specifies the priority of a test. Tests with a
+- `priority` *(since 0.52.0)*:specifies the priority of a test. Tests with a
higher priority are *started* before tests with a lower priority.
The starting order of tests with identical priorities is
implementation-defined. The default priority is 0, negative numbers are
- permitted. Since 0.52.0
+ permitted.
Defined tests can be run in a backend-agnostic way by calling
`meson test` inside the build dir, or by using backend-specific
@@ -1688,15 +1700,15 @@ This command detects revision control commit information at build time
and places it in the specified output file. This file is guaranteed to
be up to date on every build. Keywords are similar to `custom_target`.
-- `command` string list with the command to execute, see
+- `command`: string list with the command to execute, see
[`custom_target`](#custom_target) for details on how this command
must be specified
-- `fallback` version number to use when no revision control
+- `fallback`: version number to use when no revision control
information is present, such as when building from a release tarball
(defaults to `meson.project_version()`)
-- `input` file to modify (e.g. `version.c.in`) (required)
-- `output` file to write the results to (e.g. `version.c`) (required)
-- `replace_string` string in the input file to substitute with the
+- `input`: file to modify (e.g. `version.c.in`) (required)
+- `output`: file to write the results to (e.g. `version.c`) (required)
+- `replace_string`: string in the input file to substitute with the
commit information (defaults to `@VCS_TAG@`)
Meson will read the contents of `input`, substitute the
@@ -1724,26 +1736,30 @@ The `meson` object allows you to introspect various properties of the
system. This object is always mapped in the `meson` variable. It has
the following methods.
-- `add_dist_script(script_name, arg1, arg, ...)` causes the script
+- `add_dist_script(script_name, arg1, arg2, ...)` *(since 0.48.0)*: causes the script
given as argument to run during `dist` operation after the
distribution source has been generated but before it is
archived. Note that this runs the script file that is in the
_staging_ directory, not the one in the source directory. If the
script file can not be found in the staging directory, it is a hard
error. This command can only invoked from the main project, calling
- it from a subproject is a hard error. Available since 0.48.0. Before
- 0.49.0, the function only accepted a single argument. Since 0.54.0
- the `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` environment variables
- are set when dist scripts are run.
+ it from a subproject is a hard error. *(since 0.49.0)* Accepts multiple arguments
+ for the fscript. *(since 0.54.0)* The `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT`
+ environment variables are set when dist scripts are run.
+ *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program`
+ as well as strings.
-- `add_install_script(script_name, arg1, arg2, ...)` causes the script
+- `add_install_script(script_name, arg1, arg2, ...)`: causes the script
given as an argument to be run during the install step, this script
will have the environment variables `MESON_SOURCE_ROOT`,
`MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`,
`MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set.
All positional arguments are passed as parameters.
+ *(since 0.55.0)* The output of `configure_file`, `files`, `find_program`,
+ `custom_target`, indexes of `custom_target`, `executable`, `library`, and
+ other built targets as well as strings.
- *(added 0.54)* If `meson install` is called with the `--quiet` option, the
+ *(since 0.54.0)* If `meson install` is called with the `--quiet` option, the
environment variable `MESON_INSTALL_QUIET` will be set.
Meson uses the `DESTDIR` environment variable as set by the
@@ -1768,21 +1784,23 @@ the following methods.
shell would. If your script uses Python, `shlex.split()` is the
easiest correct way to do this.
-- `add_postconf_script(script_name, arg1, arg2, ...)` will run the
+- `add_postconf_script(script_name, arg1, arg2, ...)`: runs the
executable given as an argument after all project files have been
generated. This script will have the environment variables
`MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` set.
+ *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program`
+ as well as strings.
-- `backend()` *(added 0.37.0)* returns a string representing the
+- `backend()` *(since 0.37.0)*: returns a string representing the
current backend: `ninja`, `vs2010`, `vs2015`, `vs2017`, `vs2019`,
or `xcode`.
-- `build_root()` returns a string with the absolute path to the build
+- `build_root()`: returns a string with the absolute path to the build
root directory. Note: this function will return the build root of
the parent project if called from a subproject, which is usually
not what you want. Try using `current_build_dir()`.
-- `source_root()` returns a string with the absolute path to the
+- `source_root()`: returns a string with the absolute path to the
source root directory. Note: you should use the `files()` function
to refer to files in the root source directory instead of
constructing paths manually with `meson.source_root()`. This
@@ -1790,17 +1808,17 @@ the following methods.
from a subproject, which is usually not what you want. Try using
`current_source_dir()`.
-- `current_build_dir()` returns a string with the absolute path to the
+- `current_build_dir()`: returns a string with the absolute path to the
current build directory.
-- `current_source_dir()` returns a string to the current source
+- `current_source_dir()`: returns a string to the current source
directory. Note: **you do not need to use this function** when
passing files from the current source directory to a function since
that is the default. Also, you can use the `files()` function to
refer to files in the current or any other source directory instead
of constructing paths manually with `meson.current_source_dir()`.
-- `get_compiler(language)` returns [an object describing a
+- `get_compiler(language)`: returns [an object describing a
compiler](#compiler-object), takes one positional argument which is
the language to use. It also accepts one keyword argument, `native`
which when set to true makes Meson return the compiler for the build
@@ -1809,49 +1827,52 @@ the following methods.
returns the "cross" compiler if we're currently cross-compiling and
the "native" compiler if we're not.
-- `get_cross_property(propname, fallback_value)`
- *Consider get_external_property() instead*. Returns the given
+- `get_cross_property(propname, fallback_value)`:
+ *Consider `get_external_property()` instead*. Returns the given
property from a cross file, the optional fallback_value is returned
if not cross compiling or the given property is not found.
- `get_external_property(propname, fallback_value, native: true/false)`
- *(added 0.54.0)* returns the given property from a native or cross file.
+ *(since 0.54.0)*: returns the given property from a native or cross file.
The optional fallback_value is returned if the given property is not found.
The optional `native: true` forces retrieving a variable from the
native file, even when cross-compiling.
If `native: false` or not specified, variable is retrieved from the
cross-file if cross-compiling, and from the native-file when not cross-compiling.
-- `has_exe_wrapper()` returns true when doing a cross build if there
- is a wrapper command that can be used to execute cross built
- binaries (for example when cross compiling from Linux to Windows,
- one can use `wine` as the wrapper).
+- `can_run_host_binaries()` *(since 0.55.0)*: returns true if the build machine can run
+ binaries compiled for the host. This returns true unless you are
+ cross compiling, need a helper to run host binaries, and don't have one.
+ For example when cross compiling from Linux to Windows, one can use `wine`
+ as the helper.
+
+- `has_exe_wrapper()`: *(since 0.55.0)* **(deprecated)**. Use `can_run_host_binaries` instead.
-- `install_dependency_manifest(output_name)` installs a manifest file
+- `install_dependency_manifest(output_name)`: installs a manifest file
containing a list of all subprojects, their versions and license
files to the file name given as the argument.
-- `is_cross_build()` returns `true` if the current build is a [cross
+- `is_cross_build()`: returns `true` if the current build is a [cross
build](Cross-compilation.md) and `false` otherwise.
-- `is_subproject()` returns `true` if the current project is being
+- `is_subproject()`: returns `true` if the current project is being
built as a subproject of some other project and `false` otherwise.
-- `is_unity()` returns `true` when doing a [unity
+- `is_unity()`: returns `true` when doing a [unity
build](Unity-builds.md) (multiple sources are combined before
compilation to reduce build time) and `false` otherwise.
-- `override_find_program(progname, program)` [*(Added
- 0.46.0)*](Release-notes-for-0.46.0.md#can-override-find_program)
+- `override_find_program(progname, program)` *(since 0.46.0)*:
specifies that whenever `find_program` is used to find a program
named `progname`, Meson should not look it up on the system but
instead return `program`, which may either be the result of
- `find_program`, `configure_file` or `executable`.
+ `find_program`, `configure_file` or `executable`. *(since 0.55.0)* If a version
+ check is passed to `find_program` for a program that has been overridden with
+ an executable, the current project version is used.
If `program` is an `executable`, it cannot be used during configure.
-- `override_dependency(name, dep_object)` [*(Added
- 0.54.0)*](Release-notes-for-0.54.0.md#override-dependency)
+- `override_dependency(name, dep_object)` *(since 0.54.0)*:
specifies that whenever `dependency(name, ...)` is used, Meson should not
look it up on the system but instead return `dep_object`, which may either be
the result of `dependency()` or `declare_dependency()`. It takes optional
@@ -1859,16 +1880,16 @@ the following methods.
project to retrieve the dependency without having to know the dependency
variable name: `dependency(name, fallback : subproject_name)`.
-- `project_version()` returns the version string specified in
+- `project_version()`: returns the version string specified in
`project` function call.
-- `project_license()` returns the array of licenses specified in
+- `project_license()`: returns the array of licenses specified in
`project` function call.
-- `project_name()` returns the project name specified in the `project`
+- `project_name()`: returns the project name specified in the `project`
function call.
-- `version()` return a string with the version of Meson.
+- `version()`: return a string with the version of Meson.
### `build_machine` object
@@ -1877,19 +1898,19 @@ doing the actual compilation. See
[Cross-compilation](Cross-compilation.md). It has the following
methods:
-- `cpu_family()` returns the CPU family name. [This
+- `cpu_family()`: returns the CPU family name. [This
table](Reference-tables.md#cpu-families) contains all known CPU
families. These are guaranteed to continue working.
-- `cpu()` returns a more specific CPU name, such as `i686`, `amd64`,
+- `cpu()`: returns a more specific CPU name, such as `i686`, `amd64`,
etc.
-- `system()` returns the operating system name. [This
+- `system()`: returns the operating system name. [This
table](Reference-tables.md#operating-system-names) Lists all of
the currently known Operating System names, these are guaranteed to
continue working.
-- `endian()` returns `big` on big-endian systems and `little` on
+- `endian()`: returns `big` on big-endian systems and `little` on
little-endian systems.
Currently, these values are populated using
@@ -1937,58 +1958,61 @@ the cross-info file, `host_machine` values are returned instead.
All [strings](Syntax.md#strings) have the following methods. Strings
are immutable, all operations return their results as a new string.
-- `contains(string)` returns true if string contains the string
- specified as the argument
+- `contains(string)`: returns true if string contains the string
+ specified as the argument.
-- `endswith(string)` returns true if string ends with the string
- specified as the argument
+- `endswith(string)`: returns true if string ends with the string
+ specified as the argument.
-- `format()` formats text, see the [Syntax
- manual](Syntax.md#string-formatting) for usage info
+- `format()`: formats text, see the [Syntax
+ manual](Syntax.md#string-formatting) for usage info.
-- `join(list_of_strings)` is the opposite of split, for example
- `'.'.join(['a', 'b', 'c']` yields `'a.b.c'`
+- `join(list_of_strings)`: the opposite of split, for example
+ `'.'.join(['a', 'b', 'c']` yields `'a.b.c'`.
-- `split(split_character)` splits the string at the specified
+- `split(split_character)`: splits the string at the specified
character (or whitespace if not set) and returns the parts in an
- array
+ array.
-- `startswith(string)` returns true if string starts with the string
+- `startswith(string)`: returns true if string starts with the string
specified as the argument
-- `strip()` removes whitespace at the beginning and end of the string
- *(added 0.43.0)* optionally can take one positional string argument,
- and all characters in that string will be stripped
+- `substring(start,end)` *(since 0.56.0)*: returns a substring specified from start to end.
+ Both `start` and `end` arguments are optional, so, for example, `'foobar'.substring()` will return `'foobar'`.
-- `to_int` returns the string converted to an integer (error if string
- is not a number)
+- `strip()`: removes whitespace at the beginning and end of the string.
+ *(since 0.43.0)* Optionally can take one positional string argument,
+ and all characters in that string will be stripped.
-- `to_lower()` creates a lower case version of the string
+- `to_int()`: returns the string converted to an integer (error if string
+ is not a number).
-- `to_upper()` creates an upper case version of the string
+- `to_lower()`: creates a lower case version of the string.
-- `underscorify()` creates a string where every non-alphabetical
- non-number character is replaced with `_`
+- `to_upper()`: creates an upper case version of the string.
-- `version_compare(comparison_string)` does semantic version
+- `underscorify()`: creates a string where every non-alphabetical
+ non-number character is replaced with `_`.
+
+- `version_compare(comparison_string)`: does semantic version
comparison, if `x = '1.2.3'` then `x.version_compare('>1.0.0')`
- returns `true`
+ returns `true`.
### `Number` object
[Numbers](Syntax.md#numbers) support these methods:
- - `is_even()` returns true if the number is even
- - `is_odd()` returns true if the number is odd
- - `to_string()` returns the value of the number as a string.
+- `is_even()`: returns true if the number is even
+- `is_odd()`: returns true if the number is odd
+- `to_string()`: returns the value of the number as a string.
### `boolean` object
A [boolean](Syntax.md#booleans) object has two simple methods:
-- `to_int()` as above, but returns either `1` or `0`
+- `to_int()`: returns either `1` or `0`.
-- `to_string()` returns the string `'true'` if the boolean is true or
+- `to_string()`: returns the string `'true'` if the boolean is true or
`'false'` otherwise. You can also pass it two strings as positional
arguments to specify what to return for true/false. For instance,
`bool.to_string('yes', 'no')` will return `yes` if the boolean is
@@ -1998,27 +2022,29 @@ A [boolean](Syntax.md#booleans) object has two simple methods:
The following methods are defined for all [arrays](Syntax.md#arrays):
-- `contains(item)`, returns `true` if the array contains the object
+- `contains(item)`: returns `true` if the array contains the object
given as argument, `false` otherwise
-- `get(index, fallback)`, returns the object at the given index,
+- `get(index, fallback)`: returns the object at the given index,
negative indices count from the back of the array, indexing out of
- bounds returns the `fallback` value *(added 0.38.0)* or, if it is
+ bounds returns the `fallback` value *(since 0.38.0)* or, if it is
not specified, causes a fatal error
-- `length()`, the size of the array
+- `length()`: the size of the array
You can also iterate over arrays with the [`foreach`
statement](Syntax.md#foreach-statements).
### `dictionary` object
+*(since 0.47.0)*
+
The following methods are defined for all [dictionaries](Syntax.md#dictionaries):
-- `has_key(key)` returns `true` if the dictionary contains the key
+- `has_key(key)`: returns `true` if the dictionary contains the key
given as argument, `false` otherwise
-- `get(key, fallback)`, returns the value for the key given as first
+- `get(key, fallback)`: returns the value for the key given as first
argument if it is present in the dictionary, or the optional
fallback value given as the second argument. If a single argument
was given and the key was not found, causes a fatal error
@@ -2026,9 +2052,7 @@ The following methods are defined for all [dictionaries](Syntax.md#dictionaries)
You can also iterate over dictionaries with the [`foreach`
statement](Syntax.md#foreach-statements).
-Dictionaries are available since 0.47.0.
-
-Since 0.48.0 dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`).
+*(since 0.48.0)* Dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`).
Values from the second dictionary overrides values from the first.
## Returned objects
@@ -2042,204 +2066,202 @@ This object is returned by
for a given language and allows you to query its properties. It has
the following methods:
-- `alignment(typename)` returns the alignment of the type specified in
+- `alignment(typename)`: returns the alignment of the type specified in
the positional argument, you can specify external dependencies to
use with `dependencies` keyword argument.
-- `cmd_array()` returns an array containing the command arguments for
+- `cmd_array()`: returns an array containing the command arguments for
the current compiler.
-- `compiles(code)` returns true if the code fragment given in the
+- `compiles(code)`: returns true if the code fragment given in the
positional argument compiles, you can specify external dependencies
to use with `dependencies` keyword argument, `code` can be either a
string containing source code or a `file` object pointing to the
source code.
-- `compute_int(expr, ...')` computes the value of the given expression
+- `compute_int(expr, ...')`: computes the value of the given expression
(as an example `1 + 2`). When cross compiling this is evaluated with
an iterative algorithm, you can specify keyword arguments `low`
(defaults to -1024), `high` (defaults to 1024) and `guess` to
specify max and min values for the search and the value to try
first.
-- `find_library(lib_name, ...)` tries to find the library specified in
+- `find_library(lib_name, ...)`: tries to find the library specified in
the positional argument. The [result
object](#external-library-object) can be used just like the return
value of `dependency`. If the keyword argument `required` is false,
Meson will proceed even if the library is not found. By default the
library is searched for in the system library directory
(e.g. /usr/lib). This can be overridden with the `dirs` keyword
- argument, which can be either a string or a list of strings. Since
- *0.47.0* the value of a [`feature`](Build-options.md#features)
+ argument, which can be either a string or a list of strings.
+ *(since 0.47.0)* The value of a [`feature`](Build-options.md#features)
option can also be passed to the `required` keyword argument.
- *Since 0.49.0* if the keyword argument `disabler` is `true` and the
+ *(since 0.49.0)* If the keyword argument `disabler` is `true` and the
dependency couldn't be found, return a [disabler object](#disabler-object)
- instead of a not-found dependency. *Since 0.50.0* the `has_headers` keyword
+ instead of a not-found dependency. *(since 0.50.0)* The `has_headers` keyword
argument can be a list of header files that must be found as well, using
`has_header()` method. All keyword arguments prefixed with `header_` will be
- passed down to `has_header()` method with the prefix removed. *Since 0.51.0*
- the `static` keyword (boolean) can be set to `true` to limit the search to
+ passed down to `has_header()` method with the prefix removed. *(since 0.51.0)*
+ The `static` keyword (boolean) can be set to `true` to limit the search to
static libraries and `false` for dynamic/shared.
-- `first_supported_argument(list_of_strings)`, given a list of
+- `first_supported_argument(list_of_strings)`: given a list of
strings, returns the first argument that passes the `has_argument`
test or an empty array if none pass.
-- `first_supported_link_argument(list_of_strings)` *(added 0.46.0)*,
+- `first_supported_link_argument(list_of_strings)` *(since 0.46.0)*:
given a list of strings, returns the first argument that passes the
`has_link_argument` test or an empty array if none pass.
-- `get_define(definename)` returns the given preprocessor symbol's
+- `get_define(definename)`: returns the given preprocessor symbol's
value as a string or empty string if it is not defined.
- Starting with 0.47.0, this method will concatenate string literals as
+ *(since 0.47.0)* This method will concatenate string literals as
the compiler would. E.g. `"a" "b"` will become `"ab"`.
-- `get_id()` returns a string identifying the compiler. For example,
+- `get_id()`: returns a string identifying the compiler. For example,
`gcc`, `msvc`, [and more](Reference-tables.md#compiler-ids).
-- `get_argument_syntax()` *(new in 0.49.0)* returns a string identifying the type
+- `get_argument_syntax()` *(since 0.49.0)*: returns a string identifying the type
of arguments the compiler takes. Can be one of `gcc`, `msvc`, or an undefined
string value. This method is useful for identifying compilers that are not
gcc or msvc, but use the same argument syntax as one of those two compilers
such as clang or icc, especially when they use different syntax on different
operating systems.
-- `get_linker_id()` *(added 0.53.0)* returns a string identifying the linker.
+- `get_linker_id()` *(since 0.53.0)*: returns a string identifying the linker.
For example, `ld.bfd`, `link`, [and more](Reference-tables.md#linker-ids).
-- `get_supported_arguments(list_of_string)` *(added 0.43.0)* returns
+- `get_supported_arguments(list_of_string)` *(since 0.43.0)*: returns
an array containing only the arguments supported by the compiler,
as if `has_argument` were called on them individually.
-- `get_supported_link_arguments(list_of_string)` *(added 0.46.0)* returns
+- `get_supported_link_arguments(list_of_string)` *(since 0.46.0)*: returns
an array containing only the arguments supported by the linker,
as if `has_link_argument` were called on them individually.
-- `has_argument(argument_name)` returns true if the compiler accepts
+- `has_argument(argument_name)`: returns true if the compiler accepts
the specified command line argument, that is, can compile code
without erroring out or printing a warning about an unknown flag.
-- `has_link_argument(argument_name)` *(added 0.46.0)* returns true if
+- `has_link_argument(argument_name)` *(since 0.46.0)*: returns true if
the linker accepts the specified command line argument, that is, can
compile and link code without erroring out or printing a warning
about an unknown flag. Link arguments will be passed to the
compiler, so should usually have the `-Wl,` prefix. On VisualStudio
a `/link` argument will be prepended.
-- `has_function(funcname)` returns true if the given function is
+- `has_function(funcname)`: returns true if the given function is
provided by the standard library or a library passed in with the
`args` keyword, you can specify external dependencies to use with
`dependencies` keyword argument.
-- `check_header` *(added 0.47.0)* returns true if the specified header is *usable* with
+- `check_header` *(since 0.47.0)*: returns true if the specified header is *usable* with
the specified prefix, dependencies, and arguments.
You can specify external dependencies to use with `dependencies`
keyword argument and extra code to put above the header test with
the `prefix` keyword. In order to look for headers in a specific
directory you can use `args : '-I/extra/include/dir`, but this
should only be used in exceptional cases for includes that can't be
- detected via pkg-config and passed via `dependencies`. Since *0.50.0* the
+ detected via pkg-config and passed via `dependencies`. *(since 0.50.0)* The
`required` keyword argument can be used to abort if the header cannot be
found.
-- `has_header` returns true if the specified header *exists*, and is
+- `has_header`: returns true if the specified header *exists*, and is
faster than `check_header()` since it only does a pre-processor check.
You can specify external dependencies to use with `dependencies`
keyword argument and extra code to put above the header test with
the `prefix` keyword. In order to look for headers in a specific
directory you can use `args : '-I/extra/include/dir`, but this
should only be used in exceptional cases for includes that can't be
- detected via pkg-config and passed via `dependencies`. Since *0.50.0* the
+ detected via pkg-config and passed via `dependencies`. *(since 0.50.0)* The
`required` keyword argument can be used to abort if the header cannot be
found.
-- `has_header_symbol(headername, symbolname)` allows one to detect
+- `has_header_symbol(headername, symbolname)`: detects
whether a particular symbol (function, variable, #define, type
definition, etc) is declared in the specified header, you can
specify external dependencies to use with `dependencies` keyword
- argument. Since *0.50.0* the `required` keyword argument can be used to abort
- if the symbol cannot be found.
+ argument. *(since 0.50.0)* The `required` keyword argument can be
+ used to abort if the symbol cannot be found.
-- `has_member(typename, membername)` takes two arguments, type name
+- `has_member(typename, membername)`: takes two arguments, type name
and member name and returns true if the type has the specified
member, you can specify external dependencies to use with
`dependencies` keyword argument.
-- `has_members(typename, membername1, membername2, ...)` takes at
+- `has_members(typename, membername1, membername2, ...)`: takes at
least two arguments, type name and one or more member names, returns
true if the type has all the specified members, you can specify
external dependencies to use with `dependencies` keyword argument.
-- `has_multi_arguments(arg1, arg2, arg3, ...)` is the same as
+- `has_multi_arguments(arg1, arg2, arg3, ...)` *(since 0.37.0)*: the same as
`has_argument` but takes multiple arguments and uses them all in a
- single compiler invocation, available since 0.37.0.
+ single compiler invocation.
-- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(added 0.46.0)*
- is the same as `has_link_argument` but takes multiple arguments and
+- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(since 0.46.0)*:
+ the same as `has_link_argument` but takes multiple arguments and
uses them all in a single compiler invocation.
-- `has_type(typename)` returns true if the specified token is a type,
+- `has_type(typename)`: returns true if the specified token is a type,
you can specify external dependencies to use with `dependencies`
keyword argument.
-- `links(code)` returns true if the code fragment given in the
+- `links(code)`: returns true if the code fragment given in the
positional argument compiles and links, you can specify external
dependencies to use with `dependencies` keyword argument, `code` can
be either a string containing source code or a `file` object
pointing to the source code.
-- `run(code)` attempts to compile and execute the given code fragment,
+- `run(code)`: attempts to compile and execute the given code fragment,
returns a run result object, you can specify external dependencies
to use with `dependencies` keyword argument, `code` can be either a
string containing source code or a `file` object pointing to the
source code.
-- `symbols_have_underscore_prefix()` returns `true` if the C symbol
- mangling is one underscore (`_`) prefixed to the symbol, available
- since 0.37.0.
+- `symbols_have_underscore_prefix()` *(since 0.37.0)*: returns `true`
+ if the C symbol mangling is one underscore (`_`) prefixed to the symbol.
-- `sizeof(typename, ...)` returns the size of the given type
+- `sizeof(typename, ...)`: returns the size of the given type
(e.g. `'int'`) or -1 if the type is unknown, to add includes set
them in the `prefix` keyword argument, you can specify external
dependencies to use with `dependencies` keyword argument.
-- `version()` returns the compiler's version number as a string.
+- `version()`: returns the compiler's version number as a string.
-- `has_function_attribute(name)` *(added in 0.48.0)* returns `true` if the
+- `has_function_attribute(name)` *(since 0.48.0)*: returns `true` if the
compiler supports the GNU style (`__attribute__(...)`) `name`. This is
preferable to manual compile checks as it may be optimized for compilers that
do not support such attributes.
[This table](Reference-tables.md#gcc-__attribute__) lists all of the
supported attributes.
-- `get_supported_function_attributes(list_of_names)` *(added in 0.48.0)*
+- `get_supported_function_attributes(list_of_names)` *(since 0.48.0)*:
returns an array containing any names that are supported GCC style
attributes. Equivalent to `has_function_attribute` was called on each of them
individually.
The following keyword arguments can be used:
-- `args` can be used to pass a list of compiler arguments that are
+- `args`: used to pass a list of compiler arguments that are
required to find the header or symbol. For example, you might need
to pass the include path `-Isome/path/to/header` if a header is not
- in the default include path. In versions newer than 0.38.0 you
- should use the `include_directories` keyword described below. You
- may also want to pass a library name `-lfoo` for `has_function` to
- check for a function. Supported by all methods except `get_id`,
- `version`, and `find_library`.
+ in the default include path. *(since 0.38.0)* you should use the
+ `include_directories` keyword described below. You may also want to
+ pass a library name `-lfoo` for `has_function` to check for a function.
+ Supported by all methods except `get_id`, `version`, and `find_library`.
-- `include_directories` specifies extra directories for header
- searches. *(added 0.38.0)*
+- `include_directories` *(since 0.38.0)*: specifies extra directories for
+ header searches.
-- `name` the name to use for printing a message about the compiler
+- `name`: the name to use for printing a message about the compiler
check. Supported by the methods `compiles()`, `links()`, and
`run()`. If this keyword argument is not passed to those methods, no
message will be printed about the check.
-- `no_builtin_args` when set to true, the compiler arguments controlled
+- `no_builtin_args`: when set to true, the compiler arguments controlled
by built-in configuration options are not added.
-- `prefix` can be used to add #includes and other things that are
+- `prefix`: adds #includes and other things that are
required for the symbol to be declared. System definitions should be
passed via compiler args (eg: `_GNU_SOURCE` is often required for
some symbols to be exposed on Linux, and it should be passed via
@@ -2270,15 +2292,15 @@ A build target is either an [executable](#executable),
[both shared and static library](#both_libraries) or
[shared module](#shared_module).
-- `extract_all_objects()` is same as `extract_objects` but returns all
- object files generated by this target. Since 0.46.0 keyword argument
+- `extract_all_objects()`: is same as `extract_objects` but returns all
+ object files generated by this target. *(since 0.46.0)* keyword argument
`recursive` must be set to `true` to also return objects passed to
the `object` argument of this target. By default only objects built
for this target are returned to maintain backward compatibility with
previous versions. The default will eventually be changed to `true`
in a future version.
-- `extract_objects(source1, source2, ...)` takes as its arguments
+- `extract_objects(source1, source2, ...)`: takes as its arguments
a number of source files as [`string`](#string-object) or
[`files()`](#files) and returns an opaque value representing the
object files generated for those source files. This is typically used
@@ -2286,17 +2308,17 @@ A build target is either an [executable](#executable),
some source files with custom flags. To use the object file(s)
in another build target, use the `objects:` keyword argument.
-- `full_path()` returns a full path pointing to the result target file.
+- `full_path()`: returns a full path pointing to the result target file.
NOTE: In most cases using the object itself will do the same job as
this and will also allow Meson to setup inter-target dependencies
correctly. Please file a bug if that doesn't work for you.
-- `private_dir_include()` returns a opaque value that works like
+- `private_dir_include()`: returns a opaque value that works like
`include_directories` but points to the private directory of this
target, usually only needed if an another target needs to access
some generated internal headers of this target
-- `name()` *Since 0.54.0*, returns the target name.
+- `name()` *(since 0.54.0)*: returns the target name.
### `configuration` data object
@@ -2307,20 +2329,19 @@ configuration values to be used for generating configuration files. A
more in-depth description can be found in the [the configuration wiki
page](Configuration.md) It has three methods:
-- `get(varname, default_value)` returns the value of `varname`, if the
+- `get(varname, default_value)`: returns the value of `varname`, if the
value has not been set returns `default_value` if it is defined
- *(added 0.38.0)* and errors out if not
+ *(since 0.38.0)* and errors out if not
-- `get_unquoted(varname, default_value)` returns the value of `varname`
- but without surrounding double quotes (`"`). If the value has not been
- set returns `default_value` if it is defined and errors out if not.
- Available since 0.44.0
+- `get_unquoted(varname, default_value)` *(since 0.44.0)*: returns the value
+ of `varname` but without surrounding double quotes (`"`). If the value has
+ not been set returns `default_value` if it is defined and errors out if not.
-- `has(varname)`, returns `true` if the specified variable is set
+- `has(varname)`: returns `true` if the specified variable is set
-- `merge_from(other)` takes as argument a different configuration data
- object and copies all entries from that object to the current
- object, available since 0.42.0
+- `merge_from(other)` *(since 0.42.0)*: takes as argument a different
+ configuration data object and copies all entries from that object to
+ the current.
- `set(varname, value)`, sets a variable to a given value
@@ -2342,20 +2363,20 @@ cause a syntax error.
This object is returned by [`custom_target`](#custom_target) and
contains a target with the following methods:
-- `full_path()` returns a full path pointing to the result target file
+- `full_path()`: returns a full path pointing to the result target file
NOTE: In most cases using the object itself will do the same job as
this and will also allow Meson to setup inter-target dependencies
correctly. Please file a bug if that doesn't work for you.
- *Since 0.54.0* it can be also called on indexes objects:
+ *(since 0.54.0)* It can be also called on indexes objects:
`custom_targets[i].full_path()`.
-- `[index]` returns an opaque object that references this target, and
+- `[index]`: returns an opaque object that references this target, and
can be used as a source in other targets. When it is used as such it
will make that target depend on this custom target, but the only
source added will be the one that corresponds to the index of the
custom target's output argument.
-- `to_list()` *Since 0.54.0*, returns a list of opaque objects that references
+- `to_list()` *(since 0.54.0)*: returns a list of opaque objects that references
this target, and can be used as a source in other targets. This can be used to
iterate outputs with `foreach` loop.
@@ -2364,48 +2385,48 @@ contains a target with the following methods:
This object is returned by [`dependency()`](#dependency) and contains
an external dependency with the following methods:
- - `found()` which returns whether the dependency was found
+ - `found()`: returns whether the dependency was found.
- - `name()` *(Added 0.48.0)* returns the name of the dependency that was
+ - `name()` *(since 0.48.0)*: returns the name of the dependency that was
searched. Returns `internal` for dependencies created with
`declare_dependency()`.
- - `get_pkgconfig_variable(varname)` *(Added 0.36.0)* will get the
+ - `get_pkgconfig_variable(varname)` *(since 0.36.0)*: gets the
pkg-config variable specified, or, if invoked on a non pkg-config
- dependency, error out. *(Added 0.44.0)* You can also redefine a
+ dependency, error out. *(since 0.44.0)* You can also redefine a
variable by passing a list to the `define_variable` parameter
that can affect the retrieved variable: `['prefix', '/'])`.
- *(Added 0.45.0)* A warning is issued if the variable is not defined,
+ *(since 0.45.0)* A warning is issued if the variable is not defined,
unless a `default` parameter is specified.
- - `get_configtool_variable(varname)` *(Added 0.44.0)* will get the
+ - `get_configtool_variable(varname)` *(since 0.44.0)*: gets the
command line argument from the config tool (with `--` prepended), or,
if invoked on a non config-tool dependency, error out.
- - `type_name()` which returns a string describing the type of the
+ - `type_name()`: returns a string describing the type of the
dependency, the most common values are `internal` for deps created
with `declare_dependency()` and `pkgconfig` for system dependencies
obtained with Pkg-config.
- - `version()` is the version number as a string, for example `1.2.8`.
+ - `version()`: the version number as a string, for example `1.2.8`.
`unknown` if the dependency provider doesn't support determining the
version.
- - `include_type()` returns whether the value set by the `include_type` kwarg
+ - `include_type()`: returns whether the value set by the `include_type` kwarg
- - `as_system(value)` returns a copy of the dependency object, which has changed
+ - `as_system(value)`: returns a copy of the dependency object, which has changed
the value of `include_type` to `value`. The `value` argument is optional and
defaults to `'preserve'`.
- `partial_dependency(compile_args : false, link_args : false, links
- : false, includes : false, source : false)` *(Added 0.46.0)* returns
+ : false, includes : false, sources : false)` *(since 0.46.0)*: returns
a new dependency object with the same name, version, found status,
type name, and methods as the object that called it. This new
object will only inherit other attributes from its parent as
controlled by keyword arguments.
If the parent has any dependencies, those will be applied to the new
- partial dependency with the same rules. So , given:
+ partial dependency with the same rules. So, given:
```meson
dep1 = declare_dependency(compile_args : '-Werror=foo', link_with : 'libfoo')
@@ -2430,14 +2451,14 @@ an external dependency with the following methods:
- `get_variable(cmake : str, pkgconfig : str, configtool : str,
internal: str, default_value : str, pkgconfig_define : [str, str])`
- *(Added in 0.51.0)* A generic variable getter method, which replaces the
+ *(since 0.51.0)*: a generic variable getter method, which replaces the
get_*type*_variable methods. This allows one to get the variable
from a dependency without knowing specifically how that dependency
was found. If default_value is set and the value cannot be gotten
from the object then default_value is returned, if it is not set
then an error is raised.
- *New in 0.54.0, the `internal` keyword*
+ *(since 0.54.0)* added `internal` keyword.
### `disabler` object
@@ -2447,7 +2468,7 @@ statement (function call, logical op, etc) they will cause the
statement evaluation to immediately short circuit to return a disabler
object. A disabler object has one method:
- - `found()`, always returns `false`
+- `found()`: always returns `false`.
### `external program` object
@@ -2455,11 +2476,16 @@ This object is returned by [`find_program()`](#find_program) and
contains an external (i.e. not built as part of this project) program
and has the following methods:
-- `found()` which returns whether the executable was found
+- `found()`: returns whether the executable was found.
-- `path()` which returns a string pointing to the script or executable
+- `path()`: *(since 0.55.0)* **(deprecated)** use `full_path()` instead.
+ Returns a string pointing to the script or executable
**NOTE:** You should not need to use this method. Passing the object
- itself should work in all cases. For example: `run_command(obj, arg1, arg2)`
+ itself should work in all cases. For example: `run_command(obj, arg1, arg2)`.
+
+- `full_path()` (*since 0.55.0*): which returns a string pointing to the script or
+ executable **NOTE:** You should not need to use this method. Passing the object
+ itself should work in all cases. For example: `run_command(obj, arg1, arg2)`.
### `environment` object
@@ -2468,7 +2494,7 @@ detailed information about how environment variables should be set
during tests. It should be passed as the `env` keyword argument to
tests and other functions. It has the following methods.
-- `append(varname, value1, value2, ...)` appends the given values to
+- `append(varname, value1, value2, ...)`: appends the given values to
the old value of the environment variable, e.g. `env.append('FOO',
'BAR', 'BAZ', separator : ';')` produces `BOB;BAR;BAZ` if `FOO` had
the value `BOB` and plain `BAR;BAZ` if the value was not defined. If
@@ -2476,10 +2502,10 @@ tests and other functions. It has the following methods.
separator for the host operating system will be used, i.e. ';' for
Windows and ':' for UNIX/POSIX systems.
-- `prepend(varname, value1, value2, ...)` is the same as `append`
- except that it writes to the beginning of the variable
+- `prepend(varname, value1, value2, ...)`: same as `append`
+ except that it writes to the beginning of the variable.
-- `set(varname, value1, value2)` sets the environment variable
+- `set(varname, value1, value2)`: sets the environment variable
specified in the first argument to the values in the second argument
joined by the separator, e.g. `env.set('FOO', 'BAR'),` sets envvar
`FOO` to value `BAR`. See `append()` above for how separators work.
@@ -2493,27 +2519,27 @@ This object is returned by [`find_library()`](#find_library) and
contains an external (i.e. not built as part of this project)
library. This object has the following methods:
- - `found()` which returns whether the library was found.
+- `found()`: returns whether the library was found.
- - `type_name()` *(added 0.48.0)* which returns a string describing
- the type of the dependency, which will be `library` in this case.
+- `type_name()` *(since 0.48.0)*: returns a string describing
+ the type of the dependency, which will be `library` in this case.
- - `partial_dependency(compile_args : false, link_args : false, links
- : false, includes : false, source : false)` *(Added 0.46.0)* returns
- a new dependency object with the same name, version, found status,
- type name, and methods as the object that called it. This new
- object will only inherit other attributes from its parent as
- controlled by keyword arguments.
+- `partial_dependency(compile_args : false, link_args : false, links
+ : false, includes : false, source : false)` *(since 0.46.0)*: returns
+ a new dependency object with the same name, version, found status,
+ type name, and methods as the object that called it. This new
+ object will only inherit other attributes from its parent as
+ controlled by keyword arguments.
### Feature option object
-The following methods are defined for all [`feature` options](Build-options.md#features):
+*(since 0.47.0)*
-- `enabled()` returns whether the feature was set to `'enabled'`
-- `disabled()` returns whether the feature was set to `'disabled'`
-- `auto()` returns whether the feature was set to `'auto'`
+The following methods are defined for all [`feature` options](Build-options.md#features):
-Feature options are available since 0.47.0.
+- `enabled()`: returns whether the feature was set to `'enabled'`
+- `disabled()`: returns whether the feature was set to `'disabled'`
+- `auto()`: returns whether the feature was set to `'auto'`
### `generator` object
@@ -2521,7 +2547,7 @@ This object is returned by [`generator()`](#generator) and contains a
generator that is used to transform files from one type to another by
an executable (e.g. `idl` files into source code and headers).
-* `process(list_of_files, ...)` takes a list of files, causes them to
+- `process(list_of_files, ...)`: takes a list of files, causes them to
be processed and returns an object containing the result which can
then, for example, be passed into a build target definition. The
keyword argument `extra_args`, if specified, will be used to replace
@@ -2539,10 +2565,10 @@ an executable (e.g. `idl` files into source code and headers).
This object is returned by [`subproject()`](#subproject) and is an
opaque object representing it.
-- `found()` *(added 0.48.0)* which returns whether the subproject was
+- `found()` *(since 0.48.0)*: returns whether the subproject was
successfully setup
-- `get_variable(name, fallback)` fetches the specified variable from
+- `get_variable(name, fallback)`: fetches the specified variable from
inside the subproject. This is useful to, for instance, get a
[declared dependency](#declare_dependency) from the
[subproject](Subprojects.md).
@@ -2557,9 +2583,9 @@ This object encapsulates the result of trying to compile and run a
sample piece of code with [`compiler.run()`](#compiler-object) or
[`run_command()`](#run_command). It has the following methods:
-- `compiled()` if true, the compilation succeeded, if false it did not
+- `compiled()`: if true, the compilation succeeded, if false it did not
and the other methods return unspecified data. This is only available
for `compiler.run()` results.
-- `returncode()` the return code of executing the compiled binary
-- `stderr()` the standard error produced when the command was run
-- `stdout()` the standard out produced when the command was run
+- `returncode()`: the return code of executing the compiled binary
+- `stderr()`: the standard error produced when the command was run
+- `stdout()`: the standard out produced when the command was run
diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md
index dfae339..3be129f 100644
--- a/docs/markdown/Reference-tables.md
+++ b/docs/markdown/Reference-tables.md
@@ -81,14 +81,17 @@ set in the cross file.
| alpha | DEC Alpha processor |
| arc | 32 bit ARC processor |
| arm | 32 bit ARM processor |
-| e2k | MCST Elbrus processor |
+| avr | Atmel AVR processor |
| c2000 | 32 bit C2000 processor |
+| dspic | 16 bit Microchip dsPIC |
+| e2k | MCST Elbrus processor |
| ia64 | Itanium processor |
| m68k | Motorola 68000 processor |
| microblaze | MicroBlaze processor |
| mips | 32 bit MIPS processor |
| mips64 | 64 bit MIPS processor |
| parisc | HP PA-RISC processor |
+| pic24 | 16 bit Microchip PIC24 |
| ppc | 32 bit PPC processors |
| ppc64 | 64 bit PPC processors |
| riscv32 | 32 bit RISC-V Open ISA |
@@ -97,12 +100,11 @@ set in the cross file.
| rx | Renesas RX 32 bit MCU |
| s390 | IBM zSystem s390 |
| s390x | IBM zSystem s390x |
+| sh4 | SuperH SH-4 |
| sparc | 32 bit SPARC |
| sparc64 | SPARC v9 processor |
| wasm32 | 32 bit Webassembly |
| wasm64 | 64 bit Webassembly |
-| pic24 | 16 bit Microchip PIC24 |
-| dspic | 16 bit Microchip dsPIC |
| x86 | 32 bit x86 processor |
| x86_64 | 64 bit x86 processor |
@@ -120,6 +122,7 @@ These are provided by the `.system()` method call.
| Value | Comment |
| ----- | ------- |
+| android | By convention only, subject to change |
| cygwin | The Cygwin environment for Windows |
| darwin | Either OSX or iOS |
| dragonfly | DragonFly BSD |
@@ -153,6 +156,10 @@ These are the parameter names for passing language specific arguments to your bu
| Rust | rust_args | rust_link_args |
| Vala | vala_args | vala_link_args |
+All these `<lang>_*` options are specified per machine. See in [specifying
+options per machine](Builtin-options.md#Specifying-options-per-machine) for on
+how to do this in cross builds.
+
## Compiler and linker flag environment variables
These environment variables will be used to modify the compiler and
@@ -175,6 +182,10 @@ instead.
| RUSTFLAGS | Flags for the Rust compiler |
| LDFLAGS | The linker flags, used for all languages |
+N.B. these settings are specified per machine, and so the environment varibles
+actually come in pairs. See the [environment variables per
+machine](#Environment-variables-per-machine) section for details.
+
## Function Attributes
These are the parameters names that are supported using
@@ -187,49 +198,50 @@ These values are supported using the GCC style `__attribute__` annotations,
which are supported by GCC, Clang, and other compilers.
-| Name |
-|----------------------|
-| alias |
-| aligned |
-| alloc_size |
-| always_inline |
-| artificial |
-| cold |
-| const |
-| constructor |
-| constructor_priority |
-| deprecated |
-| destructor |
-| error |
-| externally_visible |
-| fallthrough |
-| flatten |
-| format |
-| format_arg |
-| gnu_inline |
-| hot |
-| ifunc |
-| malloc |
-| noclone |
-| noinline |
-| nonnull |
-| noreturn |
-| nothrow |
-| optimize |
-| packed |
-| pure |
-| returns_nonnull |
-| unused |
-| used |
-| visibility* |
-| visibility:default† |
-| visibility:hidden† |
-| visibility:internal† |
-| visibility:protected†|
-| warning |
-| warn_unused_result |
-| weak |
-| weakreaf |
+| Name |
+|--------------------------|
+| alias |
+| aligned |
+| alloc_size |
+| always_inline |
+| artificial |
+| cold |
+| const |
+| constructor |
+| constructor_priority |
+| deprecated |
+| destructor |
+| error |
+| externally_visible |
+| fallthrough |
+| flatten |
+| format |
+| format_arg |
+| force_align_arg_pointer³ |
+| gnu_inline |
+| hot |
+| ifunc |
+| malloc |
+| noclone |
+| noinline |
+| nonnull |
+| noreturn |
+| nothrow |
+| optimize |
+| packed |
+| pure |
+| returns_nonnull |
+| unused |
+| used |
+| visibility* |
+| visibility:default† |
+| visibility:hidden† |
+| visibility:internal† |
+| visibility:protected† |
+| warning |
+| warn_unused_result |
+| weak |
+| weakreaf |
\* *Changed in 0.52.0* the "visibility" target no longer includes
"protected", which is not present in Apple's clang.
@@ -237,6 +249,8 @@ which are supported by GCC, Clang, and other compilers.
† *New in 0.52.0* These split visibility attributes are preferred to the plain
"visibility" as they provide narrower checks.
+³ *New in 0.55.0*
+
### MSVC __declspec
These values are supported using the MSVC style `__declspec` annotation,
@@ -265,6 +279,10 @@ These are the values that can be passed to `dependency` function's
## Compiler and Linker selection variables
+N.B. these settings are specified per machine, and so the environment varibles
+actually come in pairs. See the [environment variables per
+machine](#Environment-variables-per-machine) section for details.
+
| Language | Compiler | Linker | Note |
|---------------|----------|-----------|---------------------------------------------|
| C | CC | CC_LD | |
@@ -278,5 +296,28 @@ These are the values that can be passed to `dependency` function's
| C# | CSC | CSC | The linker is the compiler |
*The old environment variales are still supported, but are deprecated and will
-be removed in a future version of meson.
+be removed in a future version of meson.*
+
+## Environment variables per machine
+
+Since *0.54.0*, Following Autotool and other legacy build systems, environment
+variables that affect machine-specific settings come in pairs: for every bare
+environment variable `FOO`, there is a suffixed `FOO_FOR_BUILD`, where `FOO`
+just affects the host machine configuration, while `FOO_FOR_BUILD` just affects
+the build machine configuration. For example:
+
+ - `PKG_CONFIG_PATH_FOR_BUILD` controls the paths pkg-config will search for
+ just `native: true` dependencies (build machine).
+
+ - `PKG_CONFIG_PATH` controls the paths pkg-config will search for just
+ `native: false` dependencies (host machine).
+
+This mirrors the `build.` prefix used for (built-in) meson options, which has
+the same meaning.
+
+This is useful for cross builds. In the native builds, build = host, and the
+unsuffixed environment variables alone will suffice.
+Prior to *0.54.0*, there was no `_FOR_BUILD`-suffixed variables, and most
+environment variables only effected native machine configurations, though this
+wasn't consistent (e.g. `PKG_CONFIG_PATH` still affected cross builds).
diff --git a/docs/markdown/Release-notes-for-0.54.0.md b/docs/markdown/Release-notes-for-0.54.0.md
index 2c8880c..2f215de 100644
--- a/docs/markdown/Release-notes-for-0.54.0.md
+++ b/docs/markdown/Release-notes-for-0.54.0.md
@@ -14,7 +14,7 @@ If it set to 0 then the PTHREAD_POOL_SIZE option will not be passed.
## Introduce dataonly for the pkgconfig module
This allows users to disable writing out the inbuilt variables to
-the pkg-config file as they might actualy not be required.
+the pkg-config file as they might actually not be required.
One reason to have this is for architecture-independent pkg-config
files in projects which also have architecture-dependent outputs.
@@ -359,3 +359,8 @@ target that has eight source files, Meson will generate two unity
files each of which includes four source files. The old behaviour can
be replicated by setting `unity_size` to a large value, such as 10000.
+## Verbose mode for `meson compile`
+
+The new option `--verbose` has been added to `meson compile` that will enable
+more verbose compilation logs. Note that for VS backend it means that logs will
+be less verbose by default (without `--verbose` option).
diff --git a/docs/markdown/Release-notes-for-0.55.0.md b/docs/markdown/Release-notes-for-0.55.0.md
new file mode 100644
index 0000000..534c452
--- /dev/null
+++ b/docs/markdown/Release-notes-for-0.55.0.md
@@ -0,0 +1,307 @@
+---
+title: Release 0.55.0
+short-description: Release notes for 0.55.0
+...
+
+# New features
+
+## rpath removal now more careful
+
+On Linux-like systems, meson adds rpath entries to allow running apps
+in the build tree, and then removes those build-time-only
+rpath entries when installing. Rpath entries may also come
+in via LDFLAGS and via .pc files. Meson used to remove those
+latter rpath entries by accident, but is now more careful.
+
+## Added ability to specify targets in `meson compile`
+
+It's now possible to specify targets in `meson compile`, which will result in building only the requested targets.
+
+Usage: `meson compile [TARGET [TARGET...]]`
+`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`.
+`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`).
+`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`.
+`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc)
+
+`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`.
+
+For example targets from the following code:
+```meson
+shared_library('foo', ...)
+static_library('foo', ...)
+executable('bar', ...)
+```
+can be invoked with `meson compile foo:shared_library foo:static_library bar`.
+
+## Test protocol for gtest
+
+Due to the popularity of Gtest (google test) among C and C++ developers meson
+now supports a special protocol for gtest. With this protocol meson injects
+arguments to gtests to output JUnit, reads that JUnit, and adds the output to
+the JUnit it generates.
+
+## meson.add_*_script methods accept new types
+
+All three (`add_install_script`, `add_dist_script`, and
+`add_postconf_script`) now accept ExternalPrograms (as returned by
+`find_program`), Files, and the output of `configure_file`. The dist and
+postconf methods cannot accept other types because of when they are run.
+While dist could, in theory, take other dependencies, it would require more
+extensive changes, particularly to the backend.
+
+```meson
+meson.add_install_script(find_program('foo'), files('bar'))
+meson.add_dist_script(find_program('foo'), files('bar'))
+meson.add_postconf_script(find_program('foo'), files('bar'))
+```
+
+The install script variant is also able to accept custom_targets,
+custom_target indexes, and build targets (executables, libraries), and can
+use built executables a the script to run
+
+```meson
+installer = executable('installer', ...)
+meson.add_install_script(installer, ...)
+meson.add_install_script('foo.py', installer)
+```
+
+## Machine file constants
+
+Native and cross files now support string and list concatenation using the `+`
+operator, and joining paths using the `/` operator.
+Entries defined in the `[constants]` section can be used in any other section.
+An entry defined in any other section can be used only within that same section and only
+after it has been defined.
+
+```ini
+[constants]
+toolchain = '/toolchain'
+common_flags = ['--sysroot=' + toolchain + '/sysroot']
+
+[properties]
+c_args = common_flags + ['-DSOMETHING']
+cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+[binaries]
+c = toolchain + '/gcc'
+```
+
+## Configure CMake subprojects with meson.subproject_options
+
+Meson now supports passing configuration options to CMake and overriding
+certain build details extracted from the CMake subproject.
+
+The new CMake configuration options object is very similar to the
+[configuration data object](Reference-manual.md#configuration-data-object) object
+returned by [`configuration_data`](Reference-manual.md#configuration_data). It
+is generated by the `subproject_options` function
+
+All configuration options have to be set *before* the subproject is configured
+and must be passed to the `subproject` method via the `options` key. Altering
+the configuration object won't have any effect on previous `cmake.subproject`
+calls.
+
+**Note:** The `cmake_options` kwarg for the `subproject` function is now
+deprecated since it is replaced by the new `options` system.
+
+## find_program: Fixes when the program has been overridden by executable
+
+When a program has been overridden by an executable, the returned object of
+find_program() had some issues:
+
+```meson
+# In a subproject:
+exe = executable('foo', ...)
+meson.override_find_program('foo', exe)
+
+# In main project:
+# The version check was crashing meson.
+prog = find_program('foo', version : '>=1.0')
+
+# This was crashing meson.
+message(prog.path())
+
+# New method to be consistent with built objects.
+message(prog.full_path())
+```
+
+## Response files enabled on Linux, reined in on Windows
+
+Meson used to always use response files on Windows,
+but never on Linux.
+
+It now strikes a happier balance, using them on both platforms,
+but only when needed to avoid command line length limits.
+
+## `unstable-kconfig` module renamed to `unstable-keyval`
+
+The `unstable-kconfig` module is now renamed to `unstable-keyval`.
+We expect this module to become stable once it has some usage experience,
+specifically in the next or the following release
+
+
+## Fatal warnings in `gnome.generate_gir()`
+
+`gnome.generate_gir()` now has `fatal_warnings` keyword argument to abort when
+a warning is produced. This is useful for example in CI environment where it's
+important to catch potential issues.
+
+## b_ndebug support for D language compilers
+
+D Language compilers will now set -release/--release/-frelease (depending on
+the compiler) when the b_ndebug flag is set.
+
+## Meson test now produces JUnit xml from results
+
+Meson will now generate a JUnit compatible XML file from test results. it
+will be in the meson-logs directory and is called testlog.junit.xml.
+
+## Config tool based dependencies no longer search PATH for cross compiling
+
+Before 0.55.0 config tool based dependencies (llvm-config, cups-config, etc),
+would search system $PATH if they weren't defined in the cross file. This has
+been a source of bugs and has been deprecated. It is now removed, config tool
+binaries must be specified in the cross file now or the dependency will not
+be found.
+
+## Rename has_exe_wrapper -> can_run_host_binaries
+
+The old name was confusing as it didn't really match the behavior of the
+function. The old name remains as an alias (the behavior hasn't changed), but
+is now deprecated.
+
+## String concatenation in meson_options.txt
+
+It is now possible to use string concatenation (with the `+` opperator) in the
+meson_options.txt file. This allows splitting long option descriptions.
+
+```meson
+option(
+ 'testoption',
+ type : 'string',
+ value : 'optval',
+ description : 'An option with a very long description' +
+ 'that does something in a specific context'
+)
+```
+
+## Wrap fallback URL
+
+Wrap files can now define `source_fallback_url` and `patch_fallback_url` to be
+used in case the main server is temporaly down.
+
+## Clang coverage support
+
+llvm-cov is now used to generate coverage information when clang is used as
+the compiler.
+
+## Local wrap source and patch files
+
+It is now possible to use the `patch_filename` and `source_filename` value in a
+`.wrap` file without `*_url` to specify a local source / patch file. All local
+files must be located in the `subprojects/packagefiles` directory. The `*_hash`
+entries are optional with this setup.
+
+## Local wrap patch directory
+
+Wrap files can now specify `patch_directory` instead of `patch_filename` in the
+case overlay files are local. Every files in that directory, and subdirectories,
+will be copied to the subproject directory. This can be used for example to add
+`meson.build` files to a project not using Meson build system upstream.
+The patch directory must be placed in `subprojects/packagefiles` directory.
+
+## Patch on all wrap types
+
+`patch_*` keys are not limited to `wrap-file` any more, they can be specified for
+all wrap types.
+
+## link_language argument added to all targets
+
+Previously the `link_language` argument was only supposed to be allowed in
+executables, because the linker used needs to be the linker for the language
+that implements the main function. Unfortunately it didn't work in that case,
+and, even worse, if it had been implemented properly it would have worked for
+*all* targets. In 0.55.0 this restriction has been removed, and the bug fixed.
+It now is valid for `executable` and all derivative of `library`.
+
+## meson dist --no-tests
+
+`meson dist` has a new option `--no-tests` to skip build and tests of generated
+packages. It can be used to not waste time for example when done in CI that
+already does its own testing.
+
+## Force fallback for
+
+A newly-added `--force-fallback-for` command line option can now be used to
+force fallback for specific subprojects.
+
+Example:
+
+```
+meson build --force-fallback-for=foo,bar
+```
+
+## Implicit dependency fallback
+
+`dependency('foo')` now automatically fallback if the dependency is not found on
+the system but a subproject wrap file or directory exists with the same name.
+
+That means that simply adding `subprojects/foo.wrap` is enough to add fallback
+to any `dependency('foo')` call. It is however requires that the subproject call
+`meson.override_dependency('foo', foo_dep)` to specify which dependency object
+should be used for `foo`.
+
+## Wrap file `provide` section
+
+Wrap files can define the dependencies it provides in the `[provide]` section.
+When `foo.wrap` provides the dependency `foo-1.0` any call do `dependency('foo-1.0')`
+will automatically fallback to that subproject even if no `fallback` keyword
+argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section).
+
+## `find_program()` fallback
+
+When a program cannot be found on the system but a wrap file has its name in the
+`[provide]` section, that subproject will be used as fallback.
+
+## Test scripts are given the exe wrapper if needed
+
+Meson will now set the `MESON_EXE_WRAPPER` as the properly wrapped and joined
+representation. For Unix-like OSes this means python's shelx.join, on Windows
+an implementation that attempts to properly quote windows argument is used.
+This allow wrapper scripts to run test binaries, instead of just skipping.
+
+for example, if the wrapper is `['emulator', '--script']`, it will be passed
+as `MESON_EXE_WRAPPER="emulator --script"`.
+
+## Added ability to specify backend arguments in `meson compile`
+
+It's now possible to specify backend specific arguments in `meson compile`.
+
+Usage: `meson compile [--vs-args=args] [--ninja-args=args]`
+
+```
+ --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend).
+ --vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend).
+```
+
+These arguments use the following syntax:
+
+If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command:
+
+```
+$ meson compile --ninja-args=-n,-d,explain
+```
+
+would add `-n`, `-d` and `explain` arguments to ninja invocation.
+
+If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this:
+
+```
+$ meson compile "--ninja-args=['a,b', 'c d']"
+```
+
+## Introspection API changes
+
+dumping the AST (--ast): **new in 0.55.0**
+- prints the AST of a meson.build as JSON
+
diff --git a/docs/markdown/Run-targets.md b/docs/markdown/Run-targets.md
index 38129a6..b584bf7 100644
--- a/docs/markdown/Run-targets.md
+++ b/docs/markdown/Run-targets.md
@@ -29,7 +29,7 @@ run_target('inspector',
Run targets are not run by default. To run it run the following command.
```console
-$ ninja inspector
+$ meson compile inspector
```
All additional entries in `run_target`'s `command` array are passed unchanged to the inspector script, so you can do things like this:
diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md
index 910513c..326ecb9 100644
--- a/docs/markdown/Running-Meson.md
+++ b/docs/markdown/Running-Meson.md
@@ -9,13 +9,12 @@ from the source tree with the command `/path/to/source/meson.py`. Meson may
also be installed in which case the command is simply `meson`. In this manual
we only use the latter format for simplicity.
-Additionally, the invocation can pass options to meson. The list of options is
-documented [here](Builtin-options.md).
-
At the time of writing only a command line version of Meson is available. This
means that Meson must be invoked using the terminal. If you wish to use the
MSVC compiler, you need to run Meson under "Visual Studio command prompt".
+All available meson commands are listed on the [commands reference page](Commands.md).
+
## Configuring the build directory
Let us assume that we have a source tree that has a Meson build system. This
@@ -41,6 +40,9 @@ build backend in the build directory. By default Meson generates a *debug
build*, which turns on basic warnings and debug information and disables
compiler optimizations.
+Additionally, the invocation can pass options to meson. The list of options is
+documented [here](Builtin-options.md).
+
You can specify a different type of build with the `--buildtype` command line
argument. It can have one of the following values.
@@ -83,7 +85,7 @@ during configuration time. As an example, here is how you would use Meson to
generate a Visual studio solution.
```sh
-meson setup <build dir> --backend=vs2010
+meson setup <build dir> --backend=vs
```
You can then open the generated solution with Visual Studio and compile it in
@@ -105,9 +107,18 @@ linker arguments needed.
## Building from the source
-If you are not using an IDE, Meson uses the [Ninja build
-system](https://ninja-build.org/) to actually build the code. To start the
-build, simply type the following command.
+To start the build, simply type the following command.
+
+```sh
+meson compile -C builddir
+```
+
+See [`meson compile` description](Commands.md#compile) for more info.
+
+### Building directly with ninja
+
+By default Meson uses the [Ninja build system](https://ninja-build.org/) to
+actually build the code. To start the build, simply type the following command.
```sh
ninja -C builddir
@@ -133,20 +144,29 @@ Meson provides native support for running tests. The command to do that is
simple.
```sh
-ninja -C builddir test
+meson test -C builddir
```
+See [`meson test` description](Commands.md#test) for more info.
+
Meson does not force the use of any particular testing framework. You are free
to use GTest, Boost Test, Check or even custom executables.
+Note: it can be also invoked directly with ninja with the following command:
+```sh
+ninja -C builddir test
+```
+
## Installing
Installing the built software is just as simple.
```sh
-ninja -C builddir install
+meson install -C builddir
```
+See [`meson install` description](Commands.md#install) for more info.
+
Note that Meson will only install build targets explicitly tagged as
installable, as detailed in the [installing targets
documentation](Installing.md).
@@ -157,7 +177,12 @@ Meson also supports the `DESTDIR` variable used in e.g. building packages. It
is used like this:
```sh
-DESTDIR=/path/to/staging ninja -C builddir install
+DESTDIR=/path/to/staging meson install -C builddir
+```
+
+Note: it can be also invoked directly with ninja with the following command:
+```sh
+ninja -C builddir install
```
## Command line help
diff --git a/docs/markdown/Style-guide.md b/docs/markdown/Style-guide.md
index ee2ecfe..960e60c 100644
--- a/docs/markdown/Style-guide.md
+++ b/docs/markdown/Style-guide.md
@@ -11,6 +11,12 @@ Meson build files.
Always spaces.
+## Naming Variable
+
+The most consistent naming convention is the snake case. Let say you would
+like to refer to your executable so something like `my_exe` would work or
+just `exe`.
+
## Naming options
There are two ways of naming project options. As an example for
diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md
index 8232da9..02a83e6 100644
--- a/docs/markdown/Subprojects.md
+++ b/docs/markdown/Subprojects.md
@@ -212,6 +212,9 @@ the following command-line options:
calls, and those are meant to be used for sources that cannot be
provided by the system, such as copylibs.
+ This option may be overriden by `--force-fallback-for` for specific
+ dependencies.
+
* **--wrap-mode=forcefallback**
Meson will not look at the system for any dependencies which have
@@ -220,6 +223,41 @@ the following command-line options:
want to specifically build against the library sources provided by
your subprojects.
+* **--force-fallback-for=list,of,dependencies**
+
+ Meson will not look at the system for any dependencies listed there,
+ provided a fallback was supplied when the dependency was declared.
+
+ This option takes precedence over `--wrap-mode=nofallback`, and when
+ used in combination with `--wrap-mode=nodownload` will only work
+ if the dependency has already been downloaded.
+
+ This is useful when your project has many fallback dependencies,
+ but you only want to build against the library sources for a few
+ of them.
+
+ **Warning**: This could lead to mixing system and subproject version of the
+ same library in the same process. Take this case as example:
+ - Libraries `glib-2.0` and `gstreamer-1.0` are installed on your system.
+ - `gstreamer-1.0` depends on `glib-2.0`, pkg-config file `gstreamer-1.0.pc`
+ has `Requires: glib-2.0`.
+ - In your application build definition you do:
+ ```meson
+ executable('app', ...,
+ dependencies: [
+ dependency('glib-2.0', fallback: 'glib'),
+ dependency('gstreamer-1.0', fallback: 'gstreamer')],
+ )
+ ```
+ - You configure with `--force-fallback-for=glib`.
+ This result in linking to two different versions of library `glib-2.0`
+ because `dependency('glib-2.0', fallback: 'glib')` will return the
+ subproject dependency, but `dependency('gstreamer-1.0', fallback: 'gstreamer')`
+ will not fallback and return the system dependency, including `glib-2.0`
+ library. To avoid that situation, every dependency that itself depend on
+ `glib-2.0` must also be forced to fallback, in this case with
+ `--force-fallback-for=glib,gsteamer`.
+
## Download subprojects
*Since 0.49.0*
diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md
index cf0516c..bbe3dbb 100644
--- a/docs/markdown/Syntax.md
+++ b/docs/markdown/Syntax.md
@@ -16,12 +16,12 @@ statements* and *includes*.
Usually one Meson statement takes just one line. There is no way to
have multiple statements on one line as in e.g. *C*. Function and
method calls' argument lists can be split over multiple lines. Meson
-will autodetect this case and do the right thing. In other cases you
-can get multi-line statements by ending the line with a `\`. Apart
-from line ending whitespace has no syntactic meaning.
+will autodetect this case and do the right thing.
-Variables
---
+In other cases, *(added 0.50)* you can get multi-line statements by ending the
+line with a `\`. Apart from line ending whitespace has no syntactic meaning.
+
+## Variables
Variables in Meson work just like in other high level programming
languages. A variable can contain a value of any type, such as an
@@ -46,8 +46,7 @@ var2 += [4]
# var1 is still [1, 2, 3]
```
-Numbers
---
+## Numbers
Meson supports only integer numbers. They are declared simply by
writing them out. Basic arithmetic operations are supported.
@@ -85,8 +84,7 @@ int_var = 42
string_var = int_var.to_string()
```
-Booleans
---
+## Booleans
A boolean is either `true` or `false`.
@@ -94,8 +92,7 @@ A boolean is either `true` or `false`.
truth = true
```
-Strings
---
+## Strings
Strings in Meson are declared with single quotes. To enter a literal
single quote do it like this:
@@ -126,7 +123,7 @@ As in python and C, up to three octal digits are accepted in `\ooo`.
Unrecognized escape sequences are left in the string unchanged, i.e., the
backslash is left in the string.
-#### String concatenation
+### String concatenation
Strings can be concatenated to form a new string using the `+` symbol.
@@ -136,7 +133,25 @@ str2 = 'xyz'
combined = str1 + '_' + str2 # combined is now abc_xyz
```
-#### Strings running over multiple lines
+### String path building
+
+*(Added 0.49)*
+
+You can concatenate any two strings using `/` as an operator to build paths.
+This will always use `/` as the path separator on all platforms.
+
+```meson
+joined = '/usr/share' / 'projectname' # => /usr/share/projectname
+joined = '/usr/local' / '/etc/name' # => /etc/name
+
+joined = 'C:\\foo\\bar' / 'builddir' # => C:/foo/bar/builddir
+joined = 'C:\\foo\\bar' / 'D:\\builddir' # => D:/builddir
+```
+
+Note that this is equivalent to using [`join_paths()`](Reference-manual.md#join_paths),
+which was obsoleted by this operator.
+
+### Strings running over multiple lines
Strings running over multiple lines can be declared with three single
quotes, like this:
@@ -152,7 +167,7 @@ These are raw strings that do not support the escape sequences listed
above. These strings can also be combined with the string formatting
functionality described below.
-#### String formatting
+### String formatting
Strings can be built using the string formatting functionality.
@@ -165,12 +180,12 @@ res = template.format('text', 1, true)
As can be seen, the formatting works by replacing placeholders of type
`@number@` with the corresponding argument.
-#### String methods
+### String methods
Strings also support a number of other methods that return transformed
copies.
-**.strip()**
+#### .strip()
```meson
# Similar to the Python str.strip(). Removes leading/ending spaces and newlines
@@ -179,7 +194,7 @@ stripped_define = define.strip()
# 'stripped_define' now has the value '-Dsomedefine'
```
-**.to_upper()**, **.to_lower()**
+#### .to_upper(), .to_lower()
```meson
target = 'x86_FreeBSD'
@@ -187,7 +202,7 @@ upper = target.to_upper() # t now has the value 'X86_FREEBSD'
lower = target.to_lower() # t now has the value 'x86_freebsd'
```
-**.to_int()**
+#### .to_int()
```meson
version = '1'
@@ -195,7 +210,7 @@ version = '1'
ver_int = version.to_int()
```
-**.contains()**, **.startswith()**, **.endswith()**
+#### .contains(), .startswith(), .endswith()
```meson
target = 'x86_FreeBSD'
@@ -205,7 +220,27 @@ is_x86 = target.startswith('x86') # boolean value 'true'
is_bsd = target.to_lower().endswith('bsd') # boolean value 'true'
```
-**.split()**, **.join()**
+#### .substring()
+
+Since 0.56.0, you can extract a substring from a string.
+
+```meson
+# Similar to the Python str[start:end] syntax
+target = 'x86_FreeBSD'
+platform = target.substring(0, 3) # prefix string value 'x86'
+system = target.substring(4) # suffix string value 'FreeBSD'
+```
+
+The method accepts negative values where negative `start` is relative to the end of
+string `len(string) - start` as well as negative `end`.
+
+```meson
+string = 'foobar'
+target.substring(-5, -3) # => 'oo'
+target.substring(1, -1) # => 'ooba'
+```
+
+#### .split(), .join()
```meson
# Similar to the Python str.split()
@@ -246,7 +281,7 @@ api_version = '@0@.@1@'.format(version_array[0], version_array[1])
# api_version now (again) has the value '0.2'
```
-**.underscorify()**
+#### .underscorify()
```meson
name = 'Meson Docs.txt#Reference-manual'
@@ -256,7 +291,7 @@ underscored = name.underscorify()
# underscored now has the value 'Meson_Docs_txt_Reference_manual'
```
-**.version_compare()**
+#### .version_compare()
```meson
version = '1.2.3'
@@ -266,8 +301,15 @@ is_new = version.version_compare('>=2.0')
# Supports the following operators: '>', '<', '>=', '<=', '!=', '==', '='
```
-Arrays
---
+Meson version comparison conventions include:
+
+```meson
+'3.6'.version_compare('>=3.6.0') == false
+```
+
+It is best to be unambiguous and specify the full revision level to compare.
+
+## Arrays
Arrays are delimited by brackets. An array can contain an arbitrary number of objects of any type.
@@ -302,6 +344,7 @@ assign it to `my_array` instead of modifying the original since all
objects in Meson are immutable.
Since 0.49.0, you can check if an array contains an element like this:
+
```meson
my_array = [1, 2]
if 1 in my_array
@@ -312,7 +355,7 @@ if 1 not in my_array
endif
```
-#### Array methods
+### Array methods
The following methods are defined for all arrays:
@@ -320,8 +363,7 @@ The following methods are defined for all arrays:
- `contains`, returns `true` if the array contains the object given as argument, `false` otherwise
- `get`, returns the object at the given index, negative indices count from the back of the array, indexing out of bounds is a fatal error. Provided for backwards-compatibility, it is identical to array indexing.
-Dictionaries
---
+## Dictionaries
Dictionaries are delimited by curly braces. A dictionary can contain an
arbitrary number of key value pairs. Keys are required to be strings, values can
@@ -346,6 +388,7 @@ Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read
about the methods exposed by dictionaries.
Since 0.49.0, you can check if a dictionary contains a key like this:
+
```meson
my_dict = {'foo': 42, 'bar': 43}
if 'foo' in my_dict
@@ -361,14 +404,14 @@ endif
*Since 0.53.0* Keys can be any expression evaluating to a string value, not limited
to string literals any more.
+
```meson
d = {'a' + 'b' : 42}
k = 'cd'
d += {k : 43}
```
-Function calls
---
+## Function calls
Meson provides a set of usable functions. The most common use case is
creating build objects.
@@ -413,8 +456,7 @@ executable('progname', 'prog.c',
Attempting to do this causes Meson to immediately exit with an error.
-Method calls
---
+## Method calls
Objects can have methods, which are called with the dot operator. The
exact methods it provides depends on the object.
@@ -424,8 +466,7 @@ myobj = some_function()
myobj.do_something('now')
```
-If statements
---
+## If statements
If statements work just like in other languages.
@@ -446,8 +487,7 @@ if opt != 'foo'
endif
```
-Logical operations
---
+## Logical operations
Meson has the standard range of logical operations which can be used in
`if` statements.
@@ -537,8 +577,7 @@ endforeach
# result is ['a', 'b']
```
-Comments
---
+## Comments
A comment starts with the `#` character and extends until the end of the line.
@@ -547,8 +586,7 @@ some_function() # This is a comment
some_other_function()
```
-Ternary operator
---
+## Ternary operator
The ternary operator works just like in other languages.
@@ -560,8 +598,7 @@ The only exception is that nested ternary operators are forbidden to
improve legibility. If your branching needs are more complex than this
you need to write an `if/else` construct.
-Includes
---
+## Includes
Most source trees have multiple subdirectories to process. These can
be handled by Meson's `subdir` command. It changes to the given
@@ -576,8 +613,7 @@ test_data_dir = 'data'
subdir('tests')
```
-User-defined functions and methods
---
+## User-defined functions and methods
Meson does not currently support user-defined functions or
methods. The addition of user-defined functions would make Meson
@@ -588,3 +624,71 @@ FAQ](FAQ.md#why-is-meson-not-just-a-python-module-so-i-could-code-my-build-setup
because of this limitation you find yourself copying and pasting code
a lot you may be able to use a [`foreach` loop
instead](#foreach-statements).
+
+## Stability Promises
+
+Meson is very actively developed and continuously improved. There is a
+possibility that future enhancements to the Meson build system will require
+changes to the syntax. Such changes might be the addition of new reserved
+keywords, changing the meaning of existing keywords or additions around the
+basic building blocks like statements and fundamental types. It is planned
+to stabilize the syntax with the 1.0 release.
+
+## Grammar
+
+This is the full Meson grammar, as it is used to parse Meson build definition files:
+
+```
+additive_expression: multiplicative_expression | (additive_expression additive_operator multiplicative_expression)
+additive_operator: "+" | "-"
+argument_list: positional_arguments ["," keyword_arguments] | keyword_arguments
+array_literal: "[" [expression_list] "]"
+assignment_expression: conditional_expression | (logical_or_expression assignment_operator assignment_expression)
+assignment_operator: "=" | "*=" | "/=" | "%=" | "+=" | "-="
+boolean_literal: "true" | "false"
+build_definition: (NEWLINE | statement)*
+condition: expression
+conditional_expression: logical_or_expression | (logical_or_expression "?" expression ":" assignment_expression
+decimal_literal: DECIMAL_NUMBER
+DECIMAL_NUMBER: /[1-9][0-9]*/
+dictionary_literal: "{" [key_value_list] "}"
+equality_expression: relational_expression | (equality_expression equality_operator relational_expression)
+equality_operator: "==" | "!="
+expression: assignment_expression
+expression_list: expression ("," expression)*
+expression_statememt: expression
+function_expression: id_expression "(" [argument_list] ")"
+hex_literal: "0x" HEX_NUMBER
+HEX_NUMBER: /[a-fA-F0-9]+/
+id_expression: IDENTIFIER
+IDENTIFIER: /[a-zA-Z_][a-zA-Z_0-9]*/
+identifier_list: id_expression ("," id_expression)*
+integer_literal: decimal_literal | octal_literal | hex_literal
+iteration_statement: "foreach" identifier_list ":" id_expression NEWLINE (statement | jump_statement)* "endforeach"
+jump_statement: ("break" | "continue") NEWLINE
+key_value_item: expression ":" expression
+key_value_list: key_value_item ("," key_value_item)*
+keyword_item: id_expression ":" expression
+keyword_arguments: keyword_item ("," keyword_item)*
+literal: integer_literal | string_literal | boolean_literal | array_literal | dictionary_literal
+logical_and_expression: equality_expression | (logical_and_expression "and" equality_expression)
+logical_or_expression: logical_and_expression | (logical_or_expression "or" logical_and_expression)
+method_expression: postfix_expression "." function_expression
+multiplicative_expression: unary_expression | (multiplicative_expression multiplicative_operator unary_expression)
+multiplicative_operator: "*" | "/" | "%"
+octal_literal: "0o" OCTAL_NUMBER
+OCTAL_NUMBER: /[0-7]+/
+positional_arguments: expression ("," expression)*
+postfix_expression: primary_expression | subscript_expression | function_expression | method_expression
+primary_expression: literal | ("(" expression ")") | id_expression
+relational_expression: additive_expression | (relational_expression relational_operator additive_expression)
+relational_operator: ">" | "<" | ">=" | "<=" | "in" | ("not" "in")
+selection_statement: "if" condition NEWLINE (statement)* ("elif" condition NEWLINE (statement)*)* ["else" (statement)*] "endif"
+statement: (expression_statement | selection_statement | iteration_statement) NEWLINE
+string_literal: ("'" STRING_SIMPLE_VALUE "'") | ("'''" STRING_MULTILINE_VALUE "'''")
+STRING_MULTILINE_VALUE: \.*?(''')\
+STRING_SIMPLE_VALUE: \.*?(?<!\\)(\\\\)*?'\
+subscript_expression: postfix_expression "[" expression "]"
+unary_expression: postfix_expression | (unary_operator unary_expression)
+unary_operator: "not" | "+" | "-"
+```
diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md
index 6b248b5..c5a4e6b 100644
--- a/docs/markdown/Tutorial.md
+++ b/docs/markdown/Tutorial.md
@@ -74,7 +74,7 @@ Now we are ready to build our code.
```
$ cd builddir
-$ ninja
+$ meson compile
```
Once that is done we can run the resulting binary.
@@ -124,12 +124,12 @@ or the like. Instead we just type the exact same command as if we were
rebuilding our code without any build system changes.
```
-$ ninja
+$ meson compile
```
Once you have set up your build directory the first time, you don't
ever need to run the `meson` command again. You always just run
-`ninja`. Meson will automatically detect when you have done changes to
+`meson compile`. Meson will automatically detect when you have done changes to
build definitions and will take care of everything so users don't have
to care. In this case the following output is produced.
diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md
index 0785549..60fcad2 100644
--- a/docs/markdown/Unit-tests.md
+++ b/docs/markdown/Unit-tests.md
@@ -4,20 +4,24 @@ short-description: Meson's own unit-test system
# Unit tests
-Meson comes with a fully functional unit test system. To use it simply build an executable and then use it in a test.
+Meson comes with a fully functional unit test system. To use it simply build
+an executable and then use it in a test.
```meson
e = executable('prog', 'testprog.c')
test('name of test', e)
```
-You can add as many tests as you want. They are run with the command `ninja test`.
+You can add as many tests as you want. They are run with the command `meson
+test`.
-Meson captures the output of all tests and writes it in the log file `meson-logs/testlog.txt`.
+Meson captures the output of all tests and writes it in the log file
+`meson-logs/testlog.txt`.
## Test parameters
-Some tests require the use of command line arguments or environment variables. These are simple to define.
+Some tests require the use of command line arguments or environment
+variables. These are simple to define.
```meson
test('command line test', exe, args : ['first', 'second'])
@@ -29,48 +33,59 @@ Note how you need to specify multiple values as an array.
### MALLOC_PERTURB_
By default, environment variable
-[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html)
-is set to a random value between 1..255. This can help find memory
-leaks on configurations using glibc, including with non-GCC compilers.
-This feature can be disabled as discussed in [test()](Reference-manual.md#test).
+[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) is
+set to a random value between 1..255. This can help find memory leaks on
+configurations using glibc, including with non-GCC compilers. This feature
+can be disabled as discussed in [test()](Reference-manual.md#test).
## Coverage
If you enable coverage measurements by giving Meson the command line flag
-`-Db_coverage=true`, you can generate coverage reports after running the tests
-(running the tests is required to gather the list of functions that get
-called). Meson will autodetect what coverage generator tools you have installed
-and will generate the corresponding targets. These targets are `coverage-xml`
-and `coverage-text` which are both provided by [Gcovr](http://gcovr.com)
-(version 3.3 or higher) and `coverage-html`, which requires
-[Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and
-[GenHTML](https://linux.die.net/man/1/genhtml) or
-[Gcovr](http://gcovr.com). As a convenience, a high-level `coverage` target is
-also generated which will produce all 3 coverage report types, if possible.
-
-The output of these commands is written to the log directory `meson-logs` in your build directory.
+`-Db_coverage=true`, you can generate coverage reports after running the
+tests (running the tests is required to gather the list of functions that get
+called). Meson will autodetect what coverage generator tools you have
+installed and will generate the corresponding targets. These targets are
+`coverage-xml` and `coverage-text` which are both provided by
+[Gcovr](http://gcovr.com) (version 3.3 or higher) and `coverage-html`, which
+requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and
+[GenHTML](https://linux.die.net/man/1/genhtml) or [Gcovr](http://gcovr.com).
+As a convenience, a high-level `coverage` target is also generated which will
+produce all 3 coverage report types, if possible.
+
+The output of these commands is written to the log directory `meson-logs` in
+your build directory.
## Parallelism
-To reduce test times, Meson will by default run multiple unit tests in parallel. It is common to have some tests which can not be run in parallel because they require unique hold on some resource such as a file or a D-Bus name. You have to specify these tests with a keyword argument.
+To reduce test times, Meson will by default run multiple unit tests in
+parallel. It is common to have some tests which can not be run in parallel
+because they require unique hold on some resource such as a file or a D-Bus
+name. You have to specify these tests with a keyword argument.
```meson
test('unique test', t, is_parallel : false)
```
-Meson will then make sure that no other unit test is running at the same time. Non-parallel tests take longer to run so it is recommended that you write your unit tests to be parallel executable whenever possible.
+Meson will then make sure that no other unit test is running at the same
+time. Non-parallel tests take longer to run so it is recommended that you
+write your unit tests to be parallel executable whenever possible.
-By default Meson uses as many concurrent processes as there are cores on the test machine. You can override this with the environment variable `MESON_TESTTHREADS` like this.
+By default Meson uses as many concurrent processes as there are cores on the
+test machine. You can override this with the environment variable
+`MESON_TESTTHREADS` like this.
```console
-$ MESON_TESTTHREADS=5 ninja test
+$ MESON_TESTTHREADS=5 meson test
```
## Priorities
*(added in version 0.52.0)*
-Tests can be assigned a priority that determines when a test is *started*. Tests with higher priority are started first, tests with lower priority started later. The default priority is 0, meson makes no guarantee on the ordering of tests with identical priority.
+Tests can be assigned a priority that determines when a test is *started*.
+Tests with higher priority are started first, tests with lower priority
+started later. The default priority is 0, meson makes no guarantee on the
+ordering of tests with identical priority.
```meson
test('started second', t, priority : 0)
@@ -78,23 +93,36 @@ test('started third', t, priority : -50)
test('started first', t, priority : 1000)
```
-Note that the test priority only affects the starting order of tests and subsequent tests are affected by how long it takes previous tests to complete. It is thus possible that a higher-priority test is still running when lower-priority tests with a shorter runtime have completed.
+Note that the test priority only affects the starting order of tests and
+subsequent tests are affected by how long it takes previous tests to
+complete. It is thus possible that a higher-priority test is still running
+when lower-priority tests with a shorter runtime have completed.
## Skipped tests and hard errors
Sometimes a test can only determine at runtime that it can not be run.
-For the default `exitcode` testing protocol, the GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0.
+For the default `exitcode` testing protocol, the GNU standard approach in
+this case is to exit the program with error code 77. Meson will detect this
+and report these tests as skipped rather than failed. This behavior was added
+in version 0.37.0.
-For TAP-based tests, skipped tests should print a single line starting with `1..0 # SKIP`.
+For TAP-based tests, skipped tests should print a single line starting with
+`1..0 # SKIP`.
-In addition, sometimes a test fails set up so that it should fail even if it is marked as an expected failure. The GNU standard approach in this case is to exit the program with error code 99. Again, Meson will detect this and report these tests as `ERROR`, ignoring the setting of `should_fail`. This behavior was added in version 0.50.0.
+In addition, sometimes a test fails set up so that it should fail even if it
+is marked as an expected failure. The GNU standard approach in this case is
+to exit the program with error code 99. Again, Meson will detect this and
+report these tests as `ERROR`, ignoring the setting of `should_fail`. This
+behavior was added in version 0.50.0.
## Testing tool
-The goal of the meson test tool is to provide a simple way to run tests in a variety of different ways. The tool is designed to be run in the build directory.
+The goal of the meson test tool is to provide a simple way to run tests in a
+variety of different ways. The tool is designed to be run in the build
+directory.
-The simplest thing to do is just to run all tests, which is equivalent to running `ninja test`.
+The simplest thing to do is just to run all tests.
```console
$ meson test
@@ -107,7 +135,7 @@ For clarity, consider the meson.build containing:
```meson
test('A', ..., suite: 'foo')
-test('B', ..., suite: 'foo')
+test('B', ..., suite: ['foo', 'bar'])
test('C', ..., suite: 'bar')
test('D', ..., suite: 'baz')
@@ -125,7 +153,8 @@ Tests belonging to a suite `suite` can be run as follows
$ meson test --suite (sub)project_name:suite
```
-Since version *0.46*, `(sub)project_name` can be omitted if it is the top-level project.
+Since version *0.46*, `(sub)project_name` can be omitted if it is the
+top-level project.
Multiple suites are specified like:
@@ -145,7 +174,8 @@ Sometimes you need to run the tests multiple times, which is done like this:
$ meson test --repeat=10
```
-Invoking tests via a helper executable such as Valgrind can be done with the `--wrap` argument
+Invoking tests via a helper executable such as Valgrind can be done with the
+`--wrap` argument
```console
$ meson test --wrap=valgrind testname
@@ -163,17 +193,25 @@ Meson also supports running the tests under GDB. Just doing this:
$ meson test --gdb testname
```
-Meson will launch `gdb` all set up to run the test. Just type `run` in the GDB command prompt to start the program.
+Meson will launch `gdb` all set up to run the test. Just type `run` in the
+GDB command prompt to start the program.
-The second use case is a test that segfaults only rarely. In this case you can invoke the following command:
+The second use case is a test that segfaults only rarely. In this case you
+can invoke the following command:
```console
$ meson test --gdb --repeat=10000 testname
```
-This runs the test up to 10 000 times under GDB automatically. If the program crashes, GDB will halt and the user can debug the application. Note that testing timeouts are disabled in this case so `meson test` will not kill `gdb` while the developer is still debugging it. The downside is that if the test binary freezes, the test runner will wait forever.
+This runs the test up to 10 000 times under GDB automatically. If the program
+crashes, GDB will halt and the user can debug the application. Note that
+testing timeouts are disabled in this case so `meson test` will not kill
+`gdb` while the developer is still debugging it. The downside is that if the
+test binary freezes, the test runner will wait forever.
-Sometimes, the GDB binary is not in the PATH variable or the user wants to use a GDB replacement. Therefore, the invoked GDB program can be specified *(added 0.52.0)*:
+Sometimes, the GDB binary is not in the PATH variable or the user wants to
+use a GDB replacement. Therefore, the invoked GDB program can be specified
+*(added 0.52.0)*:
```console
$ meson test --gdb --gdb-path /path/to/gdb testname
@@ -183,12 +221,41 @@ $ meson test --gdb --gdb-path /path/to/gdb testname
$ meson test --print-errorlogs
```
-Meson will report the output produced by the failing tests along with other useful information as the environmental variables. This is useful, for example, when you run the tests on Travis-CI, Jenkins and the like.
+Meson will report the output produced by the failing tests along with other
+useful information as the environmental variables. This is useful, for
+example, when you run the tests on Travis-CI, Jenkins and the like.
-For further information see the command line help of Meson by running `meson test -h`.
+For further information see the command line help of Meson by running `meson
+test -h`.
## Legacy notes
-If `meson test` does not work for you, you likely have a old version of Meson.
-In that case you should call `mesontest` instead. If `mesontest` doesn't work
-either you have a very old version prior to 0.37.0 and should upgrade.
+If `meson test` does not work for you, you likely have a old version of
+Meson. In that case you should call `mesontest` instead. If `mesontest`
+doesn't work either you have a very old version prior to 0.37.0 and should
+upgrade.
+
+## Test outputs
+
+Meson will write several different files with detailed results of running
+tests. These will be written into $builddir/meson-logs/
+
+### testlog.json
+
+This is not a proper json file, but a file containing one valid json object
+per line. This is file is designed so each line is streamed out as each test
+is run, so it can be read as a stream while the test harness is running
+
+### testlog.junit.xml
+
+This is a valid JUnit XML description of all tests run. It is not streamed
+out, and is written only once all tests complete running.
+
+When tests use the `tap` protocol each test will be recorded as a testsuite
+container, with each case named by the number of the result.
+
+When tests use the `gtest` protocol meson will inject arguments to the test
+to generate it's own JUnit XML, which meson will include as part of this XML
+file.
+
+*New in 0.55.0*
diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md
index bfc8a7a..50cd27a 100644
--- a/docs/markdown/Users.md
+++ b/docs/markdown/Users.md
@@ -56,6 +56,7 @@ topic](https://github.com/topics/meson).
- [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D
- [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO
- [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux
+ - [HarfBuzz](https://github.com/harfbuzz/harfbuzz), a text shaping engine
- [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system
- [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C
- [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite
@@ -70,8 +71,10 @@ topic](https://github.com/topics/meson).
- [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation
- [Ksh](https://github.com/att/ast), a Korn Shell
- [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network
+ - [Le](https://github.com/kirushyk/le), machine learning framework
- [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android
- [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces
+ - [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one
- [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management
- [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface
- [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2
@@ -115,6 +118,7 @@ format files
- [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock)
- [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP
- [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk
+ - [Siril](https://gitlab.com/free-astro/siril), an image processing software for amateur astronomy
- [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP
- [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor
- [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool
@@ -124,6 +128,7 @@ format files
- [Terminology](https://github.com/billiob/terminology), a terminal emulator based on the Enlightenment Foundation Libraries
- [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3
- [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices
+ - [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages
- [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala
- [Venom](https://github.com/naxuroqa/Venom), a modern Tox client for the GNU/Linux desktop
- [VMAF](https://github.com/Netflix/vmaf) (by Netflix), a perceptual video quality assessment based on multi-method fusion
diff --git a/docs/markdown/Using-multiple-build-directories.md b/docs/markdown/Using-multiple-build-directories.md
index 2455640..ab6cf3c 100644
--- a/docs/markdown/Using-multiple-build-directories.md
+++ b/docs/markdown/Using-multiple-build-directories.md
@@ -32,9 +32,9 @@ You can add cross builds, too. As an example, let's set up a Linux -> Windows cr
mkdir buildwine
meson --cross-file=mingw-cross.txt buildwine
-The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `ninja test`.
+The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `meson test`.
-To compile any of these build types, just cd into the corresponding build directory and run `ninja` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories.
+To compile any of these build types, just cd into the corresponding build directory and run `meson compile` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories.
## Specialized uses
diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md
index cbb58a9..0a29847 100644
--- a/docs/markdown/Vala.md
+++ b/docs/markdown/Vala.md
@@ -237,7 +237,7 @@ dependencies = [
dependency('glib-2.0'),
dependency('gobject-2.0'),
meson.get_compiler('c').find_library('foo'),
- meson.get_compiler('vala').find_library('foo', dir: vapi_dir),
+ meson.get_compiler('vala').find_library('foo', dirs: vapi_dir),
]
sources = files('app.vala')
diff --git a/docs/markdown/Vs-External.md b/docs/markdown/Vs-External.md
index add089e..ab3d191 100644
--- a/docs/markdown/Vs-External.md
+++ b/docs/markdown/Vs-External.md
@@ -23,9 +23,9 @@ as follows:
| entry | value |
| ----- | ----- |
-|build | `ninja -C $(Configuration)` |
-|clean | `ninja -C $(Configuration) clean` |
-|rebuild| `ninja -C $(Configuration) clean all|
+|build | `meson compile -C $(Configuration)` |
+|clean | `meson compile -C $(Configuration) --clean` |
+|rebuild| `meson compile -C $(Configuration) --clean && meson compile -C $(Configuration)` |
|Output | `$(Configuration)\name_of_your_executable.exe|
diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md
index 6e47d58..8e6282e 100644
--- a/docs/markdown/Wrap-dependency-system-manual.md
+++ b/docs/markdown/Wrap-dependency-system-manual.md
@@ -28,16 +28,16 @@ itself in a way that makes it easy to use (usually this means as a
static library).
To use this kind of a project as a dependency you could just copy and
-extract it inside your project's `subprojects` directory.
+extract it inside your project's `subprojects` directory.
However there is a simpler way. You can specify a Wrap file that tells Meson
-how to download it for you. If you then use this subproject in your build,
+how to download it for you. If you then use this subproject in your build,
Meson will automatically download and extract it during build. This makes
subproject embedding extremely easy.
All wrap files must have a name of `<project_name>.wrap` form and be in `subprojects` dir.
-Currently Meson has four kinds of wraps:
+Currently Meson has four kinds of wraps:
- wrap-file
- wrap-git
- wrap-hg
@@ -70,19 +70,31 @@ revision = head
## Accepted configuration properties for wraps
- `directory` - name of the subproject root directory, defaults to the name of the wrap.
+Since *0.55.0* those can be used in all wrap types, they were previously reserved to `wrap-file`:
+
+- `patch_url` - download url to retrieve an optional overlay archive
+- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0*
+- `patch_filename` - filename of the downloaded overlay archive
+- `patch_hash` - sha256 checksum of the downloaded overlay archive
+- `patch_directory` - *Since 0.55.0* Overlay directory, alternative to `patch_filename` in the case
+ files are local instead of a downloaded archive. The directory must be placed in
+ `subprojects/packagefiles`.
+
### Specific to wrap-file
- `source_url` - download url to retrieve the wrap-file source archive
- `source_fallback_url` - fallback URL to be used when download from `source_url` fails *Since: 0.55.0*
- `source_filename` - filename of the downloaded source archive
- `source_hash` - sha256 checksum of the downloaded source archive
-- `patch_url` - download url to retrieve an optional overlay archive
-- `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0*
-- `patch_filename` - filename of the downloaded overlay archive
-- `patch_hash` - sha256 checksum of the downloaded overlay archive
- `lead_directory_missing` - for `wrap-file` create the leading
directory name. Needed when the source file does not have a leading
directory.
+Since *0.55.0* it is possible to use only the `source_filename` and
+`patch_filename` value in a .wrap file (without `source_url` and `patch_url`) to
+specify a local archive in the `subprojects/packagefiles` directory. The `*_hash`
+entries are optional when using this method. This method should be prefered over
+the old `packagecache` approach described below.
+
Since *0.49.0* if `source_filename` or `patch_filename` is found in the
project's `subprojects/packagecache` directory, it will be used instead
of downloading the file, even if `--wrap-mode` option is set to
@@ -94,7 +106,7 @@ of downloading the file, even if `--wrap-mode` option is set to
valid value (such as a git tag) for the VCS's `checkout` command, or
(for git) `head` to track upstream's default branch. Required.
-## Specific to wrap-git
+### Specific to wrap-git
- `depth` - shallowly clone the repository to X number of commits. Note
that git always allow shallowly cloning branches, but in order to
clone commit ids shallowly, the server must support
@@ -124,19 +136,106 @@ thousands of lines of code. Once you have a working build definition,
just zip up the Meson build files (and others you have changed) and
put them somewhere where you can download them.
-Meson build patches are only supported for wrap-file mode. When using
-wrap-git, the repository must contain all Meson build definitions.
+Prior to *0.55.0* Meson build patches were only supported for wrap-file mode.
+When using wrap-git, the repository must contain all Meson build definitions.
+Since *0.55.0* Meson build patches are supported for any wrap modes, including
+wrap-git.
+
+## `provide` section
+
+*Since *0.55.0*
+
+Wrap files can define the dependencies it provides in the `[provide]` section.
+
+```ini
+[provide]
+dependency_names = foo-1.0
+```
+
+When a wrap file provides the dependency `foo-1.0`, as above, any call to
+`dependency('foo-1.0')` will automatically fallback to that subproject even if
+no `fallback` keyword argument is given. A wrap file named `foo.wrap` implicitly
+provides the dependency name `foo` even when the `[provide]` section is missing.
+
+Optional dependencies, like `dependency('foo-1.0', required: get_option('foo_opt'))`
+where `foo_opt` is a feature option set to `auto`, will not fallback to the
+subproject defined in the wrap file, for 2 reasons:
+- It allows for looking the dependency in other ways first, for example using
+ `cc.find_library('foo')`, and only fallback if that fails:
+
+```meson
+# this won't use fallback defined in foo.wrap
+foo_dep = dependency('foo-1.0', required: false)
+if not foo_dep.found()
+ foo_dep = cc.find_library('foo', has_headers: 'foo.h', required: false)
+ if not foo_dep.found()
+ # This will use the fallback
+ foo_dep = dependency('foo-1.0')
+ # or
+ foo_dep = dependency('foo-1.0', required: false, fallback: 'foo')
+ endif
+endif
+```
+
+- Sometimes not-found dependency is preferable to a fallback when the feature is
+ not explicitly requested by the user. In that case
+ `dependency('foo-1.0', required: get_option('foo_opt'))` will only fallback
+ when the user sets `foo_opt` to `enabled` instead of `auto`.
+
+If it is desired to fallback for an optional dependency, the `fallback` keyword
+argument must be passed explicitly. For example
+`dependency('foo-1.0', required: get_option('foo_opt'), fallback: 'foo')` will
+use the fallback even when `foo_opt` is set to `auto`.
+
+This mechanism assumes the subproject calls `meson.override_dependency('foo-1.0', foo_dep)`
+so Meson knows which dependency object should be used as fallback. Since that
+method was introduced in version *0.54.0*, as a transitional aid for projects
+that do not yet make use of it the variable name can be provided in the wrap file
+with entries in the format `foo-1.0 = foo_dep`.
+
+For example when using a recent enough version of glib that uses
+`meson.override_dependency()` to override `glib-2.0`, `gobject-2.0` and `gio-2.0`,
+a wrap file would look like:
+```ini
+[wrap-git]
+url=https://gitlab.gnome.org/GNOME/glib.git
+revision=glib-2-62
+
+[provide]
+dependency_names = glib-2.0, gobject-2.0, gio-2.0
+```
+
+With older version of glib dependency variable names need to be specified:
+```ini
+[wrap-git]
+url=https://gitlab.gnome.org/GNOME/glib.git
+revision=glib-2-62
+
+[provide]
+glib-2.0=glib_dep
+gobject-2.0=gobject_dep
+gio-2.0=gio_dep
+```
+
+Programs can also be provided by wrap files, with the `program_names` key:
+```ini
+[provide]
+program_names = myprog, otherprog
+```
+
+With such wrap file, `find_program('myprog')` will automatically fallback to use
+the subproject, assuming it uses `meson.override_find_program('myprog')`.
## Using wrapped projects
-Wraps provide a convenient way of obtaining a project into your subproject directory.
+Wraps provide a convenient way of obtaining a project into your subproject directory.
Then you use it as a regular subproject (see [subprojects](Subprojects.md)).
## Getting wraps
-Usually you don't want to write your wraps by hand.
+Usually you don't want to write your wraps by hand.
-There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides
+There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides
many dependencies ready to use. You can read more about WrapDB [here](Using-the-WrapDB.md).
There is also a Meson subcommand to get and manage wraps (see [using wraptool](Using-wraptool.md)).
diff --git a/docs/markdown/Wrap-maintainer-tools.md b/docs/markdown/Wrap-maintainer-tools.md
new file mode 100644
index 0000000..717d0d2
--- /dev/null
+++ b/docs/markdown/Wrap-maintainer-tools.md
@@ -0,0 +1,17 @@
+# Wrap maintainer tools
+
+The [mesonwrap repository](https://github.com/mesonbuild/mesonwrap) provides tools
+to maintain the WrapDB. Read-only features such can be used by anyone without Meson admin rights.
+
+## Personal access token
+
+Some tools require access to the Github API.
+A [personal access token](https://github.com/settings/tokens) may be required
+if the freebie Github API quota is exhausted. `public_repo` scope is required
+for write operations.
+
+```
+$ cat ~/.config/mesonwrap.ini
+[mesonwrap]
+github_token = <github token>
+```
diff --git a/docs/markdown/Wrap-review-guidelines.md b/docs/markdown/Wrap-review-guidelines.md
index 512353c..3e41a8d 100644
--- a/docs/markdown/Wrap-review-guidelines.md
+++ b/docs/markdown/Wrap-review-guidelines.md
@@ -7,18 +7,30 @@ package is rejected. What should be done will be determined on a
case-by-case basis. Similarly meeting all these requirements does not
guarantee that the package will get accepted. Use common sense.
-## Checklist
-
-Reviewer: copy-paste this to MR discussion box and tick all boxes that apply.
-
- - [ ] project() has version string
- - [ ] project() has license string
- - [ ] if new project, master has tagged commit as only commit
- - [ ] if new branch, it is branched from master
- - [ ] contains a readme.txt
- - [ ] contains an upstream.wrap file
- - [ ] download link points to authoritative upstream location
- - [ ] wrap repository contains only build system files
- - [ ] merge request is pointed to correct target branch (not master)
- - [ ] wrap works
- - [ ] repo does not have useless top level directory (i.e. libfoobar-1.0.0)
+The review process is partially automated by the [mesonwrap](Wrap-maintainer-tools.md)
+`review` tool.
+
+```
+mesonwrap review zlib --pull-request=1 [--approve]
+```
+
+Since not every check can be automated please pay attention to the following during the review:
+
+- Download link points to an authoritative upstream location.
+- Version branch is created from master.
+- Except for the existing code, `LICENSE.build` is mandatory.
+- `project()` has a version and it matches the source version.
+- `project()` has a license.
+- Complex `configure_file()` inputs are documented.
+ If the file is a copy of a project file make sure it is clear what was changed.
+- Unit tests are enabled if the project provides them.
+- There are no guidelines if `install()` is a good or a bad thing in wraps.
+- If the project can't be tested on the host platform consider using the `--cross-file` flag.
+ See [the issue](https://github.com/mesonbuild/mesonwrap/issues/125).
+
+Encourage wrap readability. Use your own judgement.
+
+## Approval
+
+If the code looks good use the `--approve` flag to merge it.
+The tool automatically creates a release.
diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md
index 8231d3d..0d1a2a2 100644
--- a/docs/markdown/howtox.md
+++ b/docs/markdown/howtox.md
@@ -25,7 +25,7 @@ for the host platform in cross builds can only be specified with a cross file.
There is a table of all environment variables supported [Here](Reference-tables.md#compiler-and-linker-selection-variables)
-## Set dynamic linker
+## Set linker
*New in 0.53.0*
@@ -148,15 +148,14 @@ $ meson <other flags> -Db_coverage=true
Then issue the following commands.
```console
-$ ninja
-$ ninja test
-$ ninja coverage-html (or coverage-xml)
+$ meson compile
+$ meson test
+$ meson compile coverage-html (or coverage-xml)
```
The coverage report can be found in the meson-logs subdirectory.
-Note: Currently, Meson does not support generating coverage reports
-with Clang.
+*New in 0.55.0* llvm-cov support for use with clang
## Add some optimization to debug builds
@@ -191,14 +190,14 @@ test failures.
Install scan-build and configure your project. Then do this:
```console
-$ ninja scan-build
+$ meson compile scan-build
```
You can use the `SCANBUILD` environment variable to choose the
scan-build executable.
```console
-$ SCANBUILD=<your exe> ninja scan-build
+$ SCANBUILD=<your exe> meson compile scan-build
```
@@ -209,8 +208,8 @@ operation. First we set up the project with profile measurements
enabled and compile it.
```console
-$ meson <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate
-$ ninja -C builddir
+$ meson setup <Meson options, such as --buildtype=debugoptimized> -Db_pgo=generate
+$ meson compile -C builddir
```
Then we need to run the program with some representative input. This
@@ -221,7 +220,7 @@ information and rebuild.
```console
$ meson configure -Db_pgo=use
-$ ninja
+$ meson compile
```
After these steps the resulting binary is fully optimized.
@@ -260,3 +259,28 @@ The `cmake_module_path` property is only needed for custom CMake scripts. System
wide CMake scripts are found automatically.
More information can be found [here](Dependencies.md#cmake)
+
+## Get a default not-found dependency?
+
+```meson
+null_dep = dependency('', required : false)
+```
+
+This can be used in cases where you want a default value, but might override it
+later.
+
+```meson
+# Not needed on Windows!
+my_dep = dependency('', required : false)
+if host_machine.system() in ['freebsd', 'netbsd', 'openbsd', 'dragonfly']
+ my_dep = dependency('some dep', required : false)
+elif host_machine.system() == 'linux'
+ my_dep = dependency('some other dep', required : false)
+endif
+
+executable(
+ 'myexe',
+ my_sources,
+ deps : [my_dep]
+)
+```
diff --git a/docs/markdown/legal.md b/docs/markdown/legal.md
index bd86ff4..a14b7b9 100644
--- a/docs/markdown/legal.md
+++ b/docs/markdown/legal.md
@@ -12,7 +12,7 @@ specific permission. It is not licensed under the same terms as the
rest of the project.
If you are a third party and want to use the Meson logo, you must
-first must obtain written permission from Jussi Pakkanen.
+first obtain written permission from Jussi Pakkanen.
## Website licensing
diff --git a/docs/markdown/snippets/dist_not_tests.md b/docs/markdown/snippets/dist_not_tests.md
deleted file mode 100644
index f9c971e..0000000
--- a/docs/markdown/snippets/dist_not_tests.md
+++ /dev/null
@@ -1,5 +0,0 @@
-## meson dist --no-tests
-
-`meson dist` has a new option `--no-tests` to skip build and tests of generated
-packages. It can be used to not waste time for example when done in CI that
-already does its own testing.
diff --git a/docs/markdown/snippets/keyval.md b/docs/markdown/snippets/keyval.md
new file mode 100644
index 0000000..895de9b
--- /dev/null
+++ b/docs/markdown/snippets/keyval.md
@@ -0,0 +1,7 @@
+## `unstable-keyval` is now stable `keyval`
+
+The `unstable-keyval` has been renamed to `keyval` and now promises stability
+guarantees.
+
+Meson will print a warning when you load an `unstable-` module that has been
+stabilised (so `unstable-keyval` is still accepted for example).
diff --git a/docs/markdown/snippets/per_subproject.md b/docs/markdown/snippets/per_subproject.md
new file mode 100644
index 0000000..6de6068
--- /dev/null
+++ b/docs/markdown/snippets/per_subproject.md
@@ -0,0 +1,4 @@
+## Per subproject `warning_level` option
+
+`warning_level` can now be defined per subproject, in the same way as
+`default_library` and `werror`.
diff --git a/docs/markdown/snippets/project_options_in_machine_files.md b/docs/markdown/snippets/project_options_in_machine_files.md
new file mode 100644
index 0000000..8dab951
--- /dev/null
+++ b/docs/markdown/snippets/project_options_in_machine_files.md
@@ -0,0 +1,52 @@
+## Project and built-in options can be set in native or cross files
+
+A new set of sections has been added to the cross and native files, `[project
+options]` and `[<subproject_name>:project options]`, where `subproject_name`
+is the name of a subproject. Any options that are allowed in the project can
+be set from this section. They have the lowest precedent, and will be
+overwritten by command line arguments.
+
+
+```meson
+option('foo', type : 'string', value : 'foo')
+```
+
+```ini
+[project options]
+foo = 'other val'
+```
+
+```console
+meson build --native-file my.ini
+```
+
+Will result in the option foo having the value `other val`,
+
+```console
+meson build --native-file my.ini -Dfoo='different val'
+```
+
+Will result in the option foo having the value `different val`,
+
+
+Subproject options are assigned like this:
+
+```ini
+[zlib:project options]
+foo = 'some val'
+```
+
+Additionally meson level options can be set in the same way, using the
+`[built-in options]` section.
+
+```ini
+[built-in options]
+c_std = 'c99'
+```
+
+These options can also be set on a per-subproject basis, although only
+`default_library` and `werror` can currently be set:
+```ini
+[zlib:built-in options]
+default_library = 'static'
+```
diff --git a/docs/markdown/snippets/wrap_fallback.md b/docs/markdown/snippets/wrap_fallback.md
deleted file mode 100644
index 9b71143..0000000
--- a/docs/markdown/snippets/wrap_fallback.md
+++ /dev/null
@@ -1,4 +0,0 @@
-## Wrap fallback URL
-
-Wrap files can now define `source_fallback_url` and `patch_fallback_url` to be
-used in case the main server is temporaly down.
diff --git a/docs/markdown_dynamic/Commands.md b/docs/markdown_dynamic/Commands.md
new file mode 100644
index 0000000..a35b4da
--- /dev/null
+++ b/docs/markdown_dynamic/Commands.md
@@ -0,0 +1,296 @@
+# Command-line commands
+
+There are two different ways of invoking Meson. First, you can run it directly
+from the source tree with the command `/path/to/source/meson.py`. Meson may
+also be installed in which case the command is simply `meson`. In this manual
+we only use the latter format for simplicity.
+
+Meson is invoked using the following syntax:
+`meson [COMMAND] [COMMAND_OPTIONS]`
+
+This section describes all available commands and some of their Optional arguments.
+The most common workflow is to run [`setup`](#setup), followed by [`compile`](#compile), and then [`install`](#install).
+
+For the full list of all available options for a specific command use the following syntax:
+`meson COMMAND --help`
+
+### configure
+
+```
+{{ cmd_help['configure']['usage'] }}
+```
+
+Changes options of a configured meson project.
+
+```
+{{ cmd_help['configure']['arguments'] }}
+```
+
+Most arguments are the same as in [`setup`](#setup).
+
+Note: reconfiguring project will not reset options to their default values (even if they were changed in `meson.build`).
+
+#### Examples:
+
+List all available options:
+```
+meson configure builddir
+```
+
+Change value of a single option:
+```
+meson configure builddir -Doption=new_value
+```
+
+### compile
+
+*(since 0.54.0)*
+
+```
+{{ cmd_help['compile']['usage'] }}
+```
+
+Builds a default or a specified target of a configured meson project.
+
+```
+{{ cmd_help['compile']['arguments'] }}
+```
+
+`--verbose` argument is available since 0.55.0.
+
+#### Targets
+
+*(since 0.55.0)*
+
+`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where:
+- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`).
+- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`.
+- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'.
+
+`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`.
+
+#### Backend specific arguments
+
+*(since 0.55.0)*
+
+`BACKEND-args` use the following syntax:
+
+If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command:
+
+```
+$ meson compile --ninja-args=-n,-d,explain
+```
+
+would add `-n`, `-d` and `explain` arguments to ninja invocation.
+
+If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this:
+
+```
+$ meson compile "--ninja-args=['a,b', 'c d']"
+```
+
+#### Examples:
+
+Build the project:
+```
+meson compile -C builddir
+```
+
+Execute a dry run on ninja backend with additional debug info:
+```
+meson compile --ninja-args=-n,-d,explain
+```
+
+Build three targets: two targets that have the same `foo` name, but different type, and a `bar` target:
+```
+meson compile foo:shared_library foo:static_library bar
+```
+
+Produce a coverage html report (if available):
+```
+meson compile coverage-html
+```
+
+### dist
+
+*(since 0.52.0)*
+
+```
+{{ cmd_help['dist']['usage'] }}
+```
+
+Generates a release archive from the current source tree.
+
+```
+{{ cmd_help['dist']['arguments'] }}
+```
+
+See [notes about creating releases](Creating-releases.md) for more info.
+
+#### Examples:
+
+Create a release archive:
+```
+meson dist -C builddir
+```
+
+### init
+
+*(since 0.45.0)*
+
+```
+{{ cmd_help['init']['usage'] }}
+```
+
+Creates a basic set of build files based on a template.
+
+```
+{{ cmd_help['init']['arguments'] }}
+```
+
+#### Examples:
+
+Create a project in `sourcedir`:
+```
+meson init -C sourcedir
+```
+
+### introspect
+
+```
+{{ cmd_help['introspect']['usage'] }}
+```
+
+Displays information about a configured meson project.
+
+```
+{{ cmd_help['introspect']['arguments'] }}
+```
+
+#### Examples:
+
+Display basic information about a configured project in `builddir`:
+```
+meson introspect builddir
+```
+
+### install
+
+*(since 0.47.0)*
+
+```
+{{ cmd_help['install']['usage'] }}
+```
+
+Installs the project to the prefix specified in [`setup`](#setup).
+
+```
+{{ cmd_help['install']['arguments'] }}
+```
+
+See [the installation documentation](Installing.md) for more info.
+
+#### Examples:
+
+Install project to `prefix`:
+```
+meson install -C builddir
+```
+
+Install project to `$DESTDIR/prefix`:
+```
+DESTDIR=/path/to/staging/area meson install -C builddir
+```
+
+### rewrite
+
+*(since 0.50.0)*
+
+```
+{{ cmd_help['rewrite']['usage'] }}
+```
+
+Modifies the meson project.
+
+```
+{{ cmd_help['rewrite']['arguments'] }}
+```
+
+See [the meson file rewriter documentation](Rewriter.md) for more info.
+
+### setup
+
+```
+{{ cmd_help['setup']['usage'] }}
+```
+
+Configures a build directory for the meson project.
+
+This is the default meson command (invoked if there was no COMMAND supplied).
+
+```
+{{ cmd_help['setup']['arguments'] }}
+```
+
+See [meson introduction page](Running-Meson.md#configuring-the-build-directory) for more info.
+
+#### Examples:
+
+Configures `builddir` with default values:
+```
+meson setup builddir
+```
+
+### subprojects
+
+*(since 0.49.0)*
+
+```
+{{ cmd_help['subprojects']['usage'] }}
+```
+
+Manages subprojects of the meson project.
+
+```
+{{ cmd_help['subprojects']['arguments'] }}
+```
+
+### test
+
+```
+{{ cmd_help['test']['usage'] }}
+```
+
+Run tests for the configure meson project.
+
+```
+{{ cmd_help['test']['arguments'] }}
+```
+
+See [the unit test documentation](Unit-tests.md) for more info.
+
+#### Examples:
+
+Run tests for the project:
+```
+meson test -C builddir
+```
+
+Run only `specific_test_1` and `specific_test_2`:
+```
+meson test -C builddir specific_test_1 specific_test_2
+```
+
+### wrap
+
+```
+{{ cmd_help['wrap']['usage'] }}
+```
+
+An utility to manage WrapDB dependencies.
+
+```
+{{ cmd_help['wrap']['arguments'] }}
+```
+
+See [the WrapDB tool documentation](Using-wraptool.md) for more info.
diff --git a/docs/meson.build b/docs/meson.build
index 32aab07..c07a200 100644
--- a/docs/meson.build
+++ b/docs/meson.build
@@ -1,16 +1,40 @@
project('Meson documentation', version: '1.0')
+cur_bdir = meson.current_build_dir()
+
+# Copy all files to build dir, since HotDoc uses relative paths
+run_command(
+ files('../tools/copy_files.py'),
+ '-C', meson.current_source_dir(),
+ '--output-dir', cur_bdir,
+ 'markdown', 'theme', 'sitemap.txt',
+ check: true)
+
+# Only the script knows which files are being generated
+docs_gen = custom_target(
+ 'gen_docs',
+ input: files('markdown/index.md'),
+ output: 'gen_docs.dummy',
+ command: [
+ files('../tools/regenerate_docs.py'),
+ '--output-dir', join_paths(cur_bdir, 'markdown'),
+ '--dummy-output-file', '@OUTPUT@',
+ ],
+ build_by_default: true,
+ install: false)
+
hotdoc = import('hotdoc')
documentation = hotdoc.generate_doc(meson.project_name(),
project_version: meson.project_version(),
- sitemap: 'sitemap.txt',
+ sitemap: join_paths(cur_bdir, 'sitemap.txt'),
build_by_default: true,
- index: 'markdown/index.md',
+ depends: docs_gen,
+ index: join_paths(cur_bdir, 'markdown/index.md'),
install: false,
extra_assets: ['images/'],
- include_paths: ['markdown'],
+ include_paths: [join_paths(cur_bdir, 'markdown')],
default_license: 'CC-BY-SAv4.0',
- html_extra_theme: join_paths('theme', 'extra'),
+ html_extra_theme: join_paths(cur_bdir, 'theme', 'extra'),
git_upload_repository: 'git@github.com:jpakkane/jpakkane.github.io.git',
edit_on_github_repository: 'https://github.com/mesonbuild/meson/',
syntax_highlighting_activate: true,
diff --git a/docs/sitemap.txt b/docs/sitemap.txt
index 3ac138e..ac74870 100644
--- a/docs/sitemap.txt
+++ b/docs/sitemap.txt
@@ -5,6 +5,7 @@ index.md
Manual.md
Overview.md
Running-Meson.md
+ Commands.md
Builtin-options.md
Using-with-Visual-Studio.md
Meson-sample.md
@@ -48,7 +49,7 @@ index.md
SourceSet-module.md
Windows-module.md
Cuda-module.md
- Kconfig-module.md
+ Keyval-module.md
Java.md
Vala.md
D.md
@@ -72,11 +73,13 @@ index.md
Adding-new-projects-to-wrapdb.md
Using-the-WrapDB.md
Using-wraptool.md
+ Wrap-maintainer-tools.md
Wrap-best-practices-and-tips.md
Wrap-review-guidelines.md
Shipping-prebuilt-binaries-as-wraps.md
fallback-wraptool.md
Release-notes.md
+ Release-notes-for-0.55.0.md
Release-notes-for-0.54.0.md
Release-notes-for-0.53.0.md
Release-notes-for-0.52.0.md
@@ -116,5 +119,6 @@ index.md
Using-multiple-build-directories.md
Vs-External.md
Contributing.md
+ MesonCI.md
legal.md
Videos.md
diff --git a/docs/theme/extra/templates/navbar_links.html b/docs/theme/extra/templates/navbar_links.html
index 6980f81..832bd2c 100644
--- a/docs/theme/extra/templates/navbar_links.html
+++ b/docs/theme/extra/templates/navbar_links.html
@@ -14,7 +14,7 @@
("Hotdoc-module.html","Hotdoc"), \
("i18n-module.html","i18n"), \
("Icestorm-module.html","Icestorm"), \
- ("Kconfig-module.html","kconfig"), \
+ ("Keyval-module.html","Keyval"), \
("Pkgconfig-module.html","Pkgconfig"), \
("Python-module.html","Python"), \
("Python-3-module.html","Python 3"), \
diff --git a/man/meson.1 b/man/meson.1
index eb3fec2..2054afa 100644
--- a/man/meson.1
+++ b/man/meson.1
@@ -1,4 +1,4 @@
-.TH MESON "1" "March 2020" "meson 0.54.0" "User Commands"
+.TH MESON "1" "July 2020" "meson 0.55.0" "User Commands"
.SH NAME
meson - a high productivity build system
.SH DESCRIPTION
diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py
new file mode 100644
index 0000000..fd4de96
--- /dev/null
+++ b/mesonbuild/arglist.py
@@ -0,0 +1,331 @@
+# Copyright 2012-2020 The Meson development team
+# Copyright © 2020 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import lru_cache
+import collections
+import enum
+import os
+import re
+import typing as T
+
+from . import mesonlib
+
+if T.TYPE_CHECKING:
+ from .linkers import StaticLinker
+ from .compilers import Compiler
+
+UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str]
+# execinfo is a compiler lib on FreeBSD and NetBSD
+if mesonlib.is_freebsd() or mesonlib.is_netbsd():
+ UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo')
+
+
+class Dedup(enum.Enum):
+
+ """What kind of deduplication can be done to compiler args.
+
+ OVERRIDEN - Whether an argument can be 'overridden' by a later argument.
+ For example, -DFOO defines FOO and -UFOO undefines FOO. In this case,
+ we can safely remove the previous occurrence and add a new one. The
+ same is true for include paths and library paths with -I and -L.
+ UNIQUE - Arguments that once specified cannot be undone, such as `-c` or
+ `-pipe`. New instances of these can be completely skipped.
+ NO_DEDUP - Whether it matters where or how many times on the command-line
+ a particular argument is present. This can matter for symbol
+ resolution in static or shared libraries, so we cannot de-dup or
+ reorder them.
+ """
+
+ NO_DEDUP = 0
+ UNIQUE = 1
+ OVERRIDEN = 2
+
+
+class CompilerArgs(collections.abc.MutableSequence):
+ '''
+ List-like class that manages a list of compiler arguments. Should be used
+ while constructing compiler arguments from various sources. Can be
+ operated with ordinary lists, so this does not need to be used
+ everywhere.
+
+ All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
+ and can converted to the native type of each compiler by using the
+ .to_native() method to which you must pass an instance of the compiler or
+ the compiler class.
+
+ New arguments added to this class (either with .append(), .extend(), or +=)
+ are added in a way that ensures that they override previous arguments.
+ For example:
+
+ >>> a = ['-Lfoo', '-lbar']
+ >>> a += ['-Lpho', '-lbaz']
+ >>> print(a)
+ ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
+
+ Arguments will also be de-duped if they can be de-duped safely.
+
+ Note that because of all this, this class is not commutative and does not
+ preserve the order of arguments if it is safe to not. For example:
+ >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
+ ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
+ >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
+ ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
+
+ '''
+ # Arg prefixes that override by prepending instead of appending
+ prepend_prefixes = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 2
+ dedup2_prefixes = () # type: T.Tuple[str, ...]
+ dedup2_suffixes = () # type: T.Tuple[str, ...]
+ dedup2_args = () # type: T.Tuple[str, ...]
+
+ # Arg prefixes and args that must be de-duped by returning 1
+ #
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = () # type: T.Tuple[str, ...]
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...]
+ # Match a .so of the form path/to/libfoo.so.0.1.0
+ # Only UNIX shared libraries require this. Others have a fixed extension.
+ dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+ dedup1_args = () # type: T.Tuple[str, ...]
+ # In generate_link() we add external libs without de-dup, but we must
+ # *always* de-dup these because they're special arguments to the linker
+ # TODO: these should probably move too
+ always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...]
+
+ def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'],
+ iterable: T.Optional[T.Iterable[str]] = None):
+ self.compiler = compiler
+ self._container = list(iterable) if iterable is not None else [] # type: T.List[str]
+ self.pre = collections.deque() # type: T.Deque[str]
+ self.post = collections.deque() # type: T.Deque[str]
+
+ # Flush the saved pre and post list into the _container list
+ #
+ # This correctly deduplicates the entries after _can_dedup definition
+ # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot.
+ def flush_pre_post(self) -> None:
+ pre_flush = collections.deque() # type: T.Deque[str]
+ pre_flush_set = set() # type: T.Set[str]
+ post_flush = collections.deque() # type: T.Deque[str]
+ post_flush_set = set() # type: T.Set[str]
+
+ #The two lists are here walked from the front to the back, in order to not need removals for deduplication
+ for a in self.pre:
+ dedup = self._can_dedup(a)
+ if a not in pre_flush_set:
+ pre_flush.append(a)
+ if dedup is Dedup.OVERRIDEN:
+ pre_flush_set.add(a)
+ for a in reversed(self.post):
+ dedup = self._can_dedup(a)
+ if a not in post_flush_set:
+ post_flush.appendleft(a)
+ if dedup is Dedup.OVERRIDEN:
+ post_flush_set.add(a)
+
+ #pre and post will overwrite every element that is in the container
+ #only copy over args that are in _container but not in the post flush or pre flush set
+
+ for a in self._container:
+ if a not in post_flush_set and a not in pre_flush_set:
+ pre_flush.append(a)
+
+ self._container = list(pre_flush) + list(post_flush)
+ self.pre.clear()
+ self.post.clear()
+
+ def __iter__(self) -> T.Iterator[str]:
+ self.flush_pre_post()
+ return iter(self._container)
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: int) -> str: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811
+ pass
+
+ def __getitem__(self, index): # noqa: F811
+ self.flush_pre_post()
+ return self._container[index]
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: int, value: str) -> None: # noqa: F811
+ pass
+
+ @T.overload # noqa: F811
+ def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811
+ pass
+
+ def __setitem__(self, index, value) -> None: # noqa: F811
+ self.flush_pre_post()
+ self._container[index] = value
+
+ def __delitem__(self, index: T.Union[int, slice]) -> None:
+ self.flush_pre_post()
+ del self._container[index]
+
+ def __len__(self) -> int:
+ return len(self._container) + len(self.pre) + len(self.post)
+
+ def insert(self, index: int, value: str) -> None:
+ self.flush_pre_post()
+ self._container.insert(index, value)
+
+ def copy(self) -> 'CompilerArgs':
+ self.flush_pre_post()
+ return type(self)(self.compiler, self._container.copy())
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _can_dedup(cls, arg: str) -> Dedup:
+ """Returns whether the argument can be safely de-duped.
+
+ In addition to these, we handle library arguments specially.
+ With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup
+ to recursively search for symbols in the libraries. This is not needed
+ with other linkers.
+ """
+
+ # A standalone argument must never be deduplicated because it is
+ # defined by what comes _after_ it. Thus dedupping this:
+ # -D FOO -D BAR
+ # would yield either
+ # -D FOO BAR
+ # or
+ # FOO -D BAR
+ # both of which are invalid.
+ if arg in cls.dedup2_prefixes:
+ return Dedup.NO_DEDUP
+ if arg in cls.dedup2_args or \
+ arg.startswith(cls.dedup2_prefixes) or \
+ arg.endswith(cls.dedup2_suffixes):
+ return Dedup.OVERRIDEN
+ if arg in cls.dedup1_args or \
+ arg.startswith(cls.dedup1_prefixes) or \
+ arg.endswith(cls.dedup1_suffixes) or \
+ re.search(cls.dedup1_regex, arg):
+ return Dedup.UNIQUE
+ return Dedup.NO_DEDUP
+
+ @classmethod
+ @lru_cache(maxsize=None)
+ def _should_prepend(cls, arg: str) -> bool:
+ return arg.startswith(cls.prepend_prefixes)
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ return self.compiler.unix_args_to_native(new._container)
+
+ def append_direct(self, arg: str) -> None:
+ '''
+ Append the specified argument without any reordering or de-dup except
+ for absolute paths to libraries, etc, which can always be de-duped
+ safely.
+ '''
+ self.flush_pre_post()
+ if os.path.isabs(arg):
+ self.append(arg)
+ else:
+ self._container.append(arg)
+
+ def extend_direct(self, iterable: T.Iterable[str]) -> None:
+ '''
+ Extend using the elements in the specified iterable without any
+ reordering or de-dup except for absolute paths where the order of
+ include search directories is not relevant
+ '''
+ self.flush_pre_post()
+ for elem in iterable:
+ self.append_direct(elem)
+
+ def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
+ normal_flags = []
+ lflags = []
+ for i in iterable:
+ if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
+ lflags.append(i)
+ else:
+ normal_flags.append(i)
+ self.extend(normal_flags)
+ self.extend_direct(lflags)
+
+ def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = self.copy()
+ new += args
+ return new
+
+ def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ '''
+ Add two CompilerArgs while taking into account overriding of arguments
+ and while preserving the order of arguments as much as possible
+ '''
+ tmp_pre = collections.deque() # type: T.Deque[str]
+ if not isinstance(args, collections.abc.Iterable):
+ raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
+ for arg in args:
+ # If the argument can be de-duped, do it either by removing the
+ # previous occurrence of it and adding a new one, or not adding the
+ # new occurrence.
+ dedup = self._can_dedup(arg)
+ if dedup is Dedup.UNIQUE:
+ # Argument already exists and adding a new instance is useless
+ if arg in self._container or arg in self.pre or arg in self.post:
+ continue
+ if self._should_prepend(arg):
+ tmp_pre.appendleft(arg)
+ else:
+ self.post.append(arg)
+ self.pre.extendleft(tmp_pre)
+ #pre and post is going to be merged later before a iter call
+ return self
+
+ def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
+ self.flush_pre_post()
+ new = type(self)(self.compiler, args)
+ new += self
+ return new
+
+ def __eq__(self, other: T.Any) -> T.Union[bool]:
+ self.flush_pre_post()
+ # Only allow equality checks against other CompilerArgs and lists instances
+ if isinstance(other, CompilerArgs):
+ return self.compiler == other.compiler and self._container == other._container
+ elif isinstance(other, list):
+ return self._container == other
+ return NotImplemented
+
+ def append(self, arg: str) -> None:
+ self.__iadd__([arg])
+
+ def extend(self, args: T.Iterable[str]) -> None:
+ self.__iadd__(args)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container)
diff --git a/mesonbuild/ast/__init__.py b/mesonbuild/ast/__init__.py
index 48de523..4fb56cb 100644
--- a/mesonbuild/ast/__init__.py
+++ b/mesonbuild/ast/__init__.py
@@ -20,6 +20,7 @@ __all__ = [
'AstInterpreter',
'AstIDGenerator',
'AstIndentationGenerator',
+ 'AstJSONPrinter',
'AstVisitor',
'AstPrinter',
'IntrospectionInterpreter',
@@ -30,4 +31,4 @@ from .interpreter import AstInterpreter
from .introspection import IntrospectionInterpreter, build_target_functions
from .visitor import AstVisitor
from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator
-from .printer import AstPrinter
+from .printer import AstPrinter, AstJSONPrinter
diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py
index cc5c94c..6a826ef 100644
--- a/mesonbuild/ast/interpreter.py
+++ b/mesonbuild/ast/interpreter.py
@@ -297,6 +297,11 @@ class AstInterpreter(interpreterbase.InterpreterBase):
elif isinstance(node, ElementaryNode):
result = node.value
+ elif isinstance(node, NotNode):
+ result = self.resolve_node(node.value, include_unknown_args, id_loop_detect)
+ if isinstance(result, bool):
+ result = not result
+
elif isinstance(node, ArrayNode):
result = [x for x in node.args.arguments]
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 142c219..6e6927f 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -120,7 +120,7 @@ class IntrospectionInterpreter(AstInterpreter):
self.do_subproject(i)
self.coredata.init_backend_options(self.backend)
- options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
+ options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')}
self.coredata.set_options(options)
self.func_add_languages(None, proj_langs, None)
diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py
index 39e2cca..a57ba20 100644
--- a/mesonbuild/ast/printer.py
+++ b/mesonbuild/ast/printer.py
@@ -18,6 +18,7 @@
from .. import mparser
from . import AstVisitor
import re
+import typing as T
arithmic_map = {
'add': '+',
@@ -155,7 +156,7 @@ class AstPrinter(AstVisitor):
self.append_padded(prefix + 'if', node)
prefix = 'el'
i.accept(self)
- if node.elseblock:
+ if not isinstance(node.elseblock, mparser.EmptyNode):
self.append('else', node)
node.elseblock.accept(self)
self.append('endif', node)
@@ -199,3 +200,160 @@ class AstPrinter(AstVisitor):
self.result = re.sub(r', \n$', '\n', self.result)
else:
self.result = re.sub(r', $', '', self.result)
+
+class AstJSONPrinter(AstVisitor):
+ def __init__(self) -> None:
+ self.result = {} # type: T.Dict[str, T.Any]
+ self.current = self.result
+
+ def _accept(self, key: str, node: mparser.BaseNode) -> None:
+ old = self.current
+ data = {} # type: T.Dict[str, T.Any]
+ self.current = data
+ node.accept(self)
+ self.current = old
+ self.current[key] = data
+
+ def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None:
+ old = self.current
+ datalist = [] # type: T.List[T.Dict[str, T.Any]]
+ for i in nodes:
+ self.current = {}
+ i.accept(self)
+ datalist += [self.current]
+ self.current = old
+ self.current[key] = datalist
+
+ def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None:
+ old = self.current
+ self.current = data
+ node.accept(self)
+ self.current = old
+
+ def setbase(self, node: mparser.BaseNode) -> None:
+ self.current['node'] = type(node).__name__
+ self.current['lineno'] = node.lineno
+ self.current['colno'] = node.colno
+ self.current['end_lineno'] = node.end_lineno
+ self.current['end_colno'] = node.end_colno
+
+ def visit_default_func(self, node: mparser.BaseNode) -> None:
+ self.setbase(node)
+
+ def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None:
+ self.current['value'] = node.value
+ self.setbase(node)
+
+ def visit_BooleanNode(self, node: mparser.BooleanNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_IdNode(self, node: mparser.IdNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_NumberNode(self, node: mparser.NumberNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_StringNode(self, node: mparser.StringNode) -> None:
+ self.gen_ElementaryNode(node)
+
+ def visit_ArrayNode(self, node: mparser.ArrayNode) -> None:
+ self._accept('args', node.args)
+ self.setbase(node)
+
+ def visit_DictNode(self, node: mparser.DictNode) -> None:
+ self._accept('args', node.args)
+ self.setbase(node)
+
+ def visit_OrNode(self, node: mparser.OrNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.setbase(node)
+
+ def visit_AndNode(self, node: mparser.AndNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.setbase(node)
+
+ def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.current['ctype'] = node.ctype
+ self.setbase(node)
+
+ def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None:
+ self._accept('left', node.left)
+ self._accept('right', node.right)
+ self.current['op'] = arithmic_map[node.operation]
+ self.setbase(node)
+
+ def visit_NotNode(self, node: mparser.NotNode) -> None:
+ self._accept('right', node.value)
+ self.setbase(node)
+
+ def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None:
+ self._accept_list('lines', node.lines)
+ self.setbase(node)
+
+ def visit_IndexNode(self, node: mparser.IndexNode) -> None:
+ self._accept('object', node.iobject)
+ self._accept('index', node.index)
+ self.setbase(node)
+
+ def visit_MethodNode(self, node: mparser.MethodNode) -> None:
+ self._accept('object', node.source_object)
+ self._accept('args', node.args)
+ self.current['name'] = node.name
+ self.setbase(node)
+
+ def visit_FunctionNode(self, node: mparser.FunctionNode) -> None:
+ self._accept('args', node.args)
+ self.current['name'] = node.func_name
+ self.setbase(node)
+
+ def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None:
+ self._accept('value', node.value)
+ self.current['var_name'] = node.var_name
+ self.setbase(node)
+
+ def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None:
+ self._accept('value', node.value)
+ self.current['var_name'] = node.var_name
+ self.setbase(node)
+
+ def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None:
+ self._accept('items', node.items)
+ self._accept('block', node.block)
+ self.current['varnames'] = node.varnames
+ self.setbase(node)
+
+ def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None:
+ self._accept_list('ifs', node.ifs)
+ self._accept('else', node.elseblock)
+ self.setbase(node)
+
+ def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
+ self._accept('right', node.value)
+ self.setbase(node)
+
+ def visit_IfNode(self, node: mparser.IfNode) -> None:
+ self._accept('condition', node.condition)
+ self._accept('block', node.block)
+ self.setbase(node)
+
+ def visit_TernaryNode(self, node: mparser.TernaryNode) -> None:
+ self._accept('condition', node.condition)
+ self._accept('true', node.trueblock)
+ self._accept('false', node.falseblock)
+ self.setbase(node)
+
+ def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None:
+ self._accept_list('positional', node.arguments)
+ kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]]
+ for key, val in node.kwargs.items():
+ key_res = {} # type: T.Dict[str, T.Any]
+ val_res = {} # type: T.Dict[str, T.Any]
+ self._raw_accept(key, key_res)
+ self._raw_accept(val, val_res)
+ kwargs_list += [{'key': key_res, 'val': val_res}]
+ self.current['kwargs'] = kwargs_list
+ self.setbase(node)
diff --git a/mesonbuild/ast/visitor.py b/mesonbuild/ast/visitor.py
index 37be463..451020d 100644
--- a/mesonbuild/ast/visitor.py
+++ b/mesonbuild/ast/visitor.py
@@ -113,8 +113,7 @@ class AstVisitor:
self.visit_default_func(node)
for i in node.ifs:
i.accept(self)
- if node.elseblock:
- node.elseblock.accept(self)
+ node.elseblock.accept(self)
def visit_UMinusNode(self, node: mparser.UMinusNode) -> None:
self.visit_default_func(node)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 926a07d..86d20f7 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -12,24 +12,54 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os, pickle, re
+from collections import OrderedDict
+from functools import lru_cache
+from pathlib import Path
+import enum
+import json
+import os
+import pickle
+import re
+import shlex
+import subprocess
import textwrap
+import typing as T
+
from .. import build
from .. import dependencies
from .. import mesonlib
from .. import mlog
-import json
-import subprocess
from ..mesonlib import (
File, Language, MachineChoice, MesonException, OrderedSet,
OptionOverrideProxy, classify_unity_sources, unholder,
)
-from ..compilers import CompilerArgs, VisualStudioLikeCompiler
-from ..interpreter import Interpreter
-from collections import OrderedDict
-import shlex
-from functools import lru_cache
-import typing as T
+
+if T.TYPE_CHECKING:
+ from ..interpreter import Interpreter
+
+
+class TestProtocol(enum.Enum):
+
+ EXITCODE = 0
+ TAP = 1
+ GTEST = 2
+
+ @classmethod
+ def from_str(cls, string: str) -> 'TestProtocol':
+ if string == 'exitcode':
+ return cls.EXITCODE
+ elif string == 'tap':
+ return cls.TAP
+ elif string == 'gtest':
+ return cls.GTEST
+ raise MesonException('unknown test format {}'.format(string))
+
+ def __str__(self) -> str:
+ if self is self.EXITCODE:
+ return 'exitcode'
+ elif self is self.GTEST:
+ return 'gtest'
+ return 'tap'
class CleanTrees:
@@ -60,12 +90,13 @@ class InstallData:
self.mesonintrospect = mesonintrospect
class TargetInstallData:
- def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode, optional=False):
+ def __init__(self, fname, outdir, aliases, strip, install_name_mappings, rpath_dirs_to_remove, install_rpath, install_mode, optional=False):
self.fname = fname
self.outdir = outdir
self.aliases = aliases
self.strip = strip
self.install_name_mappings = install_name_mappings
+ self.rpath_dirs_to_remove = rpath_dirs_to_remove
self.install_rpath = install_rpath
self.install_mode = install_mode
self.optional = optional
@@ -84,11 +115,12 @@ class ExecutableSerialisation:
class TestSerialisation:
def __init__(self, name: str, project: str, suite: str, fname: T.List[str],
- is_cross_built: bool, exe_wrapper: T.Optional[build.Executable],
+ is_cross_built: bool, exe_wrapper: T.Optional[dependencies.ExternalProgram],
needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str],
env: build.EnvironmentVariables, should_fail: bool,
timeout: T.Optional[int], workdir: T.Optional[str],
- extra_paths: T.List[str], protocol: str, priority: int):
+ extra_paths: T.List[str], protocol: TestProtocol, priority: int,
+ cmd_is_built: bool):
self.name = name
self.project_name = project
self.suite = suite
@@ -107,8 +139,10 @@ class TestSerialisation:
self.protocol = protocol
self.priority = priority
self.needs_exe_wrapper = needs_exe_wrapper
+ self.cmd_is_built = cmd_is_built
+
-def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional[Interpreter] = None) -> T.Optional['Backend']:
+def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']:
if backend == 'ninja':
from . import ninjabackend
return ninjabackend.NinjaBackend(build, interpreter)
@@ -135,7 +169,7 @@ def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, i
# This class contains the basic functionality that is needed by all backends.
# Feel free to move stuff in and out of it as you see fit.
class Backend:
- def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
+ def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']):
# Make it possible to construct a dummy backend
# This is used for introspection without a build directory
if build is None:
@@ -150,9 +184,9 @@ class Backend:
self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
self.environment.get_build_dir())
- def get_target_filename(self, t):
+ def get_target_filename(self, t, *, warn_multi_output: bool = True):
if isinstance(t, build.CustomTarget):
- if len(t.get_outputs()) != 1:
+ if warn_multi_output and len(t.get_outputs()) != 1:
mlog.warning('custom_target {!r} has more than one output! '
'Using the first one.'.format(t.name))
filename = t.get_outputs()[0]
@@ -197,7 +231,7 @@ class Backend:
return os.path.join(self.get_target_dir(target), target.get_filename())
elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)):
if not target.is_linkable_target():
- raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name)
+ raise MesonException('Tried to link against custom target "{}", which is not linkable.'.format(target.name))
return os.path.join(self.get_target_dir(target), target.get_filename())
elif isinstance(target, build.Executable):
if target.import_filename:
@@ -228,7 +262,7 @@ class Backend:
return self.build_to_src
def get_target_private_dir(self, target):
- return os.path.join(self.get_target_dir(target), target.get_id())
+ return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p')
def get_target_private_dir_abs(self, target):
return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))
@@ -283,7 +317,7 @@ class Backend:
ofile = init_language_file(comp.get_default_suffix(), unity_file_number)
unity_file_number += 1
files_in_current = 0
- ofile.write('#include<%s>\n' % src)
+ ofile.write('#include<{}>\n'.format(src))
files_in_current += 1
if ofile:
ofile.close()
@@ -413,6 +447,46 @@ class Backend:
return True
return False
+ def get_external_rpath_dirs(self, target):
+ dirs = set()
+ args = []
+ # FIXME: is there a better way?
+ for lang in ['c', 'cpp']:
+ try:
+ args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang))
+ except Exception:
+ pass
+ # Match rpath formats:
+ # -Wl,-rpath=
+ # -Wl,-rpath,
+ rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
+ # Match solaris style compat runpath formats:
+ # -Wl,-R
+ # -Wl,-R,
+ runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
+ # Match symbols formats:
+ # -Wl,--just-symbols=
+ # -Wl,--just-symbols,
+ symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
+ for arg in args:
+ rpath_match = rpath_regex.match(arg)
+ if rpath_match:
+ for dir in rpath_match.group(1).split(':'):
+ dirs.add(dir)
+ runpath_match = runpath_regex.match(arg)
+ if runpath_match:
+ for dir in runpath_match.group(1).split(':'):
+ # The symbols arg is an rpath if the path is a directory
+ if Path(dir).is_dir():
+ dirs.add(dir)
+ symbols_match = symbols_regex.match(arg)
+ if symbols_match:
+ for dir in symbols_match.group(1).split(':'):
+ # Prevent usage of --just-symbols to specify rpath
+ if Path(dir).is_dir():
+ raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir))
+ return dirs
+
def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True):
paths = []
for dep in target.external_deps:
@@ -427,6 +501,9 @@ class Backend:
if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
# No point in adding system paths.
continue
+ # Don't remove rpaths specified in LDFLAGS.
+ if libdir in self.get_external_rpath_dirs(target):
+ continue
# Windows doesn't support rpaths, but we use this function to
# emulate rpaths by setting PATH, so also accept DLLs here
if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']:
@@ -446,8 +523,15 @@ class Backend:
result = OrderedSet()
result.add('meson-out')
result.update(self.rpaths_for_bundled_shared_libraries(target))
+ target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result])
return tuple(result)
+ @staticmethod
+ def canonicalize_filename(fname):
+ for ch in ('/', '\\', ':'):
+ fname = fname.replace(ch, '_')
+ return fname
+
def object_filename_from_source(self, target, source):
assert isinstance(source, mesonlib.File)
build_dir = self.environment.get_build_dir()
@@ -478,7 +562,7 @@ class Backend:
source = os.path.relpath(os.path.join(build_dir, rel_src),
os.path.join(self.environment.get_source_dir(), target.get_subdir()))
machine = self.environment.machines[target.for_machine]
- return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix()
+ return self.canonicalize_filename(source) + '.' + machine.get_object_suffix()
def determine_ext_objs(self, extobj, proj_dir_to_build_root):
result = []
@@ -538,14 +622,14 @@ class Backend:
def create_msvc_pch_implementation(self, target, lang, pch_header):
# We have to include the language in the file name, otherwise
# pch.c and pch.cpp will both end up as pch.obj in VS backends.
- impl_name = 'meson_pch-%s.%s' % (lang, lang)
+ impl_name = 'meson_pch-{}.{}'.format(lang, lang)
pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
# Make sure to prepend the build dir, since the working directory is
# not defined. Otherwise, we might create the file in the wrong path.
pch_file = os.path.join(self.build_dir, pch_rel_to_build)
os.makedirs(os.path.dirname(pch_file), exist_ok=True)
- content = '#include "%s"' % os.path.basename(pch_header)
+ content = '#include "{}"'.format(os.path.basename(pch_header))
pch_file_tmp = pch_file + '.tmp'
with open(pch_file_tmp, 'w') as f:
f.write(content)
@@ -554,36 +638,20 @@ class Backend:
@staticmethod
def escape_extra_args(compiler, args):
- # No extra escaping/quoting needed when not running on Windows
- if not mesonlib.is_windows():
- return args
+ # all backslashes in defines are doubly-escaped
extra_args = []
- # Compiler-specific escaping is needed for -D args but not for any others
- if isinstance(compiler, VisualStudioLikeCompiler):
- # MSVC needs escaping when a -D argument ends in \ or \"
- for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
- # Without extra escaping for these two, the next character
- # gets eaten
- if arg.endswith('\\'):
- arg += '\\'
- elif arg.endswith('\\"'):
- arg = arg[:-2] + '\\\\"'
- extra_args.append(arg)
- else:
- # MinGW GCC needs all backslashes in defines to be doubly-escaped
- # FIXME: Not sure about Cygwin or Clang
- for arg in args:
- if arg.startswith('-D') or arg.startswith('/D'):
- arg = arg.replace('\\', '\\\\')
- extra_args.append(arg)
+ for arg in args:
+ if arg.startswith('-D') or arg.startswith('/D'):
+ arg = arg.replace('\\', '\\\\')
+ extra_args.append(arg)
+
return extra_args
def generate_basic_compiler_args(self, target, compiler, no_warn_args=False):
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
# starting from hard-coded defaults followed by build options and so on.
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
copt_proxy = self.get_compiler_options_for_target(target)[compiler.language]
# First, the trivial ones that are impossible to override.
@@ -665,7 +733,7 @@ class Backend:
args = []
for d in deps:
if not (d.is_linkable_target()):
- raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename())
+ raise RuntimeError('Tried to link with a non-library target "{}".'.format(d.get_basename()))
arg = self.get_target_filename_for_linking(d)
if not arg:
continue
@@ -706,6 +774,7 @@ class Backend:
for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False):
result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
for bdep in extra_bdeps:
+ prospectives.add(bdep)
prospectives.update(bdep.get_transitive_link_deps())
# Internal deps
for ld in prospectives:
@@ -738,7 +807,16 @@ class Backend:
# E.g. an external verifier or simulator program run on a generated executable.
# Can always be run without a wrapper.
test_for_machine = MachineChoice.BUILD
- is_cross = not self.environment.machines.matches_build_machine(test_for_machine)
+
+ # we allow passing compiled executables to tests, which may be cross built.
+ # We need to consider these as well when considering whether the target is cross or not.
+ for a in t.cmd_args:
+ if isinstance(a, build.BuildTarget):
+ if a.for_machine is MachineChoice.HOST:
+ test_for_machine = MachineChoice.HOST
+ break
+
+ is_cross = self.environment.is_cross_build(test_for_machine)
if is_cross and self.environment.need_exe_wrapper():
exe_wrapper = self.environment.get_exe_wrapper()
else:
@@ -751,6 +829,7 @@ class Backend:
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
else:
extra_paths = []
+
cmd_args = []
for a in unholder(t.cmd_args):
if isinstance(a, build.BuildTarget):
@@ -760,6 +839,11 @@ class Backend:
cmd_args.append(a)
elif isinstance(a, str):
cmd_args.append(a)
+ elif isinstance(a, build.Executable):
+ p = self.construct_target_rel_path(a, t.workdir)
+ if p == a.get_filename():
+ p = './' + p
+ cmd_args.append(p)
elif isinstance(a, build.Target):
cmd_args.append(self.construct_target_rel_path(a, t.workdir))
else:
@@ -768,7 +852,8 @@ class Backend:
exe_wrapper, self.environment.need_exe_wrapper(),
t.is_parallel, cmd_args, t.env,
t.should_fail, t.timeout, t.workdir,
- extra_paths, t.protocol, t.priority)
+ extra_paths, t.protocol, t.priority,
+ isinstance(exe, build.Executable))
arr.append(ts)
return arr
@@ -854,7 +939,7 @@ class Backend:
m = regex.search(arg)
while m is not None:
index = int(m.group(1))
- src = '@OUTPUT%d@' % index
+ src = '@OUTPUT{}@'.format(index)
arg = arg.replace(src, os.path.join(private_dir, output_list[index]))
m = regex.search(arg)
newargs.append(arg)
@@ -981,35 +1066,36 @@ class Backend:
elif not isinstance(i, str):
err_msg = 'Argument {0} is of unknown type {1}'
raise RuntimeError(err_msg.format(str(i), str(type(i))))
- elif '@SOURCE_ROOT@' in i:
- i = i.replace('@SOURCE_ROOT@', source_root)
- elif '@BUILD_ROOT@' in i:
- i = i.replace('@BUILD_ROOT@', build_root)
- elif '@DEPFILE@' in i:
- if target.depfile is None:
- msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
- 'keyword argument.'.format(target.name)
- raise MesonException(msg)
- dfilename = os.path.join(outdir, target.depfile)
- i = i.replace('@DEPFILE@', dfilename)
- elif '@PRIVATE_DIR@' in i:
- if target.absolute_paths:
- pdir = self.get_target_private_dir_abs(target)
- else:
- pdir = self.get_target_private_dir(target)
- i = i.replace('@PRIVATE_DIR@', pdir)
- elif '@PRIVATE_OUTDIR_' in i:
- match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
- if not match:
- msg = 'Custom target {!r} has an invalid argument {!r}' \
- ''.format(target.name, i)
- raise MesonException(msg)
- source = match.group(0)
- if match.group(1) is None and not target.absolute_paths:
- lead_dir = ''
- else:
- lead_dir = self.environment.get_build_dir()
- i = i.replace(source, os.path.join(lead_dir, outdir))
+ else:
+ if '@SOURCE_ROOT@' in i:
+ i = i.replace('@SOURCE_ROOT@', source_root)
+ if '@BUILD_ROOT@' in i:
+ i = i.replace('@BUILD_ROOT@', build_root)
+ if '@DEPFILE@' in i:
+ if target.depfile is None:
+ msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
+ 'keyword argument.'.format(target.name)
+ raise MesonException(msg)
+ dfilename = os.path.join(outdir, target.depfile)
+ i = i.replace('@DEPFILE@', dfilename)
+ if '@PRIVATE_DIR@' in i:
+ if target.absolute_paths:
+ pdir = self.get_target_private_dir_abs(target)
+ else:
+ pdir = self.get_target_private_dir(target)
+ i = i.replace('@PRIVATE_DIR@', pdir)
+ if '@PRIVATE_OUTDIR_' in i:
+ match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
+ if not match:
+ msg = 'Custom target {!r} has an invalid argument {!r}' \
+ ''.format(target.name, i)
+ raise MesonException(msg)
+ source = match.group(0)
+ if match.group(1) is None and not target.absolute_paths:
+ lead_dir = ''
+ else:
+ lead_dir = self.environment.get_build_dir()
+ i = i.replace(source, os.path.join(lead_dir, outdir))
cmd.append(i)
# Substitute the rest of the template strings
values = mesonlib.get_filenames_templates_dict(inputs, outputs)
@@ -1110,6 +1196,7 @@ class Backend:
mappings = t.get_link_deps_mapping(d.prefix, self.environment)
i = TargetInstallData(self.get_target_filename(t), outdirs[0],
t.get_aliases(), should_strip, mappings,
+ t.rpath_dirs_to_remove,
t.install_rpath, install_mode)
d.targets.append(i)
@@ -1127,14 +1214,14 @@ class Backend:
implib_install_dir = self.environment.get_import_lib_dir()
# Install the import library; may not exist for shared modules
i = TargetInstallData(self.get_target_filename_for_linking(t),
- implib_install_dir, {}, False, {}, '', install_mode,
+ implib_install_dir, {}, False, {}, set(), '', install_mode,
optional=isinstance(t, build.SharedModule))
d.targets.append(i)
if not should_strip and t.get_debug_filename():
debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename())
i = TargetInstallData(debug_file, outdirs[0],
- {}, False, {}, '',
+ {}, False, {}, set(), '',
install_mode, optional=True)
d.targets.append(i)
# Install secondary outputs. Only used for Vala right now.
@@ -1144,7 +1231,7 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
+ i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode)
d.targets.append(i)
elif isinstance(t, build.CustomTarget):
# If only one install_dir is specified, assume that all
@@ -1157,7 +1244,7 @@ class Backend:
if num_outdirs == 1 and num_out > 1:
for output in t.get_outputs():
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode,
+ i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode,
optional=not t.build_by_default)
d.targets.append(i)
else:
@@ -1166,7 +1253,7 @@ class Backend:
if outdir is False:
continue
f = os.path.join(self.get_target_dir(t), output)
- i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode,
+ i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode,
optional=not t.build_by_default)
d.targets.append(i)
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index ef9b809..968ad7c 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -15,8 +15,10 @@ import typing as T
import os
import re
import pickle
+import shlex
import subprocess
from collections import OrderedDict
+from enum import Enum, unique
import itertools
from pathlib import PurePath, Path
from functools import lru_cache
@@ -28,9 +30,15 @@ from .. import build
from .. import mlog
from .. import dependencies
from .. import compilers
-from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler,
- PGICCompiler, VisualStudioLikeCompiler)
-from ..linkers import ArLinker
+from ..arglist import CompilerArgs
+from ..compilers import (
+ Compiler, CCompiler,
+ DmdDCompiler,
+ FortranCompiler, PGICCompiler,
+ VisualStudioCsCompiler,
+ VisualStudioLikeCompiler,
+)
+from ..linkers import ArLinker, VisualStudioLinker
from ..mesonlib import (
File, LibType, Language, MachineChoice, MesonException, OrderedSet, PerMachine,
ProgressBar, quote_arg, unholder,
@@ -45,18 +53,67 @@ FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+def cmd_quote(s):
+ # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks
+
+ # backslash escape any existing double quotes
+ # any existing backslashes preceding a quote are doubled
+ s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s)
+ # any terminal backslashes likewise need doubling
+ s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s)
+ # and double quote
+ s = '"{}"'.format(s)
+
+ return s
+
+def gcc_rsp_quote(s):
+ # see: the function buildargv() in libiberty
+ #
+ # this differs from sh-quoting in that a backslash *always* escapes the
+ # following character, even inside single quotes.
+
+ s = s.replace('\\', '\\\\')
+
+ return shlex.quote(s)
+
+# How ninja executes command lines differs between Unix and Windows
+# (see https://ninja-build.org/manual.html#ref_rule_command)
if mesonlib.is_windows():
- # FIXME: can't use quote_arg on Windows just yet; there are a number of existing workarounds
- # throughout the codebase that cumulatively make the current code work (see, e.g. Backend.escape_extra_args
- # and NinjaBuildElement.write below) and need to be properly untangled before attempting this
- quote_func = lambda s: '"{}"'.format(s)
- execute_wrapper = ['cmd', '/c']
+ quote_func = cmd_quote
+ execute_wrapper = ['cmd', '/c'] # unused
rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&']
else:
quote_func = quote_arg
execute_wrapper = []
rmfile_prefix = ['rm', '-f', '{}', '&&']
+def get_rsp_threshold():
+ '''Return a conservative estimate of the commandline size in bytes
+ above which a response file should be used. May be overridden for
+ debugging by setting environment variable MESON_RSP_THRESHOLD.'''
+
+ if mesonlib.is_windows():
+ # Usually 32k, but some projects might use cmd.exe,
+ # and that has a limit of 8k.
+ limit = 8192
+ else:
+ # On Linux, ninja always passes the commandline as a single
+ # big string to /bin/sh, and the kernel limits the size of a
+ # single argument; see MAX_ARG_STRLEN
+ limit = 131072
+ # Be conservative
+ limit = limit / 2
+ return int(os.environ.get('MESON_RSP_THRESHOLD', limit))
+
+# a conservative estimate of the command-line length limit
+rsp_threshold = get_rsp_threshold()
+
+# ninja variables whose value should remain unquoted. The value of these ninja
+# variables (or variables we use them in) is interpreted directly by ninja
+# (e.g. the value of the depfile variable is a pathname that ninja will read
+# from, etc.), so it must not be shell quoted.
+raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'}
+
def ninja_quote(text, is_build_line=False):
if is_build_line:
qcs = ('$', ' ', ':')
@@ -67,12 +124,31 @@ def ninja_quote(text, is_build_line=False):
if '\n' in text:
errmsg = '''Ninja does not support newlines in rules. The content was:
-%s
+{}
-Please report this error with a test case to the Meson bug tracker.''' % text
+Please report this error with a test case to the Meson bug tracker.'''.format(text)
raise MesonException(errmsg)
return text
+@unique
+class Quoting(Enum):
+ both = 0
+ notShell = 1
+ notNinja = 2
+ none = 3
+
+class NinjaCommandArg:
+ def __init__(self, s, quoting = Quoting.both):
+ self.s = s
+ self.quoting = quoting
+
+ def __str__(self):
+ return self.s
+
+ @staticmethod
+ def list(l, q):
+ return [NinjaCommandArg(i, q) for i in l]
+
class NinjaComment:
def __init__(self, comment):
self.comment = comment
@@ -86,49 +162,127 @@ class NinjaComment:
class NinjaRule:
def __init__(self, rule, command, args, description,
- rspable = False, deps = None, depfile = None, extra = None):
+ rspable = False, deps = None, depfile = None, extra = None,
+ rspfile_quote_style = 'gcc'):
+
+ def strToCommandArg(c):
+ if isinstance(c, NinjaCommandArg):
+ return c
+
+ # deal with common cases here, so we don't have to explicitly
+ # annotate the required quoting everywhere
+ if c == '&&':
+ # shell constructs shouldn't be shell quoted
+ return NinjaCommandArg(c, Quoting.notShell)
+ if c.startswith('$'):
+ var = re.search(r'\$\{?(\w*)\}?', c).group(1)
+ if var not in raw_names:
+ # ninja variables shouldn't be ninja quoted, and their value
+ # is already shell quoted
+ return NinjaCommandArg(c, Quoting.none)
+ else:
+ # shell quote the use of ninja variables whose value must
+ # not be shell quoted (as it also used by ninja)
+ return NinjaCommandArg(c, Quoting.notNinja)
+
+ return NinjaCommandArg(c)
+
self.name = rule
- self.command = command # includes args which never go into a rspfile
- self.args = args # args which will go into a rspfile, if used
+ self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile
+ self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used
self.description = description
self.deps = deps # depstyle 'gcc' or 'msvc'
self.depfile = depfile
self.extra = extra
self.rspable = rspable # if a rspfile can be used
self.refcount = 0
+ self.rsprefcount = 0
+ self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl'
- def write(self, outfile):
- if not self.refcount:
- return
+ if self.depfile == '$DEPFILE':
+ self.depfile += '_UNQUOTED'
+
+ @staticmethod
+ def _quoter(x, qf = quote_func):
+ if isinstance(x, NinjaCommandArg):
+ if x.quoting == Quoting.none:
+ return x.s
+ elif x.quoting == Quoting.notNinja:
+ return qf(x.s)
+ elif x.quoting == Quoting.notShell:
+ return ninja_quote(x.s)
+ # fallthrough
+ return ninja_quote(qf(str(x)))
- outfile.write('rule %s\n' % self.name)
- if self.rspable:
- outfile.write(' command = %s @$out.rsp\n' % ' '.join(self.command))
- outfile.write(' rspfile = $out.rsp\n')
- outfile.write(' rspfile_content = %s\n' % ' '.join(self.args))
+ def write(self, outfile):
+ if self.rspfile_quote_style == 'cl':
+ rspfile_quote_func = cmd_quote
else:
- outfile.write(' command = %s\n' % ' '.join(self.command + self.args))
- if self.deps:
- outfile.write(' deps = %s\n' % self.deps)
- if self.depfile:
- outfile.write(' depfile = %s\n' % self.depfile)
- outfile.write(' description = %s\n' % self.description)
- if self.extra:
- for l in self.extra.split('\n'):
- outfile.write(' ')
- outfile.write(l)
- outfile.write('\n')
- outfile.write('\n')
+ rspfile_quote_func = gcc_rsp_quote
+
+ def rule_iter():
+ if self.refcount:
+ yield ''
+ if self.rsprefcount:
+ yield '_RSP'
+
+ for rsp in rule_iter():
+ outfile.write('rule {}{}\n'.format(self.name, rsp))
+ if rsp == '_RSP':
+ outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command])))
+ outfile.write(' rspfile = $out.rsp\n')
+ outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args])))
+ else:
+ outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)])))
+ if self.deps:
+ outfile.write(' deps = {}\n'.format(self.deps))
+ if self.depfile:
+ outfile.write(' depfile = {}\n'.format(self.depfile))
+ outfile.write(' description = {}\n'.format(self.description))
+ if self.extra:
+ for l in self.extra.split('\n'):
+ outfile.write(' ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
+
+ def length_estimate(self, infiles, outfiles, elems):
+ # determine variables
+ # this order of actions only approximates ninja's scoping rules, as
+ # documented at: https://ninja-build.org/manual.html#ref_scope
+ ninja_vars = {}
+ for e in elems:
+ (name, value) = e
+ ninja_vars[name] = value
+ ninja_vars['deps'] = self.deps
+ ninja_vars['depfile'] = self.depfile
+ ninja_vars['in'] = infiles
+ ninja_vars['out'] = outfiles
+
+ # expand variables in command
+ command = ' '.join([self._quoter(x) for x in self.command + self.args])
+ expanded_command = ''
+ for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command):
+ chunk = m.group()
+ if chunk.startswith('$'):
+ chunk = chunk[1:]
+ chunk = re.sub(r'{(.*)}', r'\1', chunk)
+ chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty
+ chunk = ' '.join(chunk)
+ expanded_command += chunk
+
+ # determine command length
+ return len(expanded_command)
class NinjaBuildElement:
- def __init__(self, all_outputs, outfilenames, rule, infilenames, implicit_outs=None):
+ def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None):
self.implicit_outfilenames = implicit_outs or []
if isinstance(outfilenames, str):
self.outfilenames = [outfilenames]
else:
self.outfilenames = outfilenames
- assert(isinstance(rule, str))
- self.rule = rule
+ assert(isinstance(rulename, str))
+ self.rulename = rulename
if isinstance(infilenames, str):
self.infilenames = [infilenames]
else:
@@ -151,10 +305,39 @@ class NinjaBuildElement:
self.orderdeps.add(dep)
def add_item(self, name, elems):
+ # Always convert from GCC-style argument naming to the naming used by the
+ # current compiler. Also filter system include paths, deduplicate, etc.
+ if isinstance(elems, CompilerArgs):
+ elems = elems.to_native()
if isinstance(elems, str):
elems = [elems]
self.elems.append((name, elems))
+ if name == 'DEPFILE':
+ self.elems.append((name + '_UNQUOTED', elems))
+
+ def _should_use_rspfile(self):
+ # 'phony' is a rule built-in to ninja
+ if self.rulename == 'phony':
+ return False
+
+ if not self.rule.rspable:
+ return False
+
+ infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames])
+ outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames])
+
+ return self.rule.length_estimate(infilenames,
+ outfilenames,
+ self.elems) >= rsp_threshold
+
+ def count_rule_references(self):
+ if self.rulename != 'phony':
+ if self._should_use_rspfile():
+ self.rule.rsprefcount += 1
+ else:
+ self.rule.refcount += 1
+
def write(self, outfile):
self.check_outputs()
ins = ' '.join([ninja_quote(i, True) for i in self.infilenames])
@@ -162,7 +345,13 @@ class NinjaBuildElement:
implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames])
if implicit_outs:
implicit_outs = ' | ' + implicit_outs
- line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rule, ins)
+ use_rspfile = self._should_use_rspfile()
+ if use_rspfile:
+ rulename = self.rulename + '_RSP'
+ mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames)
+ else:
+ rulename = self.rulename
+ line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins)
if len(self.deps) > 0:
line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps])
if len(self.orderdeps) > 0:
@@ -176,25 +365,24 @@ class NinjaBuildElement:
line = line.replace('\\', '/')
outfile.write(line)
- # ninja variables whose value should remain unquoted. The value of these
- # ninja variables (or variables we use them in) is interpreted directly
- # by ninja (e.g. the value of the depfile variable is a pathname that
- # ninja will read from, etc.), so it must not be shell quoted.
- raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'}
+ if use_rspfile:
+ if self.rule.rspfile_quote_style == 'cl':
+ qf = cmd_quote
+ else:
+ qf = gcc_rsp_quote
+ else:
+ qf = quote_func
for e in self.elems:
(name, elems) = e
should_quote = name not in raw_names
- line = ' %s = ' % name
+ line = ' {} = '.format(name)
newelems = []
for i in elems:
if not should_quote or i == '&&': # Hackety hack hack
quoter = ninja_quote
else:
- quoter = lambda x: ninja_quote(quote_func(x))
- i = i.replace('\\', '\\\\')
- if quote_func('') == '""':
- i = i.replace('"', '\\"')
+ quoter = lambda x: ninja_quote(qf(x))
newelems.append(quoter(i))
line += ' '.join(newelems)
line += '\n'
@@ -204,7 +392,7 @@ class NinjaBuildElement:
def check_outputs(self):
for n in self.outfilenames:
if n in self.all_outputs:
- raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n)
+ raise MesonException('Multiple producers for Ninja target "{}". Please rename your targets.'.format(n))
self.all_outputs[n] = True
class NinjaBackend(backends.Backend):
@@ -271,7 +459,7 @@ int dummy;
# different locales have different messages with a different
# number of colons. Match up to the the drive name 'd:\'.
# When used in cross compilation, the path separator is a
- # backslash rather than a forward slash so handle both.
+ # forward slash rather than a backslash so handle both.
matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|\/).*stdio.h$")
def detect_prefix(out):
@@ -299,8 +487,7 @@ int dummy;
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
tempfilename = outfilename + '~'
with open(tempfilename, 'w', encoding='utf-8') as outfile:
- outfile.write('# This is the build file for project "%s"\n' %
- self.build.get_project())
+ outfile.write('# This is the build file for project "{}"\n'.format(self.build.get_project()))
outfile.write('# It is autogenerated by the Meson build system.\n')
outfile.write('# Do not edit by hand.\n\n')
outfile.write('ninja_required_version = 1.7.1\n\n')
@@ -308,9 +495,9 @@ int dummy;
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
if num_pools > 0:
outfile.write('''pool link_pool
- depth = %d
+ depth = {}
-''' % num_pools)
+'''.format(num_pools))
with self.detect_vs_dep_prefix(tempfilename) as outfile:
self.generate_rules()
@@ -347,10 +534,14 @@ int dummy;
# http://clang.llvm.org/docs/JSONCompilationDatabase.html
def generate_compdb(self):
rules = []
+ # TODO: Rather than an explicit list here, rules could be marked in the
+ # rule store as being wanted in compdb
for for_machine in MachineChoice:
for lang in self.environment.coredata.compilers[for_machine]:
- rules += [self.get_compiler_rule_name(lang, for_machine)]
- rules += [self.get_pch_rule_name(lang, for_machine)]
+ rules += [ "%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
+ rules += [ "%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)]
+ for ext in ['', '_RSP']]
compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else []
ninja_compdb = [self.ninja_command, '-t', 'compdb'] + compdb_options + rules
builddir = self.environment.get_build_dir()
@@ -571,7 +762,7 @@ int dummy;
generated_source_files.append(raw_src)
elif self.environment.is_object(rel_src):
obj_list.append(rel_src)
- elif self.environment.is_library(rel_src):
+ elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src):
pass
else:
# Assume anything not specifically a source file is a header. This is because
@@ -586,7 +777,7 @@ int dummy;
o = self.generate_llvm_ir_compile(target, src)
else:
o = self.generate_single_compile(target, src, True,
- header_deps=header_deps)
+ order_deps=header_deps)
obj_list.append(o)
use_pch = self.environment.coredata.base_options.get('b_pch', False)
@@ -765,7 +956,7 @@ int dummy;
target_name = 'meson-{}'.format(self.build_run_target_name(target))
elem = NinjaBuildElement(self.all_outputs, target_name, 'CUSTOM_COMMAND', [])
elem.add_item('COMMAND', cmd)
- elem.add_item('description', 'Running external command %s' % target.name)
+ elem.add_item('description', 'Running external command {}'.format(target.name))
elem.add_item('pool', 'console')
# Alias that runs the target defined above with the name the user specified
self.create_target_alias(target_name)
@@ -778,6 +969,15 @@ int dummy;
self.processed_targets[target.get_id()] = True
def generate_coverage_command(self, elem, outputs):
+ targets = self.build.get_targets().values()
+ use_llvm_cov = False
+ for target in targets:
+ if not hasattr(target, 'compilers'):
+ continue
+ for compiler in target.compilers.values():
+ if compiler.get_id() == 'clang' and not compiler.info.is_darwin():
+ use_llvm_cov = True
+ break
elem.add_item('COMMAND', self.environment.get_build_command() +
['--internal', 'coverage'] +
outputs +
@@ -785,7 +985,8 @@ int dummy;
os.path.join(self.environment.get_source_dir(),
self.build.get_subproject_dir()),
self.environment.get_build_dir(),
- self.environment.get_log_dir()])
+ self.environment.get_log_dir()] +
+ ['--use_llvm_cov'] if use_llvm_cov else [])
def generate_coverage_rules(self):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
@@ -874,13 +1075,15 @@ int dummy;
deps='gcc', depfile='$DEPFILE',
extra='restat = 1'))
- c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ c = self.environment.get_build_command() + \
['--internal',
'regenerate',
- ninja_quote(quote_func(self.environment.get_source_dir())),
- ninja_quote(quote_func(self.environment.get_build_dir()))]
+ self.environment.get_source_dir(),
+ self.environment.get_build_dir(),
+ '--backend',
+ 'ninja']
self.add_rule(NinjaRule('REGENERATE_BUILD',
- c + ['--backend', 'ninja'], [],
+ c, [],
'Regenerating build files.',
extra='generator = 1'))
@@ -897,11 +1100,15 @@ int dummy;
def add_build(self, build):
self.build_elements.append(build)
- # increment rule refcount
- if build.rule != 'phony':
- self.ruledict[build.rule].refcount += 1
+ if build.rulename != 'phony':
+ # reference rule
+ build.rule = self.ruledict[build.rulename]
def write_rules(self, outfile):
+ for b in self.build_elements:
+ if isinstance(b, NinjaBuildElement):
+ b.count_rule_references()
+
for r in self.rules:
r.write(outfile)
@@ -980,12 +1187,12 @@ int dummy;
ofilename = os.path.join(self.get_target_private_dir(target), ofilebase)
elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile)
elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename])
- elem.add_item('DESC', 'Compiling resource %s' % rel_sourcefile)
+ elem.add_item('DESC', 'Compiling resource {}'.format(rel_sourcefile))
self.add_build(elem)
deps.append(ofilename)
a = '-resource:' + ofilename
else:
- raise InvalidArguments('Unknown resource file %s.' % r)
+ raise InvalidArguments('Unknown resource file {}.'.format(r))
args.append(a)
return args, deps
@@ -997,7 +1204,7 @@ int dummy;
compiler = target.compilers[Language.CS]
rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list]
deps = []
- commands = CompilerArgs(compiler, target.extra_args.get(Language.CS, []))
+ commands = compiler.compiler_args(target.extra_args.get(Language.CS, []))
commands += compiler.get_buildtype_args(buildtype)
commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target))
commands += compiler.get_debug_args(self.get_option_for_target('debug', target))
@@ -1278,7 +1485,7 @@ int dummy;
main_rust_file = None
for i in target.get_sources():
if not rustc.can_compile(i):
- raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename())
+ raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename()))
if main_rust_file is None:
main_rust_file = i.rel_to_builddir(self.build_to_src)
if main_rust_file is None:
@@ -1349,7 +1556,8 @@ int dummy;
self.get_target_dir(target))
else:
target_slashname_workaround_dir = self.get_target_dir(target)
- rpath_args = rustc.build_rpath_args(self.environment,
+ (rpath_args, target.rpath_dirs_to_remove) = \
+ rustc.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
self.determine_rpath_dirs(target),
@@ -1376,12 +1584,12 @@ int dummy;
return PerMachine('_FOR_BUILD', '')[for_machine]
@classmethod
- def get_compiler_rule_name(cls, lang: Language, for_machine: MachineChoice) -> str:
- return '%s_COMPILER%s' % (lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
+ def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
+ return '{}_COMPILER{}'.format(lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
@classmethod
- def get_pch_rule_name(cls, lang: Language, for_machine: MachineChoice) -> str:
- return '%s_PCH%s' % (lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
+ def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str:
+ return '{}_PCH{}'.format(lang.get_lower_case_name(), cls.get_rule_suffix(for_machine))
@classmethod
def compiler_to_rule_name(cls, compiler: Compiler) -> str:
@@ -1453,7 +1661,7 @@ int dummy;
abs_headers.append(absh)
header_imports += swiftc.get_header_import_args(absh)
else:
- raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename())
+ raise InvalidArguments('Swift target {} contains a non-swift source file.'.format(target.get_basename()))
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = swiftc.get_compile_only_args()
compile_args += swiftc.get_optimization_args(self.get_option_for_target('optimization', target))
@@ -1540,7 +1748,7 @@ int dummy;
static_linker = self.build.static_linker[for_machine]
if static_linker is None:
return
- rule = 'STATIC_LINKER%s' % self.get_rule_suffix(for_machine)
+ rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine))
cmdlist = []
args = ['$in']
# FIXME: Must normalize file names with pathlib.Path before writing
@@ -1554,7 +1762,7 @@ int dummy;
cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix]
cmdlist += static_linker.get_exelist()
cmdlist += ['$LINK_ARGS']
- cmdlist += static_linker.get_output_args('$out')
+ cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none)
description = 'Linking static target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@@ -1562,6 +1770,7 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, cmdlist, args, description,
rspable=static_linker.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'gcc',
extra=pool))
def generate_dynamic_link_rules(self):
@@ -1574,9 +1783,9 @@ int dummy;
or langname == Language.RUST \
or langname == Language.CS:
continue
- rule = '%s_LINKER%s' % (langname.get_lower_case_name(), self.get_rule_suffix(for_machine))
+ rule = '{}_LINKER{}'.format(langname.get_lower_case_name(), self.get_rule_suffix(for_machine))
command = compiler.get_linker_exelist()
- args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS']
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS']
description = 'Linking target $out'
if num_pools > 0:
pool = 'pool = link_pool'
@@ -1584,12 +1793,14 @@ int dummy;
pool = None
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
+ isinstance(compiler, DmdDCompiler)) else 'gcc',
extra=pool))
- args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
+ args = self.environment.get_build_command() + \
['--internal',
'symbolextractor',
- ninja_quote(quote_func(self.environment.get_build_dir())),
+ self.environment.get_build_dir(),
'$in',
'$IMPLIB',
'$out']
@@ -1601,31 +1812,28 @@ int dummy;
def generate_java_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Java object $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_cs_compile_rule(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc
+ command = compiler.get_exelist()
args = ['$ARGS', '$in']
description = 'Compiling C Sharp target $out'
self.add_rule(NinjaRule(rule, command, args, description,
- rspable=mesonlib.is_windows()))
+ rspable=mesonlib.is_windows(),
+ rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'gcc'))
def generate_vala_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Vala source $in'
self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
def generate_rust_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- invoc = [ninja_quote(i) for i in compiler.get_exelist()]
- command = invoc + ['$ARGS', '$in']
+ command = compiler.get_exelist() + ['$ARGS', '$in']
description = 'Compiling Rust source $in'
depfile = '$targetdep'
depstyle = 'gcc'
@@ -1634,18 +1842,18 @@ int dummy;
def generate_swift_compile_rules(self, compiler):
rule = self.compiler_to_rule_name(compiler)
- full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [
+ full_exe = self.environment.get_build_command() + [
'--internal',
'dirchanger',
'$RUNDIR',
]
- invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()]
+ invoc = full_exe + compiler.get_exelist()
command = invoc + ['$ARGS', '$in']
description = 'Compiling Swift source $in'
self.add_rule(NinjaRule(rule, command, [], description))
def generate_fortran_dep_hack(self, crstr):
- rule = 'FORTRAN_DEP_HACK%s' % (crstr)
+ rule = 'FORTRAN_DEP_HACK{}'.format(crstr)
if mesonlib.is_windows():
cmd = ['cmd', '/C']
else:
@@ -1659,8 +1867,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if self.created_llvm_ir_rule[compiler.for_machine]:
return
rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
- command = [ninja_quote(i) for i in compiler.get_exelist()]
- args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
description = 'Compiling LLVM IR object $in'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp()))
@@ -1689,16 +1897,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if langname == Language.FORTRAN:
self.generate_fortran_dep_hack(crstr)
rule = self.get_compiler_rule_name(langname, compiler.for_machine)
- depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- quoted_depargs = []
- for d in depargs:
- if d != '$out' and d != '$in':
- d = quote_func(d)
- quoted_depargs.append(d)
-
- command = [ninja_quote(i) for i in compiler.get_exelist()]
- args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
- description = 'Compiling %s object $out' % compiler.get_display_language()
+ depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none)
+ command = compiler.get_exelist()
+ args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in']
+ description = 'Compiling {} object $out'.format(compiler.get_display_language())
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
depfile = None
@@ -1707,6 +1909,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
depfile = '$DEPFILE'
self.add_rule(NinjaRule(rule, command, args, description,
rspable=compiler.can_linker_accept_rsp(),
+ rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or
+ isinstance(compiler, DmdDCompiler)) else 'gcc',
deps=deps, depfile=depfile))
def generate_pch_rule_for(self, langname, compiler):
@@ -1715,16 +1919,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
rule = self.compiler_to_pch_rule_name(compiler)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
- quoted_depargs = []
- for d in depargs:
- if d != '$out' and d != '$in':
- d = quote_func(d)
- quoted_depargs.append(d)
if isinstance(compiler, VisualStudioLikeCompiler):
output = []
else:
- output = compiler.get_output_args('$out')
- command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in']
+ output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none)
+ command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in']
description = 'Precompiling header $in'
if isinstance(compiler, VisualStudioLikeCompiler):
deps = 'msvc'
@@ -1859,9 +2058,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
modname = modmatch.group(1).lower()
if modname in module_files:
raise InvalidArguments(
- 'Namespace collision: module %s defined in '
- 'two files %s and %s.' %
- (modname, module_files[modname], s))
+ 'Namespace collision: module {} defined in '
+ 'two files {} and {}.'.format(modname, module_files[modname], s))
module_files[modname] = s
else:
submodmatch = submodre.match(line)
@@ -1872,9 +2070,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
if submodname in submodule_files:
raise InvalidArguments(
- 'Namespace collision: submodule %s defined in '
- 'two files %s and %s.' %
- (submodname, submodule_files[submodname], s))
+ 'Namespace collision: submodule {} defined in '
+ 'two files {} and {}.'.format(submodname, submodule_files[submodname], s))
submodule_files[submodname] = s
self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files}
@@ -1960,11 +2157,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
return linker.get_link_debugfile_args(outname)
def generate_llvm_ir_compile(self, target, src):
+ base_proxy = self.get_base_options_for_target(target)
compiler = get_compiler_for_source(target.compilers.values(), src)
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
# Compiler args for compiling this target
- commands += compilers.get_base_compile_args(self.environment.coredata.base_options,
- compiler)
+ commands += compilers.get_base_compile_args(base_proxy, compiler)
if isinstance(src, File):
if src.is_built:
src_filename = os.path.join(src.subdir, src.fname)
@@ -1974,7 +2171,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
src_filename = os.path.basename(src)
else:
src_filename = src
- obj_basename = src_filename.replace('/', '_').replace('\\', '_')
+ obj_basename = self.canonicalize_filename(src_filename)
rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename)
rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix()
commands += self.get_compile_debugfile_args(compiler, target, rel_obj)
@@ -1987,9 +2184,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# Write the Ninja build command
compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine)
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
- # Convert from GCC-style link argument naming to the naming used by the
- # current compiler.
- commands = commands.to_native()
element.add_item('ARGS', commands)
self.add_build(element)
return rel_obj
@@ -2005,6 +2199,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
curdir = '.'
return compiler.get_include_args(curdir, False)
+ @lru_cache(maxsize=None)
+ def get_normpath_target(self, source) -> str:
+ return os.path.normpath(source)
+
def get_custom_target_dir_include_args(self, target, compiler):
custom_target_include_dirs = []
for i in target.get_generated_sources():
@@ -2013,7 +2211,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# own target build dir.
if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
continue
- idir = os.path.normpath(self.get_target_dir(i))
+ idir = self.get_normpath_target(self.get_target_dir(i))
if not idir:
idir = '.'
if idir not in custom_target_include_dirs:
@@ -2049,7 +2247,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
base_proxy = self.get_base_options_for_target(target)
# Create an empty commands list, and start adding arguments from
# various sources in the order in which they must override each other
- commands = CompilerArgs(compiler)
+ commands = compiler.compiler_args()
# Start with symbol visibility.
commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility)
# Add compiler args for compiling this target derived from 'base' build
@@ -2129,7 +2327,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = self._generate_single_compile(target, compiler, is_generated)
- commands = CompilerArgs(commands.compiler, commands)
+ commands = commands.compiler.compiler_args(commands)
# Create introspection information
if is_generated is False:
@@ -2206,9 +2404,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
d = os.path.join(self.get_target_private_dir(target), d)
element.add_orderdep(d)
element.add_dep(pch_dep)
- # Convert from GCC-style link argument naming to the naming used by the
- # current compiler.
- commands = commands.to_native()
for i in self.get_fortran_orderdeps(target, compiler):
element.add_orderdep(i)
element.add_item('DEPFILE', dep_file)
@@ -2481,7 +2676,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
#
# Once all the linker options have been passed, we will start passing
# libraries and library paths from internal and external sources.
- commands = CompilerArgs(linker)
+ commands = linker.compiler_args()
# First, the trivial ones that are impossible to override.
#
# Add linker args for linking this target derived from 'base' build
@@ -2583,20 +2778,19 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
self.get_target_dir(target))
else:
target_slashname_workaround_dir = self.get_target_dir(target)
- commands += linker.build_rpath_args(self.environment,
+ (rpath_args, target.rpath_dirs_to_remove) = \
+ linker.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
self.determine_rpath_dirs(target),
target.build_rpath,
target.install_rpath)
+ commands += rpath_args
# Add libraries generated by custom targets
custom_target_libraries = self.get_custom_target_provided_libraries(target)
commands += extra_args
commands += custom_target_libraries
commands += stdlib_args # Standard library arguments go last, because they never depend on anything.
- # Convert from GCC-style link argument naming to the naming used by the
- # current compiler.
- commands = commands.to_native()
dep_targets.extend([self.get_dependency_filename(t) for t in dependencies])
dep_targets.extend([self.get_dependency_filename(t)
for t in target.link_depends])
@@ -2647,18 +2841,14 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
def generate_gcov_clean(self):
gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY')
- script_root = self.environment.get_script_dir()
- clean_script = os.path.join(script_root, 'delwithsuffix.py')
- gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno'])
+ gcno_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcno'])
gcno_elem.add_item('description', 'Deleting gcno files')
self.add_build(gcno_elem)
# Alias that runs the target defined above
self.create_target_alias('meson-clean-gcno')
gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY')
- script_root = self.environment.get_script_dir()
- clean_script = os.path.join(script_root, 'delwithsuffix.py')
- gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda'])
+ gcda_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcda'])
gcda_elem.add_item('description', 'Deleting gcda files')
self.add_build(gcda_elem)
# Alias that runs the target defined above
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index b776d7a..7e28cfb 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -26,7 +26,6 @@ from .. import build
from .. import dependencies
from .. import mlog
from .. import compilers
-from ..compilers import CompilerArgs
from ..interpreter import Interpreter
from ..mesonlib import (
MesonException, File, python_command, replace_if_different
@@ -98,6 +97,9 @@ class Vs2010Backend(backends.Backend):
self.subdirs = {}
self.handled_target_deps = {}
+ def get_target_private_dir(self, target):
+ return os.path.join(self.get_target_dir(target), target.get_id())
+
def generate_custom_generator_commands(self, target, parent_node):
generator_output_files = []
custom_target_include_dirs = []
@@ -591,10 +593,8 @@ class Vs2010Backend(backends.Backend):
raise MesonException('Could not guess language from source file %s.' % src)
def add_pch(self, pch_sources, lang, inc_cl):
- if len(pch_sources) <= 1:
- # We only need per file precompiled headers if we have more than 1 language.
- return
- self.use_pch(pch_sources, lang, inc_cl)
+ if lang in pch_sources:
+ self.use_pch(pch_sources, lang, inc_cl)
def create_pch(self, pch_sources, lang, inc_cl):
pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
@@ -602,6 +602,8 @@ class Vs2010Backend(backends.Backend):
self.add_pch_files(pch_sources, lang, inc_cl)
def use_pch(self, pch_sources, lang, inc_cl):
+ pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
+ pch.text = 'Use'
header = self.add_pch_files(pch_sources, lang, inc_cl)
pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
pch_include.text = header + ';%(ForcedIncludeFiles)'
@@ -821,12 +823,12 @@ class Vs2010Backend(backends.Backend):
clconf = ET.SubElement(compiles, 'ClCompile')
# CRT type; debug or release
if vscrt_type.value == 'from_buildtype':
- if self.buildtype == 'debug' or self.buildtype == 'debugoptimized':
+ if self.buildtype == 'debug':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
else:
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
- ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
+ ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
elif vscrt_type.value == 'mdd':
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
@@ -855,6 +857,18 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
elif '/RTCs' in buildtype_args:
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
+ # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
+ # cl will give warning D9025: overriding '/Ehs' with cpp_eh value
+ if 'cpp' in target.compilers:
+ eh = self.environment.coredata.compiler_options[target.for_machine]['cpp']['eh']
+ if eh.value == 'a':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
+ elif eh.value == 's':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow'
+ elif eh.value == 'none':
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'false'
+ else: # 'sc' or 'default'
+ ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync'
# End configuration
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root)
@@ -884,9 +898,9 @@ class Vs2010Backend(backends.Backend):
#
# file_args is also later split out into defines and include_dirs in
# case someone passed those in there
- file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items())
- file_defines = dict((lang, []) for lang in target.compilers)
- file_inc_dirs = dict((lang, []) for lang in target.compilers)
+ file_args = {l: c.compiler_args() for l, c in target.compilers.items()}
+ file_defines = {l: [] for l in target.compilers}
+ file_inc_dirs = {l: [] for l in target.compilers}
# The order in which these compile args are added must match
# generate_single_compile() and generate_basic_compiler_args()
for l, comp in target.compilers.items():
@@ -989,23 +1003,23 @@ class Vs2010Backend(backends.Backend):
# Cflags required by external deps might have UNIX-specific flags,
# so filter them out if needed
if isinstance(d, dependencies.OpenMPDependency):
- d_compile_args = compiler.openmp_flags()
+ ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
else:
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
- for arg in d_compile_args:
- if arg.startswith(('-D', '/D')):
- define = arg[2:]
- # De-dup
- if define in target_defines:
- target_defines.remove(define)
- target_defines.append(define)
- elif arg.startswith(('-I', '/I')):
- inc_dir = arg[2:]
- # De-dup
- if inc_dir not in target_inc_dirs:
- target_inc_dirs.append(inc_dir)
- else:
- target_args.append(arg)
+ for arg in d_compile_args:
+ if arg.startswith(('-D', '/D')):
+ define = arg[2:]
+ # De-dup
+ if define in target_defines:
+ target_defines.remove(define)
+ target_defines.append(define)
+ elif arg.startswith(('-I', '/I')):
+ inc_dir = arg[2:]
+ # De-dup
+ if inc_dir not in target_inc_dirs:
+ target_inc_dirs.append(inc_dir)
+ else:
+ target_args.append(arg)
languages += gen_langs
if len(target_args) > 0:
@@ -1046,12 +1060,10 @@ class Vs2010Backend(backends.Backend):
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
pch_sources = {}
if self.environment.coredata.base_options.get('b_pch', False):
- pch_node = ET.SubElement(clconf, 'PrecompiledHeader')
for lang in [Language.C, Language.CPP]:
pch = target.get_pch(lang)
if not pch:
continue
- pch_node.text = 'Use'
if compiler.id == 'msvc':
if len(pch) == 1:
# Auto generate PCH.
@@ -1065,17 +1077,13 @@ class Vs2010Backend(backends.Backend):
# I don't know whether its relevant but let's handle other compilers
# used with a vs backend
pch_sources[lang] = [pch[0], None, lang, None]
- if len(pch_sources) == 1:
- # If there is only 1 language with precompiled headers, we can use it for the entire project, which
- # is cleaner than specifying it for each source file.
- self.use_pch(pch_sources, list(pch_sources)[0], clconf)
resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
# Linker options
link = ET.SubElement(compiles, 'Link')
- extra_link_args = CompilerArgs(compiler)
+ extra_link_args = compiler.compiler_args()
# FIXME: Can these buildtype linker args be added as tags in the
# vcxproj file (similar to buildtype compiler args) instead of in
# AdditionalOptions?
@@ -1103,14 +1111,14 @@ class Vs2010Backend(backends.Backend):
# Extend without reordering or de-dup to preserve `-L -l` sets
# https://github.com/mesonbuild/meson/issues/1718
if isinstance(dep, dependencies.OpenMPDependency):
- extra_link_args.extend_direct(compiler.openmp_flags())
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
for d in target.get_dependencies():
if isinstance(d, build.StaticLibrary):
for dep in d.get_external_deps():
if isinstance(dep, dependencies.OpenMPDependency):
- extra_link_args.extend_direct(compiler.openmp_flags())
+ ET.SubElement(clconf, 'OpenMPSuppport').text = 'true'
else:
extra_link_args.extend_direct(dep.get_link_args())
# Add link args for c_* or cpp_* build options. Currently this only
@@ -1198,7 +1206,8 @@ class Vs2010Backend(backends.Backend):
# /nologo
ET.SubElement(link, 'SuppressStartupBanner').text = 'true'
# /release
- ET.SubElement(link, 'SetChecksum').text = 'true'
+ if not self.environment.coredata.get_builtin_option('debug'):
+ ET.SubElement(link, 'SetChecksum').text = 'true'
meson_file_group = ET.SubElement(root, 'ItemGroup')
ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index ddecb6e..e3b67de 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -12,12 +12,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import copy, os, re
from collections import OrderedDict, defaultdict
-import itertools, pathlib
+from functools import lru_cache
+import copy
import hashlib
+import itertools, pathlib
+import os
import pickle
-from functools import lru_cache
+import re
import typing as T
from . import environment
@@ -82,6 +84,7 @@ buildtarget_kwargs = set([
'override_options',
'sources',
'gnu_symbol_visibility',
+ 'link_language',
])
known_build_target_kwargs = (
@@ -92,7 +95,7 @@ known_build_target_kwargs = (
rust_kwargs |
cs_kwargs)
-known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'link_language', 'pie'}
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'}
known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'}
known_stlib_kwargs = known_build_target_kwargs | {'pic'}
@@ -495,6 +498,7 @@ class BuildTarget(Target):
self.link_targets = []
self.link_whole_targets = []
self.link_depends = []
+ self.added_deps = set()
self.name_prefix_set = False
self.name_suffix_set = False
self.filename = 'no_name'
@@ -509,6 +513,8 @@ class BuildTarget(Target):
self.d_features = {}
self.pic = False
self.pie = False
+ # Track build_rpath entries so we can remove them at install time
+ self.rpath_dirs_to_remove = set()
# Sources can be:
# 1. Pre-existing source files in the source tree
# 2. Pre-existing sources generated by configure_file in the build tree
@@ -532,6 +538,9 @@ class BuildTarget(Target):
repr_str = "<{0} {1}: {2}>"
return repr_str.format(self.__class__.__name__, self.get_id(), self.filename)
+ def __str__(self):
+ return "{}".format(self.name)
+
def validate_install(self, environment):
if self.for_machine is MachineChoice.BUILD and self.need_install:
if environment.is_cross_build():
@@ -729,7 +738,7 @@ class BuildTarget(Target):
File.from_source_file(environment.source_dir, self.subdir, s))
elif hasattr(s, 'get_outputs'):
self.link_depends.extend(
- [File.from_built_file(s.subdir, p) for p in s.get_outputs()])
+ [File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()])
else:
raise InvalidArguments(
'Link_depends arguments must be strings, Files, '
@@ -772,7 +781,7 @@ class BuildTarget(Target):
if isinstance(src, str):
src = File(False, self.subdir, src)
elif isinstance(src, File):
- FeatureNew('File argument for extract_objects', '0.50.0').use(self.subproject)
+ FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject)
else:
raise MesonException('Object extraction arguments must be strings or Files.')
# FIXME: It could be a generated source
@@ -813,7 +822,8 @@ class BuildTarget(Target):
def get_link_dep_subdirs(self):
result = OrderedSet()
for i in self.link_targets:
- result.add(i.get_subdir())
+ if not isinstance(i, StaticLibrary):
+ result.add(i.get_subdir())
result.update(i.get_link_dep_subdirs())
return result
@@ -1012,23 +1022,16 @@ This will become a hard error in a future Meson release.''')
def get_extra_args(self, language):
return self.extra_args.get(language, [])
- def get_dependencies(self, exclude=None, for_pkgconfig=False):
+ def get_dependencies(self, exclude=None):
transitive_deps = []
if exclude is None:
exclude = []
for t in itertools.chain(self.link_targets, self.link_whole_targets):
if t in transitive_deps or t in exclude:
continue
- # When generating `Libs:` and `Libs.private:` lists in pkg-config
- # files we don't want to include static libraries that we link_whole
- # or are uninstalled (they're implicitly promoted to link_whole).
- # But we still need to include their transitive dependencies,
- # a static library we link_whole would itself link to a shared
- # library or an installed static library.
- if not for_pkgconfig or (not t.is_internal() and t not in self.link_whole_targets):
- transitive_deps.append(t)
+ transitive_deps.append(t)
if isinstance(t, StaticLibrary):
- transitive_deps += t.get_dependencies(transitive_deps + exclude, for_pkgconfig)
+ transitive_deps += t.get_dependencies(transitive_deps + exclude)
return transitive_deps
def get_source_subdir(self):
@@ -1061,6 +1064,8 @@ This will become a hard error in a future Meson release.''')
def add_deps(self, deps):
deps = listify(deps)
for dep in unholder(deps):
+ if dep in self.added_deps:
+ continue
if isinstance(dep, dependencies.InternalDependency):
# Those parts that are internal.
self.process_sourcelist(dep.sources)
@@ -1099,6 +1104,7 @@ You probably should put it in link_with instead.''')
'either an external dependency (returned by find_library() or '
'dependency()) or an internal dependency (returned by '
'declare_dependency()).'.format(type(dep).__name__))
+ self.added_deps.add(dep)
def get_external_deps(self):
return self.external_deps
@@ -1115,7 +1121,7 @@ You probably should put it in link_with instead.''')
if not isinstance(t, (Target, CustomTargetIndex)):
raise InvalidArguments('{!r} is not a target.'.format(t))
if not t.is_linkable_target():
- raise InvalidArguments('Link target {!r} is not linkable.'.format(t))
+ raise InvalidArguments("Link target '{!s}' is not linkable.".format(t))
if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:
msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
msg += "Use the 'pic' option to static_library to build with PIC."
@@ -1228,11 +1234,7 @@ You probably should put it in link_with instead.''')
See: https://github.com/mesonbuild/meson/issues/1653
'''
- langs = []
-
- # User specified link_language of target (for multi-language targets)
- if self.link_language:
- return [self.link_language]
+ langs = [] # type: T.List[str]
# Check if any of the external libraries were written in this language
for dep in self.external_deps:
@@ -1264,6 +1266,12 @@ You probably should put it in link_with instead.''')
# Populate list of all compilers, not just those being used to compile
# sources in this target
all_compilers = self.environment.coredata.compilers[self.for_machine]
+
+ # If the user set the link_language, just return that.
+ if self.link_language:
+ comp = all_compilers[self.link_language]
+ return comp, comp.language_stdlib_only_link_flags()
+
# Languages used by dependencies
dep_langs = self.get_langs_used_by_deps()
# Pick a compiler based on the language priority-order
@@ -2159,7 +2167,7 @@ class CustomTarget(Target):
'when installing a target')
if isinstance(kwargs['install_dir'], list):
- FeatureNew('multiple install_dir for custom_target', '0.40.0').use(self.subproject)
+ FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject)
# If an item in this list is False, the output corresponding to
# the list index of that item will not be installed
self.install_dir = typeslistify(kwargs['install_dir'], (str, bool))
@@ -2171,7 +2179,6 @@ class CustomTarget(Target):
if 'build_always' in kwargs and 'build_always_stale' in kwargs:
raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.')
elif 'build_always' in kwargs:
- mlog.deprecation('build_always is deprecated. Combine build_by_default and build_always_stale instead.')
if 'build_by_default' not in kwargs:
self.build_by_default = kwargs['build_always']
self.build_always_stale = kwargs['build_always']
diff --git a/mesonbuild/cmake/__init__.py b/mesonbuild/cmake/__init__.py
index 01cc3f9..db7aefd 100644
--- a/mesonbuild/cmake/__init__.py
+++ b/mesonbuild/cmake/__init__.py
@@ -24,11 +24,14 @@ __all__ = [
'CMakeTarget',
'CMakeTraceLine',
'CMakeTraceParser',
+ 'SingleTargetOptions',
+ 'TargetOptions',
'parse_generator_expressions',
'language_map',
+ 'cmake_defines_to_args',
]
-from .common import CMakeException
+from .common import CMakeException, SingleTargetOptions, TargetOptions, cmake_defines_to_args
from .client import CMakeClient
from .executor import CMakeExecutor
from .fileapi import CMakeFileAPI
diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py
index e7da0d7..4510b5d 100644
--- a/mesonbuild/cmake/common.py
+++ b/mesonbuild/cmake/common.py
@@ -60,6 +60,26 @@ def _flags_to_list(raw: str) -> T.List[str]:
res = list(filter(lambda x: len(x) > 0, res))
return res
+def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]:
+ res = [] # type: T.List[str]
+ if not isinstance(raw, list):
+ raw = [raw]
+
+ for i in raw:
+ if not isinstance(i, dict):
+ raise MesonException('Invalid CMake defines. Expected a dict, but got a {}'.format(type(i).__name__))
+ for key, val in i.items():
+ assert isinstance(key, str)
+ if isinstance(val, (str, int, float)):
+ res += ['-D{}={}'.format(key, val)]
+ elif isinstance(val, bool):
+ val_str = 'ON' if val else 'OFF'
+ res += ['-D{}={}'.format(key, val_str)]
+ else:
+ raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key))
+
+ return res
+
class CMakeFileGroup:
def __init__(self, data: dict):
self.defines = data.get('defines', '')
@@ -163,3 +183,78 @@ class CMakeConfiguration:
mlog.log('Project {}:'.format(idx))
with mlog.nested():
i.log()
+
+class SingleTargetOptions:
+ def __init__(self) -> None:
+ self.opts = {} # type: T.Dict[str, str]
+ self.lang_args = {} # type: T.Dict[str, T.List[str]]
+ self.link_args = [] # type: T.List[str]
+ self.install = 'preserve'
+
+ def set_opt(self, opt: str, val: str) -> None:
+ self.opts[opt] = val
+
+ def append_args(self, lang: str, args: T.List[str]) -> None:
+ if lang not in self.lang_args:
+ self.lang_args[lang] = []
+ self.lang_args[lang] += args
+
+ def append_link_args(self, args: T.List[str]) -> None:
+ self.link_args += args
+
+ def set_install(self, install: bool) -> None:
+ self.install = 'true' if install else 'false'
+
+ def get_override_options(self, initial: T.List[str]) -> T.List[str]:
+ res = [] # type: T.List[str]
+ for i in initial:
+ opt = i[:i.find('=')]
+ if opt not in self.opts:
+ res += [i]
+ res += ['{}={}'.format(k, v) for k, v in self.opts.items()]
+ return res
+
+ def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]:
+ if lang in self.lang_args:
+ return initial + self.lang_args[lang]
+ return initial
+
+ def get_link_args(self, initial: T.List[str]) -> T.List[str]:
+ return initial + self.link_args
+
+ def get_install(self, initial: bool) -> bool:
+ return {'preserve': initial, 'true': True, 'false': False}[self.install]
+
+class TargetOptions:
+ def __init__(self) -> None:
+ self.global_options = SingleTargetOptions()
+ self.target_options = {} # type: T.Dict[str, SingleTargetOptions]
+
+ def __getitem__(self, tgt: str) -> SingleTargetOptions:
+ if tgt not in self.target_options:
+ self.target_options[tgt] = SingleTargetOptions()
+ return self.target_options[tgt]
+
+ def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_override_options(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_override_options(initial)
+ return initial
+
+ def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_compile_args(lang, initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_compile_args(lang, initial)
+ return initial
+
+ def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]:
+ initial = self.global_options.get_link_args(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_link_args(initial)
+ return initial
+
+ def get_install(self, tgt: str, initial: bool) -> bool:
+ initial = self.global_options.get_install(initial)
+ if tgt in self.target_options:
+ initial = self.target_options[tgt].get_install(initial)
+ return initial
diff --git a/mesonbuild/cmake/data/run_ctgt.py b/mesonbuild/cmake/data/run_ctgt.py
deleted file mode 100755
index 9d5d437..0000000
--- a/mesonbuild/cmake/data/run_ctgt.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#!/usr/bin/env python3
-
-import argparse
-import subprocess
-import shutil
-import os
-import sys
-from pathlib import Path
-
-commands = [[]]
-SEPARATOR = ';;;'
-
-# Generate CMD parameters
-parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
-parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
-parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
-parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
-parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
-
-# Parse
-args = parser.parse_args()
-
-dummy_target = None
-if len(args.outputs) == 1 and len(args.original_outputs) == 0:
- dummy_target = args.outputs[0]
-elif len(args.outputs) != len(args.original_outputs):
- print('Length of output list and original output list differ')
- sys.exit(1)
-
-for i in args.commands:
- if i == SEPARATOR:
- commands += [[]]
- continue
-
- i = i.replace('"', '') # Remove lefover quotes
- commands[-1] += [i]
-
-# Execute
-for i in commands:
- # Skip empty lists
- if not i:
- continue
-
- cmd = []
- stdout = None
- stderr = None
- capture_file = ''
-
- for j in i:
- if j in ['>', '>>']:
- stdout = subprocess.PIPE
- continue
- elif j in ['&>', '&>>']:
- stdout = subprocess.PIPE
- stderr = subprocess.STDOUT
- continue
-
- if stdout is not None or stderr is not None:
- capture_file += j
- else:
- cmd += [j]
-
- try:
- os.makedirs(args.directory, exist_ok=True)
-
- res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
- if capture_file:
- out_file = Path(args.directory) / capture_file
- out_file.write_bytes(res.stdout)
- except subprocess.CalledProcessError:
- exit(1)
-
-if dummy_target:
- with open(dummy_target, 'a'):
- os.utime(dummy_target, None)
- exit(0)
-
-# Copy outputs
-zipped_outputs = zip(args.outputs, args.original_outputs)
-for expected, generated in zipped_outputs:
- do_copy = False
- if not os.path.exists(expected):
- if not os.path.exists(generated):
- print('Unable to find generated file. This can cause the build to fail:')
- print(generated)
- do_copy = False
- else:
- do_copy = True
- elif os.path.exists(generated):
- if os.path.getmtime(generated) > os.path.getmtime(expected):
- do_copy = True
-
- if do_copy:
- if os.path.exists(expected):
- os.remove(expected)
- shutil.copyfile(generated, expected)
diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py
index 349c8ec..d41cd22 100644
--- a/mesonbuild/cmake/executor.py
+++ b/mesonbuild/cmake/executor.py
@@ -28,6 +28,7 @@ import textwrap
from .. import mlog, mesonlib
from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice
from ..environment import Environment
+from ..envconfig import get_env_var
if T.TYPE_CHECKING:
from ..dependencies.base import ExternalProgram
@@ -48,6 +49,8 @@ class CMakeExecutor:
self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent)
self.always_capture_stderr = True
self.print_cmout = False
+ self.prefix_paths = [] # type: T.List[str]
+ self.extra_cmake_args = [] # type: T.List[str]
if self.cmakebin is False:
self.cmakebin = None
return
@@ -60,26 +63,23 @@ class CMakeExecutor:
self.cmakebin = None
return
+ self.prefix_paths = self.environment.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
+ env_pref_path = get_env_var(
+ self.for_machine,
+ self.environment.is_cross_build(),
+ 'CMAKE_PREFIX_PATH')
+ if env_pref_path is not None:
+ env_pref_path = re.split(r':|;', env_pref_path)
+ env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings
+ if not self.prefix_paths:
+ self.prefix_paths = []
+ self.prefix_paths += env_pref_path
+
+ if self.prefix_paths:
+ self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))]
+
def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple['ExternalProgram', str]:
- from ..dependencies.base import ExternalProgram
-
- # Create an iterator of options
- def search():
- # Lookup in cross or machine file.
- potential_cmakepath = environment.lookup_binary_entry(self.for_machine, 'cmake')
- if potential_cmakepath is not None:
- mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', self.for_machine, potential_cmakepath)
- yield ExternalProgram.from_entry('cmake', potential_cmakepath)
- # We never fallback if the user-specified option is no good, so
- # stop returning options.
- return
- mlog.debug('CMake binary missing from cross or native file, or env var undefined.')
- # Fallback on hard-coded defaults.
- # TODO prefix this for the cross case instead of ignoring thing.
- if environment.machines.matches_build_machine(self.for_machine):
- for potential_cmakepath in environment.default_cmake:
- mlog.debug('Trying a default CMake fallback at', potential_cmakepath)
- yield ExternalProgram(potential_cmakepath, silent=True)
+ from ..dependencies.base import find_external_program
# Only search for CMake the first time and store the result in the class
# definition
@@ -89,10 +89,11 @@ class CMakeExecutor:
mlog.debug('CMake binary for %s is cached.' % self.for_machine)
else:
assert CMakeExecutor.class_cmakebin[self.for_machine] is None
+
mlog.debug('CMake binary for %s is not cached' % self.for_machine)
- for potential_cmakebin in search():
- mlog.debug('Trying CMake binary {} for machine {} at {}'
- .format(potential_cmakebin.name, self.for_machine, potential_cmakebin.command))
+ for potential_cmakebin in find_external_program(
+ environment, self.for_machine, 'cmake', 'CMake',
+ environment.default_cmake, allow_default_for_cross=False):
version_if_ok = self.check_cmake(potential_cmakebin)
if not version_if_ok:
continue
@@ -132,7 +133,7 @@ class CMakeExecutor:
msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
mlog.warning(msg)
return None
- cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip()
+ cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out).group(2)
return cmvers
def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None:
@@ -226,6 +227,7 @@ class CMakeExecutor:
if env is None:
env = os.environ
+ args = args + self.extra_cmake_args
if disable_cache:
return self._call_impl(args, build_dir, env)
@@ -362,5 +364,8 @@ class CMakeExecutor:
def get_command(self) -> T.List[str]:
return self.cmakebin.get_command()
+ def get_cmake_prefix_paths(self) -> T.List[str]:
+ return self.prefix_paths
+
def machine_choice(self) -> MachineChoice:
return self.for_machine
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index 6208696..91700c7 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -15,16 +15,15 @@
# This class contains the basic functionality needed to run any interpreter
# or an interpreter-based tool.
-import pkg_resources
-
-from .common import CMakeException, CMakeTarget
+from .common import CMakeException, CMakeTarget, TargetOptions
from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel
from .fileapi import CMakeFileAPI
from .executor import CMakeExecutor
from .traceparser import CMakeTraceParser, CMakeGeneratorTarget
-from .. import mlog
+from .. import mlog, mesonlib
from ..environment import Environment
from ..mesonlib import Language, MachineChoice, OrderedSet, version_compare
+from ..mesondata import mesondata
from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header
from enum import Enum
from functools import lru_cache
@@ -289,7 +288,17 @@ class ConverterTarget:
for j in self.compile_opts[i]:
m = ConverterTarget.std_regex.match(j)
if m:
- self.override_options += ['{}_std={}'.format(i, m.group(2))]
+ std = m.group(2)
+ supported = self._all_lang_stds(i)
+ if std not in supported:
+ mlog.warning(
+ 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-'
+ 'level {0}_std if build errors occur. Known '
+ '{0}_stds are: {2}'.format(i, std, ' '.join(supported)),
+ once=True
+ )
+ continue
+ self.override_options += ['{}_std={}'.format(i, std)]
elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']:
self.pie = True
elif j in blacklist_compiler_flags:
@@ -307,13 +316,6 @@ class ConverterTarget:
tgt = trace.targets.get(self.cmake_name)
if tgt:
self.depends_raw = trace.targets[self.cmake_name].depends
- if self.type.upper() == 'INTERFACE_LIBRARY':
- props = tgt.properties
-
- self.includes += props.get('INTERFACE_INCLUDE_DIRECTORIES', [])
- self.public_compile_opts += props.get('INTERFACE_COMPILE_DEFINITIONS', [])
- self.public_compile_opts += props.get('INTERFACE_COMPILE_OPTIONS', [])
- self.link_flags += props.get('INTERFACE_LINK_OPTIONS', [])
# TODO refactor this copy paste from CMakeDependency for future releases
reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$')
@@ -332,6 +334,12 @@ class ConverterTarget:
libraries = []
mlog.debug(tgt)
+ if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties:
+ self.includes += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x]
+
+ if 'INTERFACE_LINK_OPTIONS' in tgt.properties:
+ self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x]
+
if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties:
self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x]
@@ -346,8 +354,15 @@ class ConverterTarget:
cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x]
cfg = cfgs[0]
- if 'RELEASE' in cfgs:
- cfg = 'RELEASE'
+ is_debug = self.env.coredata.get_builtin_option('debug');
+ if is_debug:
+ if 'DEBUG' in cfgs:
+ cfg = 'DEBUG'
+ elif 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+ else:
+ if 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties:
libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x]
@@ -539,6 +554,13 @@ class ConverterTarget:
suffixes += [x for x in exts]
return suffixes
+ @lru_cache(maxsize=None)
+ def _all_lang_stds(self, lang: str) -> T.List[str]:
+ lang_opts = self.env.coredata.compiler_options.build.get(lang, None)
+ if not lang_opts or 'std' not in lang_opts:
+ return []
+ return lang_opts['std'].choices
+
def process_inter_target_dependencies(self):
# Move the dependencies from all transfer_dependencies_from to the target
to_process = list(self.depends)
@@ -791,7 +813,7 @@ class CMakeInterpreter:
raise CMakeException('Unable to find CMake')
self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True)
- preload_file = pkg_resources.resource_filename('mesonbuild', 'cmake/data/preload.cmake')
+ preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env)
# Prefere CMAKE_PROJECT_INCLUDE over CMAKE_TOOLCHAIN_FILE if possible,
# since CMAKE_PROJECT_INCLUDE was actually designed for code injection.
@@ -970,7 +992,7 @@ class CMakeInterpreter:
mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.')
- def pretend_to_be_meson(self) -> CodeBlockNode:
+ def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode:
if not self.project_name:
raise CMakeException('CMakeInterpreter was not analysed')
@@ -1036,9 +1058,6 @@ class CMakeInterpreter:
root_cb.lines += [function('project', [self.project_name] + self.languages)]
# Add the run script for custom commands
- run_script = pkg_resources.resource_filename('mesonbuild', 'cmake/data/run_ctgt.py')
- run_script_var = 'ctgt_run_script'
- root_cb.lines += [assign(run_script_var, function('find_program', [[run_script]], {'required': True}))]
# Add the targets
processing = []
@@ -1134,21 +1153,26 @@ class CMakeInterpreter:
dep_var = '{}_dep'.format(tgt.name)
tgt_var = tgt.name
+ install_tgt = options.get_install(tgt.cmake_name, tgt.install)
+
# Generate target kwargs
tgt_kwargs = {
- 'build_by_default': tgt.install,
- 'link_args': tgt.link_flags + tgt.link_libraries,
+ 'build_by_default': install_tgt,
+ 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries),
'link_with': link_with,
'include_directories': id_node(inc_var),
- 'install': tgt.install,
- 'install_dir': tgt.install_dir,
- 'override_options': tgt.override_options,
+ 'install': install_tgt,
+ 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options),
'objects': [method(x, 'extract_all_objects') for x in objec_libs],
}
+ # Only set if installed and only override if it is set
+ if install_tgt and tgt.install_dir:
+ tgt_kwargs['install_dir'] = tgt.install_dir
+
# Handle compiler args
for key, val in tgt.compile_opts.items():
- tgt_kwargs['{}_args'.format(key)] = val
+ tgt_kwargs['{}_args'.format(key)] = options.get_compile_args(tgt.cmake_name, key, val)
# Handle -fPCI, etc
if tgt_func == 'executable':
@@ -1220,7 +1244,8 @@ class CMakeInterpreter:
# Generate the command list
command = []
- command += [id_node(run_script_var)]
+ command += mesonlib.meson_command
+ command += ['--internal', 'cmake_run_ctgt']
command += ['-o', '@OUTPUT@']
if tgt.original_outputs:
command += ['-O'] + tgt.original_outputs
diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py
index 432cd21..a241360 100644
--- a/mesonbuild/cmake/traceparser.py
+++ b/mesonbuild/cmake/traceparser.py
@@ -64,6 +64,7 @@ class CMakeTarget:
return
for key, val in self.properties.items():
self.properties[key] = [x.strip() for x in val]
+ assert all([';' not in x for x in self.properties[key]])
class CMakeGeneratorTarget(CMakeTarget):
def __init__(self, name):
@@ -138,7 +139,7 @@ class CMakeTraceParser:
if not self.requires_stderr():
if not self.trace_file_path.exists and not self.trace_file_path.is_file():
raise CMakeException('CMake: Trace file "{}" not found'.format(str(self.trace_file_path)))
- trace = self.trace_file_path.read_text()
+ trace = self.trace_file_path.read_text(errors='ignore')
if not trace:
raise CMakeException('CMake: The CMake trace was not provided or is empty')
@@ -574,10 +575,10 @@ class CMakeTraceParser:
continue
if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']:
- interface += [i]
+ interface += i.split(';')
if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']:
- private += [i]
+ private += i.split(';')
if paths:
interface = self._guess_files(interface)
@@ -655,30 +656,45 @@ class CMakeTraceParser:
# Try joining file paths that contain spaces
- reg_start = re.compile(r'^([A-Za-z]:)?/.*/[^./]+$')
+ reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$')
reg_end = re.compile(r'^.*\.[a-zA-Z]+$')
fixed_list = [] # type: T.List[str]
curr_str = None # type: T.Optional[str]
+ path_found = False # type: bool
for i in broken_list:
if curr_str is None:
curr_str = i
+ path_found = False
elif os.path.isfile(curr_str):
# Abort concatenation if curr_str is an existing file
fixed_list += [curr_str]
curr_str = i
+ path_found = False
elif not reg_start.match(curr_str):
# Abort concatenation if curr_str no longer matches the regex
fixed_list += [curr_str]
curr_str = i
- elif reg_end.match(i) or os.path.exists('{} {}'.format(curr_str, i)):
+ path_found = False
+ elif reg_end.match(i):
# File detected
curr_str = '{} {}'.format(curr_str, i)
fixed_list += [curr_str]
curr_str = None
+ path_found = False
+ elif os.path.exists('{} {}'.format(curr_str, i)):
+ # Path detected
+ curr_str = '{} {}'.format(curr_str, i)
+ path_found = True
+ elif path_found:
+ # Add path to fixed_list after ensuring the whole path is in curr_str
+ fixed_list += [curr_str]
+ curr_str = i
+ path_found = False
else:
curr_str = '{} {}'.format(curr_str, i)
+ path_found = False
if curr_str:
fixed_list += [curr_str]
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index af7e519..fd47545 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -48,7 +48,6 @@ __all__ = [
'ClangObjCPPCompiler',
'ClangClCCompiler',
'ClangClCPPCompiler',
- 'CompilerArgs',
'CPPCompiler',
'DCompiler',
'DmdDCompiler',
@@ -123,7 +122,6 @@ from .compilers import (
is_known_suffix,
lang_suffixes,
sort_clink,
- CompilerArgs,
)
from .c import (
CCompiler,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 52a5157..feae8ac 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -86,9 +86,10 @@ class ClangCCompiler(ClangCompiler, CCompiler):
_C18_VERSION = '>=8.0.0'
def __init__(self, exelist, version, for_machine: MachineChoice,
- is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
+ is_cross, info: 'MachineInfo', exe_wrapper=None,
+ defines: T.Optional[T.List[str]] = None, **kwargs):
CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, defines)
default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
diff --git a/mesonbuild/compilers/c_function_attributes.py b/mesonbuild/compilers/c_function_attributes.py
index e5de485..f31229e 100644
--- a/mesonbuild/compilers/c_function_attributes.py
+++ b/mesonbuild/compilers/c_function_attributes.py
@@ -56,6 +56,8 @@ C_FUNC_ATTRIBUTES = {
'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));',
'format_arg':
'char * foo(const char * p) __attribute__((format_arg(1)));',
+ 'force_align_arg_pointer':
+ '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }',
'gnu_inline':
'inline __attribute__((gnu_inline)) int foo(void) { return 0; }',
'hot':
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index ecdb70d..c31439c 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -12,19 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import abc
import contextlib, os.path, re, tempfile
-import collections.abc
import itertools
import typing as T
from functools import lru_cache
-from ..linkers import (
- GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker,
- StaticLinker,
-)
from .. import coredata
from .. import mlog
from .. import mesonlib
+from ..linkers import LinkerEnvVarsMixin
from ..mesonlib import (
EnvironmentException, Language, MachineChoice, MesonException,
Popen_safe, split_args
@@ -32,6 +29,7 @@ from ..mesonlib import (
from ..envconfig import (
Properties, get_env_var
)
+from ..arglist import CompilerArgs
if T.TYPE_CHECKING:
from ..coredata import OptionDictType
@@ -52,7 +50,7 @@ lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so')
# This means we can't include .h headers here since they could be C, C++, ObjC, etc.
lang_suffixes = {
Language.C: ('c',),
- Language.CPP: ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'),
+ Language.CPP: ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino'),
Language.CUDA: ('cu',),
# f90, f95, f03, f08 are for free-form fortran ('f90' recommended)
# f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended)
@@ -113,11 +111,6 @@ cflags_mapping = {
Language.RUST: 'RUSTFLAGS',
}
-unixy_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt')
-# execinfo is a compiler lib on FreeBSD and NetBSD
-if mesonlib.is_freebsd() or mesonlib.is_netbsd():
- unixy_compiler_internal_libs += ('execinfo',)
-
# All these are only for C-linkable languages; see `clink_langs` above.
def sort_clink(lang):
@@ -153,11 +146,15 @@ def is_llvm_ir(fname):
fname = fname.fname
return fname.split('.')[-1] == 'll'
+@lru_cache(maxsize=None)
+def cached_by_name(fname):
+ suffix = fname.split('.')[-1]
+ return suffix in obj_suffixes
+
def is_object(fname):
if hasattr(fname, 'fname'):
fname = fname.fname
- suffix = fname.split('.')[-1]
- return suffix in obj_suffixes
+ return cached_by_name(fname)
def is_library(fname):
if hasattr(fname, 'fname'):
@@ -201,7 +198,7 @@ rust_buildtype_args = {'plain': [],
d_gdc_buildtype_args = {'plain': [],
'debug': [],
'debugoptimized': ['-finline-functions'],
- 'release': ['-frelease', '-finline-functions'],
+ 'release': ['-finline-functions'],
'minsize': [],
'custom': [],
}
@@ -209,7 +206,7 @@ d_gdc_buildtype_args = {'plain': [],
d_ldc_buildtype_args = {'plain': [],
'debug': [],
'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'],
- 'release': ['-release', '-enable-inlining', '-Hkeep-all-bodies'],
+ 'release': ['-enable-inlining', '-Hkeep-all-bodies'],
'minsize': [],
'custom': [],
}
@@ -217,7 +214,7 @@ d_ldc_buildtype_args = {'plain': [],
d_dmd_buildtype_args = {'plain': [],
'debug': [],
'debugoptimized': ['-inline'],
- 'release': ['-release', '-inline'],
+ 'release': ['-inline'],
'minsize': [],
'custom': [],
}
@@ -335,7 +332,7 @@ def get_base_compile_args(options, compiler):
if (options['b_ndebug'].value == 'true' or
(options['b_ndebug'].value == 'if-release' and
options['buildtype'].value in {'release', 'plain'})):
- args += ['-DNDEBUG']
+ args += compiler.get_disable_assert_args()
except KeyError:
pass
# This does not need a try...except
@@ -387,9 +384,10 @@ def get_base_link_args(options, linker, is_shared_module):
# -Wl,-dead_strip_dylibs is incompatible with bitcode
args.extend(linker.get_asneeded_args())
- # Apple's ld (the only one that supports bitcode) does not like any
- # -undefined arguments at all, so don't pass these when using bitcode
+ # Apple's ld (the only one that supports bitcode) does not like -undefined
+ # arguments or -headerpad_max_install_names when bitcode is enabled
if not bitcode:
+ args.extend(linker.headerpad_args())
if (not is_shared_module and
option_enabled(linker.base_options, options, 'b_lundef')):
args.extend(linker.no_undefined_link_args())
@@ -418,334 +416,8 @@ class RunResult:
self.stdout = stdout
self.stderr = stderr
-class CompilerArgs(collections.abc.MutableSequence):
- '''
- List-like class that manages a list of compiler arguments. Should be used
- while constructing compiler arguments from various sources. Can be
- operated with ordinary lists, so this does not need to be used
- everywhere.
-
- All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc)
- and can converted to the native type of each compiler by using the
- .to_native() method to which you must pass an instance of the compiler or
- the compiler class.
-
- New arguments added to this class (either with .append(), .extend(), or +=)
- are added in a way that ensures that they override previous arguments.
- For example:
-
- >>> a = ['-Lfoo', '-lbar']
- >>> a += ['-Lpho', '-lbaz']
- >>> print(a)
- ['-Lpho', '-Lfoo', '-lbar', '-lbaz']
-
- Arguments will also be de-duped if they can be de-duped safely.
-
- Note that because of all this, this class is not commutative and does not
- preserve the order of arguments if it is safe to not. For example:
- >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror']
- ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror']
- >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar']
- ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror']
-
- '''
- # NOTE: currently this class is only for C-like compilers, but it can be
- # extended to other languages easily. Just move the following to the
- # compiler class and initialize when self.compiler is set.
-
- # Arg prefixes that override by prepending instead of appending
- prepend_prefixes = ('-I', '-L')
- # Arg prefixes and args that must be de-duped by returning 2
- dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
- dedup2_suffixes = ()
- dedup2_args = ()
- # Arg prefixes and args that must be de-duped by returning 1
- #
- # NOTE: not thorough. A list of potential corner cases can be found in
- # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
- dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
- dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
- # Match a .so of the form path/to/libfoo.so.0.1.0
- # Only UNIX shared libraries require this. Others have a fixed extension.
- dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
- dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
- # In generate_link() we add external libs without de-dup, but we must
- # *always* de-dup these because they're special arguments to the linker
- always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs)
-
- def __init__(self, compiler: T.Union['Compiler', StaticLinker],
- iterable: T.Optional[T.Iterable[str]] = None):
- self.compiler = compiler
- self.__container = list(iterable) if iterable is not None else [] # type: T.List[str]
- self.__seen_args = set()
- for arg in self.__container:
- self.__seen_args.add(arg)
-
- @T.overload # noqa: F811
- def __getitem__(self, index: int) -> str: # noqa: F811
- pass
-
- @T.overload # noqa: F811
- def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811
- pass
-
- def __getitem__(self, index): # noqa: F811
- return self.__container[index]
-
- @T.overload # noqa: F811
- def __setitem__(self, index: int, value: str) -> None: # noqa: F811
- pass
-
- @T.overload # noqa: F811
- def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811
- pass
-
- def __setitem__(self, index, value) -> None: # noqa: F811
- self.__container[index] = value
- for v in value:
- self.__seen_args.add(v)
-
- def __delitem__(self, index: T.Union[int, slice]) -> None:
- value = self.__container[index]
- del self.__container[index]
- if value in self.__seen_args and value in self.__container: # this is also honoring that you can have duplicated entries
- self.__seen_args.remove(value)
- def __len__(self) -> int:
- return len(self.__container)
-
- def insert(self, index: int, value: str) -> None:
- self.__container.insert(index, value)
- self.__seen_args.add(value)
-
- def copy(self) -> 'CompilerArgs':
- return CompilerArgs(self.compiler, self.__container.copy())
-
- @classmethod
- @lru_cache(maxsize=None)
- def _can_dedup(cls, arg):
- '''
- Returns whether the argument can be safely de-duped. This is dependent
- on three things:
-
- a) Whether an argument can be 'overridden' by a later argument. For
- example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we
- can safely remove the previous occurrence and add a new one. The same
- is true for include paths and library paths with -I and -L. For
- these we return `2`. See `dedup2_prefixes` and `dedup2_args`.
- b) Arguments that once specified cannot be undone, such as `-c` or
- `-pipe`. New instances of these can be completely skipped. For these
- we return `1`. See `dedup1_prefixes` and `dedup1_args`.
- c) Whether it matters where or how many times on the command-line
- a particular argument is present. This can matter for symbol
- resolution in static or shared libraries, so we cannot de-dup or
- reorder them. For these we return `0`. This is the default.
-
- In addition to these, we handle library arguments specially.
- With GNU ld, we surround library arguments with -Wl,--start/end-group
- to recursively search for symbols in the libraries. This is not needed
- with other linkers.
- '''
- # A standalone argument must never be deduplicated because it is
- # defined by what comes _after_ it. Thus dedupping this:
- # -D FOO -D BAR
- # would yield either
- # -D FOO BAR
- # or
- # FOO -D BAR
- # both of which are invalid.
- if arg in cls.dedup2_prefixes:
- return 0
- if arg.startswith('-L='):
- # DMD and LDC proxy all linker arguments using -L=; in conjunction
- # with ld64 on macOS this can lead to command line arguments such
- # as: `-L=-compatibility_version -L=0 -L=current_version -L=0`.
- # These cannot be combined, ld64 insists they must be passed with
- # spaces and quoting does not work. if we deduplicate these then
- # one of the -L=0 arguments will be removed and the version
- # argument will consume the next argument instead.
- return 0
- if arg in cls.dedup2_args or \
- arg.startswith(cls.dedup2_prefixes) or \
- arg.endswith(cls.dedup2_suffixes):
- return 2
- if arg in cls.dedup1_args or \
- arg.startswith(cls.dedup1_prefixes) or \
- arg.endswith(cls.dedup1_suffixes) or \
- re.search(cls.dedup1_regex, arg):
- return 1
- return 0
-
- @classmethod
- @lru_cache(maxsize=None)
- def _should_prepend(cls, arg):
- if arg.startswith(cls.prepend_prefixes):
- return True
- return False
-
- def need_to_split_linker_args(self):
- return isinstance(self.compiler, Compiler) and self.compiler.get_language() == Language.D
-
- def to_native(self, copy: bool = False) -> T.List[str]:
- # Check if we need to add --start/end-group for circular dependencies
- # between static libraries, and for recursively searching for symbols
- # needed by static libraries that are provided by object files or
- # shared libraries.
- if copy:
- new = self.copy()
- else:
- new = self
- # To proxy these arguments with D you need to split the
- # arguments, thus you get `-L=-soname -L=lib.so` we don't
- # want to put the lib in a link -roup
- split_linker_args = self.need_to_split_linker_args()
- # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
- # all act like (or are) gnu ld
- # TODO: this could probably be added to the DynamicLinker instead
- if (isinstance(self.compiler, Compiler) and
- self.compiler.linker is not None and
- isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))):
- group_start = -1
- group_end = -1
- is_soname = False
- for i, each in enumerate(new):
- if is_soname:
- is_soname = False
- continue
- elif split_linker_args and '-soname' in each:
- is_soname = True
- continue
- if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
- not soregex.match(each):
- continue
- group_end = i
- if group_start < 0:
- # First occurrence of a library
- group_start = i
- if group_start >= 0:
- # Last occurrence of a library
- new.insert(group_end + 1, '-Wl,--end-group')
- new.insert(group_start, '-Wl,--start-group')
- # Remove system/default include paths added with -isystem
- if hasattr(self.compiler, 'get_default_include_dirs'):
- default_dirs = self.compiler.get_default_include_dirs()
- bad_idx_list = [] # type: T.List[int]
- for i, each in enumerate(new):
- # Remove the -isystem and the path if the path is a default path
- if (each == '-isystem' and
- i < (len(new) - 1) and
- new[i + 1] in default_dirs):
- bad_idx_list += [i, i + 1]
- elif each.startswith('-isystem=') and each[9:] in default_dirs:
- bad_idx_list += [i]
- elif each.startswith('-isystem') and each[8:] in default_dirs:
- bad_idx_list += [i]
- for i in reversed(bad_idx_list):
- new.pop(i)
- return self.compiler.unix_args_to_native(new.__container)
-
- def append_direct(self, arg: str) -> None:
- '''
- Append the specified argument without any reordering or de-dup except
- for absolute paths to libraries, etc, which can always be de-duped
- safely.
- '''
- if os.path.isabs(arg):
- self.append(arg)
- else:
- self.__container.append(arg)
- self.__seen_args.add(arg)
-
- def extend_direct(self, iterable: T.Iterable[str]) -> None:
- '''
- Extend using the elements in the specified iterable without any
- reordering or de-dup except for absolute paths where the order of
- include search directories is not relevant
- '''
- for elem in iterable:
- self.append_direct(elem)
-
- def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None:
- normal_flags = []
- lflags = []
- for i in iterable:
- if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')):
- lflags.append(i)
- else:
- normal_flags.append(i)
- self.extend(normal_flags)
- self.extend_direct(lflags)
-
- def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs':
- new = self.copy()
- new += args
- return new
-
- def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs':
- '''
- Add two CompilerArgs while taking into account overriding of arguments
- and while preserving the order of arguments as much as possible
- '''
- this_round_added = set() # a dict that contains a value, when the value was added this round
- pre = [] # type: T.List[str]
- post = [] # type: T.List[str]
- if not isinstance(args, collections.abc.Iterable):
- raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args))
- for arg in args:
- # If the argument can be de-duped, do it either by removing the
- # previous occurrence of it and adding a new one, or not adding the
- # new occurrence.
- dedup = self._can_dedup(arg)
- if dedup == 1:
- # Argument already exists and adding a new instance is useless
- if arg in self.__seen_args or arg in pre or arg in post:
- continue
- should_prepend = self._should_prepend(arg)
- if dedup == 2:
- # Remove all previous occurrences of the arg and add it anew
- if arg in self.__seen_args and arg not in this_round_added: #if __seen_args contains arg as well as this_round_added, then its not yet part in self.
- self.remove(arg)
- if should_prepend:
- if arg in pre:
- pre.remove(arg)
- else:
- if arg in post:
- post.remove(arg)
- if should_prepend:
- pre.append(arg)
- else:
- post.append(arg)
- self.__seen_args.add(arg)
- this_round_added.add(arg)
- # Insert at the beginning
- self[:0] = pre
- # Append to the end
- self.__container += post
- return self
-
- def __radd__(self, args: T.Iterable[str]):
- new = CompilerArgs(self.compiler, args)
- new += self
- return new
-
- def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]:
- # Only allow equality checks against other CompilerArgs and lists instances
- if isinstance(other, CompilerArgs):
- return self.compiler == other.compiler and self.__container == other.__container
- elif isinstance(other, list):
- return self.__container == other
- return NotImplemented
-
- def append(self, arg: str) -> None:
- self.__iadd__([arg])
-
- def extend(self, args: T.Iterable[str]) -> None:
- self.__iadd__(args)
-
- def __repr__(self) -> str:
- return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container)
-
-class Compiler:
+class Compiler(metaclass=abc.ABCMeta):
# Libraries to ignore in find_library() since they are provided by the
# compiler or the C library. Currently only used for MSVC.
ignore_libs = ()
@@ -968,8 +640,12 @@ class Compiler:
args += self.get_preprocess_only_args()
return args
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ """Return an appropriate CompilerArgs instance for this class."""
+ return CompilerArgs(self, args)
+
@contextlib.contextmanager
- def compile(self, code, extra_args=None, *, mode='link', want_output=False, temp_dir=None):
+ def compile(self, code: str, extra_args: list = None, *, mode: str = 'link', want_output: bool = False, temp_dir: str = None):
if extra_args is None:
extra_args = []
try:
@@ -986,7 +662,7 @@ class Compiler:
srcname = code.fname
# Construct the compiler command-line
- commands = CompilerArgs(self)
+ commands = self.compiler_args()
commands.append(srcname)
# Preprocess mode outputs to stdout, so no output args
if mode != 'preprocess':
@@ -1092,7 +768,7 @@ class Compiler:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
return self.linker.build_rpath_args(
env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
@@ -1102,6 +778,9 @@ class Compiler:
def openmp_flags(self):
raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language())
+ def openmp_link_flags(self):
+ return self.openmp_flags()
+
def language_stdlib_only_link_flags(self):
return []
@@ -1151,7 +830,7 @@ class Compiler:
def remove_linkerlike_args(self, args):
rm_exact = ('-headerpad_max_install_names',)
rm_prefixes = ('-Wl,', '-L',)
- rm_next = ('-L',)
+ rm_next = ('-L', '-framework',)
ret = []
iargs = iter(args)
for arg in iargs:
@@ -1184,12 +863,12 @@ class Compiler:
def get_asneeded_args(self) -> T.List[str]:
return self.linker.get_asneeded_args()
+ def headerpad_args(self) -> T.List[str]:
+ return self.linker.headerpad_args()
+
def bitcode_args(self) -> T.List[str]:
return self.linker.bitcode_args()
- def get_linker_debug_crt_args(self) -> T.List[str]:
- return self.linker.get_debug_crt_args()
-
def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
return self.linker.get_buildtype_args(buildtype)
@@ -1219,15 +898,18 @@ class Compiler:
def get_coverage_link_args(self) -> T.List[str]:
return self.linker.get_coverage_args()
+ def get_disable_assert_args(self) -> T.List[str]:
+ return []
+
def get_largefile_args(compiler):
'''
Enable transparent large-file-support for 32-bit UNIX systems
'''
- if not (compiler.info.is_windows() or compiler.info.is_darwin()):
+ if not (compiler.get_argument_syntax() == 'msvc' or compiler.info.is_darwin()):
# Enable large-file support unconditionally on all platforms other
- # than macOS and Windows. macOS is now 64-bit-only so it doesn't
- # need anything special, and Windows doesn't have automatic LFS.
+ # than macOS and MSVC. macOS is now 64-bit-only so it doesn't
+ # need anything special, and MSVC doesn't have automatic LFS.
# You must use the 64-bit counterparts explicitly.
# glibc, musl, and uclibc, and all BSD libcs support this. On Android,
# support for transparent LFS is available depending on the version of
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index c4c7da5..71a300a 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -155,10 +155,11 @@ class CPPCompiler(CLikeCompiler, Compiler):
class ClangCPPCompiler(ClangCompiler, CPPCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
- is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
+ is_cross, info: 'MachineInfo', exe_wrapper=None,
+ defines : T.Optional[T.List[str]] = None, **kwargs):
CPPCompiler.__init__(self, exelist, version, for_machine, is_cross,
info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, defines)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
self.warn_args = {'0': [],
'1': default_warn_args,
@@ -238,7 +239,9 @@ class EmscriptenCPPCompiler(EmscriptenMixin, LinkerEnvVarsMixin, ClangCPPCompile
class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs):
- CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, exe_wrapper, **kwargs)
+ CPPCompiler.__init__(self, exelist=exelist, version=version,
+ for_machine=for_machine, is_cross=is_cross,
+ info=info, exe_wrapper=exe_wrapper, **kwargs)
ArmclangCompiler.__init__(self)
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
self.warn_args = {'0': [],
@@ -574,7 +577,7 @@ class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixi
is_cross: bool, info: 'MachineInfo', exe_wrap, target, **kwargs):
CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrap, **kwargs)
MSVCCompiler.__init__(self, target)
- self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like
+ self.base_options = ['b_pch', 'b_vscrt', 'b_ndebug'] # FIXME add lto, pgo and the like
self.id = 'msvc'
def get_options(self):
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index 00233b0..b109572 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -268,17 +268,15 @@ class CudaCompiler(Compiler):
def get_depfile_suffix(self):
return 'd'
- def get_linker_debug_crt_args(self) -> T.List[str]:
- return self._cook_link_args(self.host_compiler.get_linker_debug_crt_args())
-
def get_buildtype_linker_args(self, buildtype):
return self._cook_link_args(self.host_compiler.get_buildtype_linker_args(buildtype))
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return self._cook_link_args(self.host_compiler.build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath))
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ return (self._cook_link_args(rpath_args), rpath_dirs_to_remove)
def linker_to_compiler_args(self, args):
return args
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index caa8e44..d233713 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -25,7 +25,6 @@ from .compilers import (
d_ldc_buildtype_args,
clike_debug_args,
Compiler,
- CompilerArgs,
)
from .mixins.gnu import GnuCompiler
@@ -220,7 +219,7 @@ class DmdLikeCompilerMixin:
def build_rpath_args(self, env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
if self.info.is_windows():
- return []
+ return ([], set())
# GNU ld, solaris ld, and lld acting like GNU ld
if self.linker.id.startswith('ld'):
@@ -228,15 +227,16 @@ class DmdLikeCompilerMixin:
# do directly, each argument -rpath and the value to rpath, need to be
# split into two separate arguments both prefaced with the -L=.
args = []
- for r in super().build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath):
+ (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args(
+ env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ for r in rpath_args:
if ',' in r:
a, b = r.split(',', maxsplit=1)
args.append(a)
args.append(self.LINKER_PREFIX + b)
else:
args.append(r)
- return args
+ return (args, rpath_dirs_to_remove)
return super().build_rpath_args(
env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
@@ -581,7 +581,7 @@ class DCompiler(Compiler):
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
- args = CompilerArgs(self)
+ args = self.compiler_args()
for d in dependencies:
# Add compile flags needed by dependencies
args += d.get_compile_args()
@@ -645,7 +645,8 @@ class GnuDCompiler(GnuCompiler, DCompiler):
'1': default_warn_args,
'2': default_warn_args + ['-Wextra'],
'3': default_warn_args + ['-Wextra', '-Wpedantic']}
- self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt', 'b_coverage']
+ self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic',
+ 'b_vscrt', 'b_coverage', 'b_pgo', 'b_ndebug']
self._has_color_support = version_compare(self.version, '>=4.9')
# dependencies were implemented before, but broken - support was fixed in GCC 7.1+
@@ -684,6 +685,9 @@ class GnuDCompiler(GnuCompiler, DCompiler):
return args
return args + ['-shared-libphobos']
+ def get_disable_assert_args(self):
+ return ['-frelease']
+
class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
@@ -691,7 +695,7 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
info: 'MachineInfo', arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs)
self.id = 'llvm'
- self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt']
+ self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']
def get_colorout_args(self, colortype):
if colortype == 'always':
@@ -733,6 +737,9 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler):
return args
return args + ['-link-defaultlib-shared']
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['--release']
+
class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
@@ -740,7 +747,7 @@ class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
info: 'MachineInfo', arch, **kwargs):
DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs)
self.id = 'dmd'
- self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt']
+ self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']
def get_colorout_args(self, colortype):
if colortype == 'always':
@@ -803,3 +810,6 @@ class DmdDCompiler(DmdLikeCompilerMixin, DCompiler):
if self.info.is_windows():
return args
return args + ['-defaultlib=phobos2', '-debuglib=phobos2']
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-release']
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index 2b20aa4..31b8558 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -214,6 +214,18 @@ class GnuFortranCompiler(GnuCompiler, FortranCompiler):
def language_stdlib_only_link_flags(self) -> T.List[str]:
return ['-lgfortran', '-lm']
+ def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False):
+ '''
+ Derived from mixins/clike.py:has_header, but without C-style usage of
+ __has_include which breaks with GCC-Fortran 10:
+ https://github.com/mesonbuild/meson/issues/7017
+ '''
+ fargs = {'prefix': prefix, 'header': hname}
+ code = '{prefix}\n#include <{header}>'
+ return self.compiles(code.format(**fargs), env, extra_args=extra_args,
+ dependencies=dependencies, mode='preprocess', disable_cache=disable_cache)
+
+
class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler):
def __init__(self, exelist, version, for_machine: MachineChoice,
is_cross, info: 'MachineInfo', exe_wrapper=None,
@@ -412,7 +424,7 @@ class FlangFortranCompiler(ClangCompiler, FortranCompiler):
**kwargs):
FortranCompiler.__init__(self, exelist, version, for_machine,
is_cross, info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, [])
self.id = 'flang'
default_warn_args = ['-Minform=inform']
self.warn_args = {'0': [],
diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py
index aa5d15d..b331d8f 100644
--- a/mesonbuild/compilers/mixins/arm.py
+++ b/mesonbuild/compilers/mixins/arm.py
@@ -27,10 +27,10 @@ if T.TYPE_CHECKING:
arm_buildtype_args = {
'plain': [],
- 'debug': ['-O0', '--debug'],
- 'debugoptimized': ['-O1', '--debug'],
- 'release': ['-O3', '-Otime'],
- 'minsize': ['-O3', '-Ospace'],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
'custom': [],
} # type: T.Dict[str, T.List[str]]
@@ -38,27 +38,27 @@ arm_optimization_args = {
'0': ['-O0'],
'g': ['-g'],
'1': ['-O1'],
- '2': ['-O2'],
- '3': ['-O3'],
- 's': [],
+ '2': [], # Compiler defaults to -O2
+ '3': ['-O3', '-Otime'],
+ 's': ['-O3'], # Compiler defaults to -Ospace
} # type: T.Dict[str, T.List[str]]
armclang_buildtype_args = {
'plain': [],
- 'debug': ['-O0', '-g'],
- 'debugoptimized': ['-O1', '-g'],
- 'release': ['-Os'],
- 'minsize': ['-Oz'],
+ 'debug': [],
+ 'debugoptimized': [],
+ 'release': [],
+ 'minsize': [],
'custom': [],
} # type: T.Dict[str, T.List[str]]
armclang_optimization_args = {
- '0': ['-O0'],
+ '0': [], # Compiler defaults to -O0
'g': ['-g'],
'1': ['-O1'],
'2': ['-O2'],
'3': ['-O3'],
- 's': ['-Os']
+ 's': ['-Oz']
} # type: T.Dict[str, T.List[str]]
@@ -181,7 +181,7 @@ class ArmclangCompiler:
# Override CCompiler.get_dependency_gen_args
def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
- return []
+ return ['-MD', '-MT', outtarget, '-MF', outfile]
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return armclang_optimization_args[optimization_level]
diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py
index 1c0ee45..7525c12 100644
--- a/mesonbuild/compilers/mixins/clang.py
+++ b/mesonbuild/compilers/mixins/clang.py
@@ -42,9 +42,10 @@ clang_optimization_args = {
} # type: T.Dict[str, T.List[str]]
class ClangCompiler(GnuLikeCompiler):
- def __init__(self):
+ def __init__(self, defines: T.Optional[T.Dict[str, str]]):
super().__init__()
self.id = 'clang'
+ self.defines = defines or {}
self.base_options.append('b_colorout')
# TODO: this really should be part of the linker base_options, but
# linkers don't have base_options.
@@ -56,6 +57,12 @@ class ClangCompiler(GnuLikeCompiler):
def get_colorout_args(self, colortype: str) -> T.List[str]:
return clang_color_args[colortype][:]
+ def has_builtin_define(self, define: str) -> bool:
+ return define in self.defines
+
+ def get_builtin_define(self, define: str) -> T.Optional[str]:
+ return self.defines.get(define)
+
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return clang_optimization_args[optimization_level]
@@ -106,6 +113,11 @@ class ClangCompiler(GnuLikeCompiler):
# (and other gcc-like compilers) cannot. This is becuse clang (being
# llvm based) is retargetable, while GCC is not.
#
+
+ # qcld: Qualcomm Snapdragon linker, based on LLVM
+ if linker == 'qcld':
+ return ['-fuse-ld=qcld']
+
if shutil.which(linker):
if not shutil.which(linker):
raise mesonlib.MesonException(
@@ -117,3 +129,6 @@ class ClangCompiler(GnuLikeCompiler):
# Clang only warns about unknown or ignored attributes, so force an
# error.
return ['-Werror=attributes']
+
+ def get_coverage_link_args(self) -> T.List[str]:
+ return ['--coverage']
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index 260342e..a42b050 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -29,15 +29,79 @@ import subprocess
import typing as T
from pathlib import Path
+from ... import arglist
from ... import mesonlib
-from ...mesonlib import LibType
from ... import mlog
+from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker
+from ...mesonlib import LibType
from .. import compilers
from .visualstudio import VisualStudioLikeCompiler
if T.TYPE_CHECKING:
from ...environment import Environment
+SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$')
+
+class CLikeCompilerArgs(arglist.CompilerArgs):
+ prepend_prefixes = ('-I', '-L')
+ dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U')
+
+ # NOTE: not thorough. A list of potential corner cases can be found in
+ # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038
+ dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic')
+ dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a')
+ dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread')
+
+ def to_native(self, copy: bool = False) -> T.List[str]:
+ # Check if we need to add --start/end-group for circular dependencies
+ # between static libraries, and for recursively searching for symbols
+ # needed by static libraries that are provided by object files or
+ # shared libraries.
+ self.flush_pre_post()
+ if copy:
+ new = self.copy()
+ else:
+ new = self
+ # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which
+ # all act like (or are) gnu ld
+ # TODO: this could probably be added to the DynamicLinker instead
+ if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker)):
+ group_start = -1
+ group_end = -1
+ for i, each in enumerate(new):
+ if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \
+ not SOREGEX.match(each):
+ continue
+ group_end = i
+ if group_start < 0:
+ # First occurrence of a library
+ group_start = i
+ if group_start >= 0:
+ # Last occurrence of a library
+ new.insert(group_end + 1, '-Wl,--end-group')
+ new.insert(group_start, '-Wl,--start-group')
+ # Remove system/default include paths added with -isystem
+ if hasattr(self.compiler, 'get_default_include_dirs'):
+ default_dirs = self.compiler.get_default_include_dirs()
+ bad_idx_list = [] # type: T.List[int]
+ for i, each in enumerate(new):
+ # Remove the -isystem and the path if the path is a default path
+ if (each == '-isystem' and
+ i < (len(new) - 1) and
+ new[i + 1] in default_dirs):
+ bad_idx_list += [i, i + 1]
+ elif each.startswith('-isystem=') and each[9:] in default_dirs:
+ bad_idx_list += [i]
+ elif each.startswith('-isystem') and each[8:] in default_dirs:
+ bad_idx_list += [i]
+ for i in reversed(bad_idx_list):
+ new.pop(i)
+ return self.compiler.unix_args_to_native(new._container)
+
+ def __repr__(self) -> str:
+ self.flush_pre_post()
+ return 'CLikeCompilerArgs({!r}, {!r})'.format(self.compiler, self._container)
+
class CLikeCompiler:
@@ -48,7 +112,7 @@ class CLikeCompiler:
program_dirs_cache = {}
find_library_cache = {}
find_framework_cache = {}
- internal_libs = compilers.unixy_compiler_internal_libs
+ internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS
def __init__(self, is_cross: bool, exe_wrapper: T.Optional[str] = None):
# If a child ObjC or CPP class has already set it, don't set it ourselves
@@ -61,6 +125,9 @@ class CLikeCompiler:
else:
self.exe_wrapper = exe_wrapper.get_command()
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs:
+ return CLikeCompilerArgs(self, args)
+
def needs_static_linker(self):
return True # When compiling static libraries, so yes.
@@ -152,15 +219,24 @@ class CLikeCompiler:
if not files:
retval.append(d)
continue
- file_to_check = os.path.join(d, files[0])
- with open(file_to_check, 'rb') as fd:
- header = fd.read(5)
- # if file is not an ELF file, it's weird, but accept dir
- # if it is elf, and the class matches, accept dir
- if header[1:4] != b'ELF' or int(header[4]) == elf_class:
- retval.append(d)
- # at this point, it's an ELF file which doesn't match the
- # appropriate elf_class, so skip this one
+
+ for f in files:
+ file_to_check = os.path.join(d, f)
+ try:
+ with open(file_to_check, 'rb') as fd:
+ header = fd.read(5)
+ # if file is not an ELF file, it's weird, but accept dir
+ # if it is elf, and the class matches, accept dir
+ if header[1:4] != b'ELF' or int(header[4]) == elf_class:
+ retval.append(d)
+ # at this point, it's an ELF file which doesn't match the
+ # appropriate elf_class, so skip this one
+ # stop scanning after the first sucessful read
+ break
+ except OSError:
+ # Skip the file if we can't read it
+ pass
+
return tuple(retval)
@functools.lru_cache()
@@ -254,14 +330,14 @@ class CLikeCompiler:
code = 'int main(void) { int class=0; return class; }\n'
return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code)
- def check_header(self, hname, prefix, env, *, extra_args=None, dependencies=None):
+ def check_header(self, hname: str, prefix: str, env, *, extra_args=None, dependencies=None):
fargs = {'prefix': prefix, 'header': hname}
code = '''{prefix}
#include <{header}>'''
return self.compiles(code.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies)
- def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False):
+ def has_header(self, hname: str, prefix: str, env, *, extra_args=None, dependencies=None, disable_cache: bool = False):
fargs = {'prefix': prefix, 'header': hname}
code = '''{prefix}
#ifdef __has_include
@@ -274,7 +350,7 @@ class CLikeCompiler:
return self.compiles(code.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies, mode='preprocess', disable_cache=disable_cache)
- def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None):
+ def has_header_symbol(self, hname: str, symbol: str, prefix: str, env, *, extra_args=None, dependencies=None):
fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol}
t = '''{prefix}
#include <{header}>
@@ -288,11 +364,19 @@ class CLikeCompiler:
return self.compiles(t.format(**fargs), env, extra_args=extra_args,
dependencies=dependencies)
- def _get_basic_compiler_args(self, env, mode):
+ def _get_basic_compiler_args(self, env, mode: str):
cargs, largs = [], []
- # Select a CRT if needed since we're linking
if mode == 'link':
- cargs += self.get_linker_debug_crt_args()
+ # Sometimes we need to manually select the CRT to use with MSVC.
+ # One example is when trying to do a compiler check that involves
+ # linking with static libraries since MSVC won't select a CRT for
+ # us in that case and will error out asking us to pick one.
+ try:
+ crt_val = env.coredata.base_options['b_vscrt'].value
+ buildtype = env.coredata.base_options['buildtype'].value
+ cargs += self.get_crt_compile_args(crt_val, buildtype)
+ except (KeyError, AttributeError):
+ pass
# Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env
sys_args = env.coredata.get_external_args(self.for_machine, self.language)
@@ -329,7 +413,7 @@ class CLikeCompiler:
elif not isinstance(dependencies, list):
dependencies = [dependencies]
# Collect compiler arguments
- cargs = compilers.CompilerArgs(self)
+ cargs = self.compiler_args()
largs = []
for d in dependencies:
# Add compile flags needed by dependencies
@@ -354,11 +438,11 @@ class CLikeCompiler:
def compiles(self, code: str, env, *,
extra_args: T.Sequence[T.Union[T.Sequence[str], str]] = None,
- dependencies=None, mode: str = 'compile', disable_cache=False) -> T.Tuple[bool, bool]:
+ dependencies=None, mode: str = 'compile', disable_cache: bool = False) -> T.Tuple[bool, bool]:
with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p:
return p.returncode == 0, p.cached
- def _build_wrapper(self, code: str, env, extra_args, dependencies=None, mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir=None) -> T.Tuple[bool, bool]:
+ def _build_wrapper(self, code: str, env, extra_args, dependencies=None, mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir: str = None) -> T.Tuple[bool, bool]:
args = self._get_compiler_check_args(env, extra_args, dependencies, mode)
if disable_cache or want_output:
return self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir)
@@ -369,7 +453,8 @@ class CLikeCompiler:
dependencies=dependencies, mode='link', disable_cache=disable_cache)
def run(self, code: str, env, *, extra_args=None, dependencies=None):
- if self.is_cross and self.exe_wrapper is None:
+ need_exe_wrapper = env.need_exe_wrapper(self.for_machine)
+ if need_exe_wrapper and self.exe_wrapper is None:
raise compilers.CrossNoRunException('Can not run test applications in this cross environment.')
with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p:
if p.returncode != 0:
@@ -377,7 +462,7 @@ class CLikeCompiler:
p.input_name,
p.returncode))
return compilers.RunResult(False)
- if self.is_cross:
+ if need_exe_wrapper:
cmdlist = self.exe_wrapper + [p.output_name]
else:
cmdlist = p.output_name
@@ -658,7 +743,7 @@ class CLikeCompiler:
# is not run so we don't care what the return value is.
main = '''\nint main(void) {{
void *a = (void*) &{func};
- long b = (long) a;
+ long long b = (long long) a;
return (int) b;
}}'''
return head, main
@@ -727,24 +812,29 @@ class CLikeCompiler:
# need to look for them differently. On nice compilers like clang, we
# can just directly use the __has_builtin() macro.
fargs['no_includes'] = '#include' not in prefix
- fargs['__builtin_'] = '' if funcname.startswith('__builtin_') else '__builtin_'
+ is_builtin = funcname.startswith('__builtin_')
+ fargs['is_builtin'] = is_builtin
+ fargs['__builtin_'] = '' if is_builtin else '__builtin_'
t = '''{prefix}
int main(void) {{
+
+ /* With some toolchains (MSYS2/mingw for example) the compiler
+ * provides various builtins which are not really implemented and
+ * fall back to the stdlib where they aren't provided and fail at
+ * build/link time. In case the user provides a header, including
+ * the header didn't lead to the function being defined, and the
+ * function we are checking isn't a builtin itself we assume the
+ * builtin is not functional and we just error out. */
+ #if !{no_includes:d} && !defined({func}) && !{is_builtin:d}
+ #error "No definition for {__builtin_}{func} found in the prefix"
+ #endif
+
#ifdef __has_builtin
#if !__has_builtin({__builtin_}{func})
#error "{__builtin_}{func} not found"
#endif
#elif ! defined({func})
- /* Check for {__builtin_}{func} only if no includes were added to the
- * prefix above, which means no definition of {func} can be found.
- * We would always check for this, but we get false positives on
- * MSYS2 if we do. Their toolchain is broken, but we can at least
- * give them a workaround. */
- #if {no_includes:d}
- {__builtin_}{func};
- #else
- #error "No definition for {__builtin_}{func} found in the prefix"
- #endif
+ {__builtin_}{func};
#endif
return 0;
}}'''
@@ -910,21 +1000,21 @@ class CLikeCompiler:
architecture.
'''
# If not building on macOS for Darwin, do a simple file check
- files = [Path(f) for f in files]
+ paths = [Path(f) for f in files]
if not env.machines.host.is_darwin() or not env.machines.build.is_darwin():
- for f in files:
- if f.is_file():
- return f
+ for p in paths:
+ if p.is_file():
+ return p
# Run `lipo` and check if the library supports the arch we want
- for f in files:
- if not f.is_file():
+ for p in paths:
+ if not p.is_file():
continue
- archs = mesonlib.darwin_get_object_archs(f)
+ archs = mesonlib.darwin_get_object_archs(str(p))
if archs and env.machines.host.cpu_family in archs:
- return f
+ return p
else:
mlog.debug('Rejected {}, supports {} but need {}'
- .format(f, archs, env.machines.host.cpu_family))
+ .format(p, archs, env.machines.host.cpu_family))
return None
@functools.lru_cache()
@@ -993,7 +1083,7 @@ class CLikeCompiler:
return value[:]
def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED):
- code = 'int main(void) { return 0; }'
+ code = 'int main(void) { return 0; }\n'
return self.find_library_impl(libname, env, extra_dirs, code, libtype)
def find_framework_paths(self, env):
@@ -1093,7 +1183,7 @@ class CLikeCompiler:
'the compiler you are using. has_link_argument or '
'other similar method can be used instead.'
.format(arg))
- code = 'int i;\n'
+ code = 'extern int i;\nint i;\n'
return self.has_arguments(args, env, code, mode='compile')
def has_multi_link_arguments(self, args, env):
@@ -1102,7 +1192,7 @@ class CLikeCompiler:
# false positive.
args = self.linker.fatal_warnings() + args
args = self.linker_to_compiler_args(args)
- code = 'int main(void) { return 0; }'
+ code = 'int main(void) { return 0; }\n'
return self.has_arguments(args, env, code, mode='link')
@staticmethod
@@ -1131,3 +1221,6 @@ class CLikeCompiler:
return self.compiles(self.attribute_check_func(name), env,
extra_args=self.get_has_func_attribute_extra_args(name))
+
+ def get_disable_assert_args(self) -> T.List[str]:
+ return ['-DNDEBUG']
diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py
index 7cfb2c2..f5fd7ef 100644
--- a/mesonbuild/compilers/mixins/gnu.py
+++ b/mesonbuild/compilers/mixins/gnu.py
@@ -363,7 +363,7 @@ class GnuCompiler(GnuLikeCompiler):
# For some compiler command line arguments, the GNU compilers will
# emit a warning on stderr indicating that an option is valid for a
# another language, but still complete with exit_success
- with self._build_wrapper(code, env, args, None, mode, disable_cache=False, want_output=True) as p:
+ with self._build_wrapper(code, env, args, None, mode) as p:
result = p.returncode == 0
if self.language in {Language.CPP, Language.OBJCPP} and 'is valid for C/ObjC' in p.stde:
result = False
diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py
index 681c816..a9967d6 100644
--- a/mesonbuild/compilers/mixins/islinker.py
+++ b/mesonbuild/compilers/mixins/islinker.py
@@ -107,11 +107,8 @@ class BasicLinkerIsCompilerMixin:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
-
- def get_linker_debug_crt_args(self) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
def get_asneeded_args(self) -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py
index d0004ce..93101b5 100644
--- a/mesonbuild/compilers/mixins/visualstudio.py
+++ b/mesonbuild/compilers/mixins/visualstudio.py
@@ -114,7 +114,7 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta):
# See: https://ninja-build.org/manual.html#_deps
always_args = ['/nologo', '/showIncludes']
warn_args = {
- '0': ['/W1'],
+ '0': [],
'1': ['/W2'],
'2': ['/W3'],
'3': ['/W4'],
@@ -208,6 +208,9 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta):
def openmp_flags(self) -> T.List[str]:
return ['/openmp']
+ def openmp_link_flags(self) -> T.List[str]:
+ return []
+
# FIXME, no idea what these should be.
def thread_flags(self, env: 'Environment') -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py
index b042bc5..f642a1f 100644
--- a/mesonbuild/compilers/objc.py
+++ b/mesonbuild/compilers/objc.py
@@ -86,7 +86,7 @@ class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
**kwargs):
ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross,
info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, [])
default_warn_args = ['-Wall', '-Winvalid-pch']
self.warn_args = {'0': [],
'1': default_warn_args,
diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py
index 4cfb1ab..871a0a6 100644
--- a/mesonbuild/compilers/objcpp.py
+++ b/mesonbuild/compilers/objcpp.py
@@ -84,7 +84,7 @@ class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
is_cross, info: 'MachineInfo', exe_wrapper=None,
**kwargs):
ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs)
- ClangCompiler.__init__(self)
+ ClangCompiler.__init__(self, [])
default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor']
self.warn_args = {'0': [],
'1': default_warn_args,
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 06f07f3..f2f092b 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -12,17 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from . import mlog
+from . import mlog, mparser
import pickle, os, uuid
import sys
from itertools import chain
from pathlib import PurePath
from collections import OrderedDict, defaultdict
from .mesonlib import (
- Language, MesonException, MachineChoice, PerMachine, OrderedSet,
+ Language, EnvironmentException, MesonException, MachineChoice, PerMachine, OrderedSet,
default_libdir, default_libexecdir, default_prefix, split_args
)
-from .envconfig import get_env_var_pair
from .wrap import WrapMode
import ast
import argparse
@@ -38,7 +37,7 @@ if T.TYPE_CHECKING:
OptionDictType = T.Dict[str, 'UserOption[T.Any]']
-version = '0.54.999'
+version = '0.55.999'
backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode']
default_yielding = False
@@ -99,16 +98,16 @@ class UserBooleanOption(UserOption[bool]):
class UserIntegerOption(UserOption[int]):
def __init__(self, description, value, yielding=None):
min_value, max_value, default_value = value
- super().__init__(description, [True, False], yielding)
self.min_value = min_value
self.max_value = max_value
- self.set_value(default_value)
c = []
if min_value is not None:
c.append('>=' + str(min_value))
if max_value is not None:
c.append('<=' + str(max_value))
- self.choices = ', '.join(c)
+ choices = ', '.join(c)
+ super().__init__(description, choices, yielding)
+ self.set_value(default_value)
def validate_value(self, value) -> int:
if isinstance(value, str):
@@ -160,8 +159,16 @@ class UserComboOption(UserOption[str]):
def validate_value(self, value):
if value not in self.choices:
+ if isinstance(value, bool):
+ _type = 'boolean'
+ elif isinstance(value, (int, float)):
+ _type = 'number'
+ else:
+ _type = 'string'
optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices])
- raise MesonException('Value "%s" for combo option is not one of the choices. Possible choices are: %s.' % (value, optionsstring))
+ raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.'
+ ' Possible choices are (as string): {}.'.format(
+ value, _type, self.description, optionsstring))
return value
class UserArrayOption(UserOption[T.List[str]]):
@@ -227,14 +234,6 @@ class UserFeatureOption(UserComboOption):
def is_auto(self):
return self.value == 'auto'
-
-def load_configs(filenames: T.List[str]) -> configparser.ConfigParser:
- """Load configuration files from a named subdirectory."""
- config = configparser.ConfigParser(interpolation=None)
- config.read(filenames)
- return config
-
-
if T.TYPE_CHECKING:
CacheKeyType = T.Tuple[T.Tuple[T.Any, ...], ...]
SubCacheKeyType = T.Tuple[T.Any, ...]
@@ -366,14 +365,14 @@ class CoreData:
self.install_guid = str(uuid.uuid4()).upper()
self.target_guids = {}
self.version = version
- self.builtins = {} # : OptionDictType
+ self.builtins = {} # type: OptionDictType
self.builtins_per_machine = PerMachine({}, {})
- self.backend_options = {} # : OptionDictType
- self.user_options = {} # : OptionDictType
+ self.backend_options = {} # type: OptionDictType
+ self.user_options = {} # type: OptionDictType
self.compiler_options = PerMachine(
defaultdict(dict),
defaultdict(dict),
- ) # : PerMachine[T.defaultdict[Language, OptionDictType]]
+ ) # type: PerMachine[T.defaultdict[Language, OptionDictType]]
self.base_options = {} # : OptionDictType
self.cross_files = self.__load_config_files(options, scratch_dir, 'cross')
self.compilers = PerMachine(OrderedDict(), OrderedDict())
@@ -382,8 +381,10 @@ class CoreData:
host_cache = DependencyCache(self.builtins_per_machine, MachineChoice.BUILD)
self.deps = PerMachine(build_cache, host_cache) # type: PerMachine[DependencyCache]
self.compiler_check_cache = OrderedDict()
+
# Only to print a warning if it changes between Meson invocations.
self.config_files = self.__load_config_files(options, scratch_dir, 'native')
+ self.builtin_options_libdir_cross_fixup()
self.init_builtins('')
@staticmethod
@@ -444,12 +445,12 @@ class CoreData:
raise MesonException('Cannot find specified {} file: {}'.format(ftype, f))
return real
- def libdir_cross_fixup(self):
+ def builtin_options_libdir_cross_fixup(self):
# By default set libdir to "lib" when cross compiling since
# getting the "system default" is always wrong on multiarch
# platforms as it gets a value like lib/x86_64-linux-gnu.
if self.cross_files:
- self.builtins['libdir'].value = 'lib'
+ builtin_options['libdir'].default = 'lib'
def sanitize_prefix(self, prefix):
prefix = os.path.expanduser(prefix)
@@ -490,7 +491,7 @@ class CoreData:
# commonpath will always return a path in the native format, so we
# must use pathlib.PurePath to do the same conversion before
# comparing.
- msg = ('The value of the {!r} option is {!r} which must be a '
+ msg = ('The value of the {!r} option is \'{!s}\' which must be a '
'subdir of the prefix {!r}.\nNote that if you pass a '
'relative path, it is assumed to be a subdir of prefix.')
# os.path.commonpath doesn't understand case-insensitive filesystems,
@@ -510,7 +511,6 @@ class CoreData:
for for_machine in iter(MachineChoice):
for key, opt in builtin_options_per_machine.items():
self.add_builtin_option(self.builtins_per_machine[for_machine], key, opt, subproject)
- self.libdir_cross_fixup()
def add_builtin_option(self, opts_map, key, opt, subproject):
if subproject:
@@ -694,7 +694,9 @@ class CoreData:
if type(oldval) != type(value):
self.user_options[name] = value
- def is_cross_build(self) -> bool:
+ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+ if when_building_for == MachineChoice.BUILD:
+ return False
return len(self.cross_files) > 0
def strip_build_option_names(self, options):
@@ -749,87 +751,54 @@ class CoreData:
if not self.is_cross_build():
self.copy_build_options_from_regular_ones()
- def set_default_options(self, default_options, subproject, env):
- # Warn if the user is using two different ways of setting build-type
- # options that override each other
- if 'buildtype' in env.cmd_line_options and \
- ('optimization' in env.cmd_line_options or 'debug' in env.cmd_line_options):
- mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
- 'Using both is redundant since they override each other. '
- 'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
- cmd_line_options = OrderedDict()
- # Set project default_options as if they were passed to the cmdline.
- # Subprojects can only define default for user options and not yielding
- # builtin option.
- from . import optinterpreter
- for k, v in default_options.items():
+ def set_default_options(self, default_options: 'T.OrderedDict[str, str]', subproject: str, env: 'Environment') -> None:
+ def make_key(key: str) -> str:
if subproject:
- if (k not in builtin_options or builtin_options[k].yielding) \
- and optinterpreter.is_invalid_name(k, log=False):
- continue
- k = subproject + ':' + k
- cmd_line_options[k] = v
-
- # Override project default_options using conf files (cross or native)
- for k, v in env.paths.host:
- if v is not None:
- cmd_line_options[k] = v
-
- # Override all the above defaults using the command-line arguments
- # actually passed to us
- cmd_line_options.update(env.cmd_line_options)
- env.cmd_line_options = cmd_line_options
-
- # Create a subset of cmd_line_options, keeping only options for this
- # subproject. Also take builtin options if it's the main project.
- # Language and backend specific options will be set later when adding
- # languages and setting the backend (builtin options must be set first
- # to know which backend we'll use).
+ return '{}:{}'.format(subproject, key)
+ return key
+
options = OrderedDict()
- # Some options default to environment variables if they are
- # unset, set those now. These will either be overwritten
- # below, or they won't. These should only be set on the first run.
- for for_machine in MachineChoice:
- p_env_pair = get_env_var_pair(for_machine, self.is_cross_build(), 'PKG_CONFIG_PATH')
- if p_env_pair is not None:
- p_env_var, p_env = p_env_pair
-
- # PKG_CONFIG_PATH may contain duplicates, which must be
- # removed, else a duplicates-in-array-option warning arises.
- p_list = list(OrderedSet(p_env.split(':')))
-
- key = 'pkg_config_path'
- if for_machine == MachineChoice.BUILD:
- key = 'build.' + key
-
- if env.first_invocation:
- options[key] = p_list
- elif options.get(key, []) != p_list:
- mlog.warning(
- p_env_var +
- ' environment variable has changed '
- 'between configurations, meson ignores this. '
- 'Use -Dpkg_config_path to change pkg-config search '
- 'path instead.'
- )
-
- def remove_prefix(text, prefix):
- if text.startswith(prefix):
- return text[len(prefix):]
- return text
-
- for k, v in env.cmd_line_options.items():
- if subproject:
- if not k.startswith(subproject + ':'):
- continue
- elif k not in builtin_options.keys() \
- and remove_prefix(k, 'build.') not in builtin_options_per_machine.keys():
- if ':' in k:
- continue
- if optinterpreter.is_invalid_name(k, log=False):
+ # TODO: validate these
+ from .compilers import all_languages, base_options
+ lang_prefixes = tuple('{}_'.format(l) for l in all_languages)
+ # split arguments that can be set now, and those that cannot so they
+ # can be set later, when they've been initialized.
+ for k, v in default_options.items():
+ if k.startswith(lang_prefixes):
+ lang, key = k.split('_', 1)
+ for machine in MachineChoice:
+ if key not in env.compiler_options[machine][lang]:
+ env.compiler_options[machine][lang][key] = v
+ elif k in base_options:
+ if not subproject and k not in env.base_options:
+ env.base_options[k] = v
+ else:
+ options[make_key(k)] = v
+
+ for k, v in chain(env.meson_options.host.get('', {}).items(),
+ env.meson_options.host.get(subproject, {}).items()):
+ options[make_key(k)] = v
+
+ for k, v in chain(env.meson_options.build.get('', {}).items(),
+ env.meson_options.build.get(subproject, {}).items()):
+ if k in builtin_options_per_machine:
+ options[make_key('build.{}'.format(k))] = v
+
+ options.update({make_key(k): v for k, v in env.user_options.get(subproject, {}).items()})
+
+ # Some options (namely the compiler options) are not preasant in
+ # coredata until the compiler is fully initialized. As such, we need to
+ # put those options into env.meson_options, only if they're not already
+ # in there, as the machine files and command line have precendence.
+ for k, v in default_options.items():
+ if k in builtin_options and not builtin_options[k].yielding:
+ continue
+ for machine in MachineChoice:
+ if machine is MachineChoice.BUILD and not self.is_cross_build():
continue
- options[k] = v
+ if k not in env.meson_options[machine][subproject]:
+ env.meson_options[machine][subproject][k] = v
self.set_options(options, subproject=subproject)
@@ -845,24 +814,19 @@ class CoreData:
env.is_cross_build(),
env.properties[for_machine]).items():
# prefixed compiler options affect just this machine
- opt_prefix = for_machine.get_prefix()
- user_k = opt_prefix + lang.get_lower_case_name() + '_' + k
- if user_k in env.cmd_line_options:
- o.set_value(env.cmd_line_options[user_k])
+ if k in env.compiler_options[for_machine].get(lang, {}):
+ o.set_value(env.compiler_options[for_machine][lang][k])
self.compiler_options[for_machine][lang].setdefault(k, o)
- def process_new_compiler(self, lang: Language, comp: T.Type['Compiler'], env: 'Environment') -> None:
+ def process_new_compiler(self, lang: Language, comp: 'Compiler', env: 'Environment') -> None:
from . import compilers
self.compilers[comp.for_machine][lang] = comp
- enabled_opts = []
for k, o in comp.get_options().items():
# prefixed compiler options affect just this machine
- opt_prefix = comp.for_machine.get_prefix()
- user_k = opt_prefix + lang.get_lower_case_name() + '_' + k
- if user_k in env.cmd_line_options:
- o.set_value(env.cmd_line_options[user_k])
+ if k in env.compiler_options[comp.for_machine].get(lang, {}):
+ o.set_value(env.compiler_options[comp.for_machine][lang][k])
self.compiler_options[comp.for_machine][lang].setdefault(k, o)
enabled_opts = []
@@ -870,16 +834,16 @@ class CoreData:
if optname in self.base_options:
continue
oobj = compilers.base_options[optname]
- if optname in env.cmd_line_options:
- oobj.set_value(env.cmd_line_options[optname])
+ if optname in env.base_options:
+ oobj.set_value(env.base_options[optname])
enabled_opts.append(optname)
self.base_options[optname] = oobj
self.emit_base_options_warnings(enabled_opts)
def emit_base_options_warnings(self, enabled_opts: list):
if 'b_bitcode' in enabled_opts:
- mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
- mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
+ mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
+ mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
class CmdLineFileParser(configparser.ConfigParser):
def __init__(self):
@@ -887,6 +851,69 @@ class CmdLineFileParser(configparser.ConfigParser):
# storing subproject options like "subproject:option=value"
super().__init__(delimiters=['='], interpolation=None)
+class MachineFileParser():
+ def __init__(self, filenames: T.List[str]):
+ self.parser = CmdLineFileParser()
+ self.constants = {'True': True, 'False': False}
+ self.sections = {}
+
+ self.parser.read(filenames)
+
+ # Parse [constants] first so they can be used in other sections
+ if self.parser.has_section('constants'):
+ self.constants.update(self._parse_section('constants'))
+
+ for s in self.parser.sections():
+ if s == 'constants':
+ continue
+ self.sections[s] = self._parse_section(s)
+
+ def _parse_section(self, s):
+ self.scope = self.constants.copy()
+ section = {}
+ for entry, value in self.parser.items(s):
+ if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+ raise EnvironmentException('Malformed variable name {!r} in machine file.'.format(entry))
+ # Windows paths...
+ value = value.replace('\\', '\\\\')
+ try:
+ ast = mparser.Parser(value, 'machinefile').parse()
+ res = self._evaluate_statement(ast.lines[0])
+ except MesonException:
+ raise EnvironmentException('Malformed value in machine file variable {!r}.'.format(entry))
+ except KeyError as e:
+ raise EnvironmentException('Undefined constant {!r} in machine file variable {!r}.'.format(e.args[0], entry))
+ section[entry] = res
+ self.scope[entry] = res
+ return section
+
+ def _evaluate_statement(self, node):
+ if isinstance(node, (mparser.StringNode)):
+ return node.value
+ elif isinstance(node, mparser.BooleanNode):
+ return node.value
+ elif isinstance(node, mparser.NumberNode):
+ return node.value
+ elif isinstance(node, mparser.ArrayNode):
+ return [self._evaluate_statement(arg) for arg in node.args.arguments]
+ elif isinstance(node, mparser.IdNode):
+ return self.scope[node.value]
+ elif isinstance(node, mparser.ArithmeticNode):
+ l = self._evaluate_statement(node.left)
+ r = self._evaluate_statement(node.right)
+ if node.operation == 'add':
+ if (isinstance(l, str) and isinstance(r, str)) or \
+ (isinstance(l, list) and isinstance(r, list)):
+ return l + r
+ elif node.operation == 'div':
+ if isinstance(l, str) and isinstance(r, str):
+ return os.path.join(l, r)
+ raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames):
+ parser = MachineFileParser(filenames)
+ return parser.sections
+
def get_cmd_line_file(build_dir):
return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
@@ -1102,23 +1129,25 @@ class BuiltinOption(T.Generic[_T, _U]):
cmdline_name = self.argparse_name_to_arg(prefix + name)
parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
+
# Update `docs/markdown/Builtin-options.md` after changing the options below
-builtin_options = OrderedDict([
- # Directories
- ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
- ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
- ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')),
- ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')),
- ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
- ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
- ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
- ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
+BUILTIN_DIR_OPTIONS = OrderedDict([
+ ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
+ ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
+ ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')),
+ ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')),
+ ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')),
+ ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
+ ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
+ ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')),
('localstatedir', BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
('mandir', BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')),
('sbindir', BuiltinOption(UserStringOption, 'System executable directory', 'sbin')),
('sharedstatedir', BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
('sysconfdir', BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')),
- # Core options
+]) # type: OptionDictType
+
+BUILTIN_CORE_OPTIONS = OrderedDict([
('auto_features', BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
('backend', BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)),
('buildtype', BuiltinOption(UserComboOption, 'Build type to use', 'debug',
@@ -1134,10 +1163,13 @@ builtin_options = OrderedDict([
('strip', BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
('unity', BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
('unity_size', BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
- ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])),
+ ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)),
('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])),
-])
+ ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
+]) # type: OptionDictType
+
+builtin_options = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
builtin_options_per_machine = OrderedDict([
('pkg_config_path', BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
@@ -1173,3 +1205,4 @@ forbidden_target_names = {'clean': None,
'dist': None,
'distcheck': None,
}
+
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 8cee491..3c204b9 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -22,14 +22,13 @@ import json
import shlex
import shutil
import stat
+import sys
import textwrap
import platform
import typing as T
from enum import Enum
from pathlib import Path, PurePath
-import pkg_resources
-
from .. import mlog
from .. import mesonlib
from ..compilers import clib_langs
@@ -41,6 +40,7 @@ from ..mesonlib import (
Popen_safe, version_compare_many, version_compare, listify, stringlistify, extract_as_list, split_args,
Version, LibType,
)
+from ..mesondata import mesondata
if T.TYPE_CHECKING:
from ..compilers.compilers import CompilerType # noqa: F401
@@ -78,6 +78,30 @@ class DependencyMethods(Enum):
DUB = 'dub'
+def find_external_program(env: Environment, for_machine: MachineChoice, name: str,
+ display_name: str, default_names: T.List[str],
+ allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+ """Find an external program, chcking the cross file plus any default options."""
+ # Lookup in cross or machine file.
+ potential_path = env.lookup_binary_entry(for_machine, name)
+ if potential_path is not None:
+ mlog.debug('{} binary for {} specified from cross file, native file, '
+ 'or env var as {}'.format(display_name, for_machine, potential_path))
+ yield ExternalProgram.from_entry(name, potential_path)
+ # We never fallback if the user-specified option is no good, so
+ # stop returning options.
+ return
+ mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
+ # Fallback on hard-coded defaults, if a default binary is allowed for use
+ # with cross targets, or if this is not a cross target
+ if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+ for potential_path in default_names:
+ mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
+ yield ExternalProgram(potential_path, silent=True)
+ else:
+ mlog.debug('Default target is not allowed for cross use')
+
+
class Dependency:
@classmethod
@@ -229,6 +253,16 @@ class InternalDependency(Dependency):
self.ext_deps = ext_deps
self.variables = variables
+ def __deepcopy__(self, memo: dict) -> 'InternalDependency':
+ result = self.__class__.__new__(self.__class__)
+ memo[id(self)] = result
+ for k, v in self.__dict__.items():
+ if k in ['libraries', 'whole_libraries']:
+ setattr(result, k, copy.copy(v))
+ else:
+ setattr(result, k, copy.deepcopy(v, memo))
+ return result
+
def get_pkgconfig_variable(self, variable_name, kwargs):
raise DependencyException('Method "get_pkgconfig_variable()" is '
'invalid for an internal dependency')
@@ -354,25 +388,6 @@ class ExternalDependency(Dependency, HasNativeKwarg):
raise DependencyException(m.format(self.name, not_found, self.version))
return
- # Create an iterator of options
- def search_tool(self, name, display_name, default_names):
- # Lookup in cross or machine file.
- potential_path = self.env.lookup_binary_entry(self.for_machine, name)
- if potential_path is not None:
- mlog.debug('{} binary for {} specified from cross file, native file, '
- 'or env var as {}'.format(display_name, self.for_machine, potential_path))
- yield ExternalProgram.from_entry(name, potential_path)
- # We never fallback if the user-specified option is no good, so
- # stop returning options.
- return
- mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name))
- # Fallback on hard-coded defaults.
- # TODO prefix this for the cross case instead of ignoring thing.
- if self.env.machines.matches_build_machine(self.for_machine):
- for potential_path in default_names:
- mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path)
- yield ExternalProgram(potential_path, silent=True)
-
class NotFoundDependency(Dependency):
def __init__(self, environment):
@@ -421,8 +436,6 @@ class ConfigToolDependency(ExternalDependency):
self.config = None
return
self.version = version
- if getattr(self, 'finish_init', None):
- self.finish_init(self)
def _sanitize_version(self, version):
"""Remove any non-numeric, non-point version suffixes."""
@@ -433,34 +446,20 @@ class ConfigToolDependency(ExternalDependency):
return m.group(0).rstrip('.')
return version
- def find_config(self, versions=None, returncode: int = 0):
+ def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) \
+ -> T.Tuple[T.Optional[str], T.Optional[str]]:
"""Helper method that searches for config tool binaries in PATH and
returns the one that best matches the given version requirements.
"""
if not isinstance(versions, list) and versions is not None:
versions = listify(versions)
-
- tool = self.env.lookup_binary_entry(self.for_machine, self.tool_name)
- if tool is not None:
- tools = [tool]
- else:
- if not self.env.machines.matches_build_machine(self.for_machine):
- mlog.deprecation('No entry for {0} specified in your cross file. '
- 'Falling back to searching PATH. This may find a '
- 'native version of {0}! This will become a hard '
- 'error in a future version of meson'.format(self.tool_name))
- tools = [[t] for t in self.tools]
-
- best_match = (None, None)
- for tool in tools:
- if len(tool) == 1:
- # In some situations the command can't be directly executed.
- # For example Shell scripts need to be called through sh on
- # Windows (see issue #1423).
- potential_bin = ExternalProgram(tool[0], silent=True)
- if not potential_bin.found():
- continue
- tool = potential_bin.get_command()
+ best_match = (None, None) # type: T.Tuple[T.Optional[str], T.Optional[str]]
+ for potential_bin in find_external_program(
+ self.env, self.for_machine, self.tool_name,
+ self.tool_name, self.tools, allow_default_for_cross=False):
+ if not potential_bin.found():
+ continue
+ tool = potential_bin.get_command()
try:
p, out = Popen_safe(tool + [self.version_arg])[:2]
except (FileNotFoundError, PermissionError):
@@ -581,9 +580,9 @@ class PkgConfigDependency(ExternalDependency):
else:
assert PkgConfigDependency.class_pkgbin[self.for_machine] is None
mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine)
- for potential_pkgbin in self.search_tool('pkgconfig', 'Pkg-config', environment.default_pkgconfig):
- mlog.debug('Trying pkg-config binary {} for machine {} at {}'
- .format(potential_pkgbin.name, self.for_machine, potential_pkgbin.command))
+ for potential_pkgbin in find_external_program(
+ self.env, self.for_machine, 'pkgconfig', 'Pkg-config',
+ environment.default_pkgconfig, allow_default_for_cross=False):
version_if_ok = self.check_pkgconfig(potential_pkgbin)
if not version_if_ok:
continue
@@ -1090,8 +1089,9 @@ class CMakeDependency(ExternalDependency):
# Setup the trace parser
self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir())
+ cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
- CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info()
+ CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
if self.cmakeinfo is None:
raise self._gen_exception('Unable to obtain CMake system information')
@@ -1101,25 +1101,9 @@ class CMakeDependency(ExternalDependency):
modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
- cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
if cm_path:
cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
-
- pref_path = self.env.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value
- env_pref_path = get_env_var(
- self.for_machine,
- self.env.is_cross_build(),
- 'CMAKE_PREFIX_PATH')
- if env_pref_path is not None:
- env_pref_path = env_pref_path.split(os.pathsep)
- env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings
- if not pref_path:
- pref_path = []
- pref_path += env_pref_path
- if pref_path:
- cm_args.append('-DCMAKE_PREFIX_PATH={}'.format(';'.join(pref_path)))
-
- if not self._preliminary_find_check(name, cm_path, pref_path, environment.machines[self.for_machine]):
+ if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
mlog.debug('Preliminary CMake check failed. Aborting.')
return
self._detect_dep(name, modules, components, cm_args)
@@ -1129,7 +1113,7 @@ class CMakeDependency(ExternalDependency):
return s.format(self.__class__.__name__, self.name, self.is_found,
self.version_reqs)
- def _get_cmake_info(self):
+ def _get_cmake_info(self, cm_args):
mlog.debug("Extracting basic cmake information")
res = {}
@@ -1148,6 +1132,7 @@ class CMakeDependency(ExternalDependency):
# Prepare options
cmake_opts = temp_parser.trace_args() + ['.']
+ cmake_opts += cm_args
if len(i) > 0:
cmake_opts = ['-G', i] + cmake_opts
@@ -1171,12 +1156,17 @@ class CMakeDependency(ExternalDependency):
except MesonException:
return None
+ def process_paths(l: T.List[str]) -> T.Set[str]:
+ l = [x.split(':') for x in l]
+ l = [x for sublist in l for x in sublist]
+ return set(l)
+
# Extract the variables and sanity check them
- root_paths = set(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
- root_paths.update(set(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+ root_paths = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+ root_paths.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
root_paths = sorted(root_paths)
root_paths = list(filter(lambda x: os.path.isdir(x), root_paths))
- module_paths = set(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+ module_paths = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
rooted_paths = []
for j in [Path(x) for x in root_paths]:
for i in [Path(x) for x in module_paths]:
@@ -1462,8 +1452,15 @@ class CMakeDependency(ExternalDependency):
cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x]
cfg = cfgs[0]
- if 'RELEASE' in cfgs:
- cfg = 'RELEASE'
+ is_debug = self.env.coredata.get_builtin_option('debug');
+ if is_debug:
+ if 'DEBUG' in cfgs:
+ cfg = 'DEBUG'
+ elif 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
+ else:
+ if 'RELEASE' in cfgs:
+ cfg = 'RELEASE'
if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties:
libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x]
@@ -1526,8 +1523,7 @@ class CMakeDependency(ExternalDependency):
build_dir = self._get_build_dir()
# Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt
- # Per the warning in pkg_resources, this is *not* a path and os.path and Pathlib are *not* safe to use here.
- cmake_txt = pkg_resources.resource_string('mesonbuild', 'dependencies/data/' + cmake_file).decode()
+ cmake_txt = mesondata['dependencies/data/' + cmake_file].data
# In general, some Fortran CMake find_package() also require C language enabled,
# even if nothing from C is directly used. An easy Fortran example that fails
@@ -1802,6 +1798,10 @@ class ExternalProgram:
self.name = name
if command is not None:
self.command = listify(command)
+ if mesonlib.is_windows():
+ cmd = self.command[0]
+ args = self.command[1:]
+ self.command = self._search_windows_special_cases(name, cmd) + args
else:
all_search_dirs = [search_dir]
if extra_search_dirs:
@@ -1855,14 +1855,22 @@ class ExternalProgram:
# Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
if 'USERPROFILE' not in os.environ:
return path
- # Ignore executables in the WindowsApps directory which are
- # zero-sized wrappers that magically open the Windows Store to
- # install the application.
+ # The WindowsApps directory is a bit of a problem. It contains
+ # some zero-sized .exe files which have "reparse points", that
+ # might either launch an installed application, or might open
+ # a page in the Windows Store to download the application.
+ #
+ # To handle the case where the python interpreter we're
+ # running on came from the Windows Store, if we see the
+ # WindowsApps path in the search path, replace it with
+ # dirname(sys.executable).
appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
paths = []
for each in path.split(os.pathsep):
if Path(each) != appstore_dir:
paths.append(each)
+ elif 'WindowsApps' in sys.executable:
+ paths.append(os.path.dirname(sys.executable))
return os.pathsep.join(paths)
@staticmethod
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 3341f3e..3dd0fd6 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -20,9 +20,10 @@ from pathlib import Path
from .. import mlog
from .. import mesonlib
+from ..envconfig import get_env_var
from ..environment import Environment
-from .base import (DependencyException, ExternalDependency)
+from .base import DependencyException, ExternalDependency, PkgConfigDependency
from .misc import threads_factory
# On windows 3 directory layouts are supported:
@@ -163,8 +164,8 @@ class BoostLibraryFile():
if not tags:
return
- # Without any tags mt is assumed, however, an absents of mt in the name
- # with tags present indicates that the lib was build without mt support
+ # Without any tags mt is assumed, however, an absence of mt in the name
+ # with tags present indicates that the lib was built without mt support
self.mt = False
for i in tags:
if i == 'mt':
@@ -189,13 +190,13 @@ class BoostLibraryFile():
def __lt__(self, other: T.Any) -> bool:
if isinstance(other, BoostLibraryFile):
return (
- self.mod_name, self.version_lib, self.arch, self.static,
+ self.mod_name, self.static, self.version_lib, self.arch,
not self.mt, not self.runtime_static,
not self.debug, self.runtime_debug, self.python_debug,
self.stlport, self.deprecated_iostreams,
self.name,
) < (
- other.mod_name, other.version_lib, other.arch, other.static,
+ other.mod_name, other.static, other.version_lib, other.arch,
not other.mt, not other.runtime_static,
not other.debug, other.runtime_debug, other.python_debug,
other.stlport, other.deprecated_iostreams,
@@ -344,6 +345,7 @@ class BoostDependency(ExternalDependency):
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
self.boost_root = None
+ self.explicit_static = 'static' in kwargs
# Extract and validate modules
self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
@@ -366,36 +368,27 @@ class BoostDependency(ExternalDependency):
self.arch = environment.machines[self.for_machine].cpu_family
self.arch = boost_arch_map.get(self.arch, None)
- # Prefere BOOST_INCLUDEDIR and BOOST_LIBRARYDIR if preset
- boost_manual_env = [x in os.environ for x in ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR']]
- if all(boost_manual_env):
- inc_dir = Path(os.environ['BOOST_INCLUDEDIR'])
- lib_dir = Path(os.environ['BOOST_LIBRARYDIR'])
- mlog.debug('Trying to find boost with:')
- mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir))
- mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
-
- boost_inc_dir = None
- for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
- if j.is_file():
- boost_inc_dir = self._include_dir_from_version_header(j)
- break
- if not boost_inc_dir:
- self.is_found = False
- return
+ # First, look for paths specified in a machine file
+ props = self.env.properties[self.for_machine]
+ boost_property_env = [props.get('boost_includedir'), props.get('boost_librarydir'), props.get('boost_root')]
+ if any(boost_property_env):
+ self.detect_boost_machine_file(props)
+ return
- self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+ # Next, look for paths in the environment
+ boost_manual_env_list = ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR', 'BOOST_ROOT', 'BOOSTROOT']
+ boost_manual_env = [get_env_var(self.for_machine, self.env.is_cross_build, x) for x in boost_manual_env_list]
+ if any(boost_manual_env):
+ self.detect_boost_env()
return
- elif any(boost_manual_env):
- mlog.warning('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
- # A) Detect potential boost root directories (uses also BOOST_ROOT env var)
- roots = self.detect_roots()
- roots = list(mesonlib.OrderedSet(roots))
+ # Finally, look for paths from .pc files and from searching the filesystem
+ self.detect_roots()
- # B) Foreach candidate
+ def check_and_set_roots(self, roots) -> None:
+ roots = list(mesonlib.OrderedSet(roots))
for j in roots:
- # 1. Look for the boost headers (boost/version.pp)
+ # 1. Look for the boost headers (boost/version.hpp)
mlog.debug('Checking potential boost root {}'.format(j.as_posix()))
inc_dirs = self.detect_inc_dirs(j)
inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
@@ -410,11 +403,88 @@ class BoostDependency(ExternalDependency):
self.boost_root = j
break
+ def detect_boost_machine_file(self, props) -> None:
+ incdir = props.get('boost_includedir')
+ libdir = props.get('boost_librarydir')
+
+ if incdir and libdir:
+ inc_dir = Path(props['boost_includedir'])
+ lib_dir = Path(props['boost_librarydir'])
+
+ if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+ raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
+
+ mlog.debug('Trying to find boost with:')
+ mlog.debug(' - boost_includedir = {}'.format(inc_dir))
+ mlog.debug(' - boost_librarydir = {}'.format(lib_dir))
+
+ return self.detect_split_root(inc_dir, lib_dir)
+
+ elif incdir or libdir:
+ raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
+
+ rootdir = props.get('boost_root')
+ # It shouldn't be possible to get here without something in boost_root
+ assert(rootdir)
+
+ raw_paths = mesonlib.stringlistify(rootdir)
+ paths = [Path(x) for x in raw_paths]
+ if paths and any([not x.is_absolute() for x in paths]):
+ raise DependencyException('boost_root path given in machine file must be absolute')
+
+ self.check_and_set_roots(paths)
+
+ def detect_boost_env(self):
+ boost_includedir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_INCLUDEDIR')
+ boost_librarydir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_LIBRARYDIR')
+
+ boost_manual_env = [boost_includedir, boost_librarydir]
+ if all(boost_manual_env):
+ inc_dir = Path(boost_includedir)
+ lib_dir = Path(boost_librarydir)
+
+ if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+ raise DependencyException('Paths given in BOOST_INCLUDEDIR and BOOST_LIBRARYDIR must be absolute')
+
+ mlog.debug('Trying to find boost with:')
+ mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir))
+ mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
+
+ return self.detect_split_root(inc_dir, lib_dir)
+
+ elif any(boost_manual_env):
+ raise DependencyException('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
+
+ boost_root = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_ROOT')
+ boostroot = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOSTROOT')
+
+ # It shouldn't be possible to get here without something in BOOST_ROOT or BOOSTROOT
+ assert(boost_root or boostroot)
+
+ for path, name in [(boost_root, 'BOOST_ROOT'), (boostroot, 'BOOSTROOT')]:
+ if path:
+ raw_paths = path.split(os.pathsep)
+ paths = [Path(x) for x in raw_paths]
+ if paths and any([not x.is_absolute() for x in paths]):
+ raise DependencyException('Paths in {} must be absolute'.format(name))
+ break
+
+ self.check_and_set_roots(paths)
+
def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+ mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
+ mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
+
# 2. Find all boost libraries
libs = [] # type: T.List[BoostLibraryFile]
for i in lib_dirs:
- libs += self.detect_libraries(i)
+ libs = self.detect_libraries(i)
+ if libs:
+ mlog.debug(' - found boost library dir: {}'.format(i))
+ # mlog.debug(' - raw library list:')
+ # for j in libs:
+ # mlog.debug(' - {}'.format(j))
+ break
libs = sorted(set(libs))
modules = ['boost_' + x for x in self.modules]
@@ -422,9 +492,6 @@ class BoostDependency(ExternalDependency):
mlog.debug(' - found boost {} include dir: {}'.format(inc.version, inc.path))
f_libs = self.filter_libraries(libs, inc.version_lib)
- # mlog.debug(' - raw library list:')
- # for j in libs:
- # mlog.debug(' - {}'.format(j))
mlog.debug(' - filtered library list:')
for j in f_libs:
mlog.debug(' - {}'.format(j))
@@ -499,6 +566,19 @@ class BoostDependency(ExternalDependency):
return [self._include_dir_from_version_header(x) for x in candidates]
def detect_lib_dirs(self, root: Path) -> T.List[Path]:
+ # First check the system include paths. Only consider those within the
+ # given root path
+ system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
+ system_dirs = [Path(x) for x in system_dirs_t]
+ system_dirs = [x.resolve() for x in system_dirs if x.exists()]
+ system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
+ system_dirs = list(mesonlib.OrderedSet(system_dirs))
+
+ if system_dirs:
+ return system_dirs
+
+ # No system include paths were found --> fall back to manually looking
+ # for library dirs in root
dirs = [] # type: T.List[Path]
subdirs = [] # type: T.List[Path]
for i in root.iterdir():
@@ -510,7 +590,25 @@ class BoostDependency(ExternalDependency):
for j in i.iterdir():
if j.is_dir() and j.name.endswith('-linux-gnu'):
subdirs += [j]
- return dirs + subdirs
+
+ # Filter out paths that don't match the target arch to avoid finding
+ # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
+ if not self.arch:
+ return dirs + subdirs
+
+ arch_list_32 = ['32', 'i386']
+ arch_list_64 = ['64']
+
+ raw_list = dirs + subdirs
+ no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
+
+ matching_arch = [] # type: T.List[Path]
+ if '32' in self.arch:
+ matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
+ elif '64' in self.arch:
+ matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
+
+ return sorted(matching_arch) + sorted(no_arch)
def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
# MSVC is very picky with the library tags
@@ -522,7 +620,13 @@ class BoostDependency(ExternalDependency):
except (KeyError, IndexError, AttributeError):
pass
- libs = [x for x in libs if x.static == self.static]
+ # mlog.debug(' - static: {}'.format(self.static))
+ # mlog.debug(' - not explicit static: {}'.format(not self.explicit_static))
+ # mlog.debug(' - mt: {}'.format(self.multithreading))
+ # mlog.debug(' - version: {}'.format(lib_vers))
+ # mlog.debug(' - arch: {}'.format(self.arch))
+ # mlog.debug(' - vscrt: {}'.format(vscrt))
+ libs = [x for x in libs if x.static == self.static or not self.explicit_static]
libs = [x for x in libs if x.mt == self.multithreading]
libs = [x for x in libs if x.version_matches(lib_vers)]
libs = [x for x in libs if x.arch_matches(self.arch)]
@@ -554,18 +658,37 @@ class BoostDependency(ExternalDependency):
libs += [BoostLibraryFile(i)]
return [x for x in libs if x.is_boost()] # Filter out no boost libraries
- def detect_roots(self) -> T.List[Path]:
+ def detect_split_root(self, inc_dir, lib_dir) -> None:
+ boost_inc_dir = None
+ for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+ if j.is_file():
+ boost_inc_dir = self._include_dir_from_version_header(j)
+ break
+ if not boost_inc_dir:
+ self.is_found = False
+ return
+
+ self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+
+ def detect_roots(self) -> None:
roots = [] # type: T.List[Path]
- # Add roots from the environment
- for i in ['BOOST_ROOT', 'BOOSTROOT']:
- if i in os.environ:
- raw_paths = os.environ[i].split(os.pathsep)
- paths = [Path(x) for x in raw_paths]
- if paths and any([not x.is_absolute() for x in paths]):
- raise DependencyException('Paths in {} must be absolute'.format(i))
- roots += paths
- return roots # Do not add system paths if BOOST_ROOT is present
+ # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
+ # allows BoostDependency to find boost from Conan. See #5438
+ try:
+ boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
+ if boost_pc.found():
+ boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
+ if boost_root:
+ roots += [Path(boost_root)]
+ except DependencyException:
+ pass
+
+ # Add roots from system paths
+ inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
+ inc_paths = [x.parent for x in inc_paths if x.exists()]
+ inc_paths = [x.resolve() for x in inc_paths]
+ roots += inc_paths
# Add system paths
if self.env.machines[self.for_machine].is_windows():
@@ -588,8 +711,6 @@ class BoostDependency(ExternalDependency):
roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else:
tmp = [] # type: T.List[Path]
- # Add unix paths
- tmp += [Path(x).parent for x in self.clib_compiler.get_default_include_dirs()]
# Homebrew
brew_boost = Path('/usr/local/Cellar/boost')
@@ -607,7 +728,7 @@ class BoostDependency(ExternalDependency):
tmp = [x.resolve() for x in tmp]
roots += tmp
- return roots
+ self.check_and_set_roots(roots)
def log_details(self) -> str:
res = ''
@@ -637,11 +758,8 @@ class BoostDependency(ExternalDependency):
return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
def _extra_compile_args(self) -> T.List[str]:
- args = [] # type: T.List[str]
- args += ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
- if not self.static:
- args += ['-DBOOST_ALL_DYN_LINK']
- return args
+ # BOOST_ALL_DYN_LINK should not be required with the known defines below
+ return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
@@ -665,9 +783,9 @@ boost_arch_map = {
#### ---- BEGIN GENERATED ---- ####
# #
# Generated with tools/boost_names.py:
-# - boost version: 1.72.0
-# - modules found: 158
-# - libraries found: 42
+# - boost version: 1.73.0
+# - modules found: 159
+# - libraries found: 43
#
class BoostLibrary():
@@ -690,16 +808,16 @@ class BoostModule():
boost_libraries = {
'boost_atomic': BoostLibrary(
name='boost_atomic',
- shared=[],
- static=[],
+ shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
+ static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_chrono': BoostLibrary(
name='boost_chrono',
- shared=['-DBOOST_ALL_DYN_LINK=1'],
- static=['-DBOOST_All_STATIC_LINK=1'],
- single=[],
+ shared=['-DBOOST_CHRONO_DYN_LINK=1'],
+ static=['-DBOOST_CHRONO_STATIC_LINK=1'],
+ single=['-DBOOST_CHRONO_THREAD_DISABLED'],
multi=[],
),
'boost_container': BoostLibrary(
@@ -711,28 +829,28 @@ boost_libraries = {
),
'boost_context': BoostLibrary(
name='boost_context',
- shared=[],
+ shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_contract': BoostLibrary(
name='boost_contract',
- shared=[],
- static=[],
- single=[],
+ shared=['-DBOOST_CONTRACT_DYN_LINK'],
+ static=['-DBOOST_CONTRACT_STATIC_LINK'],
+ single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
multi=[],
),
'boost_coroutine': BoostLibrary(
name='boost_coroutine',
- shared=[],
+ shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_date_time': BoostLibrary(
name='boost_date_time',
- shared=[],
+ shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
static=[],
single=[],
multi=[],
@@ -746,14 +864,14 @@ boost_libraries = {
),
'boost_fiber': BoostLibrary(
name='boost_fiber',
- shared=[],
+ shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_fiber_numa': BoostLibrary(
name='boost_fiber_numa',
- shared=[],
+ shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
@@ -767,84 +885,91 @@ boost_libraries = {
),
'boost_graph': BoostLibrary(
name='boost_graph',
- shared=['-DBOOST_GRAPH_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_iostreams': BoostLibrary(
name='boost_iostreams',
- shared=['-DBOOST_IOSTREAMS_DYN_LINK=1', '-DBOOST_IOSTREAMS_DYN_LINK=1'],
+ shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_locale': BoostLibrary(
name='boost_locale',
- shared=['-DBOOST_LOCALE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_log': BoostLibrary(
name='boost_log',
- shared=['-DBOOST_LOG_DLL', '-DBOOST_LOG_DYN_LINK=1'],
+ shared=['-DBOOST_LOG_DYN_LINK=1'],
static=[],
- single=['BOOST_LOG_NO_THREADS'],
+ single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_log_setup': BoostLibrary(
name='boost_log_setup',
- shared=['-DBOOST_LOG_DYN_LINK=1', '-DBOOST_LOG_SETUP_DLL', '-DBOOST_LOG_SETUP_DYN_LINK=1'],
+ shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
static=[],
- single=['BOOST_LOG_NO_THREADS'],
+ single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_math_c99': BoostLibrary(
name='boost_math_c99',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99f': BoostLibrary(
name='boost_math_c99f',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99l': BoostLibrary(
name='boost_math_c99l',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1': BoostLibrary(
name='boost_math_tr1',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1f': BoostLibrary(
name='boost_math_tr1f',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1l': BoostLibrary(
name='boost_math_tr1l',
- shared=['-DBOOST_MATH_TR1_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_mpi': BoostLibrary(
name='boost_mpi',
- shared=['-DBOOST_MPI_DYN_LINK=1'],
+ shared=[],
+ static=[],
+ single=[],
+ multi=[],
+ ),
+ 'boost_nowide': BoostLibrary(
+ name='boost_nowide',
+ shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
static=[],
single=[],
multi=[],
@@ -865,63 +990,63 @@ boost_libraries = {
),
'boost_random': BoostLibrary(
name='boost_random',
- shared=[],
+ shared=['-DBOOST_RANDOM_DYN_LINK'],
static=[],
single=[],
multi=[],
),
'boost_regex': BoostLibrary(
name='boost_regex',
- shared=['-DBOOST_REGEX_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_serialization': BoostLibrary(
name='boost_serialization',
- shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_addr2line': BoostLibrary(
name='boost_stacktrace_addr2line',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_backtrace': BoostLibrary(
name='boost_stacktrace_backtrace',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_basic': BoostLibrary(
name='boost_stacktrace_basic',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_noop': BoostLibrary(
name='boost_stacktrace_noop',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg': BoostLibrary(
name='boost_stacktrace_windbg',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg_cached': BoostLibrary(
name='boost_stacktrace_windbg_cached',
- shared=['-DBOOST_STACKTRACE_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
@@ -942,8 +1067,8 @@ boost_libraries = {
),
'boost_thread': BoostLibrary(
name='boost_thread',
- shared=['-DBOOST_THREAD_USE_DLL=1'],
- static=['-DBOOST_THREAD_USE_LIB=1'],
+ shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
+ static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
single=[],
multi=[],
),
@@ -956,7 +1081,7 @@ boost_libraries = {
),
'boost_type_erasure': BoostLibrary(
name='boost_type_erasure',
- shared=[],
+ shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
static=[],
single=[],
multi=[],
@@ -977,7 +1102,7 @@ boost_libraries = {
),
'boost_wserialization': BoostLibrary(
name='boost_wserialization',
- shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'],
+ shared=[],
static=[],
single=[],
multi=[],
diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py
index 063fa6d..d197f8c 100644
--- a/mesonbuild/dependencies/cuda.py
+++ b/mesonbuild/dependencies/cuda.py
@@ -158,11 +158,15 @@ class CudaDependency(ExternalDependency):
mlog.debug('Falling back to extracting version from path')
path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex
- m = path_version_regex.match(os.path.basename(path))
- if m:
- return m[1]
+ try:
+ m = path_version_regex.match(os.path.basename(path))
+ if m:
+ return m.group(1)
+ else:
+ mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path))
+ except Exception as e:
+ mlog.warning('Could not detect CUDA Toolkit version for {}: {}'.format(path, str(e)))
- mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path))
return '0.0'
def _read_toolkit_version_txt(self, path):
@@ -173,7 +177,7 @@ class CudaDependency(ExternalDependency):
version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168'
m = self.toolkit_version_regex.match(version_str)
if m:
- return self._strip_patch_version(m[1])
+ return self._strip_patch_version(m.group(1))
except Exception as e:
mlog.debug('Could not read CUDA Toolkit\'s version file {}: {}'.format(version_file_path, str(e)))
@@ -193,7 +197,7 @@ class CudaDependency(ExternalDependency):
raise DependencyException(msg.format(arch, 'Windows'))
return os.path.join('lib', libdirs[arch])
elif machine.is_linux():
- libdirs = {'x86_64': 'lib64', 'ppc64': 'lib'}
+ libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64'}
if arch not in libdirs:
raise DependencyException(msg.format(arch, 'Linux'))
return libdirs[arch]
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index de05a79..f19566b 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -97,7 +97,8 @@ class OpenMPDependency(ExternalDependency):
for name in header_names:
if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
self.is_found = True
- self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+ self.compile_args = self.clib_compiler.openmp_flags()
+ self.link_args = self.clib_compiler.openmp_link_flags()
break
if not self.is_found:
mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
@@ -271,8 +272,10 @@ class PcapDependencyConfigTool(ConfigToolDependency):
tools = ['pcap-config']
tool_name = 'pcap-config'
- @staticmethod
- def finish_init(self) -> None:
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
self.link_args = self.get_config_value(['--libs'], 'link_args')
self.version = self.get_pcap_lib_version()
@@ -284,6 +287,7 @@ class PcapDependencyConfigTool(ConfigToolDependency):
def get_pcap_lib_version(self):
# Since we seem to need to run a program to discover the pcap version,
# we can't do that when cross-compiling
+ # FIXME: this should be handled if we have an exe_wrapper
if not self.env.machines.matches_build_machine(self.for_machine):
return None
@@ -299,10 +303,12 @@ class CupsDependencyConfigTool(ConfigToolDependency):
tools = ['cups-config']
tool_name = 'cups-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
@staticmethod
def get_methods():
@@ -317,10 +323,12 @@ class LibWmfDependencyConfigTool(ConfigToolDependency):
tools = ['libwmf-config']
tool_name = 'libwmf-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
@staticmethod
def get_methods():
@@ -332,11 +340,13 @@ class LibGCryptDependencyConfigTool(ConfigToolDependency):
tools = ['libgcrypt-config']
tool_name = 'libgcrypt-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
- ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
@staticmethod
def get_methods():
@@ -348,11 +358,13 @@ class GpgmeDependencyConfigTool(ConfigToolDependency):
tools = ['gpgme-config']
tool_name = 'gpg-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
- ctdep.version = ctdep.get_config_value(['--version'], 'version')[0]
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
+ self.version = self.get_config_value(['--version'], 'version')[0]
@staticmethod
def get_methods():
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 6e54e8e..fc0824c 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -31,9 +31,11 @@ from .base import DependencyException, DependencyMethods
from .base import ExternalDependency, NonExistingExternalProgram
from .base import ExtraFrameworkDependency, PkgConfigDependency
from .base import ConfigToolDependency, DependencyFactory
+from .base import find_external_program
if T.TYPE_CHECKING:
from ..environment import Environment
+ from .base import ExternalProgram
class GLDependencySystem(ExternalDependency):
@@ -227,11 +229,14 @@ class QtBaseDependency(ExternalDependency):
bins = ['moc', 'uic', 'rcc', 'lrelease']
found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name))
for b in bins}
+ wanted = '== {}'.format(self.version)
def gen_bins():
for b in bins:
if self.bindir:
yield os.path.join(self.bindir, b), b, False
+ # prefer the <tool>-qt<version> of the tool to the plain one, as we
+ # don't know what the unsuffixed one points to without calling it.
yield '{}-{}'.format(b, self.name), b, False
yield b, b, self.required if b != 'lrelease' else False
@@ -239,12 +244,6 @@ class QtBaseDependency(ExternalDependency):
if found[name].found():
continue
- # prefer the <tool>-qt<version> of the tool to the plain one, as we
- # don't know what the unsuffixed one points to without calling it.
- p = interp_obj.find_program_impl([b], silent=True, required=required).held_object
- if not p.found():
- continue
-
if name == 'lrelease':
arg = ['-version']
elif mesonlib.version_compare(self.version, '>= 5'):
@@ -253,12 +252,18 @@ class QtBaseDependency(ExternalDependency):
arg = ['-v']
# Ensure that the version of qt and each tool are the same
- _, out, err = mesonlib.Popen_safe(p.get_command() + arg)
- if b.startswith('lrelease') or not self.version.startswith('4'):
- care = out
- else:
- care = err
- if mesonlib.version_compare(self.version, '== {}'.format(care.split(' ')[-1])):
+ def get_version(p):
+ _, out, err = mesonlib.Popen_safe(p.get_command() + arg)
+ if b.startswith('lrelease') or not self.version.startswith('4'):
+ care = out
+ else:
+ care = err
+ return care.split(' ')[-1].replace(')', '')
+
+ p = interp_obj.find_program_impl([b], required=required,
+ version_func=get_version,
+ wanted=wanted).held_object
+ if p.found():
found[name] = p
return tuple([found[b] for b in bins])
@@ -324,10 +329,9 @@ class QtBaseDependency(ExternalDependency):
if prefix:
self.bindir = os.path.join(prefix, 'bin')
- def search_qmake(self):
+ def search_qmake(self) -> T.Generator['ExternalProgram', None, None]:
for qmake in ('qmake-' + self.name, 'qmake'):
- for potential_qmake in self.search_tool(qmake, 'QMake', [qmake]):
- yield potential_qmake
+ yield from find_external_program(self.env, self.for_machine, qmake, 'QMake', [qmake])
def _qmake_detect(self, mods, kwargs):
for qmake in self.search_qmake():
@@ -406,6 +410,9 @@ class QtBaseDependency(ExternalDependency):
if libfile:
libfile = libfile[0]
else:
+ mlog.log("Could not find:", module,
+ self.qtpkgname + module + modules_lib_suffix,
+ 'in', libdir)
self.is_found = False
break
self.link_args.append(libfile)
@@ -426,6 +433,20 @@ class QtBaseDependency(ExternalDependency):
if self.env.machines[self.for_machine].is_darwin():
if is_debug:
suffix += '_debug'
+ if mesonlib.version_compare(self.version, '>= 5.14.0'):
+ if self.env.machines[self.for_machine].is_android():
+ cpu_family = self.env.machines[self.for_machine].cpu_family
+ if cpu_family == 'x86':
+ suffix += '_x86'
+ elif cpu_family == 'x86_64':
+ suffix += '_x86_64'
+ elif cpu_family == 'arm':
+ suffix += '_armeabi-v7a'
+ elif cpu_family == 'aarch64':
+ suffix += '_arm64-v8a'
+ else:
+ mlog.warning('Android target arch {!r} for Qt5 is unknown, '
+ 'module detection may not work'.format(cpu_family))
return suffix
def _link_with_qtmain(self, is_debug, libdir):
@@ -528,10 +549,12 @@ class SDL2DependencyConfigTool(ConfigToolDependency):
tools = ['sdl2-config']
tool_name = 'sdl2-config'
- @staticmethod
- def finish_init(ctdep):
- ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args')
- ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args')
+ def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+ super().__init__(name, environment, kwargs)
+ if not self.is_found:
+ return
+ self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+ self.link_args = self.get_config_value(['--libs'], 'link_args')
@staticmethod
def get_methods():
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
index 327d3fa..7d87fb7 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import configparser, os, subprocess
+import os, subprocess
import typing as T
from . import mesonlib
@@ -40,7 +40,9 @@ known_cpu_families = (
'alpha',
'arc',
'arm',
+ 'avr',
'c2000',
+ 'dspic',
'e2k',
'ia64',
'm68k',
@@ -48,6 +50,7 @@ known_cpu_families = (
'mips',
'mips64',
'parisc',
+ 'pic24',
'ppc',
'ppc64',
'riscv32',
@@ -56,14 +59,13 @@ known_cpu_families = (
'rx',
's390',
's390x',
+ 'sh4',
'sparc',
'sparc64',
- 'pic24',
- 'dspic',
'wasm32',
'wasm64',
'x86',
- 'x86_64'
+ 'x86_64',
)
# It would feel more natural to call this "64_BIT_CPU_FAMILES", but
@@ -81,33 +83,6 @@ CPU_FAMILES_64_BIT = [
'x86_64',
]
-class MesonConfigFile:
- @classmethod
- def from_config_parser(cls, parser: configparser.ConfigParser) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]:
- out = {}
- # This is a bit hackish at the moment.
- for s in parser.sections():
- section = {}
- for entry in parser[s]:
- value = parser[s][entry]
- # Windows paths...
- value = value.replace('\\', '\\\\')
- if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
- raise EnvironmentException('Malformed variable name {} in cross file..'.format(entry))
- try:
- res = eval(value, {'__builtins__': None}, {'true': True, 'false': False})
- except Exception:
- raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry))
-
- for i in (res if isinstance(res, list) else [res]):
- if not isinstance(i, (str, int, bool)):
- raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry))
-
- section[entry] = res
-
- out[s] = section
- return out
-
def get_env_var_pair(for_machine: MachineChoice,
is_cross: bool,
var_name: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
@@ -121,7 +96,7 @@ def get_env_var_pair(for_machine: MachineChoice,
# ones.
([var_name + '_FOR_BUILD'] if is_cross else [var_name]),
# Always just the unprefixed host verions
- ([] if is_cross else [var_name]),
+ [var_name]
)[for_machine]
for var in candidates:
value = os.environ.get(var)
@@ -298,6 +273,10 @@ class MachineInfo:
"""
return self.system == 'gnu'
+ def is_irix(self) -> bool:
+ """Machine is IRIX?"""
+ return self.system.startswith('irix')
+
# Various prefixes and suffixes for import libraries, shared libraries,
# static libraries, and executables.
# Versioning is added to these names in the backends as-needed.
@@ -428,43 +407,3 @@ class BinaryTable:
if command is not None and (len(command) == 0 or len(command[0].strip()) == 0):
command = None
return command
-
-class Directories:
-
- """Data class that holds information about directories for native and cross
- builds.
- """
-
- def __init__(self, bindir: T.Optional[str] = None, datadir: T.Optional[str] = None,
- includedir: T.Optional[str] = None, infodir: T.Optional[str] = None,
- libdir: T.Optional[str] = None, libexecdir: T.Optional[str] = None,
- localedir: T.Optional[str] = None, localstatedir: T.Optional[str] = None,
- mandir: T.Optional[str] = None, prefix: T.Optional[str] = None,
- sbindir: T.Optional[str] = None, sharedstatedir: T.Optional[str] = None,
- sysconfdir: T.Optional[str] = None):
- self.bindir = bindir
- self.datadir = datadir
- self.includedir = includedir
- self.infodir = infodir
- self.libdir = libdir
- self.libexecdir = libexecdir
- self.localedir = localedir
- self.localstatedir = localstatedir
- self.mandir = mandir
- self.prefix = prefix
- self.sbindir = sbindir
- self.sharedstatedir = sharedstatedir
- self.sysconfdir = sysconfdir
-
- def __contains__(self, key: str) -> bool:
- return hasattr(self, key)
-
- def __getitem__(self, key: str) -> T.Optional[str]:
- # Mypy can't figure out what to do with getattr here, so we'll case for it
- return T.cast(T.Optional[str], getattr(self, key))
-
- def __setitem__(self, key: str, value: T.Optional[str]) -> None:
- setattr(self, key, value)
-
- def __iter__(self) -> T.Iterator[T.Tuple[str, str]]:
- return iter(self.__dict__.items())
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 0e3ae8c..7cb7286 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -16,6 +16,7 @@ import os, platform, re, sys, shutil, subprocess
import tempfile
import shlex
import typing as T
+import collections
from . import coredata
from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker, Xc16Linker, C2000Linker, IntelVisualStudioLinker
@@ -27,12 +28,14 @@ from .mesonlib import (
from . import mlog
from .envconfig import (
- BinaryTable, Directories, MachineInfo, MesonConfigFile,
- Properties, known_cpu_families,
+ BinaryTable, MachineInfo,
+ Properties, known_cpu_families, get_env_var_pair,
)
from . import compilers
from .compilers import (
Compiler,
+ all_languages,
+ base_options,
is_assembly,
is_header,
is_library,
@@ -52,6 +55,7 @@ from .linkers import (
GnuBFDDynamicLinker,
GnuGoldDynamicLinker,
LLVMDynamicLinker,
+ QualcommLLVMDynamicLinker,
MSVCDynamicLinker,
OptlinkDynamicLinker,
PGIDynamicLinker,
@@ -134,9 +138,18 @@ def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version)
return None, None
+def detect_llvm_cov():
+ tools = get_llvm_tool_names('llvm-cov')
+ for tool in tools:
+ if mesonlib.exe_exists([tool, '--version']):
+ return tool
+ return None
+
def find_coverage_tools():
gcovr_exe, gcovr_new_rootdir = detect_gcovr()
+ llvm_cov_exe = detect_llvm_cov()
+
lcov_exe = 'lcov'
genhtml_exe = 'genhtml'
@@ -145,7 +158,7 @@ def find_coverage_tools():
if not mesonlib.exe_exists([genhtml_exe, '--version']):
genhtml_exe = None
- return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe
+ return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe
def detect_ninja(version: str = '1.7', log: bool = False) -> str:
r = detect_ninja_command_and_version(version, log)
@@ -332,6 +345,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
trial = 'x86'
elif trial == 'bepc':
trial = 'x86'
+ elif trial == 'arm64':
+ trial = 'aarch64'
elif trial.startswith('arm') or trial.startswith('earm'):
trial = 'arm'
elif trial.startswith(('powerpc64', 'ppc64')):
@@ -344,6 +359,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str:
trial = 'sparc64'
elif trial in {'mipsel', 'mips64el'}:
trial = trial.rstrip('el')
+ elif trial in {'ip30', 'ip35'}:
+ trial = 'mips64'
# On Linux (and maybe others) there can be any mixture of 32/64 bit code in
# the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
@@ -438,9 +455,10 @@ def machine_info_can_run(machine_info: MachineInfo):
true_build_cpu_family = detect_cpu_family({})
return \
(machine_info.cpu_family == true_build_cpu_family) or \
- ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86'))
+ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+ ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
-def search_version(text):
+def search_version(text: str) -> str:
# Usually of the type 4.1.4 but compiler output may contain
# stuff like this:
# (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
@@ -474,6 +492,13 @@ def search_version(text):
match = version_regex.search(text)
if match:
return match.group(0)
+
+ # try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo"
+ version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})")
+ match = version_regex.search(text)
+ if match:
+ return match.group(0)
+
return 'unknown version'
class Environment:
@@ -527,10 +552,11 @@ class Environment:
# Misc other properties about each machine.
properties = PerMachineDefaultable()
- # Store paths for native and cross build files. There is no target
- # machine information here because nothing is installed for the target
- # architecture, just the build and host architectures
- paths = PerMachineDefaultable()
+ # We only need one of these as project options are not per machine
+ user_options = collections.defaultdict(dict) # type: T.DefaultDict[str, T.Dict[str, object]]
+
+ # meson builtin options, as passed through cross or native files
+ meson_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]]
## Setup build machine defaults
@@ -542,34 +568,169 @@ class Environment:
binaries.build = BinaryTable()
properties.build = Properties()
+ # meson base options
+ _base_options = {} # type: T.Dict[str, object]
+
+ # Per language compiler arguments
+ compiler_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]]
+ compiler_options.build = collections.defaultdict(dict)
+
## Read in native file(s) to override build machine configuration
+ def load_options(tag: str, store: T.Dict[str, T.Any]) -> None:
+ for section in config.keys():
+ if section.endswith(tag):
+ if ':' in section:
+ project = section.split(':')[0]
+ else:
+ project = ''
+ store[project].update(config.get(section, {}))
+
+ def split_base_options(mopts: T.DefaultDict[str, T.Dict[str, object]]) -> None:
+ for k, v in list(mopts.get('', {}).items()):
+ if k in base_options:
+ _base_options[k] = v
+ del mopts[k]
+
+ lang_prefixes = tuple('{}_'.format(l) for l in all_languages)
+ def split_compiler_options(mopts: T.DefaultDict[str, T.Dict[str, object]], machine: MachineChoice) -> None:
+ for k, v in list(mopts.get('', {}).items()):
+ if k.startswith(lang_prefixes):
+ lang, key = k.split('_', 1)
+ if compiler_options[machine] is None:
+ compiler_options[machine] = collections.defaultdict(dict)
+ if lang not in compiler_options[machine]:
+ compiler_options[machine][lang] = collections.defaultdict(dict)
+ compiler_options[machine][lang][key] = v
+ del mopts[''][k]
+
+ def move_compiler_options(properties: Properties, compopts: T.Dict[str, T.DefaultDict[str, object]]) -> None:
+ for k, v in properties.properties.copy().items():
+ for lang in all_languages:
+ if k == '{}_args'.format(lang):
+ if 'args' not in compopts[lang]:
+ compopts[lang]['args'] = v
+ else:
+ mlog.warning('Ignoring {}_args in [properties] section for those in the [built-in options]'.format(lang))
+ elif k == '{}_link_args'.format(lang):
+ if 'link_args' not in compopts[lang]:
+ compopts[lang]['link_args'] = v
+ else:
+ mlog.warning('Ignoring {}_link_args in [properties] section in favor of the [built-in options] section.')
+ else:
+ continue
+ mlog.deprecation('{} in the [properties] section of the machine file is deprecated, use the [built-in options] section.'.format(k))
+ del properties.properties[k]
+ break
+
if self.coredata.config_files is not None:
- config = MesonConfigFile.from_config_parser(
- coredata.load_configs(self.coredata.config_files))
+ config = coredata.parse_machine_files(self.coredata.config_files)
binaries.build = BinaryTable(config.get('binaries', {}))
- paths.build = Directories(**config.get('paths', {}))
properties.build = Properties(config.get('properties', {}))
+ # Don't run this if there are any cross files, we don't want to use
+ # the native values if we're doing a cross build
+ if not self.coredata.cross_files:
+ load_options('project options', user_options)
+ meson_options.build = collections.defaultdict(dict)
+ if config.get('paths') is not None:
+ mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+ load_options('paths', meson_options.build)
+ load_options('built-in options', meson_options.build)
+ if not self.coredata.cross_files:
+ split_base_options(meson_options.build)
+ split_compiler_options(meson_options.build, MachineChoice.BUILD)
+ move_compiler_options(properties.build, compiler_options.build)
+
## Read in cross file(s) to override host machine configuration
if self.coredata.cross_files:
- config = MesonConfigFile.from_config_parser(
- coredata.load_configs(self.coredata.cross_files))
+ config = coredata.parse_machine_files(self.coredata.cross_files)
properties.host = Properties(config.get('properties', {}))
binaries.host = BinaryTable(config.get('binaries', {}))
if 'host_machine' in config:
machines.host = MachineInfo.from_literal(config['host_machine'])
if 'target_machine' in config:
machines.target = MachineInfo.from_literal(config['target_machine'])
- paths.host = Directories(**config.get('paths', {}))
+ load_options('project options', user_options)
+ meson_options.host = collections.defaultdict(dict)
+ compiler_options.host = collections.defaultdict(dict)
+ if config.get('paths') is not None:
+ mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+ load_options('paths', meson_options.host)
+ load_options('built-in options', meson_options.host)
+ split_base_options(meson_options.host)
+ split_compiler_options(meson_options.host, MachineChoice.HOST)
+ move_compiler_options(properties.host, compiler_options.host)
## "freeze" now initialized configuration, and "save" to the class.
self.machines = machines.default_missing()
self.binaries = binaries.default_missing()
self.properties = properties.default_missing()
- self.paths = paths.default_missing()
+ self.user_options = user_options
+ self.meson_options = meson_options.default_missing()
+ self.base_options = _base_options
+ self.compiler_options = compiler_options.default_missing()
+
+ # Some options default to environment variables if they are
+ # unset, set those now.
+
+ for for_machine in MachineChoice:
+ p_env_pair = get_env_var_pair(for_machine, self.coredata.is_cross_build(), 'PKG_CONFIG_PATH')
+ if p_env_pair is not None:
+ p_env_var, p_env = p_env_pair
+
+ # PKG_CONFIG_PATH may contain duplicates, which must be
+ # removed, else a duplicates-in-array-option warning arises.
+ p_list = list(mesonlib.OrderedSet(p_env.split(':')))
+
+ key = 'pkg_config_path'
+
+ if self.first_invocation:
+ # Environment variables override config
+ self.meson_options[for_machine][''][key] = p_list
+ elif self.meson_options[for_machine][''].get(key, []) != p_list:
+ mlog.warning(
+ p_env_var,
+ 'environment variable does not match configured',
+ 'between configurations, meson ignores this.',
+ 'Use -Dpkg_config_path to change pkg-config search',
+ 'path instead.'
+ )
+
+ # Read in command line and populate options
+ # TODO: validate all of this
+ all_builtins = set(coredata.builtin_options) | set(coredata.builtin_options_per_machine) | set(coredata.builtin_dir_noprefix_options)
+ for k, v in options.cmd_line_options.items():
+ try:
+ subproject, k = k.split(':')
+ except ValueError:
+ subproject = ''
+ if k in base_options:
+ self.base_options[k] = v
+ elif k.startswith(lang_prefixes):
+ lang, key = k.split('_', 1)
+ self.compiler_options.host[lang][key] = v
+ elif k in all_builtins or k.startswith('backend_'):
+ self.meson_options.host[subproject][k] = v
+ elif k.startswith('build.'):
+ k = k.lstrip('build.')
+ if k in coredata.builtin_options_per_machine:
+ if self.meson_options.build is None:
+ self.meson_options.build = collections.defaultdict(dict)
+ self.meson_options.build[subproject][k] = v
+ else:
+ assert not k.startswith('build.')
+ self.user_options[subproject][k] = v
+
+ # Warn if the user is using two different ways of setting build-type
+ # options that override each other
+ if meson_options.build and 'buildtype' in meson_options.build[''] and \
+ ('optimization' in meson_options.build[''] or 'debug' in meson_options.build['']):
+ mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
+ 'Using both is redundant since they override each other. '
+ 'See: https://mesonbuild.com/Builtin-options.html#build-type-options')
exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
if exe_wrapper is not None:
@@ -578,8 +739,6 @@ class Environment:
else:
self.exe_wrapper = None
- self.cmd_line_options = options.cmd_line_options.copy()
-
# List of potential compilers.
if mesonlib.is_windows():
# Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere.
@@ -625,6 +784,7 @@ class Environment:
self.clang_static_linker = ['llvm-ar']
self.default_cmake = ['cmake']
self.default_pkgconfig = ['pkg-config']
+ self.wrap_resolver = None
def create_new_coredata(self, options):
# WARNING: Don't use any values from coredata in __init__. It gets
@@ -635,8 +795,8 @@ class Environment:
self.coredata.meson_command = mesonlib.meson_command
self.first_invocation = True
- def is_cross_build(self) -> bool:
- return self.coredata.is_cross_build()
+ def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+ return self.coredata.is_cross_build(when_building_for)
def dump_coredata(self):
return coredata.save(self.coredata, self.get_build_dir())
@@ -726,6 +886,28 @@ class Environment:
minor = defines.get('__LCC_MINOR__', '0')
return dot.join((generation, major, minor))
+ @staticmethod
+ def get_clang_compiler_defines(compiler):
+ """
+ Get the list of Clang pre-processor defines
+ """
+ args = compiler + ['-E', '-dM', '-']
+ p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE)
+ if p.returncode != 0:
+ raise EnvironmentException('Unable to get clang pre-processor defines:\n' + output + error)
+ defines = {}
+ for line in output.split('\n'):
+ if not line:
+ continue
+ d, *rest = line.split(' ', 2)
+ if d != '#define':
+ continue
+ if len(rest) == 1:
+ defines[rest] = True
+ if len(rest) == 2:
+ defines[rest[0]] = rest[1]
+ return defines
+
def _get_compilers(self, lang, for_machine):
'''
The list of compilers is detected in the exact same way for
@@ -847,10 +1029,13 @@ class Environment:
check_args += override
_, o, e = Popen_safe(compiler + check_args)
- v = search_version(o)
+ v = search_version(o + e)
if o.startswith('LLD'):
linker = LLVMDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker
+ elif 'Snapdragon' in e and 'LLVM' in e:
+ linker = QualcommLLVMDynamicLinker(
+ compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker
elif e.startswith('lld-link: '):
# The LLD MinGW frontend didn't respond to --version before version 9.0.0,
# and produced an error message about failing to link (when no object
@@ -889,9 +1074,15 @@ class Environment:
cls = GnuBFDDynamicLinker
linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'Solaris' in e or 'Solaris' in o:
+ for line in (o+e).split('\n'):
+ if 'ld: Software Generation Utilities' in line:
+ v = line.split(':')[2].lstrip()
+ break
+ else:
+ v = 'unknown version'
linker = SolarisDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override,
- version=search_version(e))
+ version=v)
else:
raise EnvironmentException('Unable to determine dynamic linker')
return linker
@@ -899,7 +1090,7 @@ class Environment:
def _detect_c_or_cpp_compiler(self, lang: Language, for_machine: MachineChoice) -> Compiler:
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(lang, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
for compiler in compilers:
@@ -985,12 +1176,15 @@ class Environment:
if 'Emscripten' in out:
cls = EmscriptenCCompiler if lang == Language.C else EmscriptenCPPCompiler
self.coredata.add_lang_args(cls.language, cls, for_machine, self)
- # emcc cannot be queried to get the version out of it (it
- # ignores -Wl,--version and doesn't have an alternative).
- # Further, wasm-id *is* lld and will return `LLD X.Y.Z` if you
- # call `wasm-ld --version`, but a special version of lld that
- # takes different options.
- p, o, _ = Popen_safe(['wasm-ld', '--version'])
+
+ # emcc requires a file input in order to pass arguments to the
+ # linker. It'll exit with an error code, but still print the
+ # linker version. Old emcc versions ignore -Wl,--version completely,
+ # however. We'll report "unknown version" in that case.
+ with tempfile.NamedTemporaryFile(suffix='.c') as f:
+ cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name]
+ _, o, _ = Popen_safe(cmd)
+
linker = WASMDynamicLinker(
compiler, for_machine, cls.LINKER_PREFIX,
[], version=search_version(o))
@@ -1037,9 +1231,11 @@ class Environment:
return cls(
compiler, version, for_machine, is_cross, info, exe_wrap,
target, linker=linker)
- if 'clang' in out:
+ if 'clang' in out or 'Clang' in out:
linker = None
+ defines = self.get_clang_compiler_defines(compiler)
+
# Even if the for_machine is darwin, we could be using vanilla
# clang.
if 'Apple' in out:
@@ -1060,7 +1256,7 @@ class Environment:
return cls(
ccache + compiler, version, for_machine, is_cross, info,
- exe_wrap, full_version=full_version, linker=linker)
+ exe_wrap, defines, full_version=full_version, linker=linker)
if 'Intel(R) C++ Intel(R)' in err:
version = search_version(err)
@@ -1149,7 +1345,7 @@ class Environment:
def detect_cuda_compiler(self, for_machine):
popen_exceptions = {}
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
compilers, ccache, exe_wrap = self._get_compilers(Language.CUDA, for_machine)
info = self.machines[for_machine]
for compiler in compilers:
@@ -1189,7 +1385,7 @@ class Environment:
def detect_fortran_compiler(self, for_machine: MachineChoice):
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(Language.FORTRAN, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
for compiler in compilers:
if isinstance(compiler, str):
@@ -1308,7 +1504,7 @@ class Environment:
def _detect_objc_or_objcpp_compiler(self, for_machine: MachineInfo, objc: bool) -> 'Compiler':
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(Language.OBJC if objc else Language.OBJCPP, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
for compiler in compilers:
@@ -1399,7 +1595,7 @@ class Environment:
def detect_vala_compiler(self, for_machine):
exelist = self.lookup_binary_entry(for_machine, Language.VALA)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
if exelist is None:
# TODO support fallback
@@ -1419,7 +1615,7 @@ class Environment:
def detect_rust_compiler(self, for_machine):
popen_exceptions = {}
compilers, ccache, exe_wrap = self._get_compilers(Language.RUST, for_machine)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
cc = self.detect_c_compiler(for_machine)
@@ -1510,7 +1706,7 @@ class Environment:
arch = 'x86_mscoff'
popen_exceptions = {}
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
results, ccache, exe_wrap = self._get_compilers(Language.D, for_machine)
for exelist in results:
# Search for a D compiler.
@@ -1601,7 +1797,7 @@ class Environment:
def detect_swift_compiler(self, for_machine):
exelist = self.lookup_binary_entry(for_machine, Language.SWIFT)
- is_cross = not self.machines.matches_build_machine(for_machine)
+ is_cross = self.is_cross_build(for_machine)
info = self.machines[for_machine]
if exelist is None:
# TODO support fallback
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index bd783c7..4d541bf 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -32,10 +32,11 @@ from .interpreterbase import InterpreterBase
from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening
from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound
-from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs
+from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs
from .interpreterbase import ObjectHolder
from .modules import ModuleReturnValue
from .cmake import CMakeInterpreter
+from .backend.backends import TestProtocol
from pathlib import Path, PurePath
import os
@@ -43,6 +44,7 @@ import shutil
import uuid
import re
import shlex
+import stat
import subprocess
import collections
import functools
@@ -512,11 +514,14 @@ class DependencyHolder(InterpreterObject, ObjectHolder):
return DependencyHolder(new_dep, self.subproject)
class ExternalProgramHolder(InterpreterObject, ObjectHolder):
- def __init__(self, ep):
+ def __init__(self, ep, subproject, backend=None):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, ep)
+ self.subproject = subproject
+ self.backend = backend
self.methods.update({'found': self.found_method,
- 'path': self.path_method})
+ 'path': self.path_method,
+ 'full_path': self.full_path_method})
self.cached_version = None
@noPosargs
@@ -526,8 +531,22 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder):
@noPosargs
@permittedKwargs({})
+ @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+ 'use ExternalProgram.full_path() instead')
def path_method(self, args, kwargs):
- return self.held_object.get_path()
+ return self._full_path()
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('ExternalProgram.full_path', '0.55.0')
+ def full_path_method(self, args, kwargs):
+ return self._full_path()
+
+ def _full_path(self):
+ exe = self.held_object
+ if isinstance(exe, build.Executable):
+ return self.backend.get_target_filename_abs(exe)
+ return exe.get_path()
def found(self):
return isinstance(self.held_object, build.Executable) or self.held_object.found()
@@ -536,9 +555,14 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder):
return self.held_object.get_command()
def get_name(self):
- return self.held_object.get_name()
+ exe = self.held_object
+ if isinstance(exe, build.Executable):
+ return exe.name
+ return exe.get_name()
def get_version(self, interpreter):
+ if isinstance(self.held_object, build.Executable):
+ return self.held_object.project_version
if not self.cached_version:
raw_cmd = self.get_command() + ['--version']
cmd = [self, '--version']
@@ -961,7 +985,7 @@ class Test(InterpreterObject):
self.should_fail = should_fail
self.timeout = timeout
self.workdir = workdir
- self.protocol = protocol
+ self.protocol = TestProtocol.from_str(protocol)
self.priority = priority
def get_exe(self):
@@ -1783,6 +1807,11 @@ class ModuleHolder(InterpreterObject, ObjectHolder):
target_machine=self.interpreter.builtin['target_machine'].held_object,
current_node=self.current_node
)
+ # Many modules do for example self.interpreter.find_program_impl(),
+ # so we have to ensure they use the current interpreter and not the one
+ # that first imported that module, otherwise it will use outdated
+ # overrides.
+ self.held_object.interpreter = self.interpreter
if self.held_object.is_snippet(method_name):
value = fn(self.interpreter, state, args, kwargs)
return self.interpreter.holderify(value)
@@ -1818,10 +1847,18 @@ class Summary:
if bool_yn and isinstance(i, bool):
formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
else:
- formatted_values.append(i)
+ formatted_values.append(str(i))
self.sections[section][k] = (formatted_values, list_sep)
self.max_key_len = max(self.max_key_len, len(k))
+ def text_len(self, v):
+ if isinstance(v, str):
+ return len(v)
+ elif isinstance(v, mlog.AnsiDecorator):
+ return len(v.text)
+ else:
+ raise RuntimeError('Expecting only strings or AnsiDecorator')
+
def dump(self):
mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
for section, values in self.sections.items():
@@ -1833,12 +1870,28 @@ class Summary:
indent = self.max_key_len - len(k) + 3
end = ' ' if v else ''
mlog.log(' ' * indent, k + ':', end=end)
- if list_sep is None:
- indent = self.max_key_len + 6
- list_sep = '\n' + ' ' * indent
- mlog.log(*v, sep=list_sep)
+ indent = self.max_key_len + 6
+ self.dump_value(v, list_sep, indent)
mlog.log('') # newline
+ def dump_value(self, arr, list_sep, indent):
+ lines_sep = '\n' + ' ' * indent
+ if list_sep is None:
+ mlog.log(*arr, sep=lines_sep)
+ return
+ max_len = shutil.get_terminal_size().columns
+ line = []
+ line_len = indent
+ lines_sep = list_sep.rstrip() + lines_sep
+ for v in arr:
+ v_len = self.text_len(v) + len(list_sep)
+ if line and line_len + v_len > max_len:
+ mlog.log(*line, sep=list_sep, end=lines_sep)
+ line_len = indent
+ line = []
+ line.append(v)
+ line_len += v_len
+ mlog.log(*line, sep=list_sep)
class MesonMain(InterpreterObject):
def __init__(self, build, interpreter):
@@ -1849,6 +1902,7 @@ class MesonMain(InterpreterObject):
self.methods.update({'get_compiler': self.get_compiler_method,
'is_cross_build': self.is_cross_build_method,
'has_exe_wrapper': self.has_exe_wrapper_method,
+ 'can_run_host_binaries': self.can_run_host_binaries_method,
'is_unity': self.is_unity_method,
'is_subproject': self.is_subproject_method,
'current_source_dir': self.current_source_dir_method,
@@ -1870,48 +1924,101 @@ class MesonMain(InterpreterObject):
'backend': self.backend_method,
})
- def _find_source_script(self, name, args):
+ def _find_source_script(self, prog: T.Union[str, ExecutableHolder], args):
+ if isinstance(prog, ExecutableHolder):
+ prog_path = self.interpreter.backend.get_target_filename(prog.held_object)
+ return build.RunScript([prog_path], args)
+ elif isinstance(prog, ExternalProgramHolder):
+ return build.RunScript(prog.get_command(), args)
+
# Prefer scripts in the current source directory
search_dir = os.path.join(self.interpreter.environment.source_dir,
self.interpreter.subdir)
- key = (name, search_dir)
+ key = (prog, search_dir)
if key in self._found_source_scripts:
found = self._found_source_scripts[key]
else:
- found = dependencies.ExternalProgram(name, search_dir=search_dir)
+ found = dependencies.ExternalProgram(prog, search_dir=search_dir)
if found.found():
self._found_source_scripts[key] = found
else:
m = 'Script or command {!r} not found or not executable'
- raise InterpreterException(m.format(name))
+ raise InterpreterException(m.format(prog))
return build.RunScript(found.get_command(), args)
- @permittedKwargs({})
- def add_install_script_method(self, args, kwargs):
+ def _process_script_args(
+ self, name: str, args: T.List[T.Union[
+ str, mesonlib.File, CustomTargetHolder,
+ CustomTargetIndexHolder, ConfigureFileHolder,
+ ExternalProgramHolder, ExecutableHolder,
+ ]], allow_built: bool = False) -> T.List[str]:
+ script_args = [] # T.List[str]
+ new = False
+ for a in args:
+ a = unholder(a)
+ if isinstance(a, str):
+ script_args.append(a)
+ elif isinstance(a, mesonlib.File):
+ new = True
+ script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+ elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+ if not allow_built:
+ raise InterpreterException('Arguments to {} cannot be built'.format(name))
+ new = True
+ script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+ # This feels really hacky, but I'm not sure how else to fix
+ # this without completely rewriting install script handling.
+ # This is complicated by the fact that the install target
+ # depends on all.
+ if isinstance(a, build.CustomTargetIndex):
+ a.target.build_by_default = True
+ else:
+ a.build_by_default = True
+ elif isinstance(a, build.ConfigureFile):
+ new = True
+ script_args.append(os.path.join(a.subdir, a.targetname))
+ elif isinstance(a, dependencies.ExternalProgram):
+ script_args.extend(a.command)
+ new = True
+ else:
+ raise InterpreterException(
+ 'Arguments to {} must be strings, Files, CustomTargets, '
+ 'Indexes of CustomTargets, or ConfigureFiles'.format(name))
+ if new:
+ FeatureNew.single_use(
+ 'Calling "{}" with File, CustomTaget, Index of CustomTarget, '
+ 'ConfigureFile, Executable, or ExternalProgram'.format(name),
+ '0.55.0', self.interpreter.subproject)
+ return script_args
+
+ @permittedKwargs(set())
+ def add_install_script_method(self, args: 'T.Tuple[T.Union[str, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder], ...]', kwargs):
if len(args) < 1:
raise InterpreterException('add_install_script takes one or more arguments')
- check_stringlist(args, 'add_install_script args must be strings')
- script = self._find_source_script(args[0], args[1:])
+ script_args = self._process_script_args('add_install_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
self.build.install_scripts.append(script)
- @permittedKwargs({})
+ @permittedKwargs(set())
def add_postconf_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_postconf_script takes one or more arguments')
- check_stringlist(args, 'add_postconf_script arguments must be strings')
- script = self._find_source_script(args[0], args[1:])
+ script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
self.build.postconf_scripts.append(script)
- @permittedKwargs({})
+ @permittedKwargs(set())
def add_dist_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_dist_script takes one or more arguments')
if len(args) > 1:
- FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject)
- check_stringlist(args, 'add_dist_script argument must be a string')
+ FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+ '0.49.0', self.interpreter.subproject)
if self.interpreter.subproject != '':
raise InterpreterException('add_dist_script may not be used in a subproject.')
- script = self._find_source_script(args[0], args[1:])
+ script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True)
+ script = self._find_source_script(args[0], script_args)
self.build.dist_scripts.append(script)
@noPosargs
@@ -1949,9 +2056,19 @@ class MesonMain(InterpreterObject):
@noPosargs
@permittedKwargs({})
- def has_exe_wrapper_method(self, args, kwargs):
- if self.is_cross_build_method(None, None) and \
- self.build.environment.need_exe_wrapper():
+ @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+ def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ @noPosargs
+ @permittedKwargs({})
+ @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+ def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool:
+ return self.can_run_host_binaries_impl(args, kwargs)
+
+ def can_run_host_binaries_impl(self, args, kwargs):
+ if (self.is_cross_build_method(None, None) and
+ self.build.environment.need_exe_wrapper()):
if self.build.environment.exe_wrapper is None:
return False
# We return True when exe_wrap is defined, when it's not needed, and
@@ -2355,7 +2472,7 @@ class Interpreter(InterpreterBase):
if isinstance(item, build.CustomTarget):
return CustomTargetHolder(item, self)
- elif isinstance(item, (int, str, bool, Disabler)) or item is None:
+ elif isinstance(item, (int, str, bool, Disabler, InterpreterObject)) or item is None:
return item
elif isinstance(item, build.Executable):
return ExecutableHolder(item, self)
@@ -2370,7 +2487,7 @@ class Interpreter(InterpreterBase):
elif isinstance(item, dependencies.Dependency):
return DependencyHolder(item, self.subproject)
elif isinstance(item, dependencies.ExternalProgram):
- return ExternalProgramHolder(item)
+ return ExternalProgramHolder(item, self.subproject)
elif hasattr(item, 'held_object'):
return item
else:
@@ -2393,7 +2510,7 @@ class Interpreter(InterpreterBase):
elif isinstance(v, build.Data):
self.build.data.append(v)
elif isinstance(v, dependencies.ExternalProgram):
- return ExternalProgramHolder(v)
+ return ExternalProgramHolder(v, self.subproject)
elif isinstance(v, dependencies.InternalDependency):
# FIXME: This is special cased and not ideal:
# The first source is our new VapiTarget, the rest are deps
@@ -2427,7 +2544,19 @@ class Interpreter(InterpreterBase):
elif os.path.isfile(f) and not f.startswith('/dev'):
srcdir = Path(self.environment.get_source_dir())
builddir = Path(self.environment.get_build_dir())
- f = Path(f).resolve()
+ try:
+ f = Path(f).resolve()
+ except OSError:
+ f = Path(f)
+ s = f.stat()
+ if (hasattr(s, 'st_file_attributes') and
+ s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+ # This is a Windows Store link which we can't
+ # resolve, so just do our best otherwise.
+ f = f.parent.resolve() / f.name
+ else:
+ raise
if builddir in f.parents:
return
if srcdir in f.parents:
@@ -2458,6 +2587,15 @@ class Interpreter(InterpreterBase):
except InvalidArguments:
pass
+ def import_module(self, modname):
+ if modname in self.modules:
+ return
+ try:
+ module = importlib.import_module('mesonbuild.modules.' + modname)
+ except ImportError:
+ raise InvalidArguments('Module "%s" does not exist' % (modname, ))
+ self.modules[modname] = module.initialize(self)
+
@stringArgs
@noKwargs
def func_import(self, node, args, kwargs):
@@ -2466,14 +2604,15 @@ class Interpreter(InterpreterBase):
modname = args[0]
if modname.startswith('unstable-'):
plainname = modname.split('-', 1)[1]
- mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
- modname = 'unstable_' + plainname
- if modname not in self.modules:
try:
- module = importlib.import_module('mesonbuild.modules.' + modname)
- except ImportError:
- raise InvalidArguments('Module "%s" does not exist' % (modname, ))
- self.modules[modname] = module.initialize(self)
+ # check if stable module exists
+ self.import_module(plainname)
+ mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname))
+ modname = plainname
+ except InvalidArguments:
+ mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
+ modname = 'unstable_' + plainname
+ self.import_module(modname)
return ModuleHolder(modname, self.modules[modname], self)
@stringArgs
@@ -2524,7 +2663,7 @@ external dependencies (including libraries) must go to "dependencies".''')
@noKwargs
def func_assert(self, node, args, kwargs):
if len(args) == 1:
- FeatureNew('assert function without message argument', '0.53.0').use(self.subproject)
+ FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject)
value = args[0]
message = None
elif len(args) == 2:
@@ -2656,6 +2795,7 @@ external dependencies (including libraries) must go to "dependencies".''')
default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
default_options = coredata.create_options_dict(default_options)
+
if dirname == '':
raise InterpreterException('Subproject dir name must not be empty.')
if dirname[0] == '.':
@@ -2678,10 +2818,9 @@ external dependencies (including libraries) must go to "dependencies".''')
self.subproject_dir, dirname))
return subproject
- subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
- r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'))
+ r = self.environment.wrap_resolver
try:
- resolved = r.resolve(dirname, method)
+ resolved = r.resolve(dirname, method, self.subproject)
except wrap.WrapException as e:
subprojdir = os.path.join(self.subproject_dir, r.directory)
if isinstance(e, wrap.WrapNotFoundException):
@@ -2697,7 +2836,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise e
subdir = os.path.join(self.subproject_dir, resolved)
- subdir_abs = os.path.join(subproject_dir_abs, resolved)
+ subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
self.global_args_frozen = True
@@ -2766,13 +2905,21 @@ external dependencies (including libraries) must go to "dependencies".''')
with mlog.nested():
new_build = self.build.copy()
prefix = self.coredata.builtins['prefix'].value
+
+ from .modules.cmake import CMakeSubprojectOptions
+ options = kwargs.get('options', CMakeSubprojectOptions())
+ if not isinstance(options, CMakeSubprojectOptions):
+ raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions'
+ ' object (created by cmake.subproject_options())')
+
cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', []))
+ cmake_options += options.cmake_options
cm_int = CMakeInterpreter(new_build, subdir, subdir_abs, prefix, new_build.environment, self.backend)
cm_int.initialise(cmake_options)
cm_int.analyse()
# Generate a meson ast and execute it with the normal do_subproject_meson
- ast = cm_int.pretend_to_be_meson()
+ ast = cm_int.pretend_to_be_meson(options.target_options)
mlog.log()
with mlog.nested():
@@ -2803,6 +2950,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if self.is_subproject():
optname = self.subproject + ':' + optname
+
for opts in [
self.coredata.base_options, compilers.base_options, self.coredata.builtins,
dict(self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine)),
@@ -2859,7 +3007,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if len(args) > 1:
raise InterpreterException('configuration_data takes only one optional positional arguments')
elif len(args) == 1:
- FeatureNew('configuration_data dictionary', '0.49.0').use(self.subproject)
+ FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject)
initial_values = args[0]
if not isinstance(initial_values, dict):
raise InterpreterException('configuration_data first argument must be a dictionary')
@@ -2887,8 +3035,9 @@ external dependencies (including libraries) must go to "dependencies".''')
if self.environment.first_invocation:
self.coredata.init_backend_options(backend)
- options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
- self.coredata.set_options(options)
+ if '' in self.environment.meson_options.host:
+ options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')}
+ self.coredata.set_options(options)
@stringArgs
@permittedKwargs(permitted_kwargs['project'])
@@ -2899,11 +3048,14 @@ external dependencies (including libraries) must go to "dependencies".''')
if ':' in proj_name:
raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name))
+ # This needs to be evaluated as early as possible, as meson uses this
+ # for things like deprecation testing.
if 'meson_version' in kwargs:
cv = coredata.version
pv = kwargs['meson_version']
if not mesonlib.version_compare(cv, pv):
raise InterpreterException('Meson version is %s but project requires %s' % (cv, pv))
+ mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
if os.path.exists(self.option_file):
oi = optinterpreter.OptionInterpreter(self.subproject)
@@ -2918,7 +3070,7 @@ external dependencies (including libraries) must go to "dependencies".''')
self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
self.project_default_options = coredata.create_options_dict(self.project_default_options)
if self.environment.first_invocation:
- default_options = self.project_default_options
+ default_options = self.project_default_options.copy()
default_options.update(self.default_project_options)
self.coredata.init_builtins(self.subproject)
else:
@@ -2949,10 +3101,10 @@ external dependencies (including libraries) must go to "dependencies".''')
self.subproject_dir = spdirname
self.build.subproject_dir = self.subproject_dir
-
- mesonlib.project_meson_versions[self.subproject] = ''
- if 'meson_version' in kwargs:
- mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version']
+ if not self.is_subproject():
+ wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+ subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
+ self.environment.wrap_resolver = wrap.Resolver(subproject_dir_abs, wrap_mode)
self.build.projects[self.subproject] = proj_name
mlog.log('Project name:', mlog.bold(proj_name))
@@ -2976,8 +3128,11 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs))
else:
# absent 'native' means 'both' for backwards compatibility
- mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
- location=self.current_node)
+ tv = FeatureNew.get_target_version(self.subproject)
+ if FeatureNew.check_version(tv, '0.54.0'):
+ mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+ location=self.current_node)
+
success = self.add_languages(args, False, MachineChoice.BUILD)
success &= self.add_languages(args, required, MachineChoice.HOST)
return success
@@ -3000,7 +3155,7 @@ external dependencies (including libraries) must go to "dependencies".''')
@noKwargs
def func_message(self, node, args, kwargs):
if len(args) > 1:
- FeatureNew('message with more than one argument', '0.54.0').use(self.subproject)
+ FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject)
args_str = [self.get_message_string_arg(i) for i in args]
self.message_impl(args_str)
@@ -3062,7 +3217,7 @@ external dependencies (including libraries) must go to "dependencies".''')
@noKwargs
def func_warning(self, node, args, kwargs):
if len(args) > 1:
- FeatureNew('warning with more than one argument', '0.54.0').use(self.subproject)
+ FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject)
args_str = [self.get_message_string_arg(i) for i in args]
mlog.warning(*args_str, location=node)
@@ -3084,16 +3239,22 @@ external dependencies (including libraries) must go to "dependencies".''')
return success
def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
- if for_machine != MachineChoice.HOST:
- return False
- if not self.environment.is_cross_build():
- return False
should = self.environment.properties.host.get('skip_sanity_check', False)
if not isinstance(should, bool):
raise InterpreterException('Option skip_sanity_check must be a boolean.')
+ if for_machine != MachineChoice.HOST and not should:
+ return False
+ if not self.environment.is_cross_build() and not should:
+ return False
return should
def add_languages_for(self, args, required, for_machine: MachineChoice):
+ langs = set(self.coredata.compilers[for_machine].keys())
+ langs.update(args)
+ if 'vala' in langs:
+ if 'c' not in langs:
+ raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
+
success = True
for lang_str in sorted(args, key=compilers.sort_clink):
lang_str = lang_str.lower()
@@ -3134,14 +3295,9 @@ external dependencies (including libraries) must go to "dependencies".''')
mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
self.build.ensure_static_linker(comp)
- langs = self.coredata.compilers[for_machine].keys()
- if Language.VALA in langs:
- if Language.C not in langs:
- raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
-
return success
- def program_from_file_for(self, for_machine, prognames, silent):
+ def program_from_file_for(self, for_machine, prognames):
for p in unholder(prognames):
if isinstance(p, mesonlib.File):
continue # Always points to a local (i.e. self generated) file.
@@ -3149,7 +3305,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InterpreterException('Executable name must be a string')
prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
if prog.found():
- return ExternalProgramHolder(prog)
+ return ExternalProgramHolder(prog, self.subproject)
return None
def program_from_system(self, args, search_dirs, silent=False):
@@ -3176,20 +3332,18 @@ external dependencies (including libraries) must go to "dependencies".''')
extprog = dependencies.ExternalProgram(exename, search_dir=search_dir,
extra_search_dirs=extra_search_dirs,
silent=silent)
- progobj = ExternalProgramHolder(extprog)
+ progobj = ExternalProgramHolder(extprog, self.subproject)
if progobj.found():
return progobj
- def program_from_overrides(self, command_names, silent=False):
+ def program_from_overrides(self, command_names, extra_info):
for name in command_names:
if not isinstance(name, str):
continue
if name in self.build.find_overrides:
exe = self.build.find_overrides[name]
- if not silent:
- mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
- '(overridden: %s)' % exe.description())
- return ExternalProgramHolder(exe)
+ extra_info.append(mlog.blue('(overriden)'))
+ return ExternalProgramHolder(exe, self.subproject, self.backend)
return None
def store_name_lookups(self, command_names):
@@ -3206,40 +3360,79 @@ external dependencies (including libraries) must go to "dependencies".''')
% name)
self.build.find_overrides[name] = exe
+ def notfound_program(self, args):
+ return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject)
+
# TODO update modules to always pass `for_machine`. It is bad-form to assume
# the host machine.
def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST,
- required=True, silent=True, wanted='', search_dirs=None):
- if not isinstance(args, list):
- args = [args]
+ required=True, silent=True, wanted='', search_dirs=None,
+ version_func=None):
+ args = mesonlib.listify(args)
- progobj = self.program_from_overrides(args, silent=silent)
+ extra_info = []
+ progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
if progobj is None:
- progobj = self.program_from_file_for(for_machine, args, silent=silent)
- if progobj is None:
- progobj = self.program_from_system(args, search_dirs, silent=silent)
- if progobj is None and args[0].endswith('python3'):
- prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
- progobj = ExternalProgramHolder(prog)
- if required and (progobj is None or not progobj.found()):
- raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
- if progobj is None:
- return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)))
- # Only store successful lookups
- self.store_name_lookups(args)
+ progobj = self.notfound_program(args)
+
+ if not progobj.found():
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+ if required:
+ m = 'Program {!r} not found'
+ raise InterpreterException(m.format(progobj.get_name()))
+ return progobj
+
if wanted:
- version = progobj.get_version(self)
+ if version_func:
+ version = version_func(progobj)
+ else:
+ version = progobj.get_version(self)
is_found, not_found, found = mesonlib.version_compare_many(version, wanted)
if not is_found:
mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'),
- 'found {!r} but need:'.format(version),
- ', '.join(["'{}'".format(e) for e in not_found]))
+ 'found', mlog.normal_cyan(version), 'but need:',
+ mlog.bold(', '.join(["'{}'".format(e) for e in not_found])))
if required:
m = 'Invalid version of program, need {!r} {!r} found {!r}.'
- raise InvalidArguments(m.format(progobj.get_name(), not_found, version))
- return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)))
+ raise InterpreterException(m.format(progobj.get_name(), not_found, version))
+ return self.notfound_program(args)
+ extra_info.insert(0, mlog.normal_cyan(version))
+
+ # Only store successful lookups
+ self.store_name_lookups(args)
+ mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.green('YES'), *extra_info)
return progobj
+ def program_lookup(self, args, for_machine, required, search_dirs, extra_info):
+ progobj = self.program_from_overrides(args, extra_info)
+ if progobj:
+ return progobj
+
+ fallback = None
+ wrap_mode = self.coredata.get_builtin_option('wrap_mode')
+ if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+ fallback = self.environment.wrap_resolver.find_program_provider(args)
+ if fallback and wrap_mode == WrapMode.forcefallback:
+ return self.find_program_fallback(fallback, args, required, extra_info)
+
+ progobj = self.program_from_file_for(for_machine, args)
+ if progobj is None:
+ progobj = self.program_from_system(args, search_dirs, silent=True)
+ if progobj is None and args[0].endswith('python3'):
+ prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True)
+ progobj = ExternalProgramHolder(prog, self.subproject) if prog.found() else None
+ if progobj is None and fallback and required:
+ progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
+ return progobj
+
+ def find_program_fallback(self, fallback, args, required, extra_info):
+ mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+ mlog.bold(' '.join(args)))
+ sp_kwargs = { 'required': required }
+ self.do_subproject(fallback, 'meson', sp_kwargs)
+ return self.program_from_overrides(args, extra_info)
+
@FeatureNewKwargs('find_program', '0.53.0', ['dirs'])
@FeatureNewKwargs('find_program', '0.52.0', ['version'])
@FeatureNewKwargs('find_program', '0.49.0', ['disabler'])
@@ -3252,7 +3445,7 @@ external dependencies (including libraries) must go to "dependencies".''')
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled')
- return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)))
+ return self.notfound_program(args)
search_dirs = extract_search_dirs(kwargs)
wanted = mesonlib.stringlistify(kwargs.get('version', []))
@@ -3267,7 +3460,7 @@ external dependencies (including libraries) must go to "dependencies".''')
'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
)
- def _find_cached_dep(self, name, kwargs):
+ def _find_cached_dep(self, name, display_name, kwargs):
# Check if we want this as a build-time / build machine or runt-time /
# host machine dep.
for_machine = self.machine_from_native_kwarg(kwargs)
@@ -3282,7 +3475,7 @@ external dependencies (including libraries) must go to "dependencies".''')
# have explicitly called meson.override_dependency() with a not-found
# dep.
if not cached_dep.found():
- mlog.log('Dependency', mlog.bold(name),
+ mlog.log('Dependency', mlog.bold(display_name),
'found:', mlog.red('NO'), *info)
return identifier, cached_dep
found_vers = cached_dep.get_version()
@@ -3304,7 +3497,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if cached_dep:
if found_vers:
info = [mlog.normal_cyan(found_vers), *info]
- mlog.log('Dependency', mlog.bold(name),
+ mlog.log('Dependency', mlog.bold(display_name),
'found:', mlog.green('YES'), *info)
return identifier, cached_dep
@@ -3327,7 +3520,7 @@ external dependencies (including libraries) must go to "dependencies".''')
return
dep = subi.get_variable_method([varname], {})
if dep.held_object != cached_dep:
- m = 'Inconsistency: Subproject has overriden the dependency with another variable than {!r}'
+ m = 'Inconsistency: Subproject has overridden the dependency with another variable than {!r}'
raise DependencyException(m.format(varname))
def get_subproject_dep(self, name, display_name, dirname, varname, kwargs):
@@ -3337,17 +3530,21 @@ external dependencies (including libraries) must go to "dependencies".''')
dep = self.notfound_dependency()
try:
subproject = self.subprojects[dirname]
- _, cached_dep = self._find_cached_dep(name, kwargs)
+ _, cached_dep = self._find_cached_dep(name, display_name, kwargs)
if varname is None:
- # Assuming the subproject overriden the dependency we want
+ # Assuming the subproject overridden the dependency we want
if cached_dep:
if required and not cached_dep.found():
m = 'Dependency {!r} is not satisfied'
raise DependencyException(m.format(display_name))
return DependencyHolder(cached_dep, self.subproject)
else:
- m = 'Subproject {} did not override dependency {}'
- raise DependencyException(m.format(subproj_path, display_name))
+ if required:
+ m = 'Subproject {} did not override dependency {}'
+ raise DependencyException(m.format(subproj_path, display_name))
+ mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
+ mlog.bold(subproj_path), 'found:', mlog.red('NO'))
+ return self.notfound_dependency()
if subproject.found():
self.verify_fallback_consistency(dirname, varname, cached_dep)
dep = self.subprojects[dirname].get_variable_method([varname], {})
@@ -3388,15 +3585,15 @@ external dependencies (including libraries) must go to "dependencies".''')
def _handle_featurenew_dependencies(self, name):
'Do a feature check on dependencies used by this subproject'
if name == 'mpi':
- FeatureNew('MPI Dependency', '0.42.0').use(self.subproject)
+ FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
elif name == 'pcap':
- FeatureNew('Pcap Dependency', '0.42.0').use(self.subproject)
+ FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
elif name == 'vulkan':
- FeatureNew('Vulkan Dependency', '0.42.0').use(self.subproject)
+ FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
elif name == 'libwmf':
- FeatureNew('LibWMF Dependency', '0.44.0').use(self.subproject)
+ FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
elif name == 'openmp':
- FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject)
+ FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
@FeatureNewKwargs('dependency', '0.54.0', ['components'])
@FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
@@ -3410,6 +3607,9 @@ external dependencies (including libraries) must go to "dependencies".''')
self.validate_arguments(args, 1, [str])
name = args[0]
display_name = name if name else '(anonymous)'
+ mods = extract_as_list(kwargs, 'modules')
+ if mods:
+ display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
not_found_message = kwargs.get('not_found_message', '')
if not isinstance(not_found_message, str):
raise InvalidArguments('The not_found_message must be a string.')
@@ -3439,6 +3639,18 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.notfound_dependency()
has_fallback = 'fallback' in kwargs
+ if not has_fallback and name:
+ # Add an implicit fallback if we have a wrap file or a directory with the same name,
+ # but only if this dependency is required. It is common to first check for a pkg-config,
+ # then fallback to use find_library() and only afterward check again the dependency
+ # with a fallback. If the fallback has already been configured then we have to use it
+ # even if the dependency is not required.
+ provider = self.environment.wrap_resolver.find_dep_provider(name)
+ dirname = mesonlib.listify(provider)[0]
+ if provider and (required or dirname in self.subprojects):
+ kwargs['fallback'] = provider
+ has_fallback = True
+
if 'default_options' in kwargs and not has_fallback:
mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.',
location=self.current_node)
@@ -3451,7 +3663,7 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
'version\n requirements use the \'version\' keyword argument instead.')
- identifier, cached_dep = self._find_cached_dep(name, kwargs)
+ identifier, cached_dep = self._find_cached_dep(name, display_name, kwargs)
if cached_dep:
if has_fallback:
dirname, varname = self.get_subproject_infos(kwargs)
@@ -3469,7 +3681,10 @@ external dependencies (including libraries) must go to "dependencies".''')
return self.get_subproject_dep(name, display_name, dirname, varname, kwargs)
wrap_mode = self.coredata.get_builtin_option('wrap_mode')
- forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback
+ force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+ forcefallback = has_fallback and (wrap_mode == WrapMode.forcefallback or \
+ name in force_fallback_for or \
+ dirname in force_fallback_for)
if name != '' and not forcefallback:
self._handle_featurenew_dependencies(name)
kwargs['required'] = required and not has_fallback
@@ -3515,15 +3730,23 @@ external dependencies (including libraries) must go to "dependencies".''')
def get_subproject_infos(self, kwargs):
fbinfo = mesonlib.stringlistify(kwargs['fallback'])
if len(fbinfo) == 1:
- FeatureNew('Fallback without variable name', '0.53.0').use(self.subproject)
+ FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
return fbinfo[0], None
elif len(fbinfo) != 2:
raise InterpreterException('Fallback info must have one or two items.')
return fbinfo
def dependency_fallback(self, name, display_name, kwargs):
+ dirname, varname = self.get_subproject_infos(kwargs)
required = kwargs.get('required', True)
- if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
+
+ # Explicitly listed fallback preferences for specific subprojects
+ # take precedence over wrap-mode
+ force_fallback_for = self.coredata.get_builtin_option('force_fallback_for')
+ if name in force_fallback_for or dirname in force_fallback_for:
+ mlog.log('Looking for a fallback subproject for the dependency',
+ mlog.bold(display_name), 'because:\nUse of fallback was forced for that specific subproject')
+ elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback:
mlog.log('Not looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback '
'dependencies is disabled.')
@@ -3537,7 +3760,6 @@ external dependencies (including libraries) must go to "dependencies".''')
else:
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name))
- dirname, varname = self.get_subproject_infos(kwargs)
sp_kwargs = {
'default_options': kwargs.get('default_options', []),
'required': required,
@@ -3603,11 +3825,13 @@ external dependencies (including libraries) must go to "dependencies".''')
raise InterpreterException('Unknown target_type.')
@permittedKwargs(permitted_kwargs['vcs_tag'])
+ @FeatureDeprecatedKwargs('custom_target', '0.47.0', ['build_always'],
+ 'combine build_by_default and build_always_stale instead.')
def func_vcs_tag(self, node, args, kwargs):
if 'input' not in kwargs or 'output' not in kwargs:
raise InterpreterException('Keyword arguments input and output must exist')
if 'fallback' not in kwargs:
- FeatureNew('Optional fallback in vcs_tag', '0.41.0').use(self.subproject)
+ FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject)
fallback = kwargs.pop('fallback', self.project_version)
if not isinstance(fallback, str):
raise InterpreterException('Keyword argument fallback must be a string.')
@@ -3660,7 +3884,7 @@ external dependencies (including libraries) must go to "dependencies".''')
if len(args) != 1:
raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
- FeatureNew('substitutions in custom_target depfile', '0.47.0').use(self.subproject)
+ FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject)
return self._func_custom_target_impl(node, args, kwargs)
def _func_custom_target_impl(self, node, args, kwargs):
@@ -3748,6 +3972,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
@FeatureNewKwargs('test', '0.52.0', ['priority'])
@permittedKwargs(permitted_kwargs['test'])
def func_test(self, node, args, kwargs):
+ if kwargs.get('protocol') == 'gtest':
+ FeatureNew.single_use('"gtest" protocol for tests', '0.55.0', self.subproject)
self.add_test(node, args, kwargs, True)
def unpack_env_kwarg(self, kwargs) -> build.EnvironmentVariables:
@@ -3755,7 +3981,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if isinstance(envlist, EnvironmentVariablesHolder):
env = envlist.held_object
elif isinstance(envlist, dict):
- FeatureNew('environment dictionary', '0.52.0').use(self.subproject)
+ FeatureNew.single_use('environment dictionary', '0.52.0', self.subproject)
env = EnvironmentVariablesHolder(envlist)
env = env.held_object
else:
@@ -3767,7 +3993,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
def add_test(self, node, args, kwargs, is_base_test):
if len(args) != 2:
- raise InterpreterException('Incorrect number of arguments')
+ raise InterpreterException('test expects 2 arguments, {} given'.format(len(args)))
if not isinstance(args[0], str):
raise InterpreterException('First argument of test must be a string.')
exe = args[1]
@@ -3799,8 +4025,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
protocol = kwargs.get('protocol', 'exitcode')
- if protocol not in ('exitcode', 'tap'):
- raise InterpreterException('Protocol must be "exitcode" or "tap".')
+ if protocol not in {'exitcode', 'tap', 'gtest'}:
+ raise InterpreterException('Protocol must be "exitcode", "tap", or "gtest".')
suite = []
prj = self.subproject if self.is_subproject() else self.build.project_name
for s in mesonlib.stringlistify(kwargs.get('suite', '')):
@@ -3874,7 +4100,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
- raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
+ raise InterpreterException("Non-existent build file '{!s}'".format(buildfilename))
with open(absname, encoding='utf8') as f:
code = f.read()
assert(isinstance(code, str))
@@ -3922,7 +4148,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
elif isinstance(s, str):
source_strings.append(s)
else:
- raise InvalidArguments('Argument {!r} must be string or file.'.format(s))
+ raise InvalidArguments('Argument must be string or file.')
sources += self.source_strings_to_files(source_strings)
install_dir = kwargs.get('install_dir', None)
if not isinstance(install_dir, (str, type(None))):
@@ -4071,7 +4297,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if 'configuration' in kwargs:
conf = kwargs['configuration']
if isinstance(conf, dict):
- FeatureNew('configure_file.configuration dictionary', '0.49.0').use(self.subproject)
+ FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject)
conf = ConfigurationDataHolder(self.subproject, conf)
elif not isinstance(conf, ConfigurationDataHolder):
raise InterpreterException('Argument "configuration" is not of type configuration_data')
@@ -4101,7 +4327,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
conf.mark_used()
elif 'command' in kwargs:
if len(inputs) > 1:
- FeatureNew('multiple inputs in configure_file()', '0.52.0').use(self.subproject)
+ FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject)
# We use absolute paths for input and output here because the cwd
# that the command is run from is 'unspecified', so it could change.
# Currently it's builddir/subdir for in_builddir else srcdir/subdir.
@@ -4196,8 +4422,9 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
for a in incdir_strings:
if a.startswith(src_root):
- raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use
-relative paths instead.
+ raise InvalidArguments('Tried to form an absolute path to a source dir. '
+ 'You should not do that but use relative paths instead.'
+ '''
To get include path to any directory relative to the current dir do
@@ -4348,7 +4575,7 @@ different subdirectory.
if len(args) > 1:
raise InterpreterException('environment takes only one optional positional arguments')
elif len(args) == 1:
- FeatureNew('environment positional arguments', '0.52.0').use(self.subproject)
+ FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject)
initial_values = args[0]
if not isinstance(initial_values, dict) and not isinstance(initial_values, list):
raise InterpreterException('environment first argument must be a dictionary or a list')
@@ -4557,6 +4784,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s
kwargs['include_directories'] = self.extract_incdirs(kwargs)
target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs)
+ target.project_version = self.project_version
if not self.environment.machines.matches_build_machine(for_machine):
self.add_cross_stdlib_info(target)
@@ -4637,6 +4865,8 @@ This will become a hard error in the future.''', location=self.current_node)
if len(args) < 1 or len(args) > 2:
raise InvalidCode('Get_variable takes one or two arguments.')
varname = args[0]
+ if isinstance(varname, Disabler):
+ return varname
if not isinstance(varname, str):
raise InterpreterException('First argument must be a string.')
try:
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 1a7aa38..822167c 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -18,6 +18,7 @@
from . import mparser, mesonlib, mlog
from . import environment, dependencies
+import abc
import os, copy, re
import collections.abc
from functools import wraps
@@ -212,17 +213,17 @@ class permittedKwargs:
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
-class FeatureCheckBase:
+class FeatureCheckBase(metaclass=abc.ABCMeta):
"Base class for feature version checks"
- # Class variable, shared across all instances
- #
- # Format: {subproject: {feature_version: set(feature_names)}}
+ # In python 3.6 we can just forward declare this, but in 3.5 we can't
+ # This will be overwritten by the subclasses by necessity
feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
- def __init__(self, feature_name: str, version: str):
+ def __init__(self, feature_name: str, version: str, extra_message: T.Optional[str] = None):
self.feature_name = feature_name # type: str
self.feature_version = version # type: str
+ self.extra_message = extra_message or '' # type: str
@staticmethod
def get_target_version(subproject: str) -> str:
@@ -231,13 +232,18 @@ class FeatureCheckBase:
return ''
return mesonlib.project_meson_versions[subproject]
+ @staticmethod
+ @abc.abstractmethod
+ def check_version(target_version: str, feature_Version: str) -> bool:
+ pass
+
def use(self, subproject: str) -> None:
tv = self.get_target_version(subproject)
# No target version
if tv == '':
return
# Target version is new enough
- if mesonlib.version_compare_condition_with_min(tv, self.feature_version):
+ if self.check_version(tv, self.feature_version):
return
# Feature is too new for target version, register it
if subproject not in self.feature_registry:
@@ -280,41 +286,86 @@ class FeatureCheckBase:
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
+ @classmethod
+ def single_use(cls, feature_name: str, version: str, subproject: str,
+ extra_message: T.Optional[str] = None) -> None:
+ """Oneline version that instantiates and calls use()."""
+ cls(feature_name, version, extra_message).use(subproject)
+
+
class FeatureNew(FeatureCheckBase):
"""Checks for new features"""
+ # Class variable, shared across all instances
+ #
+ # Format: {subproject: {feature_version: set(feature_names)}}
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ @staticmethod
+ def check_version(target_version: str, feature_version: str) -> bool:
+ return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
@staticmethod
def get_warning_str_prefix(tv: str) -> str:
return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv)
def log_usage_warning(self, tv: str) -> None:
- mlog.warning('Project targeting \'{}\' but tried to use feature introduced '
- 'in \'{}\': {}'.format(tv, self.feature_version, self.feature_name))
+ args = [
+ 'Project targeting', "'{}'".format(tv),
+ 'but tried to use feature introduced in',
+ "'{}':".format(self.feature_version),
+ '{}.'.format(self.feature_name),
+ ]
+ if self.extra_message:
+ args.append(self.extra_message)
+ mlog.warning(*args)
class FeatureDeprecated(FeatureCheckBase):
"""Checks for deprecated features"""
+ # Class variable, shared across all instances
+ #
+ # Format: {subproject: {feature_version: set(feature_names)}}
+ feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]]
+
+ @staticmethod
+ def check_version(target_version: str, feature_version: str) -> bool:
+ # For deprecation checks we need to return the inverse of FeatureNew checks
+ return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
@staticmethod
def get_warning_str_prefix(tv: str) -> str:
return 'Deprecated features used:'
def log_usage_warning(self, tv: str) -> None:
- mlog.deprecation('Project targeting \'{}\' but tried to use feature '
- 'deprecated since \'{}\': {}'
- ''.format(tv, self.feature_version, self.feature_name))
-
-
-class FeatureCheckKwargsBase:
- def __init__(self, feature_name: str, feature_version: str, kwargs: T.List[str]):
+ args = [
+ 'Project targeting', "'{}'".format(tv),
+ 'but tried to use feature deprecated since',
+ "'{}':".format(self.feature_version),
+ '{}.'.format(self.feature_name),
+ ]
+ if self.extra_message:
+ args.append(self.extra_message)
+ mlog.warning(*args)
+
+
+class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
+
+ @property
+ @abc.abstractmethod
+ def feature_check_class(self) -> T.Type[FeatureCheckBase]:
+ pass
+
+ def __init__(self, feature_name: str, feature_version: str,
+ kwargs: T.List[str], extra_message: T.Optional[str] = None):
self.feature_name = feature_name
self.feature_version = feature_version
self.kwargs = kwargs
+ self.extra_message = extra_message
def __call__(self, f):
@wraps(f)
def wrapped(*wrapped_args, **wrapped_kwargs):
- # Which FeatureCheck class to invoke
- FeatureCheckClass = self.feature_check_class
kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5]
if subproject is None:
raise AssertionError('{!r}'.format(wrapped_args))
@@ -322,7 +373,8 @@ class FeatureCheckKwargsBase:
if arg not in kwargs:
continue
name = arg + ' arg in ' + self.feature_name
- FeatureCheckClass(name, self.feature_version).use(subproject)
+ self.feature_check_class.single_use(
+ name, self.feature_version, subproject, self.extra_message)
return f(*wrapped_args, **wrapped_kwargs)
return wrapped
@@ -532,7 +584,7 @@ class InterpreterBase:
self.argument_depth += 1
for key, value in kwargs.items():
if not isinstance(key, mparser.StringNode):
- FeatureNew('Dictionary entry using non literal key', '0.53.0').use(self.subproject)
+ FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
assert isinstance(key, mparser.BaseNode) # All keys must be nodes due to resolve_key_nodes=False
str_key = self.evaluate_statement(key)
if not isinstance(str_key, str):
@@ -819,7 +871,7 @@ The result of this is undefined and will become a hard error in a future Meson r
def function_call(self, node: mparser.FunctionNode) -> T.Optional[TYPE_var]:
func_name = node.func_name
(posargs, kwargs) = self.reduce_arguments(node.args)
- if is_disabled(posargs, kwargs) and func_name != 'set_variable' and func_name != 'is_disabler':
+ if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'is_disabler'}:
return Disabler()
if func_name in self.funcs:
func = self.funcs[func_name]
@@ -974,6 +1026,20 @@ The result of this is undefined and will become a hard error in a future Meson r
if not isinstance(cmpr, str):
raise InterpreterException('Version_compare() argument must be a string.')
return mesonlib.version_compare(obj, cmpr)
+ elif method_name == 'substring':
+ if len(posargs) > 2:
+ raise InterpreterException('substring() takes maximum two arguments.')
+ start = 0
+ end = len(obj)
+ if len (posargs) > 0:
+ if not isinstance(posargs[0], int):
+ raise InterpreterException('substring() argument must be an int')
+ start = posargs[0]
+ if len (posargs) > 1:
+ if not isinstance(posargs[1], int):
+ raise InterpreterException('substring() argument must be an int')
+ end = posargs[1]
+ return obj[start:end]
raise InterpreterException('Unknown method "%s" for a string.' % method_name)
def format_string(self, templ: str, args: T.List[TYPE_nvar]) -> str:
diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py
index 44c720f..3ce7111 100644
--- a/mesonbuild/linkers.py
+++ b/mesonbuild/linkers.py
@@ -17,6 +17,7 @@ import os
import typing as T
from . import mesonlib
+from .arglist import CompilerArgs
from .envconfig import get_env_var
if T.TYPE_CHECKING:
@@ -29,6 +30,9 @@ class StaticLinker:
def __init__(self, exelist: T.List[str]):
self.exelist = exelist
+ def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+ return CompilerArgs(self, args)
+
def can_linker_accept_rsp(self) -> bool:
"""
Determines whether the linker can accept arguments using the @rsp syntax.
@@ -56,8 +60,8 @@ class StaticLinker:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
return []
@@ -149,6 +153,10 @@ class ArLinker(StaticLinker):
self.std_args = ['csrD']
else:
self.std_args = ['csr']
+ self.can_rsp = '@<' in stdo
+
+ def can_linker_accept_rsp(self) -> bool:
+ return self.can_rsp
def get_std_link_args(self) -> T.List[str]:
return self.std_args
@@ -436,16 +444,17 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta):
"""Arguments to make all warnings errors."""
return []
+ def headerpad_args(self) -> T.List[str]:
+ # Only used by the Apple linker
+ return []
+
def bitcode_args(self) -> T.List[str]:
raise mesonlib.MesonException('This linker does not support bitcode bundles')
- def get_debug_crt_args(self) -> T.List[str]:
- return []
-
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
@@ -551,12 +560,12 @@ class GnuLikeDynamicLinkerMixin:
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
m = env.machines[self.for_machine]
if m.is_windows() or m.is_cygwin():
- return []
+ return ([], set())
if not rpath_paths and not install_rpath and not build_rpath:
- return []
+ return ([], set())
args = []
origin_placeholder = '$ORIGIN'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
@@ -564,9 +573,14 @@ class GnuLikeDynamicLinkerMixin:
# is *very* allergic to duplicate -delete_rpath arguments
# when calling depfixer on installation.
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+ rpath_dirs_to_remove = set()
+ for p in all_paths:
+ rpath_dirs_to_remove.add(p.encode('utf8'))
# Build_rpath is used as-is (it is usually absolute).
if build_rpath != '':
all_paths.add(build_rpath)
+ for p in build_rpath.split(':'):
+ rpath_dirs_to_remove.add(p.encode('utf8'))
# TODO: should this actually be "for (dragonfly|open)bsd"?
if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
@@ -590,7 +604,7 @@ class GnuLikeDynamicLinkerMixin:
# TODO: should this actually be "for solaris/sunos"?
if mesonlib.is_sunos():
- return args
+ return (args, rpath_dirs_to_remove)
# Rpaths to use while linking must be absolute. These are not
# written to the binary. Needed only with GNU ld:
@@ -610,7 +624,7 @@ class GnuLikeDynamicLinkerMixin:
for p in rpath_paths:
args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
- return args
+ return (args, rpath_dirs_to_remove)
class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
@@ -650,8 +664,8 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def no_undefined_args(self) -> T.List[str]:
return self._apply_prefix('-undefined,error')
- def get_always_args(self) -> T.List[str]:
- return self._apply_prefix('-headerpad_max_install_names') + super().get_always_args()
+ def headerpad_args(self) -> T.List[str]:
+ return self._apply_prefix('-headerpad_max_install_names')
def bitcode_args(self) -> T.List[str]:
return self._apply_prefix('-bitcode_bundle')
@@ -676,12 +690,10 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not rpath_paths and not install_rpath and not build_rpath:
- return []
- # Ensure that there is enough space for install_name_tool in-place
- # editing of large RPATHs
- args = self._apply_prefix('-headerpad_max_install_names')
+ return ([], set())
+ args = []
# @loader_path is the equivalent of $ORIGIN on macOS
# https://stackoverflow.com/q/26280738
origin_placeholder = '@loader_path'
@@ -692,13 +704,16 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
for rp in all_paths:
args.extend(self._apply_prefix('-rpath,' + rp))
- return args
+ return (args, set())
class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
"""Representation of GNU ld.bfd and ld.gold."""
+ def get_accepts_rsp(self) -> bool:
+ return True;
+
class GnuGoldDynamicLinker(GnuDynamicLinker):
@@ -761,6 +776,11 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna
def get_asneeded_args(self) -> T.List[str]:
return []
+ def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+ rpath_paths: str, build_rpath: str,
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
+
class CcrxDynamicLinker(DynamicLinker):
@@ -834,8 +854,8 @@ class Xc16DynamicLinker(DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
- return []
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ return ([], set())
class C2000DynamicLinker(DynamicLinker):
@@ -907,6 +927,12 @@ class ArmClangDynamicLinker(ArmDynamicLinker):
def import_library_args(self, implibname: str) -> T.List[str]:
return ['--symdefs=' + implibname]
+class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ # ARM Linker from Snapdragon LLVM ARM Compiler
+ self.id = 'ld.qcld'
class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
@@ -933,10 +959,10 @@ class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not env.machines[self.for_machine].is_windows():
- return ['-R' + os.path.join(build_dir, p) for p in rpath_paths]
- return []
+ return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
+ return ([], set())
class PGIStaticLinker(StaticLinker):
@@ -975,16 +1001,6 @@ class VisualStudioLikeLinkerMixin:
def invoked_by_compiler(self) -> bool:
return not self.direct
- def get_debug_crt_args(self) -> T.List[str]:
- """Arguments needed to select a debug crt for the linker.
-
- Sometimes we need to manually select the CRT (C runtime) to use with
- MSVC. One example is when trying to link with static libraries since
- MSVC won't auto-select a CRT for us in that case and will error out
- asking us to select one.
- """
- return self._apply_prefix('/MDd')
-
def get_output_args(self, outputname: str) -> T.List[str]:
return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname])
@@ -1075,6 +1091,19 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
return args
return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+ def get_pie_args(self) -> T.List[str]:
+ # Available in Solaris 11.2 and later
+ pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp'))
+ for line in (stdo + stde).split('\n'):
+ if '-z type' in line:
+ if 'pie' in line:
+ return ['-z', 'type=pie']
+ break
+ return []
+
+ def get_asneeded_args(self) -> T.List[str]:
+ return self._apply_prefix(['-z', 'ignore'])
+
def no_undefined_args(self) -> T.List[str]:
return ['-z', 'defs']
@@ -1086,9 +1115,9 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: str, build_rpath: str,
- install_rpath: str) -> T.List[str]:
+ install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not rpath_paths and not install_rpath and not build_rpath:
- return []
+ return ([], set())
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
if build_rpath != '':
@@ -1103,7 +1132,7 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
paths = padding
else:
paths = paths + ':' + padding
- return self._apply_prefix('-rpath,{}'.format(paths))
+ return (self._apply_prefix('-rpath,{}'.format(paths)), set())
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str],
diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py
index 7829ffc..9fe3a65 100644
--- a/mesonbuild/mcompile.py
+++ b/mesonbuild/mcompile.py
@@ -14,23 +14,241 @@
"""Entrypoint script for backend agnostic compile."""
-import os
-import pathlib
-import shutil
+import json
+import re
import sys
import typing as T
+from collections import defaultdict
+from pathlib import Path
from . import mlog
from . import mesonlib
+from . import coredata
from .mesonlib import MesonException
+from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
if T.TYPE_CHECKING:
import argparse
+def array_arg(value: str) -> T.List[str]:
+ return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
+ if not (builddir / 'meson-private' / 'coredata.dat' ).is_file():
+ raise MesonException('Current directory is not a meson build directory: `{}`.\n'
+ 'Please specify a valid build dir or change the working directory to it.\n'
+ 'It is also possible that the build directory was generated with an old\n'
+ 'meson version. Please regenerate it in this case.'.format(builddir))
+
+def get_backend_from_coredata(builddir: Path) -> str:
+ """
+ Gets `backend` option value from coredata
+ """
+ return coredata.load(str(builddir)).get_builtin_option('backend')
+
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+ """
+ Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+ """
+ path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+ if not path_to_intro.exists():
+ raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name))
+ with path_to_intro.open() as f:
+ schema = json.load(f)
+
+ parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+ for target in schema:
+ parsed_data[target['name']] += [target]
+ return parsed_data
+
+class ParsedTargetName:
+ full_name = ''
+ name = ''
+ type = ''
+ path = ''
+
+ def __init__(self, target: str):
+ self.full_name = target
+ split = target.rsplit(':', 1)
+ if len(split) > 1:
+ self.type = split[1]
+ if not self._is_valid_type(self.type):
+ raise MesonException('Can\'t invoke target `{}`: unknown target type: `{}`'.format(target, self.type))
+
+ split = split[0].rsplit('/', 1)
+ if len(split) > 1:
+ self.path = split[0]
+ self.name = split[1]
+ else:
+ self.name = split[0]
+
+ @staticmethod
+ def _is_valid_type(type: str) -> bool:
+ # Ammend docs in Commands.md when editing this list
+ allowed_types = {
+ 'executable',
+ 'static_library',
+ 'shared_library',
+ 'shared_module',
+ 'custom',
+ 'run',
+ 'jar',
+ }
+ return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> dict:
+ if target.name not in introspect_data:
+ raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+
+ intro_targets = introspect_data[target.name]
+ found_targets = []
+
+ resolved_bdir = builddir.resolve()
+
+ if not target.type and not target.path:
+ found_targets = intro_targets
+ else:
+ for intro_target in intro_targets:
+ if (intro_target['subproject'] or
+ (target.type and target.type != intro_target['type'].replace(' ', '_')) or
+ (target.path
+ and intro_target['filename'] != 'no_name'
+ and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+ continue
+ found_targets += [intro_target]
+
+ if not found_targets:
+ raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name))
+ elif len(found_targets) > 1:
+ raise MesonException('Can\'t invoke target `{}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`'.format(target.full_name))
+
+ return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ if intro_target['type'] == 'run':
+ return [target.name]
+ else:
+ return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.List[str]:
+ runner = detect_ninja()
+ if runner is None:
+ raise MesonException('Cannot find ninja.')
+ mlog.log('Found runner:', runner)
+
+ cmd = [runner, '-C', builddir.as_posix()]
+
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ for t in options.targets:
+ cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+ if options.clean:
+ cmd.append('clean')
+
+ # If the value is set to < 1 then don't set anything, which let's
+ # ninja/samu decide what to do.
+ if options.jobs > 0:
+ cmd.extend(['-j', str(options.jobs)])
+ if options.load_average > 0:
+ cmd.extend(['-l', str(options.load_average)])
+
+ if options.verbose:
+ cmd.append('--verbose')
+
+ cmd += options.ninja_args
+
+ return cmd
+
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+ intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+ assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above'
+
+ # Normalize project name
+ # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+ target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id'])
+ rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+ if rel_path != '.':
+ target_name = str(rel_path / target_name)
+ return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.List[str]:
+ slns = list(builddir.glob('*.sln'))
+ assert len(slns) == 1, 'More than one solution in a project?'
+ sln = slns[0]
+
+ cmd = ['msbuild']
+
+ if options.targets:
+ intro_data = parse_introspect_data(builddir)
+ has_run_target = any(map(
+ lambda t:
+ get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run',
+ options.targets
+ ))
+
+ if has_run_target:
+ # `run` target can't be used the same way as other targets on `vs` backend.
+ # They are defined as disabled projects, which can't be invoked as `.sln`
+ # target and have to be invoked directly as project instead.
+ # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+ if len(options.targets) > 1:
+ raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+ intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+ proj_dir = Path(intro_target['filename'][0]).parent
+ proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+ cmd += [str(proj.resolve())]
+ else:
+ cmd += [str(sln.resolve())]
+ cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+ else:
+ cmd += [str(sln.resolve())]
+
+ if options.clean:
+ cmd.extend(['-target:Clean'])
+
+ # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
+ if options.jobs > 0:
+ cmd.append('-maxCpuCount:{}'.format(options.jobs))
+ else:
+ cmd.append('-maxCpuCount')
+
+ if options.load_average:
+ mlog.warning('Msbuild does not have a load-average switch, ignoring.')
+
+ if not options.verbose:
+ cmd.append('-verbosity:minimal')
+
+ cmd += options.vs_args
+
+ return cmd
def add_arguments(parser: 'argparse.ArgumentParser') -> None:
"""Add compile specific arguments."""
parser.add_argument(
+ 'targets',
+ metavar='TARGET',
+ nargs='*',
+ default=None,
+ help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+ parser.add_argument(
+ '--clean',
+ action='store_true',
+ help='Clean the build directory.'
+ )
+ parser.add_argument(
+ '-C',
+ action='store',
+ dest='builddir',
+ type=Path,
+ default='.',
+ help='The directory containing build files to be built.'
+ )
+ parser.add_argument(
'-j', '--jobs',
action='store',
default=0,
@@ -42,80 +260,44 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
action='store',
default=0,
type=int,
- help='The system load average to try to maintain (if supported)'
+ help='The system load average to try to maintain (if supported).'
)
parser.add_argument(
- '--clean',
+ '--verbose',
action='store_true',
- help='Clean the build directory.'
+ help='Show more verbose output.'
)
parser.add_argument(
- '-C',
- action='store',
- dest='builddir',
- type=pathlib.Path,
- default='.',
- help='The directory containing build files to be built.'
+ '--ninja-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+ )
+ parser.add_argument(
+ '--vs-args',
+ type=array_arg,
+ default=[],
+ help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
)
-
def run(options: 'argparse.Namespace') -> int:
- bdir = options.builddir # type: pathlib.Path
- if not bdir.exists():
- raise MesonException('Path to builddir {} does not exist!'.format(str(bdir.resolve())))
- if not bdir.is_dir():
- raise MesonException('builddir path should be a directory.')
+ bdir = options.builddir # type: Path
+ validate_builddir(bdir.resolve())
cmd = [] # type: T.List[str]
- runner = None # type T.Optional[str]
- slns = list(bdir.glob('*.sln'))
-
- if (bdir / 'build.ninja').exists():
- runner = os.environ.get('NINJA')
- if not runner:
- if shutil.which('ninja'):
- runner = 'ninja'
- elif shutil.which('samu'):
- runner = 'samu'
-
- if runner is None:
- raise MesonException('Cannot find either ninja or samu.')
-
- cmd = [runner, '-C', bdir.as_posix()]
-
- # If the value is set to < 1 then don't set anything, which let's
- # ninja/samu decide what to do.
- if options.jobs > 0:
- cmd.extend(['-j', str(options.jobs)])
- if options.load_average > 0:
- cmd.extend(['-l', str(options.load_average)])
- if options.clean:
- cmd.append('clean')
-
- # TODO: with python 3.8 this could be `elif slns := bdir.glob('*.sln'):`
- elif slns:
- assert len(slns) == 1, 'More than one solution in a project?'
-
- sln = slns[0]
- cmd = ['msbuild', str(sln.resolve())]
-
- # In msbuild `-m` with no number means "detect cpus", the default is `-m1`
- if options.jobs > 0:
- cmd.append('-m{}'.format(options.jobs))
- else:
- cmd.append('-m')
- if options.load_average:
- mlog.warning('Msbuild does not have a load-average switch, ignoring.')
- if options.clean:
- cmd.extend(['/t:Clean'])
+ if options.targets and options.clean:
+ raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
- # TODO: xcode?
+ backend = get_backend_from_coredata(bdir)
+ if backend == 'ninja':
+ cmd = get_parsed_args_ninja(options, bdir)
+ elif backend.startswith('vs'):
+ cmd = get_parsed_args_vs(options, bdir)
else:
+ # TODO: xcode?
raise MesonException(
- 'Could not find any runner or backend for directory {}'.format(bdir.resolve().as_posix()))
-
- mlog.log('Found runner:', runner)
+ 'Backend `{}` is not yet supported by `compile`. Use generated project files directly instead.'.format(backend))
p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer)
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 6c450da..b38df85 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -97,9 +97,9 @@ class Conf:
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
- def split_options_per_subproject(self, options_iter):
+ def split_options_per_subproject(self, options):
result = {}
- for k, o in options_iter:
+ for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
@@ -184,19 +184,7 @@ class Conf:
if not self.default_values_only:
print(' Build dir ', self.build_dir)
- dir_option_names = ['bindir',
- 'datadir',
- 'includedir',
- 'infodir',
- 'libdir',
- 'libexecdir',
- 'localedir',
- 'localstatedir',
- 'mandir',
- 'prefix',
- 'sbindir',
- 'sharedstatedir',
- 'sysconfdir']
+ dir_option_names = list(coredata.BUILTIN_DIR_OPTIONS)
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
@@ -211,15 +199,12 @@ class Conf:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
- core_options = self.split_options_per_subproject(core_options.items())
- host_compiler_options = self.split_options_per_subproject(
- self.coredata.flatten_lang_iterator(
- self.coredata.compiler_options.host.items()))
+ core_options = self.split_options_per_subproject(core_options)
build_compiler_options = self.split_options_per_subproject(
- (insert_build_prefix(k), o)
- for k, o in self.coredata.flatten_lang_iterator(
- self.coredata.compiler_options.build.items()))
- project_options = self.split_options_per_subproject(self.coredata.user_options.items())
+ dict((insert_build_prefix(k), o)
+ for k, o in self.coredata.flatten_lang_iterator(
+ self.coredata.compiler_options.build.items())))
+ project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py
index b324f76..9d94ace 100644
--- a/mesonbuild/mdist.py
+++ b/mesonbuild/mdist.py
@@ -213,7 +213,7 @@ def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir):
unpacked_src_dir = unpacked_files[0]
with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions:
meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions)
- if o['name'] not in ['backend', 'install_umask']]
+ if o['name'] not in ['backend', 'install_umask', 'buildtype']]
meson_command += extra_meson_args
ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_bin)
@@ -259,7 +259,7 @@ def run(options):
if options.include_subprojects:
subproject_dir = os.path.join(src_root, b.subproject_dir)
for sub in b.subprojects:
- _, directory = wrap.get_directory(subproject_dir, sub)
+ directory = wrap.get_directory(subproject_dir, sub)
subprojects.append(os.path.join(b.subproject_dir, directory))
extra_meson_args.append('-Dwrap_mode=nodownload')
diff --git a/mesonbuild/mesondata.py b/mesonbuild/mesondata.py
new file mode 100644
index 0000000..1f223c2
--- /dev/null
+++ b/mesonbuild/mesondata.py
@@ -0,0 +1,374 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+####
+#### WARNING: This is an automatically generated file! Do not edit!
+#### Generated by tools/gen_data.py
+####
+
+
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+ from .environment import Environment
+
+######################
+# BEGIN Data section #
+######################
+
+file_0_data_CMakeListsLLVM_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} )
+
+set(PACKAGE_FOUND FALSE)
+
+while(TRUE)
+ find_package(LLVM REQUIRED CONFIG QUIET)
+
+ # ARCHS has to be set via the CMD interface
+ if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(LLVM_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ foreach(mod IN LISTS LLVM_MESON_MODULES)
+ # Reset variables
+ set(out_mods)
+ set(real_mods)
+
+ # Generate a lower and upper case version
+ string(TOLOWER "${mod}" mod_L)
+ string(TOUPPER "${mod}" mod_U)
+
+ # Get the mapped components
+ llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+ list(SORT out_mods)
+ list(REMOVE_DUPLICATES out_mods)
+
+ # Make sure that the modules exist
+ foreach(i IN LISTS out_mods)
+ if(TARGET ${i})
+ list(APPEND real_mods ${i})
+ endif()
+ endforeach()
+
+ # Set the output variables
+ set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+ foreach(i IN LISTS real_mods)
+ set(MESON_TARGET_TO_LLVM_${i} ${mod})
+ endforeach()
+ endforeach()
+
+ # Check the following variables:
+ # LLVM_PACKAGE_VERSION
+ # LLVM_VERSION
+ # LLVM_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED LLVM_PACKAGE_VERSION)
+ set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+ elseif(DEFINED LLVM_VERSION)
+ set(PACKAGE_VERSION "${LLVM_VERSION}")
+ elseif(DEFINED LLVM_VERSION_STRING)
+ set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # LLVM_LIBRARIES
+ # LLVM_LIBS
+ set(libs)
+ if(DEFINED LLVM_LIBRARIES)
+ set(libs LLVM_LIBRARIES)
+ elseif(DEFINED LLVM_LIBS)
+ set(libs LLVM_LIBS)
+ endif()
+
+ # Check the following variables:
+ # LLVM_INCLUDE_DIRS
+ # LLVM_INCLUDES
+ # LLVM_INCLUDE_DIR
+ set(includes)
+ if(DEFINED LLVM_INCLUDE_DIRS)
+ set(includes LLVM_INCLUDE_DIRS)
+ elseif(DEFINED LLVM_INCLUDES)
+ set(includes LLVM_INCLUDES)
+ elseif(DEFINED LLVM_INCLUDE_DIR)
+ set(includes LLVM_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # LLVM_DEFINITIONS
+ set(definitions)
+ if(DEFINED LLVM_DEFINITIONS)
+ set(definitions LLVM_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
+'''
+
+file_1_data_CMakePathInfo_txt = '''\
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+ file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+ foreach(dir ${implicit_dirs})
+ if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+ list(APPEND LIB_ARCH_LIST "${dir}")
+ endif()
+ endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
+'''
+
+file_2_data_CMakeLists_txt = '''\
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+while(TRUE)
+ find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+
+ # ARCHS has to be set via the CMD interface
+ if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+ break()
+ endif()
+
+ list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+ list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND)
+ set(PACKAGE_FOUND TRUE)
+
+ # Check the following variables:
+ # FOO_VERSION
+ # Foo_VERSION
+ # FOO_VERSION_STRING
+ # Foo_VERSION_STRING
+ if(NOT DEFINED PACKAGE_VERSION)
+ if(DEFINED ${_packageName}_VERSION)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+ elseif(DEFINED ${_packageName}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+ elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+ set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+ endif()
+ endif()
+
+ # Check the following variables:
+ # FOO_LIBRARIES
+ # Foo_LIBRARIES
+ # FOO_LIBS
+ # Foo_LIBS
+ set(libs)
+ if(DEFINED ${_packageName}_LIBRARIES)
+ set(libs ${_packageName}_LIBRARIES)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+ set(libs ${PACKAGE_NAME}_LIBRARIES)
+ elseif(DEFINED ${_packageName}_LIBS)
+ set(libs ${_packageName}_LIBS)
+ elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+ set(libs ${PACKAGE_NAME}_LIBS)
+ endif()
+
+ # Check the following variables:
+ # FOO_INCLUDE_DIRS
+ # Foo_INCLUDE_DIRS
+ # FOO_INCLUDES
+ # Foo_INCLUDES
+ # FOO_INCLUDE_DIR
+ # Foo_INCLUDE_DIR
+ set(includes)
+ if(DEFINED ${_packageName}_INCLUDE_DIRS)
+ set(includes ${_packageName}_INCLUDE_DIRS)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+ elseif(DEFINED ${_packageName}_INCLUDES)
+ set(includes ${_packageName}_INCLUDES)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+ set(includes ${PACKAGE_NAME}_INCLUDES)
+ elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+ set(includes ${_packageName}_INCLUDE_DIR)
+ elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+ set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+ endif()
+
+ # Check the following variables:
+ # FOO_DEFINITIONS
+ # Foo_DEFINITIONS
+ set(definitions)
+ if(DEFINED ${_packageName}_DEFINITIONS)
+ set(definitions ${_packageName}_DEFINITIONS)
+ elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+ set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+ endif()
+
+ set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+ set(PACKAGE_DEFINITIONS "${${definitions}}")
+ set(PACKAGE_LIBRARIES "${${libs}}")
+endif()
+'''
+
+file_3_data_preload_cmake = '''\
+if(MESON_PS_LOADED)
+ return()
+endif()
+
+set(MESON_PS_LOADED ON)
+
+# Dummy macros that have a special meaning in the meson code
+macro(meson_ps_execute_delayed_calls)
+endmacro()
+
+macro(meson_ps_reload_vars)
+endmacro()
+
+# Helper macro to inspect the current CMake state
+macro(meson_ps_inspect_vars)
+ set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}")
+ set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}")
+ meson_ps_execute_delayed_calls()
+endmacro()
+
+
+# Override some system functions with custom code and forward the args
+# to the original function
+macro(add_custom_command)
+ meson_ps_inspect_vars()
+ _add_custom_command(${ARGV})
+endmacro()
+
+macro(add_custom_target)
+ meson_ps_inspect_vars()
+ _add_custom_target(${ARGV})
+endmacro()
+
+macro(set_property)
+ meson_ps_inspect_vars()
+ _set_property(${ARGV})
+endmacro()
+
+function(set_source_files_properties)
+ set(FILES)
+ set(I 0)
+ set(PROPERTIES OFF)
+
+ while(I LESS ARGC)
+ if(NOT PROPERTIES)
+ if("${ARGV${I}}" STREQUAL "PROPERTIES")
+ set(PROPERTIES ON)
+ else()
+ list(APPEND FILES "${ARGV${I}}")
+ endif()
+
+ math(EXPR I "${I} + 1")
+ else()
+ set(ID_IDX ${I})
+ math(EXPR PROP_IDX "${ID_IDX} + 1")
+
+ set(ID "${ARGV${ID_IDX}}")
+ set(PROP "${ARGV${PROP_IDX}}")
+
+ set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}")
+ math(EXPR I "${I} + 2")
+ endif()
+ endwhile()
+endfunction()
+
+set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property)
+meson_ps_reload_vars()
+'''
+
+
+####################
+# END Data section #
+####################
+
+class DataFile:
+ def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+ self.path = path
+ self.sha256sum = sha256sum
+ self.data = data
+
+ def write_once(self, path: Path) -> None:
+ if not path.exists():
+ path.write_text(self.data)
+
+ def write_to_private(self, env: 'Environment') -> Path:
+ out_file = Path(env.scratch_dir) / 'data' / self.path.name
+ out_file.parent.mkdir(exist_ok=True)
+ self.write_once(out_file)
+ return out_file
+
+
+mesondata = {
+ 'dependencies/data/CMakeListsLLVM.txt': DataFile(
+ Path('dependencies/data/CMakeListsLLVM.txt'),
+ '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd',
+ file_0_data_CMakeListsLLVM_txt,
+ ),
+ 'dependencies/data/CMakePathInfo.txt': DataFile(
+ Path('dependencies/data/CMakePathInfo.txt'),
+ '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2',
+ file_1_data_CMakePathInfo_txt,
+ ),
+ 'dependencies/data/CMakeLists.txt': DataFile(
+ Path('dependencies/data/CMakeLists.txt'),
+ '71a2d58381f912bbfb1c8709884d34d721f682edf2fca001e1f582f0bffd0da7',
+ file_2_data_CMakeLists_txt,
+ ),
+ 'cmake/data/preload.cmake': DataFile(
+ Path('cmake/data/preload.cmake'),
+ '064d047b18a5c919ad016b838bed50c5d40aebe9e53da0e70eff9d52a2c1ca1f',
+ file_3_data_preload_cmake,
+ ),
+}
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 73a9139..e0f48f5 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -41,8 +41,10 @@ _U = T.TypeVar('_U')
have_fcntl = False
have_msvcrt = False
+# TODO: this is such a hack, this really should be either in coredata or in the
+# interpreter
# {subproject: project_meson_version}
-project_meson_versions = {} # type: T.Dict[str, str]
+project_meson_versions = collections.defaultdict(str) # type: T.DefaultDict[str, str]
try:
import fcntl
@@ -395,6 +397,9 @@ class PerMachine(T.Generic[_T]):
unfreeze.host = None
return unfreeze
+ def __repr__(self) -> str:
+ return 'PerMachine({!r}, {!r})'.format(self.build, self.host)
+
class PerThreeMachine(PerMachine[_T]):
"""Like `PerMachine` but includes `target` too.
@@ -427,6 +432,9 @@ class PerThreeMachine(PerMachine[_T]):
def matches_build_machine(self, machine: MachineChoice) -> bool:
return self.build == self[machine]
+ def __repr__(self) -> str:
+ return 'PerThreeMachine({!r}, {!r}, {!r})'.format(self.build, self.host, self.target)
+
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
@@ -445,6 +453,9 @@ class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
freeze.host = freeze.build
return freeze
+ def __repr__(self) -> str:
+ return 'PerMachineDefaultable({!r}, {!r})'.format(self.build, self.host)
+
class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]):
"""Extends `PerThreeMachine` with the ability to default from `None`s.
@@ -466,6 +477,9 @@ class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Option
freeze.target = freeze.host
return freeze
+ def __repr__(self) -> str:
+ return 'PerThreeMachineDefaultable({!r}, {!r}, {!r})'.format(self.build, self.host, self.target)
+
class Language(Enum):
@@ -584,6 +598,8 @@ def is_netbsd() -> bool:
def is_freebsd() -> bool:
return platform.system().lower() == 'freebsd'
+def is_irix() -> bool:
+ return platform.system().startswith('irix')
def is_hurd() -> bool:
return platform.system().lower() == 'gnu'
@@ -619,20 +635,24 @@ def darwin_get_object_archs(objpath: str) -> T.List[str]:
return stdo.split()
-def detect_vcs(source_dir: str) -> T.Optional[T.Dict[str, str]]:
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'),
dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'),
]
- # FIXME: this is much cleaner with pathlib.Path
- segs = source_dir.replace('\\', '/').split('/')
- for i in range(len(segs), -1, -1):
- curdir = '/'.join(segs[:i])
+ if isinstance(source_dir, str):
+ source_dir = Path(source_dir)
+
+ parent_paths_and_self = collections.deque(source_dir.parents)
+ # Prepend the source directory to the front so we can check it;
+ # source_dir.parents doesn't include source_dir
+ parent_paths_and_self.appendleft(source_dir)
+ for curdir in parent_paths_and_self:
for vcs in vcs_systems:
- if os.path.isdir(os.path.join(curdir, vcs['repo_dir'])) and shutil.which(vcs['cmd']):
- vcs['wc_dir'] = curdir
+ if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+ vcs['wc_dir'] = str(curdir)
return vcs
return None
@@ -804,7 +824,7 @@ def default_libdir() -> str:
return 'lib/' + archpath
except Exception:
pass
- if is_freebsd():
+ if is_freebsd() or is_irix():
return 'lib'
if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
return 'lib64'
@@ -1610,6 +1630,16 @@ def relpath(path: str, start: str) -> str:
except (TypeError, ValueError):
return path
+def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
+ # Check wheter a path is within the root directory root
+ try:
+ if resolve:
+ path.resolve().relative_to(root.resolve())
+ else:
+ path.relative_to(root)
+ except ValueError:
+ return False
+ return True
class LibType(Enum):
diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py
index c636053..191c735 100644
--- a/mesonbuild/minit.py
+++ b/mesonbuild/minit.py
@@ -56,8 +56,8 @@ class DEFAULT_TYPES(Enum):
INFO_MESSAGE = '''Sample project created. To build it run the
following commands:
-meson builddir
-ninja -C builddir
+meson setup builddir
+meson compile -C builddir
'''
@@ -151,7 +151,7 @@ def add_arguments(parser):
parser.add_argument("-n", "--name", help="project name. default: name of current directory")
parser.add_argument("-e", "--executable", help="executable name. default: project name")
parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
- parser.add_argument("-l", "--language", choices=LANG_SUPPORTED, help="project language. default: autodetected based on source files")
+ parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
parser.add_argument("-b", "--build", action='store_true', help="build after generation")
parser.add_argument("--builddir", default='build', help="directory for build")
parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index 9c64429..0be01fe 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -512,7 +512,7 @@ class Installer:
if file_copied:
self.did_install_something = True
try:
- depfixer.fix_rpath(outname, install_rpath, final_path,
+ depfixer.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
install_name_mappings, verbose=False)
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index d5516d4..0049bbd 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -19,10 +19,11 @@ tests and so on. All output is in JSON for simple parsing.
Currently only works for the Ninja backend. Others use generated
project files and don't need this info."""
+import collections
import json
from . import build, coredata as cdata
from . import mesonlib
-from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator
+from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
from . import mlog
from .backend import backends
from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode
@@ -52,7 +53,7 @@ class IntroCommand:
def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
builddata: T.Optional[build.Build] = None,
backend: T.Optional[backends.Backend] = None,
- sourcedir: T.Optional[str] = None) -> T.Dict[str, IntroCommand]:
+ sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
if backend and builddata:
benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
testdata = backend.create_test_serialisation(builddata.get_tests())
@@ -61,17 +62,19 @@ def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
else:
benchmarkdata = testdata = installdata = None
- return {
- 'benchmarks': IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)),
- 'buildoptions': IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source),
- 'buildsystem_files': IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter)),
- 'dependencies': IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source),
- 'scan_dependencies': IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source),
- 'installed': IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata)),
- 'projectinfo': IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source),
- 'targets': IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source),
- 'tests': IntroCommand('List all unit tests', func=lambda: list_tests(testdata)),
- }
+ # Enforce key order for argparse
+ return collections.OrderedDict([
+ ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+ ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+ ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+ ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+ ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)),
+ ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+ ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+ ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+ ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+ ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+ ])
def add_arguments(parser):
intro_types = get_meson_introspection_types()
@@ -79,7 +82,7 @@ def add_arguments(parser):
flag = '--' + key.replace('_', '-')
parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
- parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja',
+ parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
help='The backend to use for the --buildoptions introspection.')
parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
help='Print all available information.')
@@ -89,6 +92,11 @@ def add_arguments(parser):
help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
+ printer = AstJSONPrinter()
+ intr.ast.accept(printer)
+ return printer.result
+
def list_installed(installdata):
res = {}
if installdata is not None:
@@ -192,19 +200,7 @@ def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Di
def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
- dir_option_names = ['bindir',
- 'datadir',
- 'includedir',
- 'infodir',
- 'libdir',
- 'libexecdir',
- 'localedir',
- 'localstatedir',
- 'mandir',
- 'prefix',
- 'sbindir',
- 'sharedstatedir',
- 'sysconfdir']
+ dir_option_names = list(cdata.BUILTIN_DIR_OPTIONS)
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names]
@@ -328,7 +324,7 @@ def get_test_list(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str],
to['suite'] = t.suite
to['is_parallel'] = t.is_parallel
to['priority'] = t.priority
- to['protocol'] = t.protocol
+ to['protocol'] = str(t.protocol)
result.append(to)
return result
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index 8cbd248..1e5a105 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -40,15 +40,32 @@ def _windows_ansi() -> bool:
# original behavior
return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
-def setup_console() -> bool:
+def colorize_console() -> bool:
+ _colorize_console = getattr(sys.stdout, 'colorize_console', None) # type: bool
+ if _colorize_console is not None:
+ return _colorize_console
+
try:
if platform.system().lower() == 'windows':
- return os.isatty(sys.stdout.fileno()) and _windows_ansi()
- return os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
+ _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+ else:
+ _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
except Exception:
- return False
+ _colorize_console = False
+
+ sys.stdout.colorize_console = _colorize_console # type: ignore[attr-defined]
+ return _colorize_console
+
+def setup_console():
+ # on Windows, a subprocess might call SetConsoleMode() on the console
+ # connected to stdout and turn off ANSI escape processing. Call this after
+ # running a subprocess to ensure we turn it on again.
+ if platform.system().lower() == 'windows':
+ try:
+ delattr(sys.stdout, 'colorize_console')
+ except AttributeError:
+ pass
-colorize_console = setup_console()
log_dir = None # type: T.Optional[str]
log_file = None # type: T.Optional[T.TextIO]
log_fname = 'meson-log.txt' # type: str
@@ -204,7 +221,7 @@ def log(*args: T.Union[str, AnsiDecorator], is_error: bool = False,
if log_file is not None:
print(*arr, file=log_file, **kwargs)
log_file.flush()
- if colorize_console:
+ if colorize_console():
arr = process_markup(args, True)
if not log_errors_only or is_error:
force_print(*arr, **kwargs)
@@ -233,7 +250,7 @@ def get_error_location_string(fname: str, lineno: str) -> str:
return '{}:{}:'.format(fname, lineno)
def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator],
- once: bool = False, **kwargs: T.Any) -> None:
+ once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None:
from .mesonlib import MesonException, relpath
# The typing requirements here are non-obvious. Lists are invariant,
@@ -266,7 +283,7 @@ def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator],
global log_warnings_counter
log_warnings_counter += 1
- if log_fatal_warnings:
+ if log_fatal_warnings and fatal:
raise MesonException("Fatal warnings enabled, aborting")
def error(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None:
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index dc86a1b..47be039 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -57,6 +57,17 @@ def get_include_args(include_dirs, prefix='-I'):
return dirs_str
+def is_module_library(fname):
+ '''
+ Check if the file is a library-like file generated by a module-specific
+ target, such as GirTarget or TypelibTarget
+ '''
+ if hasattr(fname, 'fname'):
+ fname = fname.fname
+ suffix = fname.split('.')[-1]
+ return suffix in ('gir', 'typelib')
+
+
class ModuleReturnValue:
def __init__(self, return_value, new_objects):
self.return_value = return_value
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index 0283d11..e6587e4 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -14,12 +14,28 @@
import re
import os, os.path, pathlib
import shutil
+import typing as T
from . import ExtensionModule, ModuleReturnValue
from .. import build, dependencies, mesonlib, mlog
-from ..interpreterbase import permittedKwargs, FeatureNew, stringArgs, InterpreterObject, ObjectHolder, noPosargs
+from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args
from ..interpreter import ConfigurationDataHolder, InterpreterException, SubprojectHolder
+from ..interpreterbase import (
+ InterpreterObject,
+ ObjectHolder,
+
+ FeatureNew,
+ FeatureNewKwargs,
+ FeatureDeprecatedKwargs,
+
+ stringArgs,
+ permittedKwargs,
+ noPosargs,
+ noKwargs,
+
+ InvalidArguments,
+)
COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
@@ -82,42 +98,107 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder):
assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']]))
return res
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def get_variable(self, args, kwargs):
return self.held_object.get_variable_method(args, kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def dependency(self, args, kwargs):
info = self._args_to_info(args)
return self.get_variable([info['dep']], kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def include_directories(self, args, kwargs):
info = self._args_to_info(args)
return self.get_variable([info['inc']], kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def target(self, args, kwargs):
info = self._args_to_info(args)
return self.get_variable([info['tgt']], kwargs)
- @permittedKwargs({})
+ @noKwargs
+ @stringArgs
def target_type(self, args, kwargs):
info = self._args_to_info(args)
return info['func']
@noPosargs
- @permittedKwargs({})
+ @noKwargs
def target_list(self, args, kwargs):
return self.held_object.cm_interpreter.target_list()
@noPosargs
- @permittedKwargs({})
+ @noKwargs
@FeatureNew('CMakeSubproject.found()', '0.53.2')
def found_method(self, args, kwargs):
return self.held_object is not None
+class CMakeSubprojectOptions(InterpreterObject):
+ def __init__(self) -> None:
+ super().__init__()
+ self.cmake_options = [] # type: T.List[str]
+ self.target_options = TargetOptions()
+
+ self.methods.update(
+ {
+ 'add_cmake_defines': self.add_cmake_defines,
+ 'set_override_option': self.set_override_option,
+ 'set_install': self.set_install,
+ 'append_compile_args': self.append_compile_args,
+ 'append_link_args': self.append_link_args,
+ 'clear': self.clear,
+ }
+ )
+
+ def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
+ if 'target' in kwargs:
+ return self.target_options[kwargs['target']]
+ return self.target_options.global_options
+
+ @noKwargs
+ def add_cmake_defines(self, args, kwargs) -> None:
+ self.cmake_options += cmake_defines_to_args(args)
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def set_override_option(self, args, kwargs) -> None:
+ if len(args) != 2:
+ raise InvalidArguments('set_override_option takes exactly 2 positional arguments')
+ self._get_opts(kwargs).set_opt(args[0], args[1])
+
+ @permittedKwargs({'target'})
+ def set_install(self, args, kwargs) -> None:
+ if len(args) != 1 or not isinstance(args[0], bool):
+ raise InvalidArguments('set_install takes exactly 1 boolean argument')
+ self._get_opts(kwargs).set_install(args[0])
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def append_compile_args(self, args, kwargs) -> None:
+ if len(args) < 2:
+ raise InvalidArguments('append_compile_args takes at least 2 positional arguments')
+ self._get_opts(kwargs).append_args(args[0], args[1:])
+
+ @stringArgs
+ @permittedKwargs({'target'})
+ def append_link_args(self, args, kwargs) -> None:
+ if not args:
+ raise InvalidArguments('append_link_args takes at least 1 positional argument')
+ self._get_opts(kwargs).append_link_args(args)
+
+ @noPosargs
+ @noKwargs
+ def clear(self, args, kwargs) -> None:
+ self.cmake_options.clear()
+ self.target_options = TargetOptions()
+
+
class CmakeModule(ExtensionModule):
cmake_detected = False
cmake_root = None
@@ -252,8 +333,7 @@ class CmakeModule(ExtensionModule):
(ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name)))
ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
- if 'install_dir' not in kwargs:
- install_dir = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)
+ install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name))
if not isinstance(install_dir, str):
raise mesonlib.MesonException('"install_dir" must be a string.')
@@ -287,16 +367,27 @@ class CmakeModule(ExtensionModule):
return res
@FeatureNew('subproject', '0.51.0')
- @permittedKwargs({'cmake_options', 'required'})
+ @FeatureNewKwargs('subproject', '0.55.0', ['options'])
+ @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options'])
+ @permittedKwargs({'cmake_options', 'required', 'options'})
@stringArgs
def subproject(self, interpreter, state, args, kwargs):
if len(args) != 1:
raise InterpreterException('Subproject takes exactly one argument')
+ if 'cmake_options' in kwargs and 'options' in kwargs:
+ raise InterpreterException('"options" cannot be used together with "cmake_options"')
dirname = args[0]
subp = interpreter.do_subproject(dirname, 'cmake', kwargs)
if not subp.held_object:
return subp
return CMakeSubprojectHolder(subp, dirname)
+ @FeatureNew('subproject_options', '0.55.0')
+ @noKwargs
+ @noPosargs
+ def subproject_options(self, state, args, kwargs) -> ModuleReturnValue:
+ opts = CMakeSubprojectOptions()
+ return ModuleReturnValue(opts, [])
+
def initialize(*args, **kwargs):
return CmakeModule(*args, **kwargs)
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 21360a2..de674db 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -32,8 +32,8 @@ from ..mesonlib import (
MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list,
join_args, unholder,
)
-from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
-from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
+from ..dependencies import Dependency, PkgConfigDependency, InternalDependency, ExternalProgram
+from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs
# gresource compilation is broken due to the way
# the resource compiler and Ninja clash about it
@@ -44,20 +44,6 @@ gresource_dep_needed_version = '>= 2.51.1'
native_glib_version = None
-@functools.lru_cache(maxsize=None)
-def gir_has_option(intr_obj, option):
- try:
- g_ir_scanner = intr_obj.find_program_impl('g-ir-scanner')
- # Handle overridden g-ir-scanner
- if isinstance(getattr(g_ir_scanner, "held_object", g_ir_scanner), interpreter.OverrideProgram):
- assert option in ['--extra-library', '--sources-top-dirs']
- return True
-
- opts = Popen_safe(g_ir_scanner.get_command() + ['--help'], stderr=subprocess.STDOUT)[1]
- return option in opts
- except (MesonException, FileNotFoundError, subprocess.CalledProcessError):
- return False
-
class GnomeModule(ExtensionModule):
gir_dep = None
@@ -303,7 +289,7 @@ class GnomeModule(ExtensionModule):
link_command.append('-L' + d)
if include_rpath:
link_command.append('-Wl,-rpath,' + d)
- if gir_has_option(self.interpreter, '--extra-library') and use_gir_args:
+ if use_gir_args and self._gir_has_option('--extra-library'):
link_command.append('--extra-library=' + lib.name)
else:
link_command.append('-l' + lib.name)
@@ -321,6 +307,10 @@ class GnomeModule(ExtensionModule):
deps = mesonlib.unholder(mesonlib.listify(deps))
for dep in deps:
+ if isinstance(dep, Dependency):
+ girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+ if girdir:
+ gi_includes.update([girdir])
if isinstance(dep, InternalDependency):
cflags.update(dep.get_compile_args())
cflags.update(get_include_args(dep.include_directories))
@@ -371,11 +361,6 @@ class GnomeModule(ExtensionModule):
external_ldflags_nodedup += [lib, next(ldflags)]
else:
external_ldflags.update([lib])
-
- if isinstance(dep, PkgConfigDependency):
- girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
- if girdir:
- gi_includes.update([girdir])
elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
cflags.update(get_include_args(dep.get_include_dirs()))
depends.append(dep)
@@ -383,7 +368,7 @@ class GnomeModule(ExtensionModule):
mlog.log('dependency {!r} not handled to build gir files'.format(dep))
continue
- if gir_has_option(self.interpreter, '--extra-library') and use_gir_args:
+ if use_gir_args and self._gir_has_option('--extra-library'):
def fix_ldflags(ldflags):
fixed_ldflags = OrderedSet()
for ldflag in ldflags:
@@ -417,15 +402,37 @@ class GnomeModule(ExtensionModule):
return girtarget
def _get_gir_dep(self, state):
- try:
- gir_dep = self.gir_dep or PkgConfigDependency('gobject-introspection-1.0',
- state.environment,
- {'native': True})
- pkgargs = gir_dep.get_compile_args()
- except Exception:
- raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')
-
- return gir_dep, pkgargs
+ if not self.gir_dep:
+ kwargs = {'native': True, 'required': True}
+ holder = self.interpreter.func_dependency(state.current_node, ['gobject-introspection-1.0'], kwargs)
+ self.gir_dep = holder.held_object
+ giscanner = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-scanner')
+ if giscanner is not None:
+ self.giscanner = ExternalProgram.from_entry('g-ir-scanner', giscanner)
+ elif self.gir_dep.type_name == 'pkgconfig':
+ self.giscanner = ExternalProgram('g_ir_scanner', self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {}))
+ else:
+ self.giscanner = self.interpreter.find_program_impl('g-ir-scanner')
+ gicompiler = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-compiler')
+ if gicompiler is not None:
+ self.gicompiler = ExternalProgram.from_entry('g-ir-compiler', gicompiler)
+ elif self.gir_dep.type_name == 'pkgconfig':
+ self.gicompiler = ExternalProgram('g_ir_compiler', self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {}))
+ else:
+ self.gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
+ return self.gir_dep, self.giscanner, self.gicompiler
+
+ @functools.lru_cache(maxsize=None)
+ def _gir_has_option(self, option):
+ exe = self.giscanner
+ if hasattr(exe, 'held_object'):
+ exe = exe.held_object
+ if isinstance(exe, interpreter.OverrideProgram):
+ # Handle overridden g-ir-scanner
+ assert option in ['--extra-library', '--sources-top-dirs']
+ return True
+ p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT)
+ return p.returncode == 0 and option in o
def _scan_header(self, kwargs):
ret = []
@@ -688,11 +695,10 @@ class GnomeModule(ExtensionModule):
source.get_subdir())
if subdir not in typelib_includes:
typelib_includes.append(subdir)
- elif isinstance(dep, PkgConfigDependency):
- girdir = dep.get_pkgconfig_variable("girdir", {'default': ''})
+ if isinstance(dep, Dependency):
+ girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
if girdir and girdir not in typelib_includes:
typelib_includes.append(girdir)
-
return typelib_includes
def _get_external_args_for_langs(self, state, langs):
@@ -715,11 +721,12 @@ class GnomeModule(ExtensionModule):
if f.startswith(('-L', '-l', '--extra-library')):
yield f
- @FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
+ @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings'])
+ @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default'])
@permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix',
'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories',
'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args',
- 'packages', 'header', 'build_by_default'})
+ 'packages', 'header', 'build_by_default', 'fatal_warnings'})
def generate_gir(self, state, args, kwargs):
if not args:
raise MesonException('generate_gir takes at least one argument')
@@ -731,42 +738,25 @@ class GnomeModule(ExtensionModule):
if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]):
raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
- self.gir_dep, pkgargs = self._get_gir_dep(state)
- # find_program is needed in the case g-i is built as subproject.
- # In that case it uses override_find_program so the gobject utilities
- # can be used from the build dir instead of from the system.
- # However, GObject-introspection provides the appropriate paths to
- # these utilities via pkg-config, so it would be best to use the
- # results from pkg-config when possible.
- gi_util_dirs_check = [state.environment.get_build_dir(), state.environment.get_source_dir()]
- giscanner = self.interpreter.find_program_impl('g-ir-scanner')
- if giscanner.found():
- giscanner_path = giscanner.get_command()[0]
- if not any(x in giscanner_path for x in gi_util_dirs_check):
- giscanner = self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {})
- else:
- giscanner = self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {})
+ gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
- gicompiler = self.interpreter.find_program_impl('g-ir-compiler')
- if gicompiler.found():
- gicompiler_path = gicompiler.get_command()[0]
- if not any(x in gicompiler_path for x in gi_util_dirs_check):
- gicompiler = self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})
- else:
- gicompiler = self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})
-
- ns = kwargs.pop('namespace')
- nsversion = kwargs.pop('nsversion')
+ ns = kwargs.get('namespace')
+ if not ns:
+ raise MesonException('Missing "namespace" keyword argument')
+ nsversion = kwargs.get('nsversion')
+ if not nsversion:
+ raise MesonException('Missing "nsversion" keyword argument')
libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)
girfile = '%s-%s.gir' % (ns, nsversion)
srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
- depends = [] + girtargets
+ depends = gir_dep.sources + girtargets
gir_inc_dirs = []
langs_compilers = self._get_girtargets_langs_compilers(girtargets)
cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
deps = self._get_gir_targets_deps(girtargets)
deps += mesonlib.unholder(extract_as_list(kwargs, 'dependencies', pop=True))
+ deps += [gir_dep]
typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
@@ -781,7 +771,6 @@ class GnomeModule(ExtensionModule):
inc_dirs = self._scan_inc_dirs(kwargs)
scan_command = [giscanner]
- scan_command += pkgargs
scan_command += ['--no-libtool']
scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
scan_command += ['--warn-all']
@@ -806,10 +795,18 @@ class GnomeModule(ExtensionModule):
scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
scan_command += list(external_ldflags)
- if gir_has_option(self.interpreter, '--sources-top-dirs'):
+ if self._gir_has_option('--sources-top-dirs'):
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)]
scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)]
+ if '--warn-error' in scan_command:
+ mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55')
+ fatal_warnings = kwargs.get('fatal_warnings', False)
+ if not isinstance(fatal_warnings, bool):
+ raise MesonException('fatal_warnings keyword argument must be a boolean')
+ if fatal_warnings:
+ scan_command.append('--warn-error')
+
scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs)
typelib_output = '%s-%s.typelib' % (ns, nsversion)
@@ -846,6 +843,8 @@ class GnomeModule(ExtensionModule):
return ModuleReturnValue(target_g, [target_g])
@permittedKwargs({'sources', 'media', 'symlink_media', 'languages'})
+ @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'],
+ 'Use a LINGUAS file in the source directory instead')
def yelp(self, state, args, kwargs):
if len(args) < 1:
raise MesonException('Yelp requires a project id')
@@ -860,11 +859,6 @@ class GnomeModule(ExtensionModule):
source_str = '@@'.join(sources)
langs = mesonlib.stringlistify(kwargs.pop('languages', []))
- if langs:
- mlog.deprecation('''The "languages" argument of gnome.yelp() is deprecated.
-Use a LINGUAS file in the sources directory instead.
-This will become a hard error in the future.''')
-
media = mesonlib.stringlistify(kwargs.pop('media', []))
symlinks = kwargs.pop('symlink_media', True)
diff --git a/mesonbuild/modules/unstable_kconfig.py b/mesonbuild/modules/keyval.py
index 6685710..3da2992 100644
--- a/mesonbuild/modules/unstable_kconfig.py
+++ b/mesonbuild/modules/keyval.py
@@ -21,9 +21,9 @@ from ..interpreter import InvalidCode
import os
-class KconfigModule(ExtensionModule):
+class KeyvalModule(ExtensionModule):
- @FeatureNew('Kconfig Module', '0.51.0')
+ @FeatureNew('Keyval Module', '0.55.0')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.snippets.add('load')
@@ -56,9 +56,7 @@ class KconfigModule(ExtensionModule):
s = sources[0]
is_built = False
if isinstance(s, mesonlib.File):
- if s.is_built:
- FeatureNew('kconfig.load() of built files', '0.52.0').use(state.subproject)
- is_built = True
+ is_built = is_built or s.is_built
s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir)
else:
s = os.path.join(interpreter.environment.source_dir, s)
@@ -70,4 +68,4 @@ class KconfigModule(ExtensionModule):
def initialize(*args, **kwargs):
- return KconfigModule(*args, **kwargs)
+ return KeyvalModule(*args, **kwargs)
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index ac51e36..f81ee2f 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -36,6 +36,7 @@ class DependenciesHelper:
self.priv_reqs = []
self.cflags = []
self.version_reqs = {}
+ self.link_whole_targets = []
def add_pub_libs(self, libs):
libs, reqs, cflags = self._process_libs(libs, True)
@@ -76,7 +77,7 @@ class DependenciesHelper:
processed_reqs = []
for obj in mesonlib.unholder(mesonlib.listify(reqs)):
if not isinstance(obj, str):
- FeatureNew('pkgconfig.generate requirement from non-string object', '0.46.0').use(self.state.subproject)
+ FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
if hasattr(obj, 'generated_pc'):
self._check_generated_pc_deprecation(obj)
processed_reqs.append(obj.generated_pc)
@@ -130,10 +131,7 @@ class DependenciesHelper:
if obj.found():
processed_libs += obj.get_link_args()
processed_cflags += obj.get_compile_args()
- if public:
- self.add_pub_libs(obj.libraries)
- else:
- self.add_priv_libs(obj.libraries)
+ self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public)
elif isinstance(obj, dependencies.Dependency):
if obj.found():
processed_libs += obj.get_link_args()
@@ -148,12 +146,13 @@ class DependenciesHelper:
processed_libs.append(obj)
elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
processed_libs.append(obj)
- if isinstance(obj, build.StaticLibrary) and public:
- self.add_pub_libs(obj.get_dependencies(for_pkgconfig=True))
- self.add_pub_libs(obj.get_external_deps())
- else:
- self.add_priv_libs(obj.get_dependencies(for_pkgconfig=True))
- self.add_priv_libs(obj.get_external_deps())
+ # If there is a static library in `Libs:` all its deps must be
+ # public too, otherwise the generated pc file will never be
+ # usable without --static.
+ self._add_lib_dependencies(obj.link_targets,
+ obj.link_whole_targets,
+ obj.external_deps,
+ isinstance(obj, build.StaticLibrary) and public)
elif isinstance(obj, str):
processed_libs.append(obj)
else:
@@ -161,6 +160,31 @@ class DependenciesHelper:
return processed_libs, processed_reqs, processed_cflags
+ def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public):
+ add_libs = self.add_pub_libs if public else self.add_priv_libs
+ # Recursively add all linked libraries
+ for t in link_targets:
+ # Internal libraries (uninstalled static library) will be promoted
+ # to link_whole, treat them as such here.
+ if t.is_internal():
+ self._add_link_whole(t, public)
+ else:
+ add_libs([t])
+ for t in link_whole_targets:
+ self._add_link_whole(t, public)
+ # And finally its external dependencies
+ add_libs(external_deps)
+
+ def _add_link_whole(self, t, public):
+ # Don't include static libraries that we link_whole. But we still need to
+ # include their dependencies: a static library we link_whole
+ # could itself link to a shared library or an installed static library.
+ # Keep track of link_whole_targets so we can remove them from our
+ # lists in case a library is link_with and link_whole at the same time.
+ # See remove_dups() below.
+ self.link_whole_targets.append(t)
+ self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
+
def add_version_reqs(self, name, version_reqs):
if version_reqs:
if name not in self.version_reqs:
@@ -196,6 +220,32 @@ class DependenciesHelper:
return ', '.join(result)
def remove_dups(self):
+ # Set of ids that have already been handled and should not be added any more
+ exclude = set()
+
+ # We can't just check if 'x' is excluded because we could have copies of
+ # the same SharedLibrary object for example.
+ def _ids(x):
+ if hasattr(x, 'generated_pc'):
+ yield x.generated_pc
+ if isinstance(x, build.Target):
+ yield x.get_id()
+ yield x
+
+ # Exclude 'x' in all its forms and return if it was already excluded
+ def _add_exclude(x):
+ was_excluded = False
+ for i in _ids(x):
+ if i in exclude:
+ was_excluded = True
+ else:
+ exclude.add(i)
+ return was_excluded
+
+ # link_whole targets are already part of other targets, exclude them all.
+ for t in self.link_whole_targets:
+ _add_exclude(t)
+
def _fn(xs, libs=False):
# Remove duplicates whilst preserving original order
result = []
@@ -206,19 +256,21 @@ class DependenciesHelper:
cannot_dedup = libs and isinstance(x, str) and \
not x.startswith(('-l', '-L')) and \
x not in known_flags
- if x not in result or cannot_dedup:
- result.append(x)
+ if not cannot_dedup and _add_exclude(x):
+ continue
+ result.append(x)
return result
- self.pub_libs = _fn(self.pub_libs, True)
+
+ # Handle lists in priority order: public items can be excluded from
+ # private and Requires can excluded from Libs.
self.pub_reqs = _fn(self.pub_reqs)
- self.priv_libs = _fn(self.priv_libs, True)
+ self.pub_libs = _fn(self.pub_libs, True)
self.priv_reqs = _fn(self.priv_reqs)
+ self.priv_libs = _fn(self.priv_libs, True)
+ # Reset exclude list just in case some values can be both cflags and libs.
+ exclude = set()
self.cflags = _fn(self.cflags)
- # Remove from private libs/reqs if they are in public already
- self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs]
- self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs]
-
class PkgConfigModule(ExtensionModule):
def _get_lname(self, l, msg, pcfile):
@@ -267,7 +319,6 @@ class PkgConfigModule(ExtensionModule):
def generate_pkgconfig_file(self, state, deps, subdirs, name, description,
url, version, pcfile, conflicts, variables,
uninstalled=False, dataonly=False):
- deps.remove_dups()
coredata = state.environment.get_coredata()
if uninstalled:
outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
@@ -372,18 +423,18 @@ class PkgConfigModule(ExtensionModule):
if len(deps.priv_libs) > 0:
ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
- def generate_compiler_flags():
- cflags_buf = []
- for f in deps.cflags:
- cflags_buf.append(self._escape(f))
- return cflags_buf
-
- cflags = generate_compiler_flags()
- ofile.write('Cflags:')
+ cflags = []
if uninstalled:
- ofile.write(' '.join(generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)))
- elif not dataonly and cflags:
- ofile.write('{}\n'.format(' '.join(cflags)))
+ cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs)
+ else:
+ for d in subdirs:
+ if d == '.':
+ cflags.append('-I${includedir}')
+ else:
+ cflags.append(self._escape(PurePath('-I${includedir}') / d))
+ cflags += [self._escape(f) for f in deps.cflags]
+ if cflags and not dataonly:
+ ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
@FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables'])
@FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags'])
@@ -394,8 +445,6 @@ class PkgConfigModule(ExtensionModule):
'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions',
'dataonly', 'conflicts'})
def generate(self, state, args, kwargs):
- if 'variables' in kwargs:
- FeatureNew('custom pkgconfig variables', '0.41.0').use(state.subproject)
default_version = state.project_version['version']
default_install_dir = None
default_description = None
@@ -403,9 +452,9 @@ class PkgConfigModule(ExtensionModule):
mainlib = None
default_subdirs = ['.']
if not args and 'version' not in kwargs:
- FeatureNew('pkgconfig.generate implicit version keyword', '0.46.0').use(state.subproject)
+ FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
elif len(args) == 1:
- FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use(state.subproject)
+ FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
mainlib = getattr(args[0], 'held_object', args[0])
if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)):
raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object')
@@ -450,11 +499,6 @@ class PkgConfigModule(ExtensionModule):
libraries = [mainlib] + libraries
deps = DependenciesHelper(state, filebase)
- for d in subdirs:
- if d == '.':
- deps.add_cflags(['-I${includedir}'])
- else:
- deps.add_cflags(self._escape(PurePath('-I${includedir}') / d))
deps.add_pub_libs(libraries)
deps.add_priv_libs(kwargs.get('libraries_private', []))
deps.add_pub_reqs(kwargs.get('requires', []))
@@ -467,6 +511,8 @@ class PkgConfigModule(ExtensionModule):
if compiler:
deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
+ deps.remove_dups()
+
def parse_variable_list(stringlist):
reserved = ['prefix', 'libdir', 'includedir']
variables = []
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index a5c58a2..ceabd76 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -285,7 +285,7 @@ print (json.dumps ({
class PythonInstallation(ExternalProgramHolder):
def __init__(self, interpreter, python, info):
- ExternalProgramHolder.__init__(self, python)
+ ExternalProgramHolder.__init__(self, python, interpreter.subproject)
self.interpreter = interpreter
self.subproject = self.interpreter.subproject
prefix = self.interpreter.environment.coredata.get_builtin_option('prefix')
@@ -361,7 +361,7 @@ class PythonInstallation(ExternalProgramHolder):
@permittedKwargs(['pure', 'subdir'])
def install_sources_method(self, args, kwargs):
- pure = kwargs.pop('pure', False)
+ pure = kwargs.pop('pure', True)
if not isinstance(pure, bool):
raise InvalidArguments('"pure" argument must be a boolean.')
@@ -514,7 +514,7 @@ class PythonModule(ExtensionModule):
if disabled:
mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')')
- return ExternalProgramHolder(NonExistingExternalProgram())
+ return ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
if not name_or_path:
python = ExternalProgram('python3', mesonlib.python_command, silent=True)
@@ -561,11 +561,11 @@ class PythonModule(ExtensionModule):
if not python.found():
if required:
raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
- res = ExternalProgramHolder(NonExistingExternalProgram())
+ res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
elif missing_modules:
if required:
raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
- res = ExternalProgramHolder(NonExistingExternalProgram())
+ res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
else:
# Sanity check, we expect to have something that at least quacks in tune
try:
@@ -583,7 +583,7 @@ class PythonModule(ExtensionModule):
if isinstance(info, dict) and 'version' in info and self._check_version(name_or_path, info['version']):
res = PythonInstallation(interpreter, python, info)
else:
- res = ExternalProgramHolder(NonExistingExternalProgram())
+ res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject)
if required:
raise mesonlib.MesonException('{} is not a valid python or it is missing setuptools'.format(python))
diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py
index c7da530..c810df6 100644
--- a/mesonbuild/modules/qt.py
+++ b/mesonbuild/modules/qt.py
@@ -15,8 +15,8 @@
import os
from .. import mlog
from .. import build
-from ..mesonlib import MesonException, Popen_safe, extract_as_list, File, unholder
-from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency
+from ..mesonlib import MesonException, extract_as_list, File, unholder, version_compare
+from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency, NonExistingExternalProgram
import xml.etree.ElementTree as ET
from . import ModuleReturnValue, get_include_args, ExtensionModule
from ..interpreterbase import noPosargs, permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -30,49 +30,34 @@ _QT_DEPS_LUT = {
class QtBaseModule(ExtensionModule):
tools_detected = False
+ rcc_supports_depfiles = False
def __init__(self, interpreter, qt_version=5):
ExtensionModule.__init__(self, interpreter)
self.snippets.add('has_tools')
self.qt_version = qt_version
- def _detect_tools(self, env, method):
+ def _detect_tools(self, env, method, required=True):
if self.tools_detected:
return
+ self.tools_detected = True
mlog.log('Detecting Qt{version} tools'.format(version=self.qt_version))
- # FIXME: We currently require QtX to exist while importing the module.
- # We should make it gracefully degrade and not create any targets if
- # the import is marked as 'optional' (not implemented yet)
- kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method}
+ kwargs = {'required': required, 'modules': 'Core', 'method': method}
qt = _QT_DEPS_LUT[self.qt_version](env, kwargs)
- # Get all tools and then make sure that they are the right version
- self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter)
- # Moc, uic and rcc write their version strings to stderr.
- # Moc and rcc return a non-zero result when doing so.
- # What kind of an idiot thought that was a good idea?
- for compiler, compiler_name in ((self.moc, "Moc"), (self.uic, "Uic"), (self.rcc, "Rcc"), (self.lrelease, "lrelease")):
- if compiler.found():
- # Workaround since there is no easy way to know which tool/version support which flag
- for flag in ['-v', '-version']:
- p, stdout, stderr = Popen_safe(compiler.get_command() + [flag])[0:3]
- if p.returncode == 0:
- break
- stdout = stdout.strip()
- stderr = stderr.strip()
- if 'Qt {}'.format(self.qt_version) in stderr:
- compiler_ver = stderr
- elif 'version {}.'.format(self.qt_version) in stderr:
- compiler_ver = stderr
- elif ' {}.'.format(self.qt_version) in stdout:
- compiler_ver = stdout
- else:
- raise MesonException('{name} preprocessor is not for Qt {version}. Output:\n{stdo}\n{stderr}'.format(
- name=compiler_name, version=self.qt_version, stdo=stdout, stderr=stderr))
- mlog.log(' {}:'.format(compiler_name.lower()), mlog.green('YES'), '({path}, {version})'.format(
- path=compiler.get_path(), version=compiler_ver.split()[-1]))
+ if qt.found():
+ # Get all tools and then make sure that they are the right version
+ self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter)
+ if version_compare(qt.version, '>=5.14.0'):
+ self.rcc_supports_depfiles = True
else:
- mlog.log(' {}:'.format(compiler_name.lower()), mlog.red('NO'))
- self.tools_detected = True
+ mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:',
+ mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False)
+ else:
+ suffix = '-qt{}'.format(self.qt_version)
+ self.moc = NonExistingExternalProgram(name='moc' + suffix)
+ self.uic = NonExistingExternalProgram(name='uic' + suffix)
+ self.rcc = NonExistingExternalProgram(name='rcc' + suffix)
+ self.lrelease = NonExistingExternalProgram(name='lrelease' + suffix)
def parse_qrc(self, state, rcc_file):
if type(rcc_file) is str:
@@ -128,7 +113,7 @@ class QtBaseModule(ExtensionModule):
if disabled:
mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
return False
- self._detect_tools(state.environment, method)
+ self._detect_tools(state.environment, method, required=False)
for tool in (self.moc, self.uic, self.rcc, self.lrelease):
if not tool.found():
if required:
@@ -177,6 +162,9 @@ class QtBaseModule(ExtensionModule):
'output': name + '.cpp',
'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'],
'depend_files': qrc_deps}
+ if self.rcc_supports_depfiles:
+ rcc_kwargs['depfile'] = name + '.d'
+ rcc_kwargs['command'] += ['--depfile', '@DEPFILE@']
res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs)
sources.append(res_target)
if ui_files:
diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py
index 112e3e4..e85a150 100644
--- a/mesonbuild/modules/qt4.py
+++ b/mesonbuild/modules/qt4.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from .. import mlog
from .qt import QtBaseModule
@@ -23,6 +22,4 @@ class Qt4Module(QtBaseModule):
def initialize(*args, **kwargs):
- mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:',
- mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
return Qt4Module(*args, **kwargs)
diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py
index 96a7964..873c2db 100644
--- a/mesonbuild/modules/qt5.py
+++ b/mesonbuild/modules/qt5.py
@@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from .. import mlog
from .qt import QtBaseModule
@@ -23,6 +22,4 @@ class Qt5Module(QtBaseModule):
def initialize(*args, **kwargs):
- mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:',
- mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'))
return Qt5Module(*args, **kwargs)
diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py
index f939782..c154ab2 100644
--- a/mesonbuild/modules/windows.py
+++ b/mesonbuild/modules/windows.py
@@ -107,7 +107,7 @@ class WindowsModule(ExtensionModule):
'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
for arg in extra_args:
if ' ' in arg:
- mlog.warning(m.format(arg))
+ mlog.warning(m.format(arg), fatal=False)
res_targets = []
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 2cffc47..b9e381e 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -426,8 +426,8 @@ class IfNode(BaseNode):
class IfClauseNode(BaseNode):
def __init__(self, linenode: BaseNode):
super().__init__(linenode.lineno, linenode.colno, linenode.filename)
- self.ifs = [] # type: T.List[IfNode]
- self.elseblock = EmptyNode(linenode.lineno, linenode.colno, linenode.filename) # type: T.Union[EmptyNode, CodeBlockNode]
+ self.ifs = [] # type: T.List[IfNode]
+ self.elseblock = None # type: T.Union[EmptyNode, CodeBlockNode]
class UMinusNode(BaseNode):
def __init__(self, current_location: Token, value: BaseNode):
@@ -747,9 +747,7 @@ class Parser:
block = self.codeblock()
clause.ifs.append(IfNode(clause, condition, block))
self.elseifblock(clause)
- elseblock = self.elseblock()
- if elseblock:
- clause.elseblock = elseblock
+ clause.elseblock = self.elseblock()
return clause
def elseifblock(self, clause) -> None:
@@ -759,11 +757,11 @@ class Parser:
b = self.codeblock()
clause.ifs.append(IfNode(s, s, b))
- def elseblock(self) -> T.Optional[CodeBlockNode]:
+ def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]:
if self.accept('else'):
self.expect('eol')
return self.codeblock()
- return None
+ return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
def line(self) -> BaseNode:
block_start = self.current
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index 77d8377..2521511 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -86,7 +86,7 @@ class MesonApp:
# will cause a crash
for l in os.listdir(self.build_dir):
l = os.path.join(self.build_dir, l)
- if os.path.isdir(l):
+ if os.path.isdir(l) and not os.path.islink(l):
mesonlib.windows_proof_rmtree(l)
else:
mesonlib.windows_proof_rm(l)
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 23643c5..0d81692 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -33,14 +33,17 @@ import signal
import subprocess
import sys
import tempfile
+import textwrap
import time
import typing as T
+import xml.etree.ElementTree as et
from . import build
from . import environment
from . import mlog
from .dependencies import ExternalProgram
-from .mesonlib import MesonException, get_wine_shortpath, split_args
+from .mesonlib import MesonException, get_wine_shortpath, split_args, join_args
+from .backend.backends import TestProtocol
if T.TYPE_CHECKING:
from .backend.backends import TestSerialisation
@@ -92,6 +95,9 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args,
help='wrapper to run tests with (e.g. Valgrind)')
parser.add_argument('-C', default='.', dest='wd',
+ # https://github.com/python/typeshed/issues/3107
+ # https://github.com/python/mypy/issues/7177
+ type=os.path.abspath, # type: ignore
help='directory to cd into before running')
parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
help='Only run tests belonging to the given suite.')
@@ -305,7 +311,7 @@ class TAPParser:
yield self.Version(version=version)
continue
- if len(line) == 0:
+ if not line:
continue
yield self.Error('unexpected input at line {}'.format((lineno,)))
@@ -320,13 +326,144 @@ class TAPParser:
yield self.Error('Too many tests run (expected {}, got {})'.format(plan.count, num_tests))
+
+class JunitBuilder:
+
+ """Builder for Junit test results.
+
+ Junit is impossible to stream out, it requires attributes counting the
+ total number of tests, failures, skips, and errors in the root element
+ and in each test suite. As such, we use a builder class to track each
+ test case, and calculate all metadata before writing it out.
+
+ For tests with multiple results (like from a TAP test), we record the
+ test as a suite with the project_name.test_name. This allows us to track
+ each result separately. For tests with only one result (such as exit-code
+ tests) we record each one into a suite with the name project_name. The use
+ of the project_name allows us to sort subproject tests separately from
+ the root project.
+ """
+
+ def __init__(self, filename: str) -> None:
+ self.filename = filename
+ self.root = et.Element(
+ 'testsuites', tests='0', errors='0', failures='0')
+ self.suites = {} # type: T.Dict[str, et.Element]
+
+ def log(self, name: str, test: 'TestRun') -> None:
+ """Log a single test case."""
+ if test.junit is not None:
+ for suite in test.junit.findall('.//testsuite'):
+ # Assume that we don't need to merge anything here...
+ suite.attrib['name'] = '{}.{}.{}'.format(test.project, name, suite.attrib['name'])
+
+ # GTest can inject invalid attributes
+ for case in suite.findall('.//testcase[@result]'):
+ del case.attrib['result']
+ for case in suite.findall('.//testcase[@timestamp]'):
+ del case.attrib['timestamp']
+ self.root.append(suite)
+ return
+
+ # In this case we have a test binary with multiple results.
+ # We want to record this so that each result is recorded
+ # separately
+ if test.results:
+ suitename = '{}.{}'.format(test.project, name)
+ assert suitename not in self.suites, 'duplicate suite'
+
+ suite = self.suites[suitename] = et.Element(
+ 'testsuite',
+ name=suitename,
+ tests=str(len(test.results)),
+ errors=str(sum(1 for r in test.results if r is TestResult.ERROR)),
+ failures=str(sum(1 for r in test.results if r in
+ {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
+ skipped=str(sum(1 for r in test.results if r is TestResult.SKIP)),
+ )
+
+ for i, result in enumerate(test.results):
+ # Both name and classname are required. Set them both to the
+ # number of the test in a TAP test, as TAP doesn't give names.
+ testcase = et.SubElement(suite, 'testcase', name=str(i), classname=str(i))
+ if result is TestResult.SKIP:
+ et.SubElement(testcase, 'skipped')
+ elif result is TestResult.ERROR:
+ et.SubElement(testcase, 'error')
+ elif result is TestResult.FAIL:
+ et.SubElement(testcase, 'failure')
+ elif result is TestResult.UNEXPECTEDPASS:
+ fail = et.SubElement(testcase, 'failure')
+ fail.text = 'Test unexpected passed.'
+ elif result is TestResult.TIMEOUT:
+ fail = et.SubElement(testcase, 'failure')
+ fail.text = 'Test did not finish before configured timeout.'
+ if test.stdo:
+ out = et.SubElement(suite, 'system-out')
+ out.text = test.stdo.rstrip()
+ if test.stde:
+ err = et.SubElement(suite, 'system-err')
+ err.text = test.stde.rstrip()
+ else:
+ if test.project not in self.suites:
+ suite = self.suites[test.project] = et.Element(
+ 'testsuite', name=test.project, tests='1', errors='0',
+ failures='0', skipped='0')
+ else:
+ suite = self.suites[test.project]
+ suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1)
+
+ testcase = et.SubElement(suite, 'testcase', name=name, classname=name)
+ if test.res is TestResult.SKIP:
+ et.SubElement(testcase, 'skipped')
+ suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+ elif test.res is TestResult.ERROR:
+ et.SubElement(testcase, 'error')
+ suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+ elif test.res is TestResult.FAIL:
+ et.SubElement(testcase, 'failure')
+ suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+ if test.stdo:
+ out = et.SubElement(testcase, 'system-out')
+ out.text = test.stdo.rstrip()
+ if test.stde:
+ err = et.SubElement(testcase, 'system-err')
+ err.text = test.stde.rstrip()
+
+ def write(self) -> None:
+ """Calculate total test counts and write out the xml result."""
+ for suite in self.suites.values():
+ self.root.append(suite)
+ # Skipped is really not allowed in the "testsuits" element
+ for attr in ['tests', 'errors', 'failures']:
+ self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr]))
+
+ tree = et.ElementTree(self.root)
+ with open(self.filename, 'wb') as f:
+ tree.write(f, encoding='utf-8', xml_declaration=True)
+
+
class TestRun:
@classmethod
+ def make_gtest(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
+ returncode: int, starttime: float, duration: float,
+ stdo: T.Optional[str], stde: T.Optional[str],
+ cmd: T.Optional[T.List[str]]) -> 'TestRun':
+ filename = '{}.xml'.format(test.name)
+ if test.workdir:
+ filename = os.path.join(test.workdir, filename)
+ tree = et.parse(filename)
+
+ return cls.make_exitcode(
+ test, test_env, returncode, starttime, duration, stdo, stde, cmd,
+ junit=tree)
+
+ @classmethod
def make_exitcode(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
- cmd: T.Optional[T.List[str]]) -> 'TestRun':
+ cmd: T.Optional[T.List[str]], **kwargs) -> 'TestRun':
if returncode == GNU_SKIP_RETURNCODE:
res = TestResult.SKIP
elif returncode == GNU_ERROR_RETURNCODE:
@@ -335,30 +472,29 @@ class TestRun:
res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS
else:
res = TestResult.FAIL if bool(returncode) else TestResult.OK
- return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd)
+ return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd, **kwargs)
@classmethod
def make_tap(cls, test: 'TestSerialisation', test_env: T.Dict[str, str],
returncode: int, starttime: float, duration: float,
stdo: str, stde: str,
cmd: T.Optional[T.List[str]]) -> 'TestRun':
- res = None
- num_tests = 0
+ res = None # type: T.Optional[TestResult]
+ results = [] # type: T.List[TestResult]
failed = False
- num_skipped = 0
for i in TAPParser(io.StringIO(stdo)).parse():
if isinstance(i, TAPParser.Bailout):
- res = TestResult.ERROR
+ results.append(TestResult.ERROR)
+ failed = True
elif isinstance(i, TAPParser.Test):
- if i.result == TestResult.SKIP:
- num_skipped += 1
- elif i.result in (TestResult.FAIL, TestResult.UNEXPECTEDPASS):
+ results.append(i.result)
+ if i.result not in {TestResult.OK, TestResult.EXPECTEDFAIL}:
failed = True
- num_tests += 1
elif isinstance(i, TAPParser.Error):
- res = TestResult.ERROR
+ results.append(TestResult.ERROR)
stde += '\nTAP parsing error: ' + i.message
+ failed = True
if returncode != 0:
res = TestResult.ERROR
@@ -366,7 +502,7 @@ class TestRun:
if res is None:
# Now determine the overall result of the test based on the outcome of the subcases
- if num_skipped == num_tests:
+ if all(t is TestResult.SKIP for t in results):
# This includes the case where num_tests is zero
res = TestResult.SKIP
elif test.should_fail:
@@ -374,14 +510,16 @@ class TestRun:
else:
res = TestResult.FAIL if failed else TestResult.OK
- return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd)
+ return cls(test, test_env, res, results, returncode, starttime, duration, stdo, stde, cmd)
def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str],
- res: TestResult, returncode: int, starttime: float, duration: float,
+ res: TestResult, results: T.List[TestResult], returncode:
+ int, starttime: float, duration: float,
stdo: T.Optional[str], stde: T.Optional[str],
- cmd: T.Optional[T.List[str]]):
+ cmd: T.Optional[T.List[str]], *, junit: T.Optional[et.ElementTree] = None):
assert isinstance(res, TestResult)
self.res = res
+ self.results = results # May be an empty list
self.returncode = returncode
self.starttime = starttime
self.duration = duration
@@ -390,6 +528,8 @@ class TestRun:
self.cmd = cmd
self.env = test_env
self.should_fail = test.should_fail
+ self.project = test.project_name
+ self.junit = junit
def get_log(self) -> str:
res = '--- command ---\n'
@@ -436,9 +576,7 @@ def write_json_log(jsonlogfile: T.TextIO, test_name: str, result: TestRun) -> No
jsonlogfile.write(json.dumps(jresult) + '\n')
def run_with_mono(fname: str) -> bool:
- if fname.endswith('.exe') and not (is_windows() or is_cygwin()):
- return True
- return False
+ return fname.endswith('.exe') and not (is_windows() or is_cygwin())
def load_benchmarks(build_dir: str) -> T.List['TestSerialisation']:
datafile = Path(build_dir) / 'meson-private' / 'meson_benchmark_setup.dat'
@@ -471,26 +609,26 @@ class SingleTestRunner:
return ['java', '-jar'] + self.test.fname
elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]):
return ['mono'] + self.test.fname
- else:
- if self.test.is_cross_built and self.test.needs_exe_wrapper:
- if self.test.exe_runner is None:
- # Can not run test on cross compiled executable
- # because there is no execute wrapper.
- return None
- else:
- if not self.test.exe_runner.found():
- msg = 'The exe_wrapper defined in the cross file {!r} was not ' \
- 'found. Please check the command and/or add it to PATH.'
- raise TestException(msg.format(self.test.exe_runner.name))
- return self.test.exe_runner.get_command() + self.test.fname
- else:
- return self.test.fname
+ elif self.test.cmd_is_built and self.test.needs_exe_wrapper:
+ if self.test.exe_runner is None:
+ # Can not run test on cross compiled executable
+ # because there is no execute wrapper.
+ return None
+ elif self.test.cmd_is_built:
+ # If the command is not built (ie, its a python script),
+ # then we don't check for the exe-wrapper
+ if not self.test.exe_runner.found():
+ msg = ('The exe_wrapper defined in the cross file {!r} was not '
+ 'found. Please check the command and/or add it to PATH.')
+ raise TestException(msg.format(self.test.exe_runner.name))
+ return self.test.exe_runner.get_command() + self.test.fname
+ return self.test.fname
def run(self) -> TestRun:
cmd = self._get_cmd()
if cmd is None:
skip_stdout = 'Not run because can not execute cross compiled binaries.'
- return TestRun(self.test, self.test_env, TestResult.SKIP, GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None)
+ return TestRun(self.test, self.test_env, TestResult.SKIP, [], GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None)
else:
wrap = TestHarness.get_wrapper(self.options)
if self.options.gdb:
@@ -500,7 +638,7 @@ class SingleTestRunner:
def _run_cmd(self, cmd: T.List[str]) -> TestRun:
starttime = time.time()
- if len(self.test.extra_paths) > 0:
+ if self.test.extra_paths:
self.env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + self.env['PATH']
winecmd = []
for c in cmd:
@@ -525,7 +663,7 @@ class SingleTestRunner:
if not self.options.verbose:
stdout = tempfile.TemporaryFile("wb+")
stderr = tempfile.TemporaryFile("wb+") if self.options.split else stdout
- if self.test.protocol == 'tap' and stderr is stdout:
+ if self.test.protocol is TestProtocol.TAP and stderr is stdout:
stdout = tempfile.TemporaryFile("wb+")
# Let gdb handle ^C instead of us
@@ -545,7 +683,14 @@ class SingleTestRunner:
# errors avoid not being able to use the terminal.
os.setsid() # type: ignore
- p = subprocess.Popen(cmd,
+ extra_cmd = [] # type: T.List[str]
+ if self.test.protocol is TestProtocol.GTEST:
+ gtestname = '{}.xml'.format(self.test.name)
+ if self.test.workdir:
+ gtestname = '{}:{}'.format(self.test.workdir, self.test.name)
+ extra_cmd.append('--gtest_output=xml:{}'.format(gtestname))
+
+ p = subprocess.Popen(cmd + extra_cmd,
stdout=stdout,
stderr=stderr,
env=self.env,
@@ -633,10 +778,12 @@ class SingleTestRunner:
stdo = ""
stde = additional_error
if timed_out:
- return TestRun(self.test, self.test_env, TestResult.TIMEOUT, p.returncode, starttime, duration, stdo, stde, cmd)
+ return TestRun(self.test, self.test_env, TestResult.TIMEOUT, [], p.returncode, starttime, duration, stdo, stde, cmd)
else:
- if self.test.protocol == 'exitcode':
+ if self.test.protocol is TestProtocol.EXITCODE:
return TestRun.make_exitcode(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd)
+ elif self.test.protocol is TestProtocol.GTEST:
+ return TestRun.make_gtest(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd)
else:
if self.options.verbose:
print(stdo, end='')
@@ -647,6 +794,7 @@ class TestHarness:
def __init__(self, options: argparse.Namespace):
self.options = options
self.collected_logs = [] # type: T.List[str]
+ self.collected_failures = [] # type: T.List[str]
self.fail_count = 0
self.expectedfail_count = 0
self.unexpectedpass_count = 0
@@ -655,9 +803,11 @@ class TestHarness:
self.timeout_count = 0
self.is_run = False
self.tests = None
+ self.results = [] # type: T.List[TestRun]
self.logfilename = None # type: T.Optional[str]
self.logfile = None # type: T.Optional[T.TextIO]
self.jsonlogfile = None # type: T.Optional[T.TextIO]
+ self.junit = None # type: T.Optional[JunitBuilder]
if self.options.benchmark:
self.tests = load_benchmarks(options.wd)
else:
@@ -678,12 +828,11 @@ class TestHarness:
self.close_logfiles()
def close_logfiles(self) -> None:
- if self.logfile:
- self.logfile.close()
- self.logfile = None
- if self.jsonlogfile:
- self.jsonlogfile.close()
- self.jsonlogfile = None
+ for f in ['logfile', 'jsonlogfile']:
+ lfile = getattr(self, f)
+ if lfile:
+ lfile.close()
+ setattr(self, f, None)
def merge_suite_options(self, options: argparse.Namespace, test: 'TestSerialisation') -> T.Dict[str, str]:
if ':' in options.setup:
@@ -719,6 +868,9 @@ class TestHarness:
env = os.environ.copy()
test_env = test.env.get_env(env)
env.update(test_env)
+ if (test.is_cross_built and test.needs_exe_wrapper and
+ test.exe_runner and test.exe_runner.found()):
+ env['MESON_EXE_WRAPPER'] = join_args(test.exe_runner.get_command())
return SingleTestRunner(test, test_env, env, options)
def process_test_result(self, result: TestRun) -> None:
@@ -757,6 +909,7 @@ class TestHarness:
if not self.options.quiet or result.res not in ok_statuses:
if result.res not in ok_statuses and mlog.colorize_console:
if result.res in bad_statuses:
+ self.collected_failures.append(result_str)
decorator = mlog.red
elif result.res is TestResult.SKIP:
decorator = mlog.yellow
@@ -773,23 +926,31 @@ class TestHarness:
self.logfile.write(result_str)
if self.jsonlogfile:
write_json_log(self.jsonlogfile, name, result)
+ if self.junit:
+ self.junit.log(name, result)
def print_summary(self) -> None:
- msg = '''
-Ok: {:<4}
-Expected Fail: {:<4}
-Fail: {:<4}
-Unexpected Pass: {:<4}
-Skipped: {:<4}
-Timeout: {:<4}
-'''.format(self.success_count, self.expectedfail_count, self.fail_count,
+ # Prepend a list of failures
+ msg = '' if len(self.collected_failures) < 1 else "\nSummary of Failures:\n\n"
+ msg += '\n'.join(self.collected_failures)
+ msg += textwrap.dedent('''
+
+ Ok: {:<4}
+ Expected Fail: {:<4}
+ Fail: {:<4}
+ Unexpected Pass: {:<4}
+ Skipped: {:<4}
+ Timeout: {:<4}
+ ''').format(self.success_count, self.expectedfail_count, self.fail_count,
self.unexpectedpass_count, self.skip_count, self.timeout_count)
print(msg)
if self.logfile:
self.logfile.write(msg)
+ if self.junit:
+ self.junit.write()
def print_collected_logs(self) -> None:
- if len(self.collected_logs) > 0:
+ if self.collected_logs:
if len(self.collected_logs) > 10:
print('\nThe output from 10 first failed tests:\n')
else:
@@ -871,7 +1032,7 @@ Timeout: {:<4}
print('No tests defined.')
return []
- if len(self.options.include_suites) or len(self.options.exclude_suites):
+ if self.options.include_suites or self.options.exclude_suites:
tests = []
for tst in self.tests:
if self.test_suitable(tst):
@@ -903,6 +1064,9 @@ Timeout: {:<4}
if namebase:
logfile_base += '-' + namebase.replace(' ', '_')
+
+ self.junit = JunitBuilder(logfile_base + '.junit.xml')
+
self.logfilename = logfile_base + '.txt'
self.jsonlogfilename = logfile_base + '.json'
@@ -930,7 +1094,7 @@ Timeout: {:<4}
if len(self.suites) > 1 and test.suite:
rv = TestHarness.split_suite_string(test.suite[0])[0]
s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
- if len(s):
+ if s:
rv += ":"
return rv + s + " / " + test.name
else:
@@ -970,8 +1134,8 @@ Timeout: {:<4}
break
self.drain_futures(futures)
- self.print_summary()
self.print_collected_logs()
+ self.print_summary()
if self.logfilename:
print('Full log written to {}'.format(self.logfilename))
@@ -1046,7 +1210,6 @@ def run(options: argparse.Namespace) -> int:
if not exe.found():
print('Could not find requested program: {!r}'.format(check_bin))
return 1
- options.wd = os.path.abspath(options.wd)
if not options.list and not options.no_rebuild:
if not rebuild_all(options.wd):
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index c3cf1d8..56f8984 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -16,10 +16,11 @@ import re
import functools
import typing as T
-from . import mparser
+from . import compilers
from . import coredata
from . import mesonlib
-from . import compilers
+from . import mparser
+from .interpreterbase import FeatureNew
forbidden_option_names = set(coredata.builtin_options.keys())
forbidden_prefixes = [lang.get_lower_case_name() + '_' for lang in compilers.all_languages] + ['b_', 'backend_']
@@ -170,12 +171,21 @@ class OptionInterpreter:
res = self.reduce_single(arg.value)
if not isinstance(res, (int, float)):
raise OptionException('Token after "-" is not a number')
+ FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject)
return -res
elif isinstance(arg, mparser.NotNode):
res = self.reduce_single(arg.value)
if not isinstance(res, bool):
raise OptionException('Token after "not" is not a a boolean')
+ FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject)
return not res
+ elif isinstance(arg, mparser.ArithmeticNode):
+ l = self.reduce_single(arg.left)
+ r = self.reduce_single(arg.right)
+ if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)):
+ raise OptionException('Only string concatenation with the "+" operator is allowed')
+ FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject)
+ return l + r
else:
raise OptionException('Arguments may only be string, int, bool, or array of those.')
@@ -200,11 +210,8 @@ class OptionInterpreter:
raise OptionException('Only calls to option() are allowed in option files.')
(posargs, kwargs) = self.reduce_arguments(node.args)
- # FIXME: Cannot use FeatureNew while parsing options because we parse
- # it before reading options in project(). See func_project() in
- # interpreter.py
- #if 'yield' in kwargs:
- # FeatureNew('option yield', '0.45.0').use(self.subproject)
+ if 'yield' in kwargs:
+ FeatureNew.single_use('option yield', '0.45.0', self.subproject)
if 'type' not in kwargs:
raise OptionException('Option call missing mandatory "type" keyword argument')
diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 0000000..5c0b31f
--- /dev/null
+++ b/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+import argparse
+import subprocess
+import shutil
+import os
+import sys
+from pathlib import Path
+
+def run(argsv):
+ commands = [[]]
+ SEPARATOR = ';;;'
+
+ # Generate CMD parameters
+ parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+ parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+ parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+ parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+ parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR))
+
+ # Parse
+ args = parser.parse_args(argsv)
+
+ dummy_target = None
+ if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+ dummy_target = args.outputs[0]
+ elif len(args.outputs) != len(args.original_outputs):
+ print('Length of output list and original output list differ')
+ sys.exit(1)
+
+ for i in args.commands:
+ if i == SEPARATOR:
+ commands += [[]]
+ continue
+
+ i = i.replace('"', '') # Remove lefover quotes
+ commands[-1] += [i]
+
+ # Execute
+ for i in commands:
+ # Skip empty lists
+ if not i:
+ continue
+
+ cmd = []
+ stdout = None
+ stderr = None
+ capture_file = ''
+
+ for j in i:
+ if j in ['>', '>>']:
+ stdout = subprocess.PIPE
+ continue
+ elif j in ['&>', '&>>']:
+ stdout = subprocess.PIPE
+ stderr = subprocess.STDOUT
+ continue
+
+ if stdout is not None or stderr is not None:
+ capture_file += j
+ else:
+ cmd += [j]
+
+ try:
+ os.makedirs(args.directory, exist_ok=True)
+
+ res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True)
+ if capture_file:
+ out_file = Path(args.directory) / capture_file
+ out_file.write_bytes(res.stdout)
+ except subprocess.CalledProcessError:
+ sys.exit(1)
+
+ if dummy_target:
+ with open(dummy_target, 'a'):
+ os.utime(dummy_target, None)
+ sys.exit(0)
+
+ # Copy outputs
+ zipped_outputs = zip(args.outputs, args.original_outputs)
+ for expected, generated in zipped_outputs:
+ do_copy = False
+ if not os.path.exists(expected):
+ if not os.path.exists(generated):
+ print('Unable to find generated file. This can cause the build to fail:')
+ print(generated)
+ do_copy = False
+ else:
+ do_copy = True
+ elif os.path.exists(generated):
+ if os.path.getmtime(generated) > os.path.getmtime(expected):
+ do_copy = True
+
+ if do_copy:
+ if os.path.exists(expected):
+ os.remove(expected)
+ shutil.copyfile(generated, expected)
+
+if __name__ == '__main__':
+ sys.run(sys.argv[1:])
diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py
index 4bd41fe..7231972 100644
--- a/mesonbuild/scripts/coverage.py
+++ b/mesonbuild/scripts/coverage.py
@@ -12,15 +12,15 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from mesonbuild import environment
+from mesonbuild import environment, mesonlib
-import argparse, sys, os, subprocess, pathlib
+import argparse, sys, os, subprocess, pathlib, stat
-def coverage(outputs, source_root, subproject_root, build_root, log_dir):
+def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llvm_cov):
outfiles = []
exitcode = 0
- (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools()
+ (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
# gcovr >= 4.2 requires a different syntax for out of source builds
if gcovr_new_rootdir:
@@ -28,13 +28,18 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
else:
gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+ if use_llvm_cov:
+ gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+ else:
+ gcov_exe_args = []
+
if not outputs or 'xml' in outputs:
if gcovr_exe:
subprocess.check_call(gcovr_base_cmd +
['-x',
'-e', subproject_root,
- '-o', os.path.join(log_dir, 'coverage.xml'),
- ])
+ '-o', os.path.join(log_dir, 'coverage.xml')
+ ] + gcov_exe_args)
outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
elif outputs:
print('gcovr >= 3.3 needed to generate Xml coverage report')
@@ -44,8 +49,8 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
if gcovr_exe:
subprocess.check_call(gcovr_base_cmd +
['-e', subproject_root,
- '-o', os.path.join(log_dir, 'coverage.txt'),
- ])
+ '-o', os.path.join(log_dir, 'coverage.txt')
+ ] + gcov_exe_args)
outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
elif outputs:
print('gcovr >= 3.3 needed to generate text coverage report')
@@ -58,19 +63,34 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir):
initial_tracefile = covinfo + '.initial'
run_tracefile = covinfo + '.run'
raw_tracefile = covinfo + '.raw'
+ if use_llvm_cov:
+ # Create a shim to allow using llvm-cov as a gcov tool.
+ if mesonlib.is_windows():
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+ with open(llvm_cov_shim_path, 'w') as llvm_cov_bat:
+ llvm_cov_bat.write('@"{}" gcov %*'.format(llvm_cov_exe))
+ else:
+ llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+ with open(llvm_cov_shim_path, 'w') as llvm_cov_sh:
+ llvm_cov_sh.write('#!/usr/bin/env sh\nexec "{}" gcov $@'.format(llvm_cov_exe))
+ os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+ gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+ else:
+ gcov_tool_args = []
subprocess.check_call([lcov_exe,
'--directory', build_root,
'--capture',
'--initial',
'--output-file',
- initial_tracefile])
+ initial_tracefile] +
+ gcov_tool_args)
subprocess.check_call([lcov_exe,
'--directory', build_root,
'--capture',
'--output-file', run_tracefile,
'--no-checksum',
- '--rc', 'lcov_branch_coverage=1',
- ])
+ '--rc', 'lcov_branch_coverage=1'] +
+ gcov_tool_args)
# Join initial and test results.
subprocess.check_call([lcov_exe,
'-a', initial_tracefile,
@@ -137,6 +157,8 @@ def run(args):
const='xml', help='generate Xml report')
parser.add_argument('--html', dest='outputs', action='append_const',
const='html', help='generate Html report')
+ parser.add_argument('--use_llvm_cov', action='store_true',
+ help='use llvm-cov')
parser.add_argument('source_root')
parser.add_argument('subproject_root')
parser.add_argument('build_root')
@@ -144,7 +166,7 @@ def run(args):
options = parser.parse_args(args)
return coverage(options.outputs, options.source_root,
options.subproject_root, options.build_root,
- options.log_dir)
+ options.log_dir, options.use_llvm_cov)
if __name__ == '__main__':
sys.exit(run(sys.argv[1:]))
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index 5ba3a97..a3a3eff 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -290,13 +290,13 @@ class Elf(DataSizes):
self.bf.seek(offset)
self.bf.write(newname)
- def fix_rpath(self, new_rpath):
+ def fix_rpath(self, rpath_dirs_to_remove, new_rpath):
# The path to search for can be either rpath or runpath.
# Fix both of them to be sure.
- self.fix_rpathtype_entry(new_rpath, DT_RPATH)
- self.fix_rpathtype_entry(new_rpath, DT_RUNPATH)
+ self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH)
+ self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
- def fix_rpathtype_entry(self, new_rpath, entrynum):
+ def fix_rpathtype_entry(self, rpath_dirs_to_remove, new_rpath, entrynum):
if isinstance(new_rpath, str):
new_rpath = new_rpath.encode('utf8')
rp_off = self.get_entry_offset(entrynum)
@@ -305,7 +305,23 @@ class Elf(DataSizes):
print('File does not have rpath. It should be a fully static executable.')
return
self.bf.seek(rp_off)
+
old_rpath = self.read_str()
+ new_rpaths = []
+ if new_rpath:
+ new_rpaths.append(new_rpath)
+ if old_rpath:
+ # Filter out build-only rpath entries
+ # added by get_link_dep_subdirs() or
+ # specified by user with build_rpath.
+ for dir in old_rpath.split(b':'):
+ if not (dir in rpath_dirs_to_remove or
+ dir == (b'X' * len(dir))):
+ new_rpaths.append(dir)
+
+ # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+ new_rpath = b':'.join(new_rpaths)
+
if len(old_rpath) < len(new_rpath):
sys.exit("New rpath must not be longer than the old one.")
# The linker does read-only string deduplication. If there is a
@@ -343,13 +359,13 @@ class Elf(DataSizes):
entry.write(self.bf)
return None
-def fix_elf(fname, new_rpath, verbose=True):
+def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True):
with Elf(fname, verbose) as e:
if new_rpath is None:
e.print_rpath()
e.print_runpath()
else:
- e.fix_rpath(new_rpath)
+ e.fix_rpath(rpath_dirs_to_remove, new_rpath)
def get_darwin_rpaths_to_remove(fname):
out = subprocess.check_output(['otool', '-l', fname],
@@ -430,7 +446,7 @@ def fix_jar(fname):
f.truncate()
subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF'])
-def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True):
+def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_mappings, verbose=True):
global INSTALL_NAME_TOOL
# Static libraries, import libraries, debug information, headers, etc
# never have rpaths
@@ -441,7 +457,7 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True)
if fname.endswith('.jar'):
fix_jar(fname)
return
- fix_elf(fname, new_rpath, verbose)
+ fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
return
except SystemExit as e:
if isinstance(e.code, int) and e.code == 0:
diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py
index 6b174a6..812604a 100644
--- a/mesonbuild/scripts/gtkdochelper.py
+++ b/mesonbuild/scripts/gtkdochelper.py
@@ -16,7 +16,7 @@ import sys, os
import subprocess
import shutil
import argparse
-from ..mesonlib import MesonException, Popen_safe, is_windows, split_args
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
from . import destdir_join
parser = argparse.ArgumentParser()
@@ -55,16 +55,18 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None):
library_paths = []
env = dict(os.environ)
- if is_windows():
+ if is_windows() or is_cygwin():
if 'PATH' in env:
library_paths.extend(env['PATH'].split(os.pathsep))
env['PATH'] = os.pathsep.join(library_paths)
- cmd.insert(0, sys.executable)
else:
if 'LD_LIBRARY_PATH' in env:
library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+ if is_windows():
+ cmd.insert(0, sys.executable)
+
# Put stderr into stdout since we want to print it out anyway.
# This preserves the order of messages.
p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py
index d393f93..f4084be 100644
--- a/mesonbuild/scripts/symbolextractor.py
+++ b/mesonbuild/scripts/symbolextractor.py
@@ -113,11 +113,23 @@ def gnu_syms(libfilename: str, outfilename: str):
continue
line_split = line.split()
entry = line_split[0:2]
- if len(line_split) >= 4:
+ # Store the size of symbols pointing to data objects so we relink
+ # when those change, which is needed because of copy relocations
+ # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+ if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4:
entry += [line_split[3]]
result += [' '.join(entry)]
write_if_changed('\n'.join(result) + '\n', outfilename)
+def solaris_syms(libfilename: str, outfilename: str):
+ # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+ origpath = os.environ['PATH']
+ try:
+ os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+ gnu_syms(libfilename, outfilename)
+ finally:
+ os.environ['PATH'] = origpath
+
def osx_syms(libfilename: str, outfilename: str):
# Get the name of the library
output = call_tool('otool', ['-l', libfilename])
@@ -139,6 +151,23 @@ def osx_syms(libfilename: str, outfilename: str):
result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
write_if_changed('\n'.join(result) + '\n', outfilename)
+def openbsd_syms(libfilename: str, outfilename: str):
+ # Get the name of the library
+ output = call_tool('readelf', ['-d', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ result = [x for x in output.split('\n') if 'SONAME' in x]
+ assert(len(result) <= 1)
+ # Get a list of all symbols exported
+ output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+ if not output:
+ dummy_syms(outfilename)
+ return
+ # U = undefined (cope with the lack of --defined-only option)
+ result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+ write_if_changed('\n'.join(result) + '\n', outfilename)
+
def cygwin_syms(impfilename: str, outfilename: str):
# Get the name of the library
output = call_tool('dlltool', ['-I', impfilename])
@@ -234,6 +263,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
gnu_syms(libfilename, outfilename)
elif mesonlib.is_osx():
osx_syms(libfilename, outfilename)
+ elif mesonlib.is_openbsd():
+ openbsd_syms(libfilename, outfilename)
elif mesonlib.is_windows():
if os.path.isfile(impfilename):
windows_syms(impfilename, outfilename)
@@ -248,6 +279,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host
# No import library. Not sure how the DLL is being used, so just
# rebuild everything that links to it every time.
dummy_syms(outfilename)
+ elif mesonlib.is_sunos():
+ solaris_syms(libfilename, outfilename)
else:
if not os.path.exists(TOOL_WARNING_FILE):
mlog.warning('Symbol extracting has not been implemented for this '
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 1715cd3..aba220e 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -27,7 +27,9 @@ import sys
import configparser
import typing as T
+from pathlib import Path
from . import WrapMode
+from .. import coredata
from ..mesonlib import git, GIT, ProgressBar, MesonException
if T.TYPE_CHECKING:
@@ -59,7 +61,10 @@ def quiet_git(cmd: T.List[str], workingdir: str) -> T.Tuple[bool, str]:
def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
if not GIT:
return False
- return git(cmd, workingdir, check=check).returncode == 0
+ try:
+ return git(cmd, workingdir, check=check).returncode == 0
+ except subprocess.CalledProcessError:
+ raise WrapException('Git command failed')
def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
""" raises WrapException if not whitelisted subdomain """
@@ -102,13 +107,31 @@ class WrapNotFoundException(WrapException):
class PackageDefinition:
def __init__(self, fname: str):
self.filename = fname
+ self.type = None
+ self.values = {} # type: T.Dict[str, str]
+ self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
+ self.provided_programs = [] # type: T.List[str]
self.basename = os.path.basename(fname)
- self.name = self.basename[:-5]
+ self.name = self.basename
+ if self.name.endswith('.wrap'):
+ self.name = self.name[:-5]
+ self.provided_deps[self.name] = None
+ if fname.endswith('.wrap'):
+ self.parse_wrap(fname)
+ self.directory = self.values.get('directory', self.name)
+ if os.path.dirname(self.directory):
+ raise WrapException('Directory key must be a name and not a path')
+
+ def parse_wrap(self, fname: str):
try:
self.config = configparser.ConfigParser(interpolation=None)
self.config.read(fname)
except configparser.Error:
raise WrapException('Failed to parse {}'.format(self.basename))
+ self.parse_wrap_section()
+ self.parse_provide_section()
+
+ def parse_wrap_section(self):
if len(self.config.sections()) < 1:
raise WrapException('Missing sections in {}'.format(self.basename))
self.wrap_section = self.config.sections()[0]
@@ -118,6 +141,27 @@ class PackageDefinition:
self.type = self.wrap_section[5:]
self.values = dict(self.config[self.wrap_section])
+ def parse_provide_section(self):
+ if self.config.has_section('provide'):
+ for k, v in self.config['provide'].items():
+ if k == 'dependency_names':
+ # A comma separated list of dependency names that does not
+ # need a variable name
+ names = {n.strip(): None for n in v.split(',')}
+ self.provided_deps.update(names)
+ continue
+ if k == 'program_names':
+ # A comma separated list of program names
+ names = [n.strip() for n in v.split(',')]
+ self.provided_programs += names
+ continue
+ if not v:
+ m = ('Empty dependency variable name for {!r} in {}. '
+ 'If the subproject uses meson.override_dependency() '
+ 'it can be added in the "dependency_names" special key.')
+ raise WrapException(m.format(k, self.basename))
+ self.provided_deps[k] = v
+
def get(self, key: str) -> str:
try:
return self.values[key]
@@ -125,36 +169,87 @@ class PackageDefinition:
m = 'Missing key {!r} in {}'
raise WrapException(m.format(key, self.basename))
- def has_patch(self) -> bool:
- return 'patch_url' in self.values
-
-def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition:
+def get_directory(subdir_root: str, packagename: str) -> str:
fname = os.path.join(subdir_root, packagename + '.wrap')
if os.path.isfile(fname):
- return PackageDefinition(fname)
- return None
-
-def get_directory(subdir_root: str, packagename: str):
- directory = packagename
- # We always have to load the wrap file, if it exists, because it could
- # override the default directory name.
- wrap = load_wrap(subdir_root, packagename)
- if wrap and 'directory' in wrap.values:
- directory = wrap.get('directory')
- if os.path.dirname(directory):
- raise WrapException('Directory key must be a name and not a path')
- return wrap, directory
+ wrap = PackageDefinition(fname)
+ return wrap.directory
+ return packagename
class Resolver:
def __init__(self, subdir_root: str, wrap_mode=WrapMode.default):
self.wrap_mode = wrap_mode
self.subdir_root = subdir_root
self.cachedir = os.path.join(self.subdir_root, 'packagecache')
-
- def resolve(self, packagename: str, method: str) -> str:
+ self.filesdir = os.path.join(self.subdir_root, 'packagefiles')
+ self.wraps = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
+ self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+ self.load_wraps()
+
+ def load_wraps(self):
+ if not os.path.isdir(self.subdir_root):
+ return
+ root, dirs, files = next(os.walk(self.subdir_root))
+ for i in files:
+ if not i.endswith('.wrap'):
+ continue
+ fname = os.path.join(self.subdir_root, i)
+ wrap = PackageDefinition(fname)
+ self.wraps[wrap.name] = wrap
+ if wrap.directory in dirs:
+ dirs.remove(wrap.directory)
+ # Add dummy package definition for directories not associated with a wrap file.
+ for i in dirs:
+ if i in ['packagecache', 'packagefiles']:
+ continue
+ fname = os.path.join(self.subdir_root, i)
+ wrap = PackageDefinition(fname)
+ self.wraps[wrap.name] = wrap
+
+ for wrap in self.wraps.values():
+ for k in wrap.provided_deps.keys():
+ if k in self.provided_deps:
+ prev_wrap = self.provided_deps[k]
+ m = 'Multiple wrap files provide {!r} dependency: {} and {}'
+ raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+ self.provided_deps[k] = wrap
+ for k in wrap.provided_programs:
+ if k in self.provided_programs:
+ prev_wrap = self.provided_programs[k]
+ m = 'Multiple wrap files provide {!r} program: {} and {}'
+ raise WrapException(m.format(k, wrap.basename, prev_wrap.basename))
+ self.provided_programs[k] = wrap
+
+ def find_dep_provider(self, packagename: str):
+ # Return value is in the same format as fallback kwarg:
+ # ['subproject_name', 'variable_name'], or 'subproject_name'.
+ wrap = self.provided_deps.get(packagename)
+ if wrap:
+ dep_var = wrap.provided_deps.get(packagename)
+ if dep_var:
+ return [wrap.name, dep_var]
+ return wrap.name
+ return None
+
+ def find_program_provider(self, names: T.List[str]):
+ for name in names:
+ wrap = self.provided_programs.get(name)
+ if wrap:
+ return wrap.name
+ return None
+
+ def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str:
+ self.current_subproject = current_subproject
self.packagename = packagename
- self.wrap, self.directory = get_directory(self.subdir_root, self.packagename)
+ self.directory = packagename
+ self.wrap = self.wraps.get(packagename)
+ if not self.wrap:
+ m = 'Subproject directory not found and {}.wrap file not found'
+ raise WrapNotFoundException(m.format(self.packagename))
+ self.directory = self.wrap.directory
self.dirname = os.path.join(self.subdir_root, self.directory)
+
meson_file = os.path.join(self.dirname, 'meson.build')
cmake_file = os.path.join(self.dirname, 'CMakeLists.txt')
@@ -174,11 +269,6 @@ class Resolver:
if not os.path.isdir(self.dirname):
raise WrapException('Path already exists but is not a directory')
else:
- # A wrap file is required to download
- if not self.wrap:
- m = 'Subproject directory not found and {}.wrap file not found'
- raise WrapNotFoundException(m.format(self.packagename))
-
if self.wrap.type == 'file':
self.get_file()
else:
@@ -191,6 +281,7 @@ class Resolver:
self.get_svn()
else:
raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type))
+ self.apply_patch()
# A meson.build or CMakeLists.txt file is required in the directory
if method == 'meson' and not os.path.exists(meson_file):
@@ -250,8 +341,6 @@ class Resolver:
os.mkdir(self.dirname)
extract_dir = self.dirname
shutil.unpack_archive(path, extract_dir)
- if self.wrap.has_patch():
- self.apply_patch()
def get_git(self) -> None:
if not GIT:
@@ -330,7 +419,8 @@ class Resolver:
raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring))
else:
try:
- resp = urllib.request.urlopen(urlstring, timeout=REQ_TIMEOUT)
+ req = urllib.request.Request(urlstring, headers={'User-Agent': 'mesonbuild/{}'.format(coredata.version)})
+ resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
except urllib.error.URLError as e:
mlog.log(str(e))
raise WrapException('could not get {} is the internet available?'.format(urlstring))
@@ -363,7 +453,9 @@ class Resolver:
hashvalue = h.hexdigest()
return hashvalue, tmpfile.name
- def check_hash(self, what: str, path: str) -> None:
+ def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
+ if what + '_hash' not in self.wrap.values and not hash_required:
+ return
expected = self.wrap.get(what + '_hash')
h = hashlib.sha256()
with open(path, 'rb') as f:
@@ -393,26 +485,49 @@ class Resolver:
def get_file_internal(self, what: str) -> str:
filename = self.wrap.get(what + '_filename')
- cache_path = os.path.join(self.cachedir, filename)
+ if what + '_url' in self.wrap.values:
+ cache_path = os.path.join(self.cachedir, filename)
+
+ if os.path.exists(cache_path):
+ self.check_hash(what, cache_path)
+ mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+ return cache_path
- if os.path.exists(cache_path):
- self.check_hash(what, cache_path)
- mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+ if not os.path.isdir(self.cachedir):
+ os.mkdir(self.cachedir)
+ self.download(what, cache_path)
return cache_path
+ else:
+ from ..interpreterbase import FeatureNew
+ FeatureNew('Local wrap patch files without {}_url'.format(what), '0.55.0').use(self.current_subproject)
+ path = Path(self.filesdir) / filename
+
+ if not path.exists():
+ raise WrapException('File "{}" does not exist'.format(path))
+ self.check_hash(what, path.as_posix(), hash_required=False)
- if not os.path.isdir(self.cachedir):
- os.mkdir(self.cachedir)
- self.download(what, cache_path)
- return cache_path
+ return path.as_posix()
def apply_patch(self) -> None:
- path = self.get_file_internal('patch')
- try:
- shutil.unpack_archive(path, self.subdir_root)
- except Exception:
- with tempfile.TemporaryDirectory() as workdir:
- shutil.unpack_archive(path, workdir)
- self.copy_tree(workdir, self.subdir_root)
+ if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+ m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"'
+ raise WrapException(m.format(self.wrap.basename))
+ if 'patch_filename' in self.wrap.values:
+ path = self.get_file_internal('patch')
+ try:
+ shutil.unpack_archive(path, self.subdir_root)
+ except Exception:
+ with tempfile.TemporaryDirectory() as workdir:
+ shutil.unpack_archive(path, workdir)
+ self.copy_tree(workdir, self.subdir_root)
+ elif 'patch_directory' in self.wrap.values:
+ from ..interpreterbase import FeatureNew
+ FeatureNew('patch_directory', '0.55.0').use(self.current_subproject)
+ patch_dir = self.wrap.values['patch_directory']
+ src_dir = os.path.join(self.filesdir, patch_dir)
+ if not os.path.isdir(src_dir):
+ raise WrapException('patch directory does not exists: {}'.format(patch_dir))
+ self.copy_tree(src_dir, self.dirname)
def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
"""
diff --git a/msi/createmsi.py b/msi/createmsi.py
index f80d1dc..eca4493 100644
--- a/msi/createmsi.py
+++ b/msi/createmsi.py
@@ -13,10 +13,12 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
'''
- This script is for generating MSI packages
- for Windows users.
+This script is for generating MSI packages
+for Windows users.
'''
+
import subprocess
import shutil
import uuid
@@ -78,12 +80,8 @@ class PackageGenerator:
self.bytesize = 32 if '32' in platform.architecture()[0] else 64
self.final_output = 'meson-{}-{}.msi'.format(self.version, self.bytesize)
self.staging_dirs = ['dist', 'dist2']
- if self.bytesize == 64:
- self.progfile_dir = 'ProgramFiles64Folder'
- redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC142_CRT_x64.msm'
- else:
- self.progfile_dir = 'ProgramFilesFolder'
- redist_glob = 'C:\\Program Files*\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC142_CRT_x86.msm'
+ self.progfile_dir = 'ProgramFiles64Folder'
+ redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\v*\\MergeModules\\Microsoft_VC142_CRT_x64.msm'
trials = glob(redist_glob)
if len(trials) != 1:
sys.exit('Could not find unique MSM setup:' + '\n'.join(trials))
diff --git a/run_cross_test.py b/run_cross_test.py
index abbfdac..1e67876 100755
--- a/run_cross_test.py
+++ b/run_cross_test.py
@@ -21,8 +21,9 @@ This is now just a wrapper around run_project_tests.py with specific arguments
import argparse
import subprocess
-import sys
from mesonbuild import mesonlib
+from mesonbuild.coredata import version as meson_version
+
def runtests(cross_file, failfast):
tests = ['--only', 'common']
@@ -37,4 +38,5 @@ def main():
return runtests(options.cross_file, options.failfast)
if __name__ == '__main__':
- sys.exit(main())
+ print('Meson build system', meson_version, 'Cross Tests')
+ raise SystemExit(main())
diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py
index 9dfb62e..7bc6185 100755
--- a/run_meson_command_tests.py
+++ b/run_meson_command_tests.py
@@ -14,7 +14,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import sys
import os
import tempfile
import unittest
@@ -23,6 +22,8 @@ import zipapp
from pathlib import Path
from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows
+from mesonbuild.coredata import version as meson_version
+
def get_pypath():
import sysconfig
@@ -128,6 +129,10 @@ class CommandTests(unittest.TestCase):
os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '')
os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH']
self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)])
+ # Fix importlib-metadata by appending all dirs in pylibdir
+ PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()]
+ PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS]
+ os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS)
# Check that all the files were installed correctly
self.assertTrue(bindir.is_dir())
self.assertTrue(pylibdir.is_dir())
@@ -195,4 +200,5 @@ class CommandTests(unittest.TestCase):
if __name__ == '__main__':
- sys.exit(unittest.main(buffer=True))
+ print('Meson build system', meson_version, 'Command Tests')
+ raise SystemExit(unittest.main(buffer=True))
diff --git a/run_project_tests.py b/run_project_tests.py
index 875a522..56b7e2a 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -41,8 +41,9 @@ from mesonbuild import compilers
from mesonbuild import mesonlib
from mesonbuild import mlog
from mesonbuild import mtest
+from mesonbuild.build import ConfigurationData
from mesonbuild.mesonlib import MachineChoice, Popen_safe
-from mesonbuild.coredata import backendlist
+from mesonbuild.coredata import backendlist, version as meson_version
from run_tests import get_fake_options, run_configure, get_meson_script
from run_tests import get_backend_commands, get_backend_args_for_dir, Backend
@@ -50,7 +51,7 @@ from run_tests import ensure_backend_detects_changes
from run_tests import guess_backend
ALL_TESTS = ['cmake', 'common', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test',
- 'kconfig', 'platform-osx', 'platform-windows', 'platform-linux',
+ 'keyval', 'platform-osx', 'platform-windows', 'platform-linux',
'java', 'C#', 'vala', 'rust', 'd', 'objective c', 'objective c++',
'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm'
]
@@ -190,7 +191,8 @@ class TestDef:
self.skip = skip
self.env = os.environ.copy()
self.installed_files = [] # type: T.List[InstalledFile]
- self.do_not_set_opts = [] # type: T.List[str]
+ self.do_not_set_opts = [] # type: T.List[str]
+ self.stdout = [] # type: T.List[T.Dict[str, str]]
def __repr__(self) -> str:
return '<{}: {:<48} [{}: {}] -- {}>'.format(type(self).__name__, str(self.path), self.name, self.args, self.skip)
@@ -233,6 +235,7 @@ no_meson_log_msg = 'No meson-log.txt found.'
system_compiler = None
compiler_id_map = {} # type: T.Dict[str, str]
+tool_vers_map = {} # type: T.Dict[str, str]
class StopException(Exception):
def __init__(self):
@@ -340,30 +343,29 @@ def log_text_file(logfile, testdir, stdo, stde):
def bold(text):
- return mlog.bold(text).get_text(mlog.colorize_console)
+ return mlog.bold(text).get_text(mlog.colorize_console())
def green(text):
- return mlog.green(text).get_text(mlog.colorize_console)
+ return mlog.green(text).get_text(mlog.colorize_console())
def red(text):
- return mlog.red(text).get_text(mlog.colorize_console)
+ return mlog.red(text).get_text(mlog.colorize_console())
def yellow(text):
- return mlog.yellow(text).get_text(mlog.colorize_console)
+ return mlog.yellow(text).get_text(mlog.colorize_console())
def _run_ci_include(args: T.List[str]) -> str:
if not args:
return 'At least one parameter required'
try:
- file_path = Path(args[0])
- data = file_path.open(errors='ignore', encoding='utf-8').read()
+ data = Path(args[0]).read_text(errors='ignore', encoding='utf-8')
return 'Included file {}:\n{}\n'.format(args[0], data)
except Exception:
- return 'Failed to open {} ({})'.format(args[0])
+ return 'Failed to open {}'.format(args[0])
ci_commands = {
'ci_include': _run_ci_include
@@ -380,6 +382,66 @@ def run_ci_commands(raw_log: str) -> T.List[str]:
res += ['CI COMMAND {}:\n{}\n'.format(cmd[0], ci_commands[cmd[0]](cmd[1:]))]
return res
+def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) -> str:
+ if expected:
+ i = iter(expected)
+
+ def next_expected(i):
+ # Get the next expected line
+ item = next(i)
+ how = item.get('match', 'literal')
+ expected = item.get('line')
+
+ # Simple heuristic to automatically convert path separators for
+ # Windows:
+ #
+ # Any '/' appearing before 'WARNING' or 'ERROR' (i.e. a path in a
+ # filename part of a location) is replaced with '\' (in a re: '\\'
+ # which matches a literal '\')
+ #
+ # (There should probably be a way to turn this off for more complex
+ # cases which don't fit this)
+ if mesonlib.is_windows():
+ if how != "re":
+ sub = r'\\'
+ else:
+ sub = r'\\\\'
+ expected = re.sub(r'/(?=.*(WARNING|ERROR))', sub, expected)
+
+ return how, expected
+
+ try:
+ how, expected = next_expected(i)
+ for actual in output.splitlines():
+ if how == "re":
+ match = bool(re.match(expected, actual))
+ else:
+ match = (expected == actual)
+ if match:
+ how, expected = next_expected(i)
+
+ # reached the end of output without finding expected
+ return 'expected "{}" not found in {}'.format(expected, desc)
+ except StopIteration:
+ # matched all expected lines
+ pass
+
+ return ''
+
+def validate_output(test: TestDef, stdo: str, stde: str) -> str:
+ return _compare_output(test.stdout, stdo, 'stdout')
+
+# There are some class variables and such that cahce
+# information. Clear all of these. The better solution
+# would be to change the code so that no state is persisted
+# but that would be a lot of work given that Meson was originally
+# coded to run as a batch process.
+def clear_internal_caches():
+ import mesonbuild.interpreterbase
+ from mesonbuild.dependencies import CMakeDependency
+ from mesonbuild.mesonlib import PerMachine
+ mesonbuild.interpreterbase.FeatureNew.feature_registry = {}
+ CMakeDependency.class_cmakeinfo = PerMachine(None, None)
def run_test_inprocess(testdir):
old_stdout = sys.stdout
@@ -414,6 +476,28 @@ def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str:
os.mkdir(abs_pathname)
return abs_pathname
+def format_parameter_file(file_basename: str, test: TestDef, test_build_dir: str) -> Path:
+ confdata = ConfigurationData()
+ confdata.values = {'MESON_TEST_ROOT': (str(test.path.absolute()), 'base directory of current test')}
+
+ template = test.path / (file_basename + '.in')
+ destination = Path(test_build_dir) / file_basename
+ mesonlib.do_conf_file(str(template), str(destination), confdata, 'meson')
+
+ return destination
+
+def detect_parameter_files(test: TestDef, test_build_dir: str) -> (Path, Path):
+ nativefile = test.path / 'nativefile.ini'
+ crossfile = test.path / 'crossfile.ini'
+
+ if os.path.exists(str(test.path / 'nativefile.ini.in')):
+ nativefile = format_parameter_file('nativefile.ini', test, test_build_dir)
+
+ if os.path.exists(str(test.path / 'crossfile.ini.in')):
+ crossfile = format_parameter_file('crossfile.ini', test, test_build_dir)
+
+ return nativefile, crossfile
+
def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool):
if test.skip:
return None
@@ -436,8 +520,9 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args,
if 'libdir' not in test.do_not_set_opts:
gen_args += ['--libdir', 'lib']
gen_args += [test.path.as_posix(), test_build_dir] + flags + extra_args
- nativefile = test.path / 'nativefile.ini'
- crossfile = test.path / 'crossfile.ini'
+
+ nativefile, crossfile = detect_parameter_files(test, test_build_dir)
+
if nativefile.exists():
gen_args.extend(['--native-file', nativefile.as_posix()])
if crossfile.exists():
@@ -451,6 +536,11 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args,
cicmds = run_ci_commands(mesonlog)
testresult = TestResult(cicmds)
testresult.add_step(BuildStep.configure, stdo, stde, mesonlog, time.time() - gen_start)
+ output_msg = validate_output(test, stdo, stde)
+ testresult.mlog += output_msg
+ if output_msg:
+ testresult.fail('Unexpected output while configuring.')
+ return testresult
if should_fail == 'meson':
if returncode == 1:
return testresult
@@ -496,6 +586,7 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args,
force_regenerate()
# Test in-process
+ clear_internal_caches()
test_start = time.time()
(returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir)
testresult.add_step(BuildStep.test, tstdo, tstde, test_log, time.time() - test_start)
@@ -538,18 +629,16 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args,
return testresult
-def gather_tests(testdir: Path) -> T.List[TestDef]:
+def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]:
tests = [t.name for t in testdir.iterdir() if t.is_dir()]
tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc)
test_defs = [TestDef(testdir / t, None, []) for t in tests]
all_tests = [] # type: T.List[TestDef]
for t in test_defs:
+ test_def = {}
test_def_file = t.path / 'test.json'
- if not test_def_file.is_file():
- all_tests += [t]
- continue
-
- test_def = json.loads(test_def_file.read_text())
+ if test_def_file.is_file():
+ test_def = json.loads(test_def_file.read_text())
# Handle additional environment variables
env = {} # type: T.Dict[str, str]
@@ -565,14 +654,29 @@ def gather_tests(testdir: Path) -> T.List[TestDef]:
if 'installed' in test_def:
installed = [InstalledFile(x) for x in test_def['installed']]
+ # Handle expected output
+ stdout = test_def.get('stdout', [])
+ if stdout_mandatory and not stdout:
+ raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file))
+
# Handle the do_not_set_opts list
do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str]
+ # Skip tests if the tool requirements are not met
+ if 'tools' in test_def:
+ assert isinstance(test_def['tools'], dict)
+ for tool, vers_req in test_def['tools'].items():
+ if tool not in tool_vers_map:
+ t.skip = True
+ elif not mesonlib.version_compare(tool_vers_map[tool], vers_req):
+ t.skip = True
+
# Skip the matrix code and just update the existing test
if 'matrix' not in test_def:
t.env.update(env)
t.installed_files = installed
t.do_not_set_opts = do_not_set_opts
+ t.stdout = stdout
all_tests += [t]
continue
@@ -588,11 +692,6 @@ def gather_tests(testdir: Path) -> T.List[TestDef]:
assert "val" in i
skip = False
- # Add an empty matrix entry
- if i['val'] is None:
- tmp_opts += [(None, False)]
- continue
-
# Skip the matrix entry if environment variable is present
if 'skip_on_env' in i:
for skip_env_var in i['skip_on_env']:
@@ -606,6 +705,11 @@ def gather_tests(testdir: Path) -> T.List[TestDef]:
skip = True
break
+ # Add an empty matrix entry
+ if i['val'] is None:
+ tmp_opts += [(None, skip)]
+ continue
+
tmp_opts += [('{}={}'.format(key, i['val']), skip)]
if opt_list:
@@ -639,10 +743,11 @@ def gather_tests(testdir: Path) -> T.List[TestDef]:
name = ' '.join([x[0] for x in i if x[0] is not None])
opts = ['-D' + x[0] for x in i if x[0] is not None]
skip = any([x[1] for x in i])
- test = TestDef(t.path, name, opts, skip)
+ test = TestDef(t.path, name, opts, skip or t.skip)
test.env.update(env)
test.installed_files = installed
test.do_not_set_opts = do_not_set_opts
+ test.stdout = stdout
all_tests += [test]
return sorted(all_tests)
@@ -827,45 +932,50 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str,
shutil.which('pgfortran') or
shutil.which('ifort'))
- # Name, subdirectory, skip condition.
+ class TestCategory:
+ def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False):
+ self.category = category # category name
+ self.subdir = subdir # subdirectory
+ self.skip = skip # skip condition
+ self.stdout_mandatory = stdout_mandatory # expected stdout is mandatory for tests in this categroy
+
all_tests = [
- ('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)),
- ('common', 'common', False),
- ('warning-meson', 'warning', False),
- ('failing-meson', 'failing', False),
- ('failing-build', 'failing build', False),
- ('failing-test', 'failing test', False),
- ('kconfig', 'kconfig', False),
-
- ('platform-osx', 'osx', not mesonlib.is_osx()),
- ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
- ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
-
- ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
- ('C#', 'csharp', skip_csharp(backend)),
- ('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
- ('rust', 'rust', should_skip_rust(backend)),
- ('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
- ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)),
- ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)),
- ('fortran', 'fortran', skip_fortran or backend != Backend.ninja),
- ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
+ TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)),
+ TestCategory('common', 'common'),
+ TestCategory('warning-meson', 'warning', stdout_mandatory=True),
+ TestCategory('failing-meson', 'failing', stdout_mandatory=True),
+ TestCategory('failing-build', 'failing build'),
+ TestCategory('failing-test', 'failing test'),
+ TestCategory('keyval', 'keyval'),
+ TestCategory('platform-osx', 'osx', not mesonlib.is_osx()),
+ TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()),
+ TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()),
+ TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()),
+ TestCategory('C#', 'csharp', skip_csharp(backend)),
+ TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))),
+ TestCategory('rust', 'rust', should_skip_rust(backend)),
+ TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()),
+ TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)),
+ TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)),
+ TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja),
+ TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')),
# CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja
- ('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')),
- ('python3', 'python3', backend is not Backend.ninja),
- ('python', 'python', backend is not Backend.ninja),
- ('fpga', 'fpga', shutil.which('yosys') is None),
- ('frameworks', 'frameworks', False),
- ('nasm', 'nasm', False),
- ('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja),
+ TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')),
+ TestCategory('python3', 'python3', backend is not Backend.ninja),
+ TestCategory('python', 'python'),
+ TestCategory('fpga', 'fpga', shutil.which('yosys') is None),
+ TestCategory('frameworks', 'frameworks'),
+ TestCategory('nasm', 'nasm'),
+ TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja),
]
- names = [t[0] for t in all_tests]
- assert names == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests names'
+ categories = [t.category for t in all_tests]
+ assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories'
+
if only:
- ind = [names.index(o) for o in only]
- all_tests = [all_tests[i] for i in ind]
- gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests]
+ all_tests = [t for t in all_tests if t.category in only]
+
+ gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory), t.skip) for t in all_tests]
return gathered_tests
def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]],
@@ -1029,16 +1139,15 @@ def check_format():
'.build',
'.md',
}
+ skip_dirs = {
+ '.dub', # external deps are here
+ '.pytest_cache',
+ 'meson-logs', 'meson-private',
+ '.eggs', '_cache', # e.g. .mypy_cache
+ 'venv', # virtualenvs have DOS line endings
+ }
for (root, _, filenames) in os.walk('.'):
- if '.dub' in root: # external deps are here
- continue
- if '.pytest_cache' in root:
- continue
- if 'meson-logs' in root or 'meson-private' in root:
- continue
- if '__CMake_build' in root:
- continue
- if '.eggs' in root or '_cache' in root: # e.g. .mypy_cache
+ if any([x in root for x in skip_dirs]):
continue
for fname in filenames:
file = Path(fname)
@@ -1103,11 +1212,23 @@ def detect_system_compiler(options):
def print_tool_versions():
tools = [
{
+ 'tool': 'ninja',
+ 'args': ['--version'],
+ 'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
+ 'match_group': 1,
+ },
+ {
'tool': 'cmake',
'args': ['--version'],
'regex': re.compile(r'^cmake version ([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
'match_group': 1,
},
+ {
+ 'tool': 'hotdoc',
+ 'args': ['--version'],
+ 'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'),
+ 'match_group': 1,
+ },
]
def get_version(t: dict) -> str:
@@ -1123,6 +1244,7 @@ def print_tool_versions():
i = i.strip('\n\r\t ')
m = t['regex'].match(i)
if m is not None:
+ tool_vers_map[t['tool']] = m.group(t['match_group'])
return '{} ({})'.format(exe, m.group(t['match_group']))
return '{} (unknown)'.format(exe)
@@ -1148,6 +1270,8 @@ if __name__ == '__main__':
if options.cross_file:
options.extra_args += ['--cross-file', options.cross_file]
+ print('Meson build system', meson_version, 'Project Tests')
+ print('Using python', sys.version.split('\n')[0])
setup_commands(options.backend)
detect_system_compiler(options)
print_tool_versions()
diff --git a/run_tests.py b/run_tests.py
index 005d9a0..2648e06 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -33,7 +33,7 @@ from mesonbuild import mesonmain
from mesonbuild import mtest
from mesonbuild import mlog
from mesonbuild.environment import Environment, detect_ninja
-from mesonbuild.coredata import backendlist
+from mesonbuild.coredata import backendlist, version as meson_version
NINJA_1_9_OR_NEWER = False
NINJA_CMD = None
@@ -303,7 +303,7 @@ def run_configure(commandlist, env=None):
return run_configure_inprocess(commandlist, env=env)
def print_system_info():
- print(mlog.bold('System information.').get_text(mlog.colorize_console))
+ print(mlog.bold('System information.').get_text(mlog.colorize_console()))
print('Architecture:', platform.architecture())
print('Machine:', platform.machine())
print('Platform:', platform.system())
@@ -377,7 +377,7 @@ def main():
print(flush=True)
returncode = 0
else:
- print(mlog.bold('Running unittests.').get_text(mlog.colorize_console))
+ print(mlog.bold('Running unittests.').get_text(mlog.colorize_console()))
print(flush=True)
cmd = mesonlib.python_command + ['run_unittests.py', '-v']
if options.failfast:
@@ -390,7 +390,7 @@ def main():
else:
cross_test_args = mesonlib.python_command + ['run_cross_test.py']
for cf in options.cross:
- print(mlog.bold('Running {} cross tests.'.format(cf)).get_text(mlog.colorize_console))
+ print(mlog.bold('Running {} cross tests.'.format(cf)).get_text(mlog.colorize_console()))
print(flush=True)
cmd = cross_test_args + ['cross/' + cf]
if options.failfast:
@@ -401,4 +401,5 @@ def main():
return returncode
if __name__ == '__main__':
- sys.exit(main())
+ print('Meson build system', meson_version, 'Project and Unit Tests')
+ raise SystemExit(main())
diff --git a/run_unittests.py b/run_unittests.py
index e427ee9..4b4fe34 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import typing as T
import stat
import subprocess
import re
@@ -40,6 +41,7 @@ from contextlib import contextmanager
from glob import glob
from pathlib import (PurePath, Path)
from distutils.dir_util import copy_tree
+import typing as T
import mesonbuild.mlog
import mesonbuild.depfile
@@ -56,7 +58,7 @@ from mesonbuild.mesonlib import (
BuildDirLock, Language, LibType, MachineChoice, PerMachine, Version, is_windows,
is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos,
windows_proof_rmtree, python_command, version_compare, split_args,
- quote_arg, relpath
+ quote_arg, relpath, is_linux
)
from mesonbuild.environment import detect_ninja
from mesonbuild.mesonlib import MesonException, EnvironmentException
@@ -311,8 +313,14 @@ class InternalTests(unittest.TestCase):
self.assertEqual(searchfunc('1.2.3'), '1.2.3')
self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3')
self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3')
- self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version')
- self.assertEqual(searchfunc('2016.10.128'), 'unknown version')
+ self.assertEqual(searchfunc('foobar 2016.10.128'), '2016.10.128')
+ self.assertEqual(searchfunc('2016.10.128'), '2016.10.128')
+ self.assertEqual(searchfunc('2016.10'), '2016.10')
+ self.assertEqual(searchfunc('2016.10 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('oops v1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('2016.oops 1.2.3'), '1.2.3')
+ self.assertEqual(searchfunc('2016.x'), 'unknown version')
+
def test_mode_symbolic_to_bits(self):
modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits
@@ -349,17 +357,34 @@ class InternalTests(unittest.TestCase):
stat.S_IRWXU | stat.S_ISUID |
stat.S_IRGRP | stat.S_IXGRP)
+ def test_compiler_args_class_none_flush(self):
+ cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
+ a = cc.compiler_args(['-I.'])
+ #first we are checking if the tree construction deduplicates the correct -I argument
+ a += ['-I..']
+ a += ['-I./tests/']
+ a += ['-I./tests2/']
+ #think this here as assertion, we cannot apply it, otherwise the CompilerArgs would already flush the changes:
+ # assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..', '-I.'])
+ a += ['-I.']
+ a += ['-I.', '-I./tests/']
+ self.assertEqual(a, ['-I.', '-I./tests/', '-I./tests2/', '-I..'])
+
+ #then we are checking that when CompilerArgs already have a build container list, that the deduplication is taking the correct one
+ a += ['-I.', '-I./tests2/']
+ self.assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..'])
+
+
def test_compiler_args_class(self):
- cargsfunc = mesonbuild.compilers.CompilerArgs
cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock())
# Test that empty initialization works
- a = cargsfunc(cc)
+ a = cc.compiler_args()
self.assertEqual(a, [])
# Test that list initialization works
- a = cargsfunc(cc, ['-I.', '-I..'])
+ a = cc.compiler_args(['-I.', '-I..'])
self.assertEqual(a, ['-I.', '-I..'])
# Test that there is no de-dup on initialization
- self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.'])
+ self.assertEqual(cc.compiler_args(['-I.', '-I.']), ['-I.', '-I.'])
## Test that appending works
a.append('-I..')
@@ -405,7 +430,7 @@ class InternalTests(unittest.TestCase):
self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall'])
## Test that adding libraries works
- l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
+ l = cc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Adding a library and a libpath appends both correctly
l += ['-Lbardir', '-lbar']
@@ -415,7 +440,7 @@ class InternalTests(unittest.TestCase):
self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar'])
## Test that 'direct' append and extend works
- l = cargsfunc(cc, ['-Lfoodir', '-lfoo'])
+ l = cc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l, ['-Lfoodir', '-lfoo'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
@@ -431,14 +456,13 @@ class InternalTests(unittest.TestCase):
self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a'])
def test_compiler_args_class_gnuld(self):
- cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
- l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
+ l = gcc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
# Direct-adding a library and a libpath appends both correctly
l.extend_direct(['-Lbardir', '-lbar'])
@@ -460,14 +484,13 @@ class InternalTests(unittest.TestCase):
self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group'])
def test_compiler_args_remove_system(self):
- cargsfunc = mesonbuild.compilers.CompilerArgs
## Test --start/end-group
linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', [])
gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker)
## Ensure that the fake compiler is never called by overriding the relevant function
gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include']
## Test that 'direct' append and extend works
- l = cargsfunc(gcc, ['-Lfoodir', '-lfoo'])
+ l = gcc.compiler_args(['-Lfoodir', '-lfoo'])
self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group'])
## Test that to_native removes all system includes
l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include']
@@ -681,7 +704,6 @@ class InternalTests(unittest.TestCase):
self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))
# Test flattening and unholdering
holder1 = ObjectHolder(1)
- holder3 = ObjectHolder(3)
self.assertEqual([holder1], listify(holder1))
self.assertEqual([holder1], listify([holder1]))
self.assertEqual([holder1, 2], listify([holder1, 2]))
@@ -717,25 +739,22 @@ class InternalTests(unittest.TestCase):
self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
def test_pkgconfig_module(self):
-
- class Mock:
- pass
-
- dummystate = Mock()
+ dummystate = mock.Mock()
dummystate.subproject = 'dummy'
- mock = Mock()
- mock.pcdep = Mock()
- mock.pcdep.name = "some_name"
- mock.version_reqs = []
+ _mock = mock.Mock(spec=mesonbuild.dependencies.ExternalDependency)
+ _mock.pcdep = mock.Mock()
+ _mock.pcdep.name = "some_name"
+ _mock.version_reqs = []
+ _mock = mock.Mock(held_object=_mock)
# pkgconfig dependency as lib
deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib")
- deps.add_pub_libs([mock])
+ deps.add_pub_libs([_mock])
self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
# pkgconfig dependency as requires
deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib")
- deps.add_pub_reqs([mock])
+ deps.add_pub_reqs([_mock])
self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name")
def _test_all_naming(self, cc, env, patterns, platform):
@@ -1253,7 +1272,6 @@ class InternalTests(unittest.TestCase):
self.assertFalse(errors)
-
@unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release')
class DataTests(unittest.TestCase):
@@ -1466,16 +1484,49 @@ class DataTests(unittest.TestCase):
astint = AstInterpreter('.', '', '')
self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys()))
+ def test_mesondata_is_up_to_date(self):
+ from mesonbuild.mesondata import mesondata
+ err_msg = textwrap.dedent('''
+
+ ###########################################################
+ ### mesonbuild.mesondata is not up-to-date ###
+ ### Please regenerate it by running tools/gen_data.py ###
+ ###########################################################
+
+ ''')
+
+ root_dir = Path(__file__).resolve().parent
+ mesonbuild_dir = root_dir / 'mesonbuild'
+
+ data_dirs = mesonbuild_dir.glob('**/data')
+ data_files = [] # type: T.List[T.Tuple(str, str)]
+
+ for i in data_dirs:
+ for p in i.iterdir():
+ data_files += [(p.relative_to(mesonbuild_dir).as_posix(), hashlib.sha256(p.read_bytes()).hexdigest())]
+
+ from pprint import pprint
+ current_files = set(mesondata.keys())
+ scanned_files = set([x[0] for x in data_files])
+
+ self.assertSetEqual(current_files, scanned_files, err_msg + 'Data files were added or removed\n')
+ errors = []
+ for i in data_files:
+ if mesondata[i[0]].sha256sum != i[1]:
+ errors += [i[0]]
+
+ self.assertListEqual(errors, [], err_msg + 'Files were changed')
class BasePlatformTests(unittest.TestCase):
+ prefix = '/usr'
+ libdir = 'lib'
+
def setUp(self):
super().setUp()
self.maxDiff = None
src_root = os.path.dirname(__file__)
src_root = os.path.join(os.getcwd(), src_root)
self.src_root = src_root
- self.prefix = '/usr'
- self.libdir = 'lib'
# Get the backend
# FIXME: Extract this from argv?
self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja'))
@@ -1588,8 +1639,9 @@ class BasePlatformTests(unittest.TestCase):
extra_args = [extra_args]
args = [srcdir, self.builddir]
if default_args:
- args += ['--prefix', self.prefix,
- '--libdir', self.libdir]
+ args += ['--prefix', self.prefix]
+ if self.libdir:
+ args += ['--libdir', self.libdir]
if self.meson_native_file:
args += ['--native-file', self.meson_native_file]
if self.meson_cross_file:
@@ -1901,48 +1953,48 @@ class AllPlatformTests(BasePlatformTests):
(result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str(in_data, confdata, variable_format = vformat)
return '\n'.join(result)
- def check_formats (confdata, result):
- self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'),result)
- self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'),result)
- self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'),result)
+ def check_formats(confdata, result):
+ self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'), result)
+ self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'), result)
+ self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'), result)
confdata = ConfigurationData()
# Key error as they do not exists
check_formats(confdata, '/* #undef VAR */\n')
# Check boolean
- confdata.values = {'VAR': (False,'description')}
+ confdata.values = {'VAR': (False, 'description')}
check_formats(confdata, '#undef VAR\n')
- confdata.values = {'VAR': (True,'description')}
+ confdata.values = {'VAR': (True, 'description')}
check_formats(confdata, '#define VAR\n')
# Check string
- confdata.values = {'VAR': ('value','description')}
+ confdata.values = {'VAR': ('value', 'description')}
check_formats(confdata, '#define VAR value\n')
# Check integer
- confdata.values = {'VAR': (10,'description')}
+ confdata.values = {'VAR': (10, 'description')}
check_formats(confdata, '#define VAR 10\n')
# Check multiple string with cmake formats
- confdata.values = {'VAR': ('value','description')}
- self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value\n')
- self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value')
- self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value\n')
- self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value')
+ confdata.values = {'VAR': ('value', 'description')}
+ self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value\n')
+ self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value')
+ self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value\n')
+ self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value')
# Handles meson format exceptions
# Unknown format
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'unknown_format')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'unknown_format')
# More than 2 params in mesondefine
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'meson')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'meson')
# Mismatched line with format
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#cmakedefine VAR'], confdata, 'meson')
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake')
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake@')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#cmakedefine VAR'], confdata, 'meson')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake')
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake@')
# Dict value in confdata
- confdata.values = {'VAR': (['value'],'description')}
- self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'meson')
+ confdata.values = {'VAR': (['value'], 'description')}
+ self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson')
def test_absolute_prefix_libdir(self):
'''
@@ -2262,6 +2314,12 @@ class AllPlatformTests(BasePlatformTests):
self.build()
self.run_tests()
+ def test_force_fallback_for(self):
+ testdir = os.path.join(self.unit_test_dir, '31 forcefallback')
+ self.init(testdir, extra_args=['--force-fallback-for=zlib,foo'])
+ self.build()
+ self.run_tests()
+
def test_env_ops_dont_stack(self):
'''
Test that env ops prepend/append do not stack, and that this usage issues a warning
@@ -2433,6 +2491,9 @@ class AllPlatformTests(BasePlatformTests):
self.assertPathExists(exe2)
def test_internal_include_order(self):
+ if mesonbuild.environment.detect_msys2_arch() and ('MESON_RSP_THRESHOLD' in os.environ):
+ raise unittest.SkipTest('Test does not yet support gcc rsp files on msys2')
+
testdir = os.path.join(self.common_test_dir, '134 include order')
self.init(testdir)
execmd = fxecmd = None
@@ -2448,9 +2509,12 @@ class AllPlatformTests(BasePlatformTests):
# Check include order for 'someexe'
incs = [a for a in split_args(execmd) if a.startswith("-I")]
self.assertEqual(len(incs), 9)
- # target private dir
- someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe")
- self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id))
+ # Need to run the build so the private dir is created.
+ self.build()
+ pdirs = glob(os.path.join(self.builddir, 'sub4/someexe*.p'))
+ self.assertEqual(len(pdirs), 1)
+ privdir = pdirs[0][len(self.builddir)+1:]
+ self.assertPathEqual(incs[0], "-I" + privdir)
# target build subdir
self.assertPathEqual(incs[1], "-Isub4")
# target source subdir
@@ -2471,7 +2535,10 @@ class AllPlatformTests(BasePlatformTests):
incs = [a for a in split_args(fxecmd) if a.startswith('-I')]
self.assertEqual(len(incs), 9)
# target private dir
- self.assertPathEqual(incs[0], '-Isomefxe@exe')
+ pdirs = glob(os.path.join(self.builddir, 'somefxe*.p'))
+ self.assertEqual(len(pdirs), 1)
+ privdir = pdirs[0][len(self.builddir)+1:]
+ self.assertPathEqual(incs[0], '-I' + privdir)
# target build dir
self.assertPathEqual(incs[1], '-I.')
# target source dir
@@ -2546,6 +2613,8 @@ class AllPlatformTests(BasePlatformTests):
self.assertIsInstance(linker, ar)
if is_osx():
self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker)
+ elif is_sunos():
+ self.assertIsInstance(cc.linker, (mesonbuild.linkers.SolarisDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin))
else:
self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)
if isinstance(cc, clangcl):
@@ -2796,9 +2865,25 @@ class AllPlatformTests(BasePlatformTests):
# fails sometimes.
pass
- def test_dist_hg(self):
+ def has_working_hg(self):
if not shutil.which('hg'):
- raise unittest.SkipTest('Mercurial not found')
+ return False
+ try:
+ # This check should not be necessary, but
+ # CI under macOS passes the above test even
+ # though Mercurial is not installed.
+ if subprocess.call(['hg', '--version'],
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL) != 0:
+ return False
+ return True
+ except FileNotFoundError:
+ return False
+
+
+ def test_dist_hg(self):
+ if not self.has_working_hg():
+ raise unittest.SkipTest('Mercurial not found or broken.')
if self.backend is not Backend.ninja:
raise unittest.SkipTest('Dist is only supported with Ninja')
@@ -3139,8 +3224,9 @@ int main(int argc, char **argv) {
self.assertEqual(foo_dep.get_link_args(), link_args)
# Ensure include args are properly quoted
incdir = PurePath(prefix) / PurePath('include')
- cargs = ['-I' + incdir.as_posix()]
- self.assertEqual(foo_dep.get_compile_args(), cargs)
+ cargs = ['-I' + incdir.as_posix(), '-DLIBFOO']
+ # pkg-config and pkgconf does not respect the same order
+ self.assertEqual(sorted(foo_dep.get_compile_args()), sorted(cargs))
def test_array_option_change(self):
def get_opt():
@@ -3419,67 +3505,6 @@ int main(int argc, char **argv) {
f.write('public class Foo { public static void main() {} }')
self._run(self.meson_command + ['init', '-b'], workdir=tmpdir)
- # The test uses mocking and thus requires that
- # the current process is the one to run the Meson steps.
- # If we are using an external test executable (most commonly
- # in Debian autopkgtests) then the mocking won't work.
- @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
- def test_cross_file_system_paths(self):
- if is_windows():
- raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
- if is_sunos():
- cc = 'gcc'
- else:
- cc = 'cc'
-
- testdir = os.path.join(self.common_test_dir, '1 trivial')
- cross_content = textwrap.dedent("""\
- [binaries]
- c = '/usr/bin/{}'
- ar = '/usr/bin/ar'
- strip = '/usr/bin/ar'
-
- [properties]
-
- [host_machine]
- system = 'linux'
- cpu_family = 'x86'
- cpu = 'i686'
- endian = 'little'
- """.format(cc))
-
- with tempfile.TemporaryDirectory() as d:
- dir_ = os.path.join(d, 'meson', 'cross')
- os.makedirs(dir_)
- with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
- f.write(cross_content)
- name = os.path.basename(f.name)
-
- with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
- self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
- self.wipe()
-
- with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
- os.environ.pop('XDG_DATA_HOME', None)
- self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
- self.wipe()
-
- with tempfile.TemporaryDirectory() as d:
- dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
- os.makedirs(dir_)
- with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
- f.write(cross_content)
- name = os.path.basename(f.name)
-
- # If XDG_DATA_HOME is set in the environment running the
- # tests this test will fail, os mock the environment, pop
- # it, then test
- with mock.patch.dict(os.environ):
- os.environ.pop('XDG_DATA_HOME', None)
- with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
- self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
- self.wipe()
-
def test_compiler_run_command(self):
'''
The test checks that the compiler object can be passed to
@@ -3937,7 +3962,7 @@ recommended as it is not supported on some platforms''')
with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile:
crossfile.write(textwrap.dedent(
'''[binaries]
- pkgconfig = r'{0}'
+ pkgconfig = '{0}'
[properties]
@@ -3967,7 +3992,7 @@ recommended as it is not supported on some platforms''')
pkgconfig = 'pkg-config'
[properties]
- pkg_config_libdir = [r'{0}']
+ pkg_config_libdir = ['{0}']
[host_machine]
system = 'linux'
@@ -4103,6 +4128,11 @@ recommended as it is not supported on some platforms''')
'version': '1.0'
},
{
+ 'descriptive_name': 'sub_implicit',
+ 'name': 'sub_implicit',
+ 'version': '1.0',
+ },
+ {
'descriptive_name': 'sub-novar',
'name': 'sub_novar',
'version': '1.0',
@@ -4456,6 +4486,83 @@ recommended as it is not supported on some platforms''')
self.maxDiff = None
self.assertListEqual(res_nb, res_wb)
+ def test_introspect_ast_source(self):
+ testdir = os.path.join(self.unit_test_dir, '57 introspection')
+ testfile = os.path.join(testdir, 'meson.build')
+ res_nb = self.introspect_directory(testfile, ['--ast'] + self.meson_args)
+
+ node_counter = {}
+
+ def accept_node(json_node):
+ self.assertIsInstance(json_node, dict)
+ for i in ['lineno', 'colno', 'end_lineno', 'end_colno']:
+ self.assertIn(i, json_node)
+ self.assertIsInstance(json_node[i], int)
+ self.assertIn('node', json_node)
+ n = json_node['node']
+ self.assertIsInstance(n, str)
+ self.assertIn(n, nodes)
+ if n not in node_counter:
+ node_counter[n] = 0
+ node_counter[n] = node_counter[n] + 1
+ for nodeDesc in nodes[n]:
+ key = nodeDesc[0]
+ func = nodeDesc[1]
+ self.assertIn(key, json_node)
+ if func is None:
+ tp = nodeDesc[2]
+ self.assertIsInstance(json_node[key], tp)
+ continue
+ func(json_node[key])
+
+ def accept_node_list(node_list):
+ self.assertIsInstance(node_list, list)
+ for i in node_list:
+ accept_node(i)
+
+ def accept_kwargs(kwargs):
+ self.assertIsInstance(kwargs, list)
+ for i in kwargs:
+ self.assertIn('key', i)
+ self.assertIn('val', i)
+ accept_node(i['key'])
+ accept_node(i['val'])
+
+ nodes = {
+ 'BooleanNode': [('value', None, bool)],
+ 'IdNode': [('value', None, str)],
+ 'NumberNode': [('value', None, int)],
+ 'StringNode': [('value', None, str)],
+ 'ContinueNode': [],
+ 'BreakNode': [],
+ 'ArgumentNode': [('positional', accept_node_list), ('kwargs', accept_kwargs)],
+ 'ArrayNode': [('args', accept_node)],
+ 'DictNode': [('args', accept_node)],
+ 'EmptyNode': [],
+ 'OrNode': [('left', accept_node), ('right', accept_node)],
+ 'AndNode': [('left', accept_node), ('right', accept_node)],
+ 'ComparisonNode': [('left', accept_node), ('right', accept_node), ('ctype', None, str)],
+ 'ArithmeticNode': [('left', accept_node), ('right', accept_node), ('op', None, str)],
+ 'NotNode': [('right', accept_node)],
+ 'CodeBlockNode': [('lines', accept_node_list)],
+ 'IndexNode': [('object', accept_node), ('index', accept_node)],
+ 'MethodNode': [('object', accept_node), ('args', accept_node), ('name', None, str)],
+ 'FunctionNode': [('args', accept_node), ('name', None, str)],
+ 'AssignmentNode': [('value', accept_node), ('var_name', None, str)],
+ 'PlusAssignmentNode': [('value', accept_node), ('var_name', None, str)],
+ 'ForeachClauseNode': [('items', accept_node), ('block', accept_node), ('varnames', None, list)],
+ 'IfClauseNode': [('ifs', accept_node_list), ('else', accept_node)],
+ 'IfNode': [('condition', accept_node), ('block', accept_node)],
+ 'UMinusNode': [('right', accept_node)],
+ 'TernaryNode': [('condition', accept_node), ('true', accept_node), ('false', accept_node)],
+ }
+
+ accept_node(res_nb)
+
+ for n, c in [('ContinueNode', 2), ('BreakNode', 1), ('NotNode', 3)]:
+ self.assertIn(n, node_counter)
+ self.assertEqual(node_counter[n], c)
+
def test_introspect_dependencies_from_source(self):
testdir = os.path.join(self.unit_test_dir, '57 introspection')
testfile = os.path.join(testdir, 'meson.build')
@@ -4535,7 +4642,7 @@ recommended as it is not supported on some platforms''')
self._run(self.mconf_command + [self.builddir])
def test_summary(self):
- testdir = os.path.join(self.unit_test_dir, '72 summary')
+ testdir = os.path.join(self.unit_test_dir, '73 summary')
out = self.init(testdir)
expected = textwrap.dedent(r'''
Some Subproject 2.0
@@ -4559,6 +4666,10 @@ recommended as it is not supported on some platforms''')
no: NO
coma list: a, b, c
+ Plugins
+ long coma list: alpha, alphacolor, apetag, audiofx, audioparsers, auparse,
+ autodetect, avi
+
Subprojects
sub: YES
sub2: NO Problem encountered: This subproject failed
@@ -4575,21 +4686,86 @@ recommended as it is not supported on some platforms''')
def test_meson_compile(self):
"""Test the meson compile command."""
- prog = 'trivialprog'
- if is_windows():
- prog = '{}.exe'.format(prog)
+
+ def get_exe_name(basename: str) -> str:
+ if is_windows():
+ return '{}.exe'.format(basename)
+ else:
+ return basename
+
+ def get_shared_lib_name(basename: str) -> str:
+ if mesonbuild.environment.detect_msys2_arch():
+ return 'lib{}.dll'.format(basename)
+ elif is_windows():
+ return '{}.dll'.format(basename)
+ elif is_cygwin():
+ return 'cyg{}.dll'.format(basename)
+ elif is_osx():
+ return 'lib{}.dylib'.format(basename)
+ else:
+ return 'lib{}.so'.format(basename)
+
+ def get_static_lib_name(basename: str) -> str:
+ return 'lib{}.a'.format(basename)
+
+ # Base case (no targets or additional arguments)
testdir = os.path.join(self.common_test_dir, '1 trivial')
self.init(testdir)
+
self._run([*self.meson_command, 'compile', '-C', self.builddir])
- # If compile worked then we should get a program
- self.assertPathExists(os.path.join(self.builddir, prog))
+ self.assertPathExists(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+ # `--clean`
self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean'])
- self.assertPathDoesNotExist(os.path.join(self.builddir, prog))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+
+ # Target specified in a project with unique names
+
+ testdir = os.path.join(self.common_test_dir, '6 linkshared')
+ self.init(testdir, extra_args=['--wipe'])
+ # Multiple targets and target type specified
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, 'mylib', 'mycpplib:shared_library'])
+ # Check that we have a shared lib, but not an executable, i.e. check that target actually worked
+ self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mylib')))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('prog')))
+ self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mycpplib')))
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('cppprog')))
+
+ # Target specified in a project with non unique names
+
+ testdir = os.path.join(self.common_test_dir, '190 same target name')
+ self.init(testdir, extra_args=['--wipe'])
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, './foo'])
+ self.assertPathExists(os.path.join(self.builddir, get_static_lib_name('foo')))
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, 'sub/foo'])
+ self.assertPathExists(os.path.join(self.builddir, 'sub', get_static_lib_name('foo')))
+
+ # run_target
+
+ testdir = os.path.join(self.common_test_dir, '54 run target')
+ self.init(testdir, extra_args=['--wipe'])
+ out = self._run([*self.meson_command, 'compile', '-C', self.builddir, 'py3hi'])
+ self.assertIn('I am Python3.', out)
+
+ # `--$BACKEND-args`
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ if self.backend is Backend.ninja:
+ self.init(testdir, extra_args=['--wipe'])
+ # Dry run - should not create a program
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n'])
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
+ elif self.backend is Backend.vs:
+ self.init(testdir, extra_args=['--wipe'])
+ self._run([*self.meson_command, 'compile', '-C', self.builddir])
+ # Explicitly clean the target through msbuild interface
+ self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', get_exe_name('trivialprog')))])
+ self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog')))
def test_spurious_reconfigure_built_dep_file(self):
- testdir = os.path.join(self.unit_test_dir, '74 dep files')
+ testdir = os.path.join(self.unit_test_dir, '75 dep files')
# Regression test: Spurious reconfigure was happening when build
# directory is inside source directory.
@@ -4617,6 +4793,242 @@ recommended as it is not supported on some platforms''')
out = self.build()
self.assertNotIn('Project configured', out)
+ def _test_junit(self, case: str) -> None:
+ try:
+ import lxml.etree as et
+ except ImportError:
+ raise unittest.SkipTest('lxml required, but not found.')
+
+ schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd')))
+
+ self.init(case)
+ self.run_tests()
+
+ junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml'))
+ try:
+ schema.assertValid(junit)
+ except et.DocumentInvalid as e:
+ self.fail(e.error_log)
+
+ def test_junit_valid_tap(self):
+ self._test_junit(os.path.join(self.common_test_dir, '213 tap tests'))
+
+ def test_junit_valid_exitcode(self):
+ self._test_junit(os.path.join(self.common_test_dir, '44 test args'))
+
+ def test_junit_valid_gtest(self):
+ self._test_junit(os.path.join(self.framework_test_dir, '2 gtest'))
+
+ def test_link_language_linker(self):
+ # TODO: there should be some way to query how we're linking things
+ # without resorting to reading the ninja.build file
+ if self.backend is not Backend.ninja:
+ raise unittest.SkipTest('This test reads the ninja file')
+
+ testdir = os.path.join(self.common_test_dir, '232 link language')
+ self.init(testdir)
+
+ build_ninja = os.path.join(self.builddir, 'build.ninja')
+ with open(build_ninja, 'r', encoding='utf-8') as f:
+ contents = f.read()
+
+ self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER')
+ self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER')
+
+ def test_commands_documented(self):
+ '''
+ Test that all listed meson commands are documented in Commands.md.
+ '''
+
+ # The docs directory is not in release tarballs.
+ if not os.path.isdir('docs'):
+ raise unittest.SkipTest('Doc directory does not exist.')
+ doc_path = 'docs/markdown_dynamic/Commands.md'
+
+ md = None
+ with open(doc_path, encoding='utf-8') as f:
+ md = f.read()
+ self.assertIsNotNone(md)
+
+ ## Get command sections
+
+ section_pattern = re.compile(r'^### (.+)$', re.MULTILINE)
+ md_command_section_matches = [i for i in section_pattern.finditer(md)]
+ md_command_sections = dict()
+ for i, s in enumerate(md_command_section_matches):
+ section_end = len(md) if i == len(md_command_section_matches) - 1 else md_command_section_matches[i + 1].start()
+ md_command_sections[s.group(1)] = (s.start(), section_end)
+
+ ## Validate commands
+
+ md_commands = set(k for k,v in md_command_sections.items())
+
+ help_output = self._run(self.meson_command + ['--help'])
+ help_commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(','))
+
+ self.assertEqual(md_commands | {'help'}, help_commands, 'Doc file: `{}`'.format(doc_path))
+
+ ## Validate that each section has proper placeholders
+
+ def get_data_pattern(command):
+ return re.compile(
+ r'^```[\r\n]'
+ r'{{ cmd_help\[\'' + command + r'\'\]\[\'usage\'\] }}[\r\n]'
+ r'^```[\r\n]'
+ r'.*?'
+ r'^```[\r\n]'
+ r'{{ cmd_help\[\'' + command + r'\'\]\[\'arguments\'\] }}[\r\n]'
+ r'^```',
+ flags = re.MULTILINE|re.DOTALL)
+
+ for command in md_commands:
+ m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1])
+ self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path))
+
+ def test_coverage(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage')
+
+ def test_coverage_complex(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '109 generatorcustom')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage')
+
+ def test_coverage_html(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-html')
+
+ def test_coverage_text(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-text')
+
+ def test_coverage_xml(self):
+ if mesonbuild.environment.detect_msys2_arch():
+ raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2')
+ gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
+ if not gcovr_exe:
+ raise unittest.SkipTest('gcovr not found, or too old')
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ if cc.get_id() == 'clang':
+ if not mesonbuild.environment.detect_llvm_cov():
+ raise unittest.SkipTest('llvm-cov not found')
+ if cc.get_id() == 'msvc':
+ raise unittest.SkipTest('Test only applies to non-MSVC compilers')
+ self.init(testdir, extra_args=['-Db_coverage=true'])
+ self.build()
+ self.run_tests()
+ self.run_target('coverage-xml')
+
+ def test_cross_file_constants(self):
+ with temp_filename() as crossfile1, temp_filename() as crossfile2:
+ with open(crossfile1, 'w') as f:
+ f.write(textwrap.dedent(
+ '''
+ [constants]
+ compiler = 'gcc'
+ '''))
+ with open(crossfile2, 'w') as f:
+ f.write(textwrap.dedent(
+ '''
+ [constants]
+ toolchain = '/toolchain/'
+ common_flags = ['--sysroot=' + toolchain / 'sysroot']
+
+ [properties]
+ c_args = common_flags + ['-DSOMETHING']
+ cpp_args = c_args + ['-DSOMETHING_ELSE']
+
+ [binaries]
+ c = toolchain / compiler
+ '''))
+
+ values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2])
+ self.assertEqual(values['binaries']['c'], '/toolchain/gcc')
+ self.assertEqual(values['properties']['c_args'],
+ ['--sysroot=/toolchain/sysroot', '-DSOMETHING'])
+ self.assertEqual(values['properties']['cpp_args'],
+ ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE'])
+
+ @unittest.skipIf(is_windows(), 'Directory cleanup fails for some reason')
+ def test_wrap_git(self):
+ with tempfile.TemporaryDirectory() as tmpdir:
+ srcdir = os.path.join(tmpdir, 'src')
+ shutil.copytree(os.path.join(self.unit_test_dir, '81 wrap-git'), srcdir)
+ upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream')
+ upstream_uri = Path(upstream).as_uri()
+ _git_init(upstream)
+ with open(os.path.join(srcdir, 'subprojects', 'wrap_git.wrap'), 'w') as f:
+ f.write(textwrap.dedent('''
+ [wrap-git]
+ url = {}
+ patch_directory = wrap_git_builddef
+ revision = master
+ '''.format(upstream_uri)))
+ self.init(srcdir)
+ self.build()
+ self.run_tests()
+
class FailureTests(BasePlatformTests):
'''
Tests that test failure conditions. Build files here should be dynamically
@@ -5134,7 +5546,7 @@ class WindowsTests(BasePlatformTests):
raise
raise unittest.SkipTest('pefile module not found')
testdir = os.path.join(self.common_test_dir, '6 linkshared')
- self.init(testdir)
+ self.init(testdir, extra_args=['--buildtype=release'])
self.build()
# Test that binaries have a non-zero checksum
env = get_fake_env()
@@ -5282,7 +5694,7 @@ class DarwinTests(BasePlatformTests):
def test_removing_unused_linker_args(self):
testdir = os.path.join(self.common_test_dir, '108 has arg')
- env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic'}
+ env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic -framework Foundation'}
self.init(testdir, override_envvars=env)
@@ -5424,6 +5836,19 @@ class LinuxlikeTests(BasePlatformTests):
out = self._run(cmd + ['--libs'], override_envvars=env).strip().split()
self.assertEqual(out, ['-llibmain2', '-llibinternal'])
+ # See common/47 pkgconfig-gen/meson.build for description of the case this test
+ with open(os.path.join(privatedir1, 'simple2.pc')) as f:
+ content = f.read()
+ self.assertIn('Libs: -L${libdir} -lsimple2 -lz -lsimple1', content)
+
+ with open(os.path.join(privatedir1, 'simple3.pc')) as f:
+ content = f.read()
+ self.assertEqual(1, content.count('-lsimple3'))
+
+ with open(os.path.join(privatedir1, 'simple5.pc')) as f:
+ content = f.read()
+ self.assertNotIn('-lstat2', content)
+
def test_pkgconfig_uninstalled(self):
testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen')
self.init(testdir)
@@ -5533,6 +5958,10 @@ class LinuxlikeTests(BasePlatformTests):
self.assertRegex('\n'.join(mesonlog),
r'Run-time dependency qt5 \(modules: Core\) found: YES .* \((qmake|qmake-qt5)\)\n')
+ def glob_sofiles_without_privdir(self, g):
+ files = glob(g)
+ return [f for f in files if not f.endswith('.p')]
+
def _test_soname_impl(self, libpath, install):
if is_cygwin() or is_osx():
raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames')
@@ -5548,28 +5977,28 @@ class LinuxlikeTests(BasePlatformTests):
self.assertPathExists(nover)
self.assertFalse(os.path.islink(nover))
self.assertEqual(get_soname(nover), 'libnover.so')
- self.assertEqual(len(glob(nover[:-3] + '*')), 1)
+ self.assertEqual(len(self.glob_sofiles_without_privdir(nover[:-3] + '*')), 1)
# File with version set
verset = os.path.join(libpath, 'libverset.so')
self.assertPathExists(verset + '.4.5.6')
self.assertEqual(os.readlink(verset), 'libverset.so.4')
self.assertEqual(get_soname(verset), 'libverset.so.4')
- self.assertEqual(len(glob(verset[:-3] + '*')), 3)
+ self.assertEqual(len(self.glob_sofiles_without_privdir(verset[:-3] + '*')), 3)
# File with soversion set
soverset = os.path.join(libpath, 'libsoverset.so')
self.assertPathExists(soverset + '.1.2.3')
self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3')
self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3')
- self.assertEqual(len(glob(soverset[:-3] + '*')), 2)
+ self.assertEqual(len(self.glob_sofiles_without_privdir(soverset[:-3] + '*')), 2)
# File with version and soversion set to same values
settosame = os.path.join(libpath, 'libsettosame.so')
self.assertPathExists(settosame + '.7.8.9')
self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9')
self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9')
- self.assertEqual(len(glob(settosame[:-3] + '*')), 2)
+ self.assertEqual(len(self.glob_sofiles_without_privdir(settosame[:-3] + '*')), 2)
# File with version and soversion set to different values
bothset = os.path.join(libpath, 'libbothset.so')
@@ -5577,7 +6006,7 @@ class LinuxlikeTests(BasePlatformTests):
self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3')
self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6')
self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3')
- self.assertEqual(len(glob(bothset[:-3] + '*')), 3)
+ self.assertEqual(len(self.glob_sofiles_without_privdir(bothset[:-3] + '*')), 3)
def test_soname(self):
self._test_soname_impl(self.builddir, False)
@@ -5697,10 +6126,12 @@ class LinuxlikeTests(BasePlatformTests):
def test_unity_subproj(self):
testdir = os.path.join(self.common_test_dir, '45 subproject')
self.init(testdir, extra_args='--unity=subprojects')
- simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe')
- self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity0.c'))
- sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha')
- self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity0.c'))
+ pdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/simpletest*.p'))
+ self.assertEqual(len(pdirs), 1)
+ self.assertPathExists(os.path.join(pdirs[0], 'simpletest-unity0.c'))
+ sdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/*sublib*.p'))
+ self.assertEqual(len(sdirs), 1)
+ self.assertPathExists(os.path.join(sdirs[0], 'sublib-unity0.c'))
self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c'))
self.build()
@@ -6036,6 +6467,54 @@ class LinuxlikeTests(BasePlatformTests):
install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx'))
self.assertEqual(install_rpath, 'baz')
+ def test_global_rpath(self):
+ if is_cygwin():
+ raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH')
+ if is_osx():
+ raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)')
+
+ testdir = os.path.join(self.unit_test_dir, '80 global-rpath')
+ oldinstalldir = self.installdir
+
+ # Build and install an external library without DESTDIR.
+ # The external library generates a .pc file without an rpath.
+ yonder_dir = os.path.join(testdir, 'yonder')
+ yonder_prefix = os.path.join(oldinstalldir, 'yonder')
+ yonder_libdir = os.path.join(yonder_prefix, self.libdir)
+ self.prefix = yonder_prefix
+ self.installdir = yonder_prefix
+ self.init(yonder_dir)
+ self.build()
+ self.install(use_destdir=False)
+
+ # Since rpath has multiple valid formats we need to
+ # test that they are all properly used.
+ rpath_formats = [
+ ('-Wl,-rpath=', False),
+ ('-Wl,-rpath,', False),
+ ('-Wl,--just-symbols=', True),
+ ('-Wl,--just-symbols,', True),
+ ('-Wl,-R', False),
+ ('-Wl,-R,', False)
+ ]
+ for rpath_format, exception in rpath_formats:
+ # Build an app that uses that installed library.
+ # Supply the rpath to the installed library via LDFLAGS
+ # (as systems like buildroot and guix are wont to do)
+ # and verify install preserves that rpath.
+ self.new_builddir()
+ env = {'LDFLAGS': rpath_format + yonder_libdir,
+ 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')}
+ if exception:
+ with self.assertRaises(subprocess.CalledProcessError):
+ self.init(testdir, override_envvars=env)
+ break
+ self.init(testdir, override_envvars=env)
+ self.build()
+ self.install(use_destdir=False)
+ got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified'))
+ self.assertEqual(got_rpath, yonder_libdir, rpath_format)
+
@skip_if_not_base_option('b_sanitize')
def test_pch_with_address_sanitizer(self):
if is_cygwin():
@@ -6050,21 +6529,6 @@ class LinuxlikeTests(BasePlatformTests):
for i in compdb:
self.assertIn("-fsanitize=address", i["command"])
- def test_coverage(self):
- gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr()
- if not gcovr_exe:
- raise unittest.SkipTest('gcovr not found')
- if not shutil.which('genhtml') and not gcovr_new_rootdir:
- raise unittest.SkipTest('genhtml not found and gcovr is too old')
- if 'clang' in os.environ.get('CC', ''):
- # We need to use llvm-cov instead of gcovr with clang
- raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!')
- testdir = os.path.join(self.common_test_dir, '1 trivial')
- self.init(testdir, extra_args=['-Db_coverage=true'])
- self.build()
- self.run_tests()
- self.run_target('coverage-html')
-
def test_cross_find_program(self):
testdir = os.path.join(self.unit_test_dir, '11 cross prog')
crossfile = tempfile.NamedTemporaryFile(mode='w')
@@ -6347,13 +6811,15 @@ class LinuxlikeTests(BasePlatformTests):
self.build(override_envvars=env)
# test uninstalled
self.run_tests(override_envvars=env)
- if not is_osx():
- # Rest of the workflow only works on macOS
+ if not (is_osx() or is_linux()):
return
# test running after installation
self.install(use_destdir=False)
prog = os.path.join(self.installdir, 'bin', 'prog')
self._run([prog])
+ if not is_osx():
+ # Rest of the workflow only works on macOS
+ return
out = self._run(['otool', '-L', prog])
self.assertNotIn('@rpath', out)
## New builddir for testing that DESTDIR is not added to install_name
@@ -6370,6 +6836,57 @@ class LinuxlikeTests(BasePlatformTests):
# Ensure that the otool output does not contain self.installdir
self.assertNotRegex(out, self.installdir + '.*dylib ')
+ @skipIfNoPkgconfig
+ def test_usage_pkgconfig_prefixes(self):
+ '''
+ Build and install two external libraries, to different prefixes,
+ then build and install a client program that finds them via pkgconfig,
+ and verify the installed client program runs.
+ '''
+ oldinstalldir = self.installdir
+
+ # Build and install both external libraries without DESTDIR
+ val1dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val1')
+ val1prefix = os.path.join(oldinstalldir, 'val1')
+ self.prefix = val1prefix
+ self.installdir = val1prefix
+ self.init(val1dir)
+ self.build()
+ self.install(use_destdir=False)
+ self.new_builddir()
+
+ env1 = {}
+ env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig')
+ val2dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val2')
+ val2prefix = os.path.join(oldinstalldir, 'val2')
+ self.prefix = val2prefix
+ self.installdir = val2prefix
+ self.init(val2dir, override_envvars=env1)
+ self.build()
+ self.install(use_destdir=False)
+ self.new_builddir()
+
+ # Build, install, and run the client program
+ env2 = {}
+ env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig')
+ testdir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'client')
+ testprefix = os.path.join(oldinstalldir, 'client')
+ self.prefix = testprefix
+ self.installdir = testprefix
+ self.init(testdir, override_envvars=env2)
+ self.build()
+ self.install(use_destdir=False)
+ prog = os.path.join(self.installdir, 'bin', 'client')
+ env3 = {}
+ if is_cygwin():
+ env3['PATH'] = os.path.join(val1prefix, 'bin') + \
+ os.pathsep + \
+ os.path.join(val2prefix, 'bin') + \
+ os.pathsep + os.environ['PATH']
+ out = self._run([prog], override_envvars=env3).strip()
+ # Expected output is val1 + val2 = 3
+ self.assertEqual(out, '3')
+
def install_subdir_invalid_symlinks(self, testdir, subdir_path):
'''
Test that installation of broken symlinks works fine.
@@ -6409,6 +6926,11 @@ class LinuxlikeTests(BasePlatformTests):
testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup')
if is_cygwin() or is_osx():
raise unittest.SkipTest('Not applicable on Cygwin or OSX.')
+ env = get_fake_env()
+ cc = env.detect_c_compiler(MachineChoice.HOST)
+ linker = cc.linker
+ if not linker.export_dynamic_args(env):
+ raise unittest.SkipTest('Not applicable for linkers without --export-dynamic')
self.init(testdir)
build_ninja = os.path.join(self.builddir, 'build.ninja')
max_count = 0
@@ -6560,7 +7082,7 @@ c = ['{0}']
return hashlib.sha256(f.read()).hexdigest()
def test_wrap_with_file_url(self):
- testdir = os.path.join(self.unit_test_dir, '73 wrap file url')
+ testdir = os.path.join(self.unit_test_dir, '74 wrap file url')
source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz')
patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz')
wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap')
@@ -6590,12 +7112,25 @@ c = ['{0}']
windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo'))
os.unlink(wrap_filename)
+ def test_no_rpath_for_static(self):
+ testdir = os.path.join(self.common_test_dir, '5 linkstatic')
+ self.init(testdir)
+ self.build()
+ build_rpath = get_rpath(os.path.join(self.builddir, 'prog'))
+ self.assertIsNone(build_rpath)
+
+
+class BaseLinuxCrossTests(BasePlatformTests):
+ # Don't pass --libdir when cross-compiling. We have tests that
+ # check whether meson auto-detects it correctly.
+ libdir = None
+
def should_run_cross_arm_tests():
return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm')
@unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM")
-class LinuxCrossArmTests(BasePlatformTests):
+class LinuxCrossArmTests(BaseLinuxCrossTests):
'''
Tests that cross-compilation to Linux/ARM works
'''
@@ -6642,6 +7177,17 @@ class LinuxCrossArmTests(BasePlatformTests):
return
self.assertTrue(False, 'Option libdir not in introspect data.')
+ def test_cross_libdir_subproject(self):
+ # Guard against a regression where calling "subproject"
+ # would reset the value of libdir to its default value.
+ testdir = os.path.join(self.unit_test_dir, '78 subdir libdir')
+ self.init(testdir, extra_args=['--libdir=fuf'])
+ for i in self.introspect('--buildoptions'):
+ if i['name'] == 'libdir':
+ self.assertEqual(i['value'], 'fuf')
+ return
+ self.assertTrue(False, 'Libdir specified on command line gets reset.')
+
def test_std_remains(self):
# C_std defined in project options must be in effect also when cross compiling.
testdir = os.path.join(self.unit_test_dir, '51 noncross options')
@@ -6665,7 +7211,7 @@ def should_run_cross_mingw_tests():
return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin())
@unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW")
-class LinuxCrossMingwTests(BasePlatformTests):
+class LinuxCrossMingwTests(BaseLinuxCrossTests):
'''
Tests that cross-compilation to Windows/MinGW works
'''
@@ -7126,7 +7672,12 @@ class NativeFileTests(BasePlatformTests):
for section, entries in values.items():
f.write('[{}]\n'.format(section))
for k, v in entries.items():
- f.write("{}='{}'\n".format(k, v))
+ if isinstance(v, (bool, int, float)):
+ f.write("{}={}\n".format(k, v))
+ elif isinstance(v, list):
+ f.write("{}=[{}]\n".format(k, ', '.join(["'{}'".format(w) for w in v])))
+ else:
+ f.write("{}='{}'\n".format(k, v))
return filename
def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs):
@@ -7450,6 +8001,219 @@ class NativeFileTests(BasePlatformTests):
self.init(testcase, extra_args=['--native-file', config])
self.build()
+ def test_user_options(self):
+ testcase = os.path.join(self.common_test_dir, '43 options')
+ for opt, value in [('testoption', 'some other val'), ('other_one', True),
+ ('combo_opt', 'one'), ('array_opt', ['two']),
+ ('integer_opt', 0)]:
+ config = self.helper_create_native_file({'project options': {opt: value}})
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.init(testcase, extra_args=['--native-file', config])
+ self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option')
+
+ def test_user_options_command_line_overrides(self):
+ testcase = os.path.join(self.common_test_dir, '43 options')
+ config = self.helper_create_native_file({'project options': {'other_one': True}})
+ self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false'])
+
+ def test_user_options_subproject(self):
+ testcase = os.path.join(self.unit_test_dir, '79 user options for subproject')
+
+ s = os.path.join(testcase, 'subprojects')
+ if not os.path.exists(s):
+ os.mkdir(s)
+ s = os.path.join(s, 'sub')
+ if not os.path.exists(s):
+ sub = os.path.join(self.common_test_dir, '43 options')
+ shutil.copytree(sub, s)
+
+ for opt, value in [('testoption', 'some other val'), ('other_one', True),
+ ('combo_opt', 'one'), ('array_opt', ['two']),
+ ('integer_opt', 0)]:
+ config = self.helper_create_native_file({'sub:project options': {opt: value}})
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.init(testcase, extra_args=['--native-file', config])
+ self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option')
+
+ def test_option_bool(self):
+ # Bools are allowed to be unquoted
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({'built-in options': {'werror': True}})
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ # Test that no-per subproject options are inherited from the parent
+ if 'werror' in each['name']:
+ self.assertEqual(each['value'], True)
+ break
+ else:
+ self.fail('Did not find werror in build options?')
+
+ def test_option_integer(self):
+ # Bools are allowed to be unquoted
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({'built-in options': {'unity_size': 100}})
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ # Test that no-per subproject options are inherited from the parent
+ if 'unity_size' in each['name']:
+ self.assertEqual(each['value'], 100)
+ break
+ else:
+ self.fail('Did not find unity_size in build options?')
+
+ def test_builtin_options(self):
+ testcase = os.path.join(self.common_test_dir, '2 cpp')
+ config = self.helper_create_native_file({'built-in options': {'cpp_std': 'c++14'}})
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'cpp_std':
+ self.assertEqual(each['value'], 'c++14')
+ break
+ else:
+ self.fail('Did not find werror in build options?')
+
+ def test_builtin_options_env_overrides_conf(self):
+ testcase = os.path.join(self.common_test_dir, '2 cpp')
+ config = self.helper_create_native_file({'built-in options': {'pkg_config_path': '/foo'}})
+
+ self.init(testcase, extra_args=['--native-file', config], override_envvars={'PKG_CONFIG_PATH': '/bar'})
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'pkg_config_path':
+ self.assertEqual(each['value'], ['/bar'])
+ break
+ else:
+ self.fail('Did not find pkg_config_path in build options?')
+
+ def test_builtin_options_subprojects(self):
+ testcase = os.path.join(self.common_test_dir, '102 subproject subdir')
+ config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}})
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ found = 0
+ for each in configuration:
+ # Test that no-per subproject options are inherited from the parent
+ if 'c_args' in each['name']:
+ # This path will be hit twice, once for build and once for host,
+ self.assertEqual(each['value'], ['-Dfoo'])
+ found += 1
+ elif each['name'] == 'default_library':
+ self.assertEqual(each['value'], 'both')
+ found += 1
+ elif each['name'] == 'sub:default_library':
+ self.assertEqual(each['value'], 'static')
+ found += 1
+ self.assertEqual(found, 4, 'Did not find all three sections')
+
+ def test_builtin_options_subprojects_overrides_buildfiles(self):
+ # If the buildfile says subproject(... default_library: shared), ensure that's overwritten
+ testcase = os.path.join(self.common_test_dir, '230 persubproject options')
+ config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}})
+
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.init(testcase, extra_args=['--native-file', config])
+ self.assertIn(cm.exception.stdout, 'Parent should override default_library')
+
+ def test_builtin_options_subprojects_inherits_parent_override(self):
+ # If the buildfile says subproject(... default_library: shared), ensure that's overwritten
+ testcase = os.path.join(self.common_test_dir, '230 persubproject options')
+ config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}})
+
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.init(testcase, extra_args=['--native-file', config])
+ self.assertIn(cm.exception.stdout, 'Parent should override default_library')
+
+ def test_builtin_options_compiler_properties(self):
+ # the properties section can have lang_args, and those need to be
+ # overwritten by the built-in options
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({
+ 'built-in options': {'c_args': ['-DFOO']},
+ 'properties': {'c_args': ['-DBAR']},
+ })
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'c_args':
+ self.assertEqual(each['value'], ['-DFOO'])
+ break
+ else:
+ self.fail('Did not find c_args in build options?')
+
+ def test_builtin_options_compiler_properties_legacy(self):
+ # The legacy placement in properties is still valid if a 'built-in
+ # options' setting is present, but doesn't have the lang_args
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({
+ 'built-in options': {'default_library': 'static'},
+ 'properties': {'c_args': ['-DBAR']},
+ })
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'c_args':
+ self.assertEqual(each['value'], ['-DBAR'])
+ break
+ else:
+ self.fail('Did not find c_args in build options?')
+
+ def test_builtin_options_paths(self):
+ # the properties section can have lang_args, and those need to be
+ # overwritten by the built-in options
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({
+ 'built-in options': {'bindir': 'foo'},
+ 'paths': {'bindir': 'bar'},
+ })
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'bindir':
+ self.assertEqual(each['value'], 'foo')
+ break
+ else:
+ self.fail('Did not find bindir in build options?')
+
+ def test_builtin_options_paths_legacy(self):
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({
+ 'built-in options': {'default_library': 'static'},
+ 'paths': {'bindir': 'bar'},
+ })
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'bindir':
+ self.assertEqual(each['value'], 'bar')
+ break
+ else:
+ self.fail('Did not find bindir in build options?')
+
+ def test_builtin_options_paths_legacy(self):
+ testcase = os.path.join(self.common_test_dir, '1 trivial')
+ config = self.helper_create_native_file({
+ 'built-in options': {'default_library': 'static'},
+ 'paths': {'bindir': 'bar'},
+ })
+
+ self.init(testcase, extra_args=['--native-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'bindir':
+ self.assertEqual(each['value'], 'bar')
+ break
+ else:
+ self.fail('Did not find bindir in build options?')
+
class CrossFileTests(BasePlatformTests):
@@ -7459,6 +8223,154 @@ class CrossFileTests(BasePlatformTests):
This is mainly aimed to testing overrides from cross files.
"""
+ def setUp(self):
+ super().setUp()
+ self.current_config = 0
+ self.current_wrapper = 0
+
+ def _cross_file_generator(self, *, needs_exe_wrapper: bool = False,
+ exe_wrapper: T.Optional[T.List[str]] = None) -> str:
+ if is_windows():
+ raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows')
+ if is_sunos():
+ cc = 'gcc'
+ else:
+ cc = 'cc'
+
+ return textwrap.dedent("""\
+ [binaries]
+ c = '/usr/bin/{}'
+ ar = '/usr/bin/ar'
+ strip = '/usr/bin/ar'
+ {}
+
+ [properties]
+ needs_exe_wrapper = {}
+
+ [host_machine]
+ system = 'linux'
+ cpu_family = 'x86'
+ cpu = 'i686'
+ endian = 'little'
+ """.format(cc,
+ 'exe_wrapper = {}'.format(str(exe_wrapper)) if exe_wrapper is not None else '',
+ needs_exe_wrapper))
+
+ def _stub_exe_wrapper(self) -> str:
+ return textwrap.dedent('''\
+ #!/usr/bin/env python3
+ import subprocess
+ import sys
+
+ sys.exit(subprocess.run(sys.argv[1:]).returncode)
+ ''')
+
+ def test_needs_exe_wrapper_true(self):
+ testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+ with tempfile.TemporaryDirectory() as d:
+ p = Path(d) / 'crossfile'
+ with p.open('wt') as f:
+ f.write(self._cross_file_generator(needs_exe_wrapper=True))
+ self.init(testdir, extra_args=['--cross-file=' + str(p)])
+ out = self.run_target('test')
+ self.assertRegex(out, r'Skipped:\s*1\s*\n')
+
+ def test_needs_exe_wrapper_false(self):
+ testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+ with tempfile.TemporaryDirectory() as d:
+ p = Path(d) / 'crossfile'
+ with p.open('wt') as f:
+ f.write(self._cross_file_generator(needs_exe_wrapper=False))
+ self.init(testdir, extra_args=['--cross-file=' + str(p)])
+ out = self.run_target('test')
+ self.assertNotRegex(out, r'Skipped:\s*1\n')
+
+ def test_needs_exe_wrapper_true_wrapper(self):
+ testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+ with tempfile.TemporaryDirectory() as d:
+ s = Path(d) / 'wrapper.py'
+ with s.open('wt') as f:
+ f.write(self._stub_exe_wrapper())
+ s.chmod(0o774)
+ p = Path(d) / 'crossfile'
+ with p.open('wt') as f:
+ f.write(self._cross_file_generator(
+ needs_exe_wrapper=True,
+ exe_wrapper=[str(s)]))
+
+ self.init(testdir, extra_args=['--cross-file=' + str(p), '-Dexpect=true'])
+ out = self.run_target('test')
+ self.assertRegex(out, r'Ok:\s*3\s*\n')
+
+ def test_cross_exe_passed_no_wrapper(self):
+ testdir = os.path.join(self.unit_test_dir, '72 cross test passed')
+ with tempfile.TemporaryDirectory() as d:
+ p = Path(d) / 'crossfile'
+ with p.open('wt') as f:
+ f.write(self._cross_file_generator(needs_exe_wrapper=True))
+
+ self.init(testdir, extra_args=['--cross-file=' + str(p)])
+ self.build()
+ out = self.run_target('test')
+ self.assertRegex(out, r'Skipped:\s*1\s*\n')
+
+ # The test uses mocking and thus requires that the current process is the
+ # one to run the Meson steps. If we are using an external test executable
+ # (most commonly in Debian autopkgtests) then the mocking won't work.
+ @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.')
+ def test_cross_file_system_paths(self):
+ if is_windows():
+ raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ cross_content = self._cross_file_generator()
+ with tempfile.TemporaryDirectory() as d:
+ dir_ = os.path.join(d, 'meson', 'cross')
+ os.makedirs(dir_)
+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+ f.write(cross_content)
+ name = os.path.basename(f.name)
+
+ with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):
+ self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):
+ os.environ.pop('XDG_DATA_HOME', None)
+ self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ with tempfile.TemporaryDirectory() as d:
+ dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')
+ os.makedirs(dir_)
+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:
+ f.write(cross_content)
+ name = os.path.basename(f.name)
+
+ # If XDG_DATA_HOME is set in the environment running the
+ # tests this test will fail, os mock the environment, pop
+ # it, then test
+ with mock.patch.dict(os.environ):
+ os.environ.pop('XDG_DATA_HOME', None)
+ with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):
+ self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True)
+ self.wipe()
+
+ def helper_create_cross_file(self, values):
+ """Create a config file as a temporary file.
+
+ values should be a nested dictionary structure of {section: {key:
+ value}}
+ """
+ filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config))
+ self.current_config += 1
+ with open(filename, 'wt') as f:
+ for section, entries in values.items():
+ f.write('[{}]\n'.format(section))
+ for k, v in entries.items():
+ f.write("{}='{}'\n".format(k, v))
+ return filename
+
def test_cross_file_dirs(self):
testcase = os.path.join(self.unit_test_dir, '60 native file override')
self.init(testcase, default_args=False,
@@ -7515,6 +8427,89 @@ class CrossFileTests(BasePlatformTests):
'-Ddef_sharedstatedir=sharedstatebar',
'-Ddef_sysconfdir=sysconfbar'])
+ def test_user_options(self):
+ # This is just a touch test for cross file, since the implementation
+ # shares code after loading from the files
+ testcase = os.path.join(self.common_test_dir, '43 options')
+ config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}})
+ with self.assertRaises(subprocess.CalledProcessError) as cm:
+ self.init(testcase, extra_args=['--cross-file', config])
+ self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option')
+
+ def test_builtin_options(self):
+ testcase = os.path.join(self.common_test_dir, '2 cpp')
+ config = self.helper_create_cross_file({'built-in options': {'cpp_std': 'c++14'}})
+
+ self.init(testcase, extra_args=['--cross-file', config])
+ configuration = self.introspect('--buildoptions')
+ for each in configuration:
+ if each['name'] == 'cpp_std':
+ self.assertEqual(each['value'], 'c++14')
+ break
+ else:
+ self.fail('No c++ standard set?')
+
+ def test_builtin_options_per_machine(self):
+ """Test options that are allowed to be set on a per-machine basis.
+
+ Such options could be passed twice, once for the build machine, and
+ once for the host machine. I've picked pkg-config path, but any would
+ do that can be set for both.
+ """
+ testcase = os.path.join(self.common_test_dir, '2 cpp')
+ cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}})
+ native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}})
+
+ # Ensure that PKG_CONFIG_PATH is not set in the environment
+ with mock.patch.dict('os.environ'):
+ for k in ['PKG_CONFIG_PATH', 'PKG_CONFIG_PATH_FOR_BUILD']:
+ try:
+ del os.environ[k]
+ except KeyError:
+ pass
+ self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native])
+
+ configuration = self.introspect('--buildoptions')
+ found = 0
+ for each in configuration:
+ if each['name'] == 'pkg_config_path':
+ self.assertEqual(each['value'], ['/cross/path'])
+ found += 1
+ elif each['name'] == 'cpp_std':
+ self.assertEqual(each['value'], 'c++17')
+ found += 1
+ elif each['name'] == 'build.pkg_config_path':
+ self.assertEqual(each['value'], ['/native/path'])
+ found += 1
+ elif each['name'] == 'build.cpp_std':
+ self.assertEqual(each['value'], 'c++14')
+ found += 1
+
+ if found == 4:
+ break
+ self.assertEqual(found, 4, 'Did not find all sections.')
+
+ def test_builtin_options_env_overrides_conf(self):
+ testcase = os.path.join(self.common_test_dir, '2 cpp')
+ config = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/foo'}})
+ cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/foo'}})
+
+ self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross],
+ override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir'})
+ configuration = self.introspect('--buildoptions')
+ found = 0
+ for each in configuration:
+ if each['name'] == 'pkg_config_path':
+ self.assertEqual(each['value'], ['/bar'])
+ found += 1
+ elif each['name'] == 'build.pkg_config_path':
+ self.assertEqual(each['value'], ['/dir'])
+ found += 1
+ if found == 2:
+ break
+ self.assertEqual(found, 2, 'Did not find all sections.')
+
+
class TAPParserTests(unittest.TestCase):
def assert_test(self, events, **kwargs):
if 'explanation' not in kwargs:
@@ -7827,6 +8822,9 @@ def convert_args(argv):
test_list = []
for arg in argv:
if arg.startswith('-'):
+ if arg in ('-f', '--failfast'):
+ arg = '--exitfirst'
+ pytest_args.append(arg)
continue
# ClassName.test_name => 'ClassName and test_name'
if '.' in arg:
@@ -7858,4 +8856,5 @@ def main():
return unittest.main(defaultTest=cases, buffer=True)
if __name__ == '__main__':
+ print('Meson build system', mesonbuild.coredata.version, 'Unit Tests')
raise SystemExit(main())
diff --git a/setup.py b/setup.py
index 1f95be7..145f19c 100644
--- a/setup.py
+++ b/setup.py
@@ -37,10 +37,6 @@ packages = ['mesonbuild',
'mesonbuild.scripts',
'mesonbuild.templates',
'mesonbuild.wrap']
-package_data = {
- 'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakeListsLLVM.txt', 'data/CMakePathInfo.txt'],
- 'mesonbuild.cmake': ['data/run_ctgt.py', 'data/preload.cmake'],
-}
data_files = []
if sys.platform != 'win32':
# Only useful on UNIX-like systems
@@ -51,6 +47,5 @@ if __name__ == '__main__':
setup(name='meson',
version=version,
packages=packages,
- package_data=package_data,
entry_points=entries,
data_files=data_files,)
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
index 9798209..9c95636 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt
@@ -8,5 +8,7 @@ include_directories(${CMAKE_CURRENT_BINARY_DIR})
add_definitions("-DDO_NOTHING_JUST_A_FLAG=1")
add_library(cmModLib++ SHARED cmMod.cpp)
+target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21)
+target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42)
include(GenerateExportHeader)
generate_export_header(cmModLib++)
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
index d3141d5..f4cbea0 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
+++ b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp
@@ -2,6 +2,10 @@
using namespace std;
+#if MESON_MAGIC_FLAG != 21
+#error "Invalid MESON_MAGIC_FLAG (private)"
+#endif
+
cmModClass::cmModClass(string foo) {
str = foo + " World";
}
diff --git a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
index 0e6dc04..4445e1f 100644
--- a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
+++ b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp
@@ -3,6 +3,10 @@
#include "cmmodlib++_export.h"
#include <string>
+#if MESON_MAGIC_FLAG != 42 && MESON_MAGIC_FLAG != 21
+#error "Invalid MESON_MAGIC_FLAG"
+#endif
+
class CMMODLIB___EXPORT cmModClass {
private:
std::string str;
diff --git a/test cases/cmake/10 header only/main.cpp b/test cases/cmake/10 header only/main.cpp
index 9507961..1417881 100644
--- a/test cases/cmake/10 header only/main.cpp
+++ b/test cases/cmake/10 header only/main.cpp
@@ -3,8 +3,14 @@
using namespace std;
+#define EXPECTED "Hello World compDef 42"
+
int main(void) {
cmModClass obj("Hello");
cout << obj.getStr() << endl;
+ if (obj.getStr() != EXPECTED) {
+ cerr << "Expected: '" << EXPECTED << "'" << endl;
+ return 1;
+ }
return 0;
}
diff --git a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
index f5d9a47..e01b6e2 100644
--- a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt
@@ -9,3 +9,4 @@ add_library(cmModLib INTERFACE)
set_target_properties(cmModLib PROPERTIES INTERFACE_COMPILE_OPTIONS "-DCMAKE_FLAG_MUST_BE_PRESENT")
target_include_directories(cmModLib INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/include")
target_compile_definitions(cmModLib INTERFACE -DCMAKE_COMPILER_DEFINE_STR="compDef")
+target_compile_definitions(cmModLib INTERFACE MESON_MAGIC_FLAG=42)
diff --git a/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp b/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
index 7ea72f7..fe01040 100644
--- a/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
+++ b/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp
@@ -6,6 +6,9 @@
#error "The flag CMAKE_FLAG_MUST_BE_PRESENT was not set"
#endif
+#define xstr(s) str(s)
+#define str(s) #s
+
class cmModClass {
private:
std::string str;
@@ -13,6 +16,8 @@ class cmModClass {
cmModClass(std::string foo) {
str = foo + " World ";
str += CMAKE_COMPILER_DEFINE_STR;
+ str += ' ';
+ str += xstr(MESON_MAGIC_FLAG);
}
inline std::string getStr() const { return str; }
diff --git a/test cases/cmake/19 advanced options/main.cpp b/test cases/cmake/19 advanced options/main.cpp
new file mode 100644
index 0000000..6a071cc
--- /dev/null
+++ b/test cases/cmake/19 advanced options/main.cpp
@@ -0,0 +1,18 @@
+#include <iostream>
+#include <cmMod.hpp>
+#include <cmTest.hpp>
+
+using namespace std;
+
+int main(void) {
+ cmModClass obj("Hello");
+ cout << obj.getStr() << endl;
+
+ int v1 = obj.getInt();
+ int v2 = getTestInt();
+ if (v1 != ((1 + v2) * 2)) {
+ cerr << "Number test failed" << endl;
+ return 1;
+ }
+ return 0;
+}
diff --git a/test cases/cmake/19 advanced options/meson.build b/test cases/cmake/19 advanced options/meson.build
new file mode 100644
index 0000000..6332ca4
--- /dev/null
+++ b/test cases/cmake/19 advanced options/meson.build
@@ -0,0 +1,29 @@
+project('cmake_set_opt', ['c', 'cpp'])
+
+comp = meson.get_compiler('cpp')
+if comp.get_argument_syntax() == 'msvc'
+ error('MESON_SKIP_TEST: MSVC is not supported because it does not support C++11')
+endif
+
+cm = import('cmake')
+opts = cm.subproject_options()
+
+opts.add_cmake_defines({'SOME_CMAKE_VAR': 'something', 'SOME_OTHER_VAR': true})
+
+opts.set_override_option('cpp_std', 'c++11') # Global is C++11
+opts.set_override_option('cpp_std', 'c++14', target: 'cmModLib++') # Override it with C++14 for cmModLib++
+
+opts.append_compile_args('cpp', '-DMESON_GLOBAL_FLAG=1')
+opts.append_compile_args('cpp', ['-DMESON_SPECIAL_FLAG1=1', ['-DMESON_SPECIAL_FLAG2=1']], target: 'cmModLib++')
+opts.append_compile_args('cpp', '-DMESON_MAGIC_INT=42', target: 'cmModLib++')
+opts.append_compile_args('cpp', [[[['-DMESON_MAGIC_INT=20']]]], target: 'cmTestLib')
+
+opts.set_install(false)
+opts.set_install(true, target: 'testEXE')
+
+sp = cm.subproject('cmOpts', options: opts)
+dep1 = sp.dependency('cmModLib++')
+dep2 = sp.dependency('cmTestLib')
+
+exe1 = executable('main', ['main.cpp'], dependencies: [dep1, dep2])
+test('test1', exe1)
diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt b/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt
new file mode 100644
index 0000000..584841e
--- /dev/null
+++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt
@@ -0,0 +1,18 @@
+cmake_minimum_required(VERSION 3.7)
+
+project(CmOpts)
+
+set(CMAKE_CXX_STANDARD 98)
+set(CMAKE_CXX_STANDARD_REQUIRED ON)
+
+if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something")
+ message(FATAL_ERROR "Setting the CMake var failed")
+endif()
+
+add_library(cmModLib++ STATIC cmMod.cpp)
+add_library(cmTestLib STATIC cmTest.cpp)
+add_executable(testEXE main.cpp)
+
+target_link_libraries(testEXE cmModLib++)
+
+install(TARGETS cmTestLib ARCHIVE DESTINATION lib RUNTIME DESTINATION bin)
diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp
new file mode 100644
index 0000000..7651b60
--- /dev/null
+++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp
@@ -0,0 +1,31 @@
+#include "cmMod.hpp"
+
+using namespace std;
+
+#if __cplusplus < 201402L
+#error "At least C++14 is required"
+#endif
+
+#ifndef MESON_GLOBAL_FLAG
+#error "MESON_GLOBAL_FLAG was not set"
+#endif
+
+#ifndef MESON_SPECIAL_FLAG1
+#error "MESON_SPECIAL_FLAG1 was not set"
+#endif
+
+#ifndef MESON_SPECIAL_FLAG2
+#error "MESON_SPECIAL_FLAG2 was not set"
+#endif
+
+cmModClass::cmModClass(string foo) {
+ str = foo + " World";
+}
+
+string cmModClass::getStr() const {
+ return str;
+}
+
+int cmModClass::getInt() const {
+ return MESON_MAGIC_INT;
+}
diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp
new file mode 100644
index 0000000..0748936
--- /dev/null
+++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp
@@ -0,0 +1,14 @@
+#pragma once
+
+#include <string>
+
+class cmModClass {
+private:
+ std::string str;
+
+public:
+ cmModClass(std::string foo);
+
+ std::string getStr() const;
+ int getInt() const;
+};
diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp
new file mode 100644
index 0000000..a00cdcd
--- /dev/null
+++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp
@@ -0,0 +1,25 @@
+#include "cmTest.hpp"
+
+#if __cplusplus < 201103L
+#error "At least C++11 is required"
+#endif
+
+#if __cplusplus >= 201402L
+#error "At most C++11 is required"
+#endif
+
+#ifndef MESON_GLOBAL_FLAG
+#error "MESON_GLOBAL_FLAG was not set"
+#endif
+
+#ifdef MESON_SPECIAL_FLAG1
+#error "MESON_SPECIAL_FLAG1 *was* set"
+#endif
+
+#ifdef MESON_SPECIAL_FLAG2
+#error "MESON_SPECIAL_FLAG2 *was* set"
+#endif
+
+int getTestInt() {
+ return MESON_MAGIC_INT;
+}
diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp
new file mode 100644
index 0000000..5a3bf7b
--- /dev/null
+++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp
@@ -0,0 +1,3 @@
+#pragma once
+
+int getTestInt();
diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp
new file mode 100644
index 0000000..497d1ce
--- /dev/null
+++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp
@@ -0,0 +1,10 @@
+#include <iostream>
+#include "cmMod.hpp"
+
+using namespace std;
+
+int main(void) {
+ cmModClass obj("Hello (LIB TEST)");
+ cout << obj.getStr() << endl;
+ return 0;
+}
diff --git a/test cases/cmake/19 advanced options/test.json b/test cases/cmake/19 advanced options/test.json
new file mode 100644
index 0000000..e2d9c05
--- /dev/null
+++ b/test cases/cmake/19 advanced options/test.json
@@ -0,0 +1,8 @@
+{
+ "installed": [
+ {"type": "exe", "file": "usr/bin/cm_testEXE"}
+ ],
+ "tools": {
+ "cmake": ">=3.11"
+ }
+}
diff --git a/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
index 50b1049..c9b2a20 100644
--- a/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
+++ b/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt
@@ -20,7 +20,7 @@ set_target_properties(cmModLib PROPERTIES VERSION 1.0.1)
add_executable(testEXE main.cpp)
target_link_libraries(cmModLib ZLIB::ZLIB)
-target_link_libraries(cmModLibStatic ZLIB::ZLIB)
+target_link_libraries(cmModLibStatic ;ZLIB::ZLIB;)
target_link_libraries(testEXE cmModLib)
target_compile_definitions(cmModLibStatic PUBLIC CMMODLIB_STATIC_DEFINE)
diff --git a/test cases/cmake/2 advanced/test.json b/test cases/cmake/2 advanced/test.json
index 11aad94..e12f530 100644
--- a/test cases/cmake/2 advanced/test.json
+++ b/test cases/cmake/2 advanced/test.json
@@ -4,5 +4,8 @@
{"type": "implib", "platform": "cygwin", "file": "usr/lib/libcm_cmModLib"},
{"type": "implib", "platform": "!cygwin", "file": "usr/bin/libcm_cmModLib"},
{"type": "exe", "file": "usr/bin/cm_testEXE"}
- ]
+ ],
+ "tools": {
+ "cmake": ">=3.11"
+ }
}
diff --git a/test cases/cmake/20 cmake file/foolib.cmake.in b/test cases/cmake/20 cmake file/foolib.cmake.in
new file mode 100644
index 0000000..16e992b
--- /dev/null
+++ b/test cases/cmake/20 cmake file/foolib.cmake.in
@@ -0,0 +1 @@
+@foo@
diff --git a/test cases/cmake/20 cmake file/meson.build b/test cases/cmake/20 cmake file/meson.build
new file mode 100644
index 0000000..758bbee
--- /dev/null
+++ b/test cases/cmake/20 cmake file/meson.build
@@ -0,0 +1,14 @@
+project(
+ 'cmake config file',
+)
+
+cmake = import('cmake')
+
+cmake_conf = configuration_data()
+cmake_conf.set_quoted('foo', 'bar')
+cmake.configure_package_config_file(
+ name : 'foolib',
+ input : 'foolib.cmake.in',
+ install_dir : get_option('libdir') / 'cmake',
+ configuration : cmake_conf,
+)
diff --git a/test cases/cmake/20 cmake file/test.json b/test cases/cmake/20 cmake file/test.json
new file mode 100644
index 0000000..a8c4ba3
--- /dev/null
+++ b/test cases/cmake/20 cmake file/test.json
@@ -0,0 +1,5 @@
+{
+ "installed": [
+ {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"}
+ ]
+}
diff --git a/test cases/cmake/3 advanced no dep/test.json b/test cases/cmake/3 advanced no dep/test.json
index 24c89c4..98a1719 100644
--- a/test cases/cmake/3 advanced no dep/test.json
+++ b/test cases/cmake/3 advanced no dep/test.json
@@ -8,5 +8,8 @@
{"type": "exe", "file": "usr/bin/cm_testEXE"},
{"type": "pdb", "file": "usr/bin/cm_testEXE2"},
{"type": "exe", "file": "usr/bin/cm_testEXE2"}
- ]
+ ],
+ "tools": {
+ "cmake": ">=3.11"
+ }
}
diff --git a/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt b/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
index 62b5990..873b9b3 100644
--- a/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
+++ b/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt
@@ -1,5 +1,10 @@
cmake_minimum_required(VERSION 3.7)
+project(testPro)
if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something")
message(FATAL_ERROR "Setting the CMake var failed")
endif()
+
+if(NOT "${CMAKE_PREFIX_PATH}" STREQUAL "val1;val2")
+ message(FATAL_ERROR "Setting the CMAKE_PREFIX_PATH failed '${CMAKE_PREFIX_PATH}'")
+endif()
diff --git a/test cases/cmake/7 cmake options/test.json b/test cases/cmake/7 cmake options/test.json
new file mode 100644
index 0000000..046e2ee
--- /dev/null
+++ b/test cases/cmake/7 cmake options/test.json
@@ -0,0 +1,9 @@
+{
+ "matrix": {
+ "options": {
+ "cmake_prefix_path": [
+ { "val": ["val1", "val2"] }
+ ]
+ }
+ }
+}
diff --git a/test cases/common/102 subproject subdir/meson.build b/test cases/common/102 subproject subdir/meson.build
index 8299a37..a891ca9 100644
--- a/test cases/common/102 subproject subdir/meson.build
+++ b/test cases/common/102 subproject subdir/meson.build
@@ -25,3 +25,32 @@ dependency('sub-novar', fallback : 'sub_novar')
# Verify a subproject can force a dependency to be not-found
d = dependency('sub-notfound', fallback : 'sub_novar', required : false)
assert(not d.found(), 'Dependency should be not-found')
+
+# Verify that implicit fallback works because subprojects/sub_implicit directory exists
+d = dependency('sub_implicit')
+assert(d.found(), 'Should implicitly fallback')
+
+# Verify that implicit fallback works because sub_implicit.wrap has
+# `dependency_names=sub_implicit_provide1` and the subproject overrides sub_implicit_provide1.
+d = dependency('sub_implicit_provide1')
+assert(d.found(), 'Should implicitly fallback')
+
+# Verify that implicit fallback works because sub_implicit.wrap has
+# `sub_implicit_provide2=sub_implicit_provide2_dep` and does not override
+# sub_implicit_provide2.
+d = dependency('sub_implicit_provide2')
+assert(d.found(), 'Should implicitly fallback')
+
+# sub_implicit.wrap provides glib-2.0 and we already configured that subproject,
+# so we must not return the system dependency here. Using glib-2.0 here because
+# some CI runners have it installed.
+d = dependency('glib-2.0', required : false)
+assert(d.found())
+assert(d.type_name() == 'internal')
+
+# sub_implicit.wrap provides gobject-2.0 and we already configured that subproject,
+# so we must not return the system dependency here. But since the subproject did
+# not override that dependency and its not required, not-found should be returned.
+# Using gobject-2.0 here because some CI runners have it installed.
+d = dependency('gobject-2.0', required : false)
+assert(not d.found())
diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap
new file mode 100644
index 0000000..a809c43
--- /dev/null
+++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap
@@ -0,0 +1,6 @@
+[wrap-file]
+
+[provide]
+glib-2.0 = glib_dep
+dependency_names = sub_implicit_provide1, gobject-2.0
+sub_implicit_provide2 = sub_implicit_provide2_dep
diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build
new file mode 100644
index 0000000..24609ae
--- /dev/null
+++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build
@@ -0,0 +1,11 @@
+project('sub_implicit', 'c', version : '1.0')
+
+dep = declare_dependency()
+meson.override_dependency('sub_implicit', dep)
+meson.override_dependency('sub_implicit_provide1', dep)
+
+# This one is not overriden but the wrap file tells the variable name to use.
+sub_implicit_provide2_dep = dep
+
+# This one is not overriden but the wrap file tells the variable name to use.
+glib_dep = dep
diff --git a/test cases/common/104 postconf with args/meson.build b/test cases/common/104 postconf with args/meson.build
index 8510c5b..a34502c 100644
--- a/test cases/common/104 postconf with args/meson.build
+++ b/test cases/common/104 postconf with args/meson.build
@@ -1,5 +1,10 @@
project('postconf script', 'c')
-meson.add_postconf_script('postconf.py', '5', '33')
+conf = configure_file(
+ configuration : configuration_data(),
+ output : 'out'
+)
+
+meson.add_postconf_script(find_program('postconf.py'), '5', '33', conf)
test('post', executable('prog', 'prog.c'))
diff --git a/test cases/common/109 generatorcustom/meson.build b/test cases/common/109 generatorcustom/meson.build
index 17d27e5..b3f50bb 100644
--- a/test cases/common/109 generatorcustom/meson.build
+++ b/test cases/common/109 generatorcustom/meson.build
@@ -14,5 +14,7 @@ allinone = custom_target('alltogether',
output : 'alltogether.h',
command : [catter, '@INPUT@', '@OUTPUT@'])
-executable('proggie', 'main.c', allinone)
+proggie = executable('proggie', 'main.c', allinone)
+
+test('proggie', proggie)
diff --git a/test cases/common/125 object only target/obj_generator.py b/test cases/common/125 object only target/obj_generator.py
index a33872a..afdbc09 100755
--- a/test cases/common/125 object only target/obj_generator.py
+++ b/test cases/common/125 object only target/obj_generator.py
@@ -13,6 +13,8 @@ if __name__ == '__main__':
ofile = sys.argv[3]
if compiler.endswith('cl'):
cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile]
+ elif sys.platform == 'sunos5':
+ cmd = [compiler, '-fpic', '-c', ifile, '-o', ofile]
else:
cmd = [compiler, '-c', ifile, '-o', ofile]
sys.exit(subprocess.call(cmd))
diff --git a/test cases/common/145 special characters/arg-char-test.c b/test cases/common/145 special characters/arg-char-test.c
new file mode 100644
index 0000000..04e02f8
--- /dev/null
+++ b/test cases/common/145 special characters/arg-char-test.c
@@ -0,0 +1,10 @@
+#include <assert.h>
+#include <stdio.h>
+
+int main(int argc, char **argv) {
+ char c = CHAR;
+ assert(argc == 2);
+ if (c != argv[1][0])
+ fprintf(stderr, "Expected %x, got %x\n", (unsigned int) c, (unsigned int) argv[1][0]);
+ assert(c == argv[1][0]);
+}
diff --git a/test cases/common/145 special characters/arg-string-test.c b/test cases/common/145 special characters/arg-string-test.c
new file mode 100644
index 0000000..199fd79
--- /dev/null
+++ b/test cases/common/145 special characters/arg-string-test.c
@@ -0,0 +1,12 @@
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+int main(int argc, char **argv) {
+ const char *s = CHAR;
+ assert(argc == 2);
+ assert(strlen(s) == 1);
+ if (s[0] != argv[1][0])
+ fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]);
+ assert(s[0] == argv[1][0]);
+}
diff --git a/test cases/common/145 special characters/arg-unquoted-test.c b/test cases/common/145 special characters/arg-unquoted-test.c
new file mode 100644
index 0000000..7f679ca
--- /dev/null
+++ b/test cases/common/145 special characters/arg-unquoted-test.c
@@ -0,0 +1,17 @@
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+#define Q(x) #x
+#define QUOTE(x) Q(x)
+
+int main(int argc, char **argv) {
+ const char *s = QUOTE(CHAR);
+ assert(argc == 2);
+ assert(strlen(s) == 1);
+ if (s[0] != argv[1][0])
+ fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]);
+ assert(s[0] == argv[1][0]);
+ // There is no way to convert a macro argument into a character constant.
+ // Otherwise we'd test that as well
+}
diff --git a/test cases/common/145 special characters/meson.build b/test cases/common/145 special characters/meson.build
index ecba650..579601e 100644
--- a/test cases/common/145 special characters/meson.build
+++ b/test cases/common/145 special characters/meson.build
@@ -35,3 +35,41 @@ gen2 = custom_target('gen2',
output : 'result2',
install : true,
install_dir : get_option('datadir'))
+
+# Test that we can pass these special characters in compiler arguments
+#
+# (this part of the test is crafted so we don't try to use these special
+# characters in filenames or target names)
+#
+# TODO: similar tests needed for languages other than C
+# TODO: add similar test for quote, doublequote, and hash, carefully
+# Re hash, see
+# https://docs.microsoft.com/en-us/cpp/build/reference/d-preprocessor-definitions
+
+special = [
+ ['amp', '&'],
+ ['at', '@'],
+ ['backslash', '\\'],
+ ['dollar', '$'],
+ ['gt', '>'],
+ ['lt', '<'],
+ ['slash', '/'],
+]
+
+cc = meson.get_compiler('c')
+
+foreach s : special
+ args = '-DCHAR="@0@"'.format(s[1])
+ e = executable('arg-string-' + s[0], 'arg-string-test.c', c_args: args)
+ test('arg-string-' + s[0], e, args: s[1])
+
+ args = '-DCHAR=@0@'.format(s[1])
+ e = executable('arg-unquoted-' + s[0], 'arg-unquoted-test.c', c_args: args)
+ test('arg-unquoted-' + s[0], e, args: s[1])
+endforeach
+
+foreach s : special
+ args = '-DCHAR=\'@0@\''.format(s[1])
+ e = executable('arg-char-' + s[0], 'arg-char-test.c', c_args: args)
+ test('arg-char-' + s[0], e, args: s[1])
+endforeach
diff --git a/test cases/common/157 wrap file should not failed/meson.build b/test cases/common/157 wrap file should not failed/meson.build
index f4ec2a8..48d1068 100644
--- a/test cases/common/157 wrap file should not failed/meson.build
+++ b/test cases/common/157 wrap file should not failed/meson.build
@@ -3,8 +3,14 @@ project('mainproj', 'c',
)
subproject('zlib')
-subproject('foo')
+foo = subproject('foo')
+bar = subproject('bar')
+
+libfoo = foo.get_variable('libfoo')
+libbar = bar.get_variable('libbar')
executable('grabprog', files('src/subprojects/prog.c'))
executable('grabprog2', files('src/subprojects/foo/prog2.c'))
subdir('src')
+
+subproject('patchdir')
diff --git a/test cases/common/157 wrap file should not failed/src/meson.build b/test cases/common/157 wrap file should not failed/src/meson.build
index 69f666d..0c82165 100644
--- a/test cases/common/157 wrap file should not failed/src/meson.build
+++ b/test cases/common/157 wrap file should not failed/src/meson.build
@@ -1,2 +1,6 @@
executable('grabprog3', files('subprojects/prog.c'))
executable('grabprog4', files('subprojects/foo/prog2.c'))
+
+texe = executable('testexe', files('test.c'), link_with: [libfoo, libbar])
+
+test('t1', texe)
diff --git a/test cases/common/157 wrap file should not failed/src/test.c b/test cases/common/157 wrap file should not failed/src/test.c
new file mode 100644
index 0000000..34cf991
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/src/test.c
@@ -0,0 +1,9 @@
+#include <stdio.h>
+
+int bar_dummy_func(void);
+int dummy_func(void);
+
+int main(void) {
+ printf("Hello world %d\n", bar_dummy_func() + dummy_func());
+ return 0;
+}
diff --git a/test cases/common/157 wrap file should not failed/subprojects/.gitignore b/test cases/common/157 wrap file should not failed/subprojects/.gitignore
new file mode 100644
index 0000000..5550e2e
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/.gitignore
@@ -0,0 +1,2 @@
+/foo-1.0
+/bar-1.0
diff --git a/test cases/common/157 wrap file should not failed/subprojects/bar.wrap b/test cases/common/157 wrap file should not failed/subprojects/bar.wrap
new file mode 100644
index 0000000..4e8f7e3
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/bar.wrap
@@ -0,0 +1,8 @@
+[wrap-file]
+directory = bar-1.0
+lead_directory_missing = true
+
+source_filename = bar-1.0.tar.xz
+source_hash = f0f61948530dc0d33e3028cd71a9f8ee869f6b3665960d8f41d715cf4aed6467
+
+patch_filename = bar-1.0-patch.tar.xz
diff --git a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c b/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c
deleted file mode 100644
index 267b43a..0000000
--- a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c
+++ /dev/null
@@ -1,3 +0,0 @@
-int dummy_func(void) {
- return 42;
-}
diff --git a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build b/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build
deleted file mode 100644
index 318e81d..0000000
--- a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build
+++ /dev/null
@@ -1,2 +0,0 @@
-project('shared lib', 'c')
-libfoo = shared_library('foo', 'foo.c')
diff --git a/test cases/common/157 wrap file should not failed/subprojects/foo.wrap b/test cases/common/157 wrap file should not failed/subprojects/foo.wrap
index 90d6d40..c67c5e5 100644
--- a/test cases/common/157 wrap file should not failed/subprojects/foo.wrap
+++ b/test cases/common/157 wrap file should not failed/subprojects/foo.wrap
@@ -3,9 +3,9 @@ directory = foo-1.0
source_url = http://something.invalid
source_filename = foo-1.0.tar.xz
-source_hash = ae5fc03185654f76b459db16ca25809703f8821aeb39a433902244bb479c4b79
+source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1
lead_directory_missing = true
patch_url = https://something.invalid/patch
patch_filename = foo-1.0-patch.tar.xz
-patch_hash = 8f2e286a4b190228d4e0c25ddc91195449cfb5e5c52006355838964b244037da
+patch_hash = d0ddc5e60fdb27d808552f5ac8d0bb603ea2cba306538b4427b985535b26c9c5
diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz
index 26d2927..e26b8e0 100644
--- a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz
+++ b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz
Binary files differ
diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz
index 2647ef9..37eb6cc 100644
--- a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz
+++ b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz
Binary files differ
diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz
new file mode 100644
index 0000000..f257a19
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz
Binary files differ
diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz
new file mode 100644
index 0000000..d90a9e8
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz
Binary files differ
diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build
new file mode 100644
index 0000000..dbaf91f
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build
@@ -0,0 +1,2 @@
+project('static lib patchdir', 'c')
+libfoo = static_library('foo', 'foo.c')
diff --git a/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap b/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap
new file mode 100644
index 0000000..1a2134c
--- /dev/null
+++ b/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap
@@ -0,0 +1,9 @@
+[wrap-file]
+directory = foo-1.0-patchdir
+
+source_url = http://something.invalid
+source_filename = foo-1.0.tar.xz
+source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1
+lead_directory_missing = true
+
+patch_directory = foo-1.0
diff --git a/test cases/common/163 disabler/meson.build b/test cases/common/163 disabler/meson.build
index 5554f14..d132e2b 100644
--- a/test cases/common/163 disabler/meson.build
+++ b/test cases/common/163 disabler/meson.build
@@ -9,6 +9,7 @@ d2 = dependency(d)
d3 = (d == d2)
d4 = d + 0
d5 = d2 or true
+set_variable('d6', disabler())
has_not_changed = false
if is_disabler(d)
@@ -23,12 +24,14 @@ assert(is_disabler(d2), 'Function laundered disabler was not identified correctl
assert(is_disabler(d3), 'Disabler comparison should yield disabler.')
assert(is_disabler(d4), 'Disabler addition should yield disabler.')
assert(is_disabler(d5), 'Disabler logic op should yield disabler.')
+assert(is_disabler(d6), 'set_variable with a disabler should set variable to disabler.')
assert(d, 'Disabler did not cause this to be skipped.')
assert(d2, 'Function laundered disabler did not cause this to be skipped.')
assert(d3, 'Disabler comparison should yield disabler and thus this would not be called.')
assert(d4, 'Disabler addition should yield disabler and thus this would not be called.')
assert(d5, 'Disabler logic op should yield disabler and thus this would not be called.')
+assert(d6, 'set_variable with a disabler did not cause this to be skipped.')
number = 0
@@ -80,6 +83,31 @@ else
endif
assert(has_not_changed, 'App has changed.')
+assert(not is_disabler(is_variable('d6')), 'is_variable should not return a disabler')
+assert(is_variable('d6'), 'is_variable for a disabler should return true')
+
+if_is_not_disabled = false
+if is_variable('d6')
+ if_is_not_disabled = true
+else
+ if_is_not_disabled = true
+endif
+assert(if_is_not_disabled, 'Disabler in is_variable should not skip blocks')
+
+get_d = get_variable('d6')
+assert(is_disabler(get_d), 'get_variable should yield a disabler')
+
+get_fallback_d = get_variable('nonexistant', disabler())
+assert(is_disabler(get_fallback_d), 'get_variable fallback should yield a disabler')
+
+var_true = true
+get_no_fallback_d = get_variable('var_true', disabler())
+assert(not is_disabler(get_no_fallback_d), 'get_variable should not fallback to disabler')
+assert(get_no_fallback_d, 'get_variable should yield true')
+
+assert(is_disabler(get_variable(disabler())), 'get_variable should yield a disabler')
+assert(is_disabler(get_variable(disabler(), var_true)), 'get_variable should yield a disabler')
+
if_is_disabled = true
if disabler()
if_is_disabled = false
diff --git a/test cases/common/187 find override/meson.build b/test cases/common/187 find override/meson.build
index 3b8af80..b277459 100644
--- a/test cases/common/187 find override/meson.build
+++ b/test cases/common/187 find override/meson.build
@@ -10,3 +10,6 @@ if not gencodegen.found()
endif
subdir('otherdir')
+
+tool = find_program('sometool')
+assert(tool.found())
diff --git a/test cases/common/187 find override/subprojects/sub.wrap b/test cases/common/187 find override/subprojects/sub.wrap
new file mode 100644
index 0000000..17aa332
--- /dev/null
+++ b/test cases/common/187 find override/subprojects/sub.wrap
@@ -0,0 +1,5 @@
+[wrap-file]
+directory = sub
+
+[provide]
+program_names = sometool
diff --git a/test cases/common/187 find override/subprojects/sub/meson.build b/test cases/common/187 find override/subprojects/sub/meson.build
new file mode 100644
index 0000000..640f270
--- /dev/null
+++ b/test cases/common/187 find override/subprojects/sub/meson.build
@@ -0,0 +1,4 @@
+project('tools')
+
+exe = find_program('gencodegen')
+meson.override_find_program('sometool', exe)
diff --git a/test cases/common/201 override with exe/meson.build b/test cases/common/201 override with exe/meson.build
index 81f6c02..62d2f32 100644
--- a/test cases/common/201 override with exe/meson.build
+++ b/test cases/common/201 override with exe/meson.build
@@ -1,6 +1,10 @@
project('myexe', 'c')
sub = subproject('sub')
-prog = find_program('foobar')
+
+prog = find_program('foobar', version : '>= 2.0', required : false)
+assert(not prog.found())
+
+prog = find_program('foobar', version : '>= 1.0')
custom1 = custom_target('custom1',
build_by_default : true,
input : [],
@@ -11,5 +15,7 @@ gen = generator(prog,
arguments : ['@OUTPUT@'])
custom2 = gen.process('main2.input')
+message(prog.full_path())
+
executable('e1', custom1)
executable('e2', custom2)
diff --git a/test cases/common/201 override with exe/subprojects/sub/meson.build b/test cases/common/201 override with exe/subprojects/sub/meson.build
index 1f186da..f0343b2 100644
--- a/test cases/common/201 override with exe/subprojects/sub/meson.build
+++ b/test cases/common/201 override with exe/subprojects/sub/meson.build
@@ -1,3 +1,3 @@
-project('sub', 'c')
+project('sub', 'c', version : '1.0')
foobar = executable('foobar', 'foobar.c', native : true)
meson.override_find_program('foobar', foobar)
diff --git a/test cases/common/222 source set realistic example/meson.build b/test cases/common/222 source set realistic example/meson.build
index 5b0e495..d986b99 100644
--- a/test cases/common/222 source set realistic example/meson.build
+++ b/test cases/common/222 source set realistic example/meson.build
@@ -1,4 +1,4 @@
-# a sort-of realistic example that combines the sourceset and kconfig
+# a sort-of realistic example that combines the sourceset and keyval
# modules, inspired by QEMU's build system
project('sourceset-example', 'cpp', default_options: ['cpp_std=c++11'])
@@ -9,7 +9,7 @@ if cppid == 'pgi'
endif
ss = import('sourceset')
-kconfig = import('unstable-kconfig')
+keyval = import('keyval')
zlib = declare_dependency(compile_args: '-DZLIB=1')
another = declare_dependency(compile_args: '-DANOTHER=1')
@@ -39,7 +39,7 @@ targets = [ 'arm', 'aarch64', 'x86' ]
target_dirs = { 'arm' : 'arm', 'aarch64' : 'arm', 'x86': 'x86' }
foreach x : targets
- config = kconfig.load('config' / x)
+ config = keyval.load('config' / x)
target_specific = specific.apply(config, strict: false)
target_common = common.apply(config, strict: false)
target_deps = target_specific.dependencies() + target_common.dependencies()
diff --git a/test cases/common/226 include_type dependency/main.cpp b/test cases/common/226 include_type dependency/main.cpp
new file mode 100644
index 0000000..bf8c4a4
--- /dev/null
+++ b/test cases/common/226 include_type dependency/main.cpp
@@ -0,0 +1,8 @@
+#include <iostream>
+#include <boost/graph/filtered_graph.hpp>
+
+using namespace std;
+
+int main(void) {
+ return 0;
+}
diff --git a/test cases/common/226 include_type dependency/meson.build b/test cases/common/226 include_type dependency/meson.build
index fafceaf..d17e920 100644
--- a/test cases/common/226 include_type dependency/meson.build
+++ b/test cases/common/226 include_type dependency/meson.build
@@ -4,10 +4,16 @@ project(
)
dep = dependency('zlib', method: 'pkg-config', required : false)
+boost_dep = dependency('boost', modules: ['graph'], include_type : 'system', required: false)
+
if not dep.found()
error('MESON_SKIP_TEST zlib was not found')
endif
+if not boost_dep.found()
+ error('MESON_SKIP_TEST boost was not found')
+endif
+
assert(dep.include_type() == 'preserve', 'include_type must default to "preserve"')
dep_sys = dep.as_system()
@@ -26,3 +32,7 @@ assert(sp_dep.include_type() == 'preserve', 'default is preserve')
sp_dep_sys = sp_dep.as_system('system')
assert(sp_dep_sys.include_type() == 'system', 'changing include_type works')
assert(sp_dep.include_type() == 'preserve', 'as_system must not mutate the original object')
+
+# Check that PCH works with `include_type : 'system'` See https://github.com/mesonbuild/meson/issues/7167
+main_exe = executable('main_exe', 'main.cpp', cpp_pch: 'pch/test.hpp', dependencies: boost_dep)
+test('main_test', main_exe)
diff --git a/test cases/common/226 include_type dependency/pch/test.hpp b/test cases/common/226 include_type dependency/pch/test.hpp
new file mode 100644
index 0000000..0d40fe1
--- /dev/null
+++ b/test cases/common/226 include_type dependency/pch/test.hpp
@@ -0,0 +1 @@
+#include <boost/graph/filtered_graph.hpp>
diff --git a/test cases/common/230 persubproject options/meson.build b/test cases/common/230 persubproject options/meson.build
index 20dff90..f76a70c 100644
--- a/test cases/common/230 persubproject options/meson.build
+++ b/test cases/common/230 persubproject options/meson.build
@@ -1,9 +1,11 @@
project('persubproject options', 'c',
default_options : ['default_library=both',
- 'werror=true'])
+ 'werror=true',
+ 'warning_level=3'])
assert(get_option('default_library') == 'both', 'Parent default_library should be "both"')
assert(get_option('werror'))
+assert(get_option('warning_level') == '3')
# Check it build both by calling a method only both_libraries target implement
lib = library('lib1', 'foo.c')
diff --git a/test cases/common/230 persubproject options/subprojects/sub1/foo.c b/test cases/common/230 persubproject options/subprojects/sub1/foo.c
index 63e4de6..82ad2c2 100644
--- a/test cases/common/230 persubproject options/subprojects/sub1/foo.c
+++ b/test cases/common/230 persubproject options/subprojects/sub1/foo.c
@@ -1,5 +1,8 @@
int foo(void);
int foo(void) {
+ /* This is built with -Werror, it would error if warning_level=3 was inherited
+ * from main project and not overridden by this subproject's default_options. */
+ int x;
return 0;
}
diff --git a/test cases/common/230 persubproject options/subprojects/sub1/meson.build b/test cases/common/230 persubproject options/subprojects/sub1/meson.build
index 7afc934..4e4bc1b 100644
--- a/test cases/common/230 persubproject options/subprojects/sub1/meson.build
+++ b/test cases/common/230 persubproject options/subprojects/sub1/meson.build
@@ -1,6 +1,8 @@
-project('sub1', 'c')
+project('sub1', 'c',
+ default_options : ['warning_level=0'])
assert(get_option('default_library') == 'both', 'Should inherit parent project default_library')
+assert(get_option('warning_level') == '0')
# Check it build both by calling a method only both_libraries target implement
lib = library('lib1', 'foo.c')
diff --git a/test cases/common/232 link language/c_linkage.cpp b/test cases/common/232 link language/c_linkage.cpp
new file mode 100644
index 0000000..dc006b9
--- /dev/null
+++ b/test cases/common/232 link language/c_linkage.cpp
@@ -0,0 +1,5 @@
+extern "C" {
+ int makeInt(void) {
+ return 0;
+ }
+}
diff --git a/test cases/common/232 link language/c_linkage.h b/test cases/common/232 link language/c_linkage.h
new file mode 100644
index 0000000..1609f47
--- /dev/null
+++ b/test cases/common/232 link language/c_linkage.h
@@ -0,0 +1,10 @@
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int makeInt(void);
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/test cases/common/232 link language/lib.cpp b/test cases/common/232 link language/lib.cpp
new file mode 100644
index 0000000..ab43828
--- /dev/null
+++ b/test cases/common/232 link language/lib.cpp
@@ -0,0 +1,5 @@
+extern "C" {
+ int makeInt(void) {
+ return 1;
+ }
+}
diff --git a/test cases/common/232 link language/main.c b/test cases/common/232 link language/main.c
new file mode 100644
index 0000000..5a167e7
--- /dev/null
+++ b/test cases/common/232 link language/main.c
@@ -0,0 +1,5 @@
+#include "c_linkage.h"
+
+int main(void) {
+ return makeInt();
+}
diff --git a/test cases/common/232 link language/meson.build b/test cases/common/232 link language/meson.build
new file mode 100644
index 0000000..f9af6cd
--- /dev/null
+++ b/test cases/common/232 link language/meson.build
@@ -0,0 +1,18 @@
+project(
+ 'link_language',
+ ['c', 'cpp'],
+)
+
+exe = executable(
+ 'main',
+ ['main.c', 'c_linkage.cpp'],
+ link_language : 'c',
+)
+
+lib = library(
+ 'mylib',
+ ['lib.cpp'],
+ link_language : 'c',
+)
+
+test('main', exe)
diff --git a/test cases/common/233 link depends indexed custom target/foo.c b/test cases/common/233 link depends indexed custom target/foo.c
new file mode 100644
index 0000000..58c86a6
--- /dev/null
+++ b/test cases/common/233 link depends indexed custom target/foo.c
@@ -0,0 +1,15 @@
+#include <stdio.h>
+
+int main(void) {
+ const char *fn = DEPFILE;
+ FILE *f = fopen(fn, "r");
+ if (!f) {
+ printf("could not open %s", fn);
+ return 1;
+ }
+ else {
+ printf("successfully opened %s", fn);
+ }
+
+ return 0;
+}
diff --git a/test cases/common/233 link depends indexed custom target/make_file.py b/test cases/common/233 link depends indexed custom target/make_file.py
new file mode 100644
index 0000000..6a43b7d
--- /dev/null
+++ b/test cases/common/233 link depends indexed custom target/make_file.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python3
+import sys
+
+with open(sys.argv[1], 'w') as f:
+ print('# this file does nothing', file=f)
+
+with open(sys.argv[2], 'w') as f:
+ print('# this file does nothing', file=f)
diff --git a/test cases/common/233 link depends indexed custom target/meson.build b/test cases/common/233 link depends indexed custom target/meson.build
new file mode 100644
index 0000000..5c066e9
--- /dev/null
+++ b/test cases/common/233 link depends indexed custom target/meson.build
@@ -0,0 +1,19 @@
+project('link_depends_indexed_custom_target', 'c')
+
+if meson.backend().startswith('vs')
+ # FIXME: Broken on the VS backends
+ error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799')
+endif
+
+cmd = find_program('make_file.py')
+
+dep_files = custom_target('gen_dep',
+ command: [cmd, '@OUTPUT@'],
+ output: ['dep_file1', 'dep_file2'])
+
+exe = executable('foo', 'foo.c',
+ link_depends: dep_files[1],
+ c_args: ['-DDEPFILE="' + dep_files[0].full_path()+ '"'])
+
+# check that dep_file1 exists, which means that link_depends target ran
+test('runtest', exe)
diff --git a/test cases/common/234 very long commmand line/codegen.py b/test cases/common/234 very long commmand line/codegen.py
new file mode 100755
index 0000000..4de78ce
--- /dev/null
+++ b/test cases/common/234 very long commmand line/codegen.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import sys
+
+with open(sys.argv[2], 'w') as f:
+ print('int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1]), file=f)
diff --git a/test cases/common/234 very long commmand line/main.c b/test cases/common/234 very long commmand line/main.c
new file mode 100644
index 0000000..dbb64a8
--- /dev/null
+++ b/test cases/common/234 very long commmand line/main.c
@@ -0,0 +1,5 @@
+int main(int argc, char **argv) {
+ (void) argc;
+ (void) argv;
+ return 0;
+}
diff --git a/test cases/common/234 very long commmand line/meson.build b/test cases/common/234 very long commmand line/meson.build
new file mode 100644
index 0000000..fe47b5e
--- /dev/null
+++ b/test cases/common/234 very long commmand line/meson.build
@@ -0,0 +1,44 @@
+project('very long command lines', 'c')
+
+# Get the current system's commandline length limit.
+if build_machine.system() == 'windows'
+ # Various limits on windows:
+ # cmd.exe: 8kb
+ # CreateProcess: 32kb
+ limit = 32767
+elif build_machine.system() == 'cygwin'
+ # cygwin-to-win32: see above
+ # cygwin-to-cygwin: no limit?
+ # Cygwin is slow, so only test it lightly here.
+ limit = 8192
+else
+ # ninja passes whole line as a single argument, for which
+ # the limit is 128k as of Linux 2.6.23. See MAX_ARG_STRLEN.
+ # BSD seems similar, see https://www.in-ulm.de/~mascheck/various/argmax
+ limit = 131072
+endif
+# Now exceed that limit, but not so far that the test takes too long.
+name = 'ALongFilenameMuchLongerThanIsNormallySeenAndReallyHardToReadThroughToTheEndAMooseOnceBitMySisterSheNowWorksAtLLamaFreshFarmsThisHasToBeSoLongThatWeExceed128KBWithoutCompilingTooManyFiles'
+namelen = 187
+nfiles = 50 + limit / namelen
+message('Expected link commandline length is approximately ' + '@0@'.format((nfiles * (namelen+28))))
+
+seq = run_command('seq.py', '1', '@0@'.format(nfiles)).stdout().strip().split('\n')
+
+sources = []
+codegen = find_program('codegen.py')
+
+foreach i : seq
+ sources += custom_target('codegen' + i,
+ command: [codegen, i, '@OUTPUT@'],
+ output: name + i + '.c')
+endforeach
+
+shared_library('sharedlib', sources)
+static_library('staticlib', sources)
+executable('app', 'main.c', sources)
+
+# Also test short commandlines to make sure that doesn't regress
+shared_library('sharedlib0', sources[0])
+static_library('staticlib0', sources[0])
+executable('app0', 'main.c', sources[0])
diff --git a/test cases/common/234 very long commmand line/seq.py b/test cases/common/234 very long commmand line/seq.py
new file mode 100755
index 0000000..637bf57
--- /dev/null
+++ b/test cases/common/234 very long commmand line/seq.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import sys
+
+for i in range(int(sys.argv[1]), int(sys.argv[2])):
+ print(i)
diff --git a/test cases/common/36 tryrun/meson.build b/test cases/common/36 tryrun/meson.build
index 261adf2..5580974 100644
--- a/test cases/common/36 tryrun/meson.build
+++ b/test cases/common/36 tryrun/meson.build
@@ -2,7 +2,7 @@ project('tryrun', 'c', 'cpp')
# Complex to exercise all code paths.
if meson.is_cross_build()
- if meson.has_exe_wrapper()
+ if meson.can_run_host_binaries()
compilers = [meson.get_compiler('c', native : false), meson.get_compiler('cpp', native : false)]
else
compilers = [meson.get_compiler('c', native : true), meson.get_compiler('cpp', native : true)]
diff --git a/test cases/common/38 string operations/meson.build b/test cases/common/38 string operations/meson.build
index 6596142..8a06a82 100644
--- a/test cases/common/38 string operations/meson.build
+++ b/test cases/common/38 string operations/meson.build
@@ -101,3 +101,18 @@ assert('\\\\n' == bs_bs_n, 'Four backslash broken before n')
assert('\\\\\n' == bs_bs_nl, 'Five backslash broken before n')
assert('\\\\' == bs_bs, 'Double-backslash broken')
assert('\\' == bs, 'Backslash broken')
+
+mysubstring='foobarbaz'
+assert(mysubstring.substring() == 'foobarbaz', 'substring is broken')
+assert(mysubstring.substring(0) == 'foobarbaz', 'substring is broken')
+assert(mysubstring.substring(1) == 'oobarbaz', 'substring is broken')
+assert(mysubstring.substring(-5) == 'arbaz', 'substring is broken')
+assert(mysubstring.substring(1, 4) == 'oob', 'substring is broken')
+assert(mysubstring.substring(1,-5) == 'oob', 'substring is broken')
+assert(mysubstring.substring(1, 0) == '', 'substring is broken')
+assert(mysubstring.substring(0, 100) == 'foobarbaz', 'substring is broken')
+assert(mysubstring.substring(-1, -5) == '', 'substring is broken')
+assert(mysubstring.substring(10, -25) == '', 'substring is broken')
+assert(mysubstring.substring(-4, 2) == '', 'substring is broken')
+assert(mysubstring.substring(10, 9) == '', 'substring is broken')
+assert(mysubstring.substring(8, 10) == 'z', 'substring is broken')
diff --git a/test cases/common/43 options/meson_options.txt b/test cases/common/43 options/meson_options.txt
index c5986ba..db649de 100644
--- a/test cases/common/43 options/meson_options.txt
+++ b/test cases/common/43 options/meson_options.txt
@@ -1,7 +1,7 @@
-option('testoption', type : 'string', value : 'optval', description : 'An option to do something')
+option('testoption', type : 'string', value : 'optval', description : 'An option ' + 'to do something')
option('other_one', type : 'boolean', value : not (not (not (not false))))
-option('combo_opt', type : 'combo', choices : ['one', 'two', 'combo'], value : 'combo')
+option('combo_opt', type : 'co' + 'mbo', choices : ['one', 'two', 'combo'], value : 'combo')
option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two'])
option('free_array_opt', type : 'array')
option('integer_opt', type : 'integer', min : 0, max : -(-5), value : 3)
-option('neg_int_opt', type : 'integer', min : -5, max : 5, value : -3)
+option('neg' + '_' + 'int' + '_' + 'opt', type : 'integer', min : -5, max : 5, value : -3)
diff --git a/test cases/common/47 pkgconfig-gen/dependencies/main.c b/test cases/common/47 pkgconfig-gen/dependencies/main.c
index 61708d3..397d40c 100644
--- a/test cases/common/47 pkgconfig-gen/dependencies/main.c
+++ b/test cases/common/47 pkgconfig-gen/dependencies/main.c
@@ -1,5 +1,9 @@
#include <simple.h>
+#ifndef LIBFOO
+#error LIBFOO should be defined in pkgconfig cflags
+#endif
+
int main(int argc, char *argv[])
{
return simple_function() == 42 ? 0 : 1;
diff --git a/test cases/common/47 pkgconfig-gen/meson.build b/test cases/common/47 pkgconfig-gen/meson.build
index c251b9f..8c16cd5 100644
--- a/test cases/common/47 pkgconfig-gen/meson.build
+++ b/test cases/common/47 pkgconfig-gen/meson.build
@@ -1,5 +1,12 @@
project('pkgconfig-gen', 'c')
+# Some CI runners does not have zlib, just skip them as we need some common
+# external dependency.
+cc = meson.get_compiler('c')
+if not cc.find_library('z', required: false).found()
+ error('MESON_SKIP_TEST: zlib missing')
+endif
+
# First check we have pkg-config >= 0.29
pkgconfig = find_program('pkg-config', required: false)
if not pkgconfig.found()
@@ -43,7 +50,8 @@ pkgg.generate(
name : 'libfoo',
version : libver,
description : 'A foo library.',
- variables : ['foo=bar', 'datadir=${prefix}/data']
+ variables : ['foo=bar', 'datadir=${prefix}/data'],
+ extra_cflags : ['-DLIBFOO'],
)
pkgg.generate(
@@ -58,3 +66,32 @@ pkgg.generate(
version : libver,
dataonly: true
)
+
+# Regression test for 2 cases:
+# - link_whole from InternalDependency used to be ignored, but we should still
+# recurse to add libraries they link to. In this case it must add `-lsimple1`
+# in generated pc file.
+# - dependencies from InternalDependency used to be ignored. In this it must add
+# `-lz` in generated pc file.
+simple1 = shared_library('simple1', 'simple.c')
+stat1 = static_library('stat1', 'simple.c', link_with: simple1)
+dep = declare_dependency(link_whole: stat1, dependencies: cc.find_library('z'))
+simple2 = library('simple2', 'simple.c')
+pkgg.generate(simple2, libraries: dep)
+
+# Regression test: as_system() does a deepcopy() of the InternalDependency object
+# which caused `-lsimple3` to be duplicated because generator used to compare
+# Target instances instead of their id.
+simple3 = shared_library('simple3', 'simple.c')
+dep1 = declare_dependency(link_with: simple3)
+dep2 = dep1.as_system()
+pkgg.generate(libraries: [dep1, dep2],
+ name: 'simple3',
+ description: 'desc')
+
+# Regression test: stat2 is both link_with and link_whole, it should not appear
+# in generated pc file.
+stat2 = static_library('stat2', 'simple.c', install: true)
+simple4 = library('simple4', 'simple.c', link_with: stat2)
+simple5 = library('simple5', 'simple5.c', link_with: simple4, link_whole: stat2)
+pkgg.generate(simple5)
diff --git a/test cases/common/47 pkgconfig-gen/simple5.c b/test cases/common/47 pkgconfig-gen/simple5.c
new file mode 100644
index 0000000..9f924bd
--- /dev/null
+++ b/test cases/common/47 pkgconfig-gen/simple5.c
@@ -0,0 +1,6 @@
+int simple5(void);
+
+int simple5(void)
+{
+ return 0;
+}
diff --git a/test cases/common/47 pkgconfig-gen/test.json b/test cases/common/47 pkgconfig-gen/test.json
index 1c6a452..702e7fe 100644
--- a/test cases/common/47 pkgconfig-gen/test.json
+++ b/test cases/common/47 pkgconfig-gen/test.json
@@ -1,9 +1,13 @@
{
"installed": [
{"type": "file", "file": "usr/include/simple.h"},
+ {"type": "file", "file": "usr/lib/libstat2.a"},
{"type": "file", "file": "usr/lib/pkgconfig/simple.pc"},
{"type": "file", "file": "usr/lib/pkgconfig/libfoo.pc"},
{"type": "file", "file": "usr/lib/pkgconfig/libhello.pc"},
- {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"}
+ {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"},
+ {"type": "file", "file": "usr/lib/pkgconfig/simple2.pc"},
+ {"type": "file", "file": "usr/lib/pkgconfig/simple3.pc"},
+ {"type": "file", "file": "usr/lib/pkgconfig/simple5.pc"}
]
}
diff --git a/test cases/common/56 install script/customtarget.py b/test cases/common/56 install script/customtarget.py
new file mode 100755
index 0000000..e28373a
--- /dev/null
+++ b/test cases/common/56 install script/customtarget.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python3
+
+import argparse
+import os
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('dirname')
+ args = parser.parse_args()
+
+ with open(os.path.join(args.dirname, '1.txt'), 'w') as f:
+ f.write('')
+ with open(os.path.join(args.dirname, '2.txt'), 'w') as f:
+ f.write('')
+
+
+if __name__ == "__main__":
+ main()
diff --git a/test cases/common/56 install script/meson.build b/test cases/common/56 install script/meson.build
index 6351518..e80e666 100644
--- a/test cases/common/56 install script/meson.build
+++ b/test cases/common/56 install script/meson.build
@@ -5,3 +5,29 @@ meson.add_install_script('myinstall.py', 'diiba/daaba', 'file.dat')
meson.add_install_script('myinstall.py', 'this/should', 'also-work.dat')
subdir('src')
+
+meson.add_install_script('myinstall.py', 'dir', afile, '--mode=copy')
+
+data = configuration_data()
+data.set10('foo', true)
+conf = configure_file(
+ configuration : data,
+ output : 'conf.txt'
+)
+
+meson.add_install_script('myinstall.py', 'dir', conf, '--mode=copy')
+
+t = custom_target(
+ 'ct',
+ command : [find_program('customtarget.py'), '@OUTDIR@'],
+ output : ['1.txt', '2.txt'],
+)
+
+meson.add_install_script('myinstall.py', 'customtarget', t, '--mode=copy')
+meson.add_install_script('myinstall.py', 'customtargetindex', t[0], '--mode=copy')
+
+meson.add_install_script(exe, 'generated.txt')
+wrap = find_program('wrap.py')
+# Yes, these are getting silly
+meson.add_install_script(wrap, exe, 'wrapped.txt')
+meson.add_install_script(wrap, wrap, exe, 'wrapped2.txt')
diff --git a/test cases/common/56 install script/myinstall.py b/test cases/common/56 install script/myinstall.py
index 812561e..a573342 100644
--- a/test cases/common/56 install script/myinstall.py
+++ b/test cases/common/56 install script/myinstall.py
@@ -1,12 +1,31 @@
#!/usr/bin/env python3
+import argparse
import os
-import sys
+import shutil
prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX']
-dirname = os.path.join(prefix, sys.argv[1])
-os.makedirs(dirname)
-with open(os.path.join(dirname, sys.argv[2]), 'w') as f:
- f.write('')
+def main() -> None:
+ parser = argparse.ArgumentParser()
+ parser.add_argument('dirname')
+ parser.add_argument('files', nargs='+')
+ parser.add_argument('--mode', action='store', default='create', choices=['create', 'copy'])
+ args = parser.parse_args()
+
+ dirname = os.path.join(prefix, args.dirname)
+ if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
+ if args.mode == 'create':
+ for name in args.files:
+ with open(os.path.join(dirname, name), 'w') as f:
+ f.write('')
+ else:
+ for name in args.files:
+ shutil.copy(name, dirname)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/test cases/unit/74 dep files/foo.c b/test cases/common/56 install script/src/a file.txt
index e69de29..e69de29 100644
--- a/test cases/unit/74 dep files/foo.c
+++ b/test cases/common/56 install script/src/a file.txt
diff --git a/test cases/common/56 install script/src/exe.c b/test cases/common/56 install script/src/exe.c
new file mode 100644
index 0000000..b573b91
--- /dev/null
+++ b/test cases/common/56 install script/src/exe.c
@@ -0,0 +1,24 @@
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+int main(int argc, char * argv[]) {
+ if (argc != 2) {
+ fprintf(stderr, "Takes exactly 2 arguments\n");
+ return 1;
+ }
+
+ char * dirname = getenv("MESON_INSTALL_DESTDIR_PREFIX");
+ char * fullname = malloc(strlen(dirname) + 1 + strlen(argv[1]) + 1);
+ strcpy(fullname, dirname);
+ strcat(fullname, "/");
+ strcat(fullname, argv[1]);
+
+ FILE * fp = fopen(fullname, "w");
+ fputs("Some text\n", fp);
+ fclose(fp);
+
+ free(fullname);
+
+ return 0;
+}
diff --git a/test cases/common/56 install script/src/meson.build b/test cases/common/56 install script/src/meson.build
index b23574a..1db424f 100644
--- a/test cases/common/56 install script/src/meson.build
+++ b/test cases/common/56 install script/src/meson.build
@@ -1 +1,5 @@
meson.add_install_script('myinstall.py', 'this/does', 'something-different.dat')
+
+afile = files('a file.txt')
+
+exe = executable('exe', 'exe.c', install : false, native : true)
diff --git a/test cases/common/56 install script/src/myinstall.py b/test cases/common/56 install script/src/myinstall.py
index 3b7ce37..3a9d89b 100644
--- a/test cases/common/56 install script/src/myinstall.py
+++ b/test cases/common/56 install script/src/myinstall.py
@@ -7,6 +7,8 @@ prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX']
dirname = os.path.join(prefix, sys.argv[1])
-os.makedirs(dirname)
+if not os.path.exists(dirname):
+ os.makedirs(dirname)
+
with open(os.path.join(dirname, sys.argv[2] + '.in'), 'w') as f:
f.write('')
diff --git a/test cases/common/56 install script/test.json b/test cases/common/56 install script/test.json
index d17625f..b2a5971 100644
--- a/test cases/common/56 install script/test.json
+++ b/test cases/common/56 install script/test.json
@@ -4,6 +4,14 @@
{"type": "pdb", "file": "usr/bin/prog"},
{"type": "file", "file": "usr/diiba/daaba/file.dat"},
{"type": "file", "file": "usr/this/should/also-work.dat"},
- {"type": "file", "file": "usr/this/does/something-different.dat.in"}
+ {"type": "file", "file": "usr/this/does/something-different.dat.in"},
+ {"type": "file", "file": "usr/dir/a file.txt"},
+ {"type": "file", "file": "usr/dir/conf.txt"},
+ {"type": "file", "file": "usr/customtarget/1.txt"},
+ {"type": "file", "file": "usr/customtarget/2.txt"},
+ {"type": "file", "file": "usr/customtargetindex/1.txt"},
+ {"type": "file", "file": "usr/generated.txt"},
+ {"type": "file", "file": "usr/wrapped.txt"},
+ {"type": "file", "file": "usr/wrapped2.txt"}
]
}
diff --git a/test cases/common/56 install script/wrap.py b/test cases/common/56 install script/wrap.py
new file mode 100755
index 0000000..87508e0
--- /dev/null
+++ b/test cases/common/56 install script/wrap.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python3
+
+import subprocess
+import sys
+
+subprocess.run(sys.argv[1:])
diff --git a/test cases/common/93 selfbuilt custom/meson.build b/test cases/common/93 selfbuilt custom/meson.build
index 3cc3906..b536352 100644
--- a/test cases/common/93 selfbuilt custom/meson.build
+++ b/test cases/common/93 selfbuilt custom/meson.build
@@ -26,7 +26,7 @@ ctlib = custom_target('ctlib',
build_by_default : true,
)
-if meson.is_cross_build() and meson.has_exe_wrapper()
+if meson.is_cross_build() and meson.can_run_host_binaries()
checkarg_host = executable('checkarg_host', 'checkarg.cpp')
ctlib_host = custom_target(
diff --git a/test cases/failing/1 project not first/test.json b/test cases/failing/1 project not first/test.json
new file mode 100644
index 0000000..70f3c41
--- /dev/null
+++ b/test cases/failing/1 project not first/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "ERROR: First statement must be a call to project"
+ }
+ ]
+}
diff --git a/test cases/failing/10 out of bounds/test.json b/test cases/failing/10 out of bounds/test.json
new file mode 100644
index 0000000..e27d990
--- /dev/null
+++ b/test cases/failing/10 out of bounds/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/10 out of bounds/meson.build:4:0: ERROR: Index 0 out of bounds of array of size 0."
+ }
+ ]
+}
diff --git a/test cases/failing/100 fallback consistency/test.json b/test cases/failing/100 fallback consistency/test.json
new file mode 100644
index 0000000..a783d8c
--- /dev/null
+++ b/test cases/failing/100 fallback consistency/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/100 fallback consistency/meson.build:7:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'"
+ }
+ ]
+}
diff --git a/test cases/failing/101 no native compiler/test.json b/test cases/failing/101 no native compiler/test.json
new file mode 100644
index 0000000..c7b5d1c
--- /dev/null
+++ b/test cases/failing/101 no native compiler/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/101 no native compiler/meson.build:12:0: ERROR: No host machine compiler for \"main.c\""
+ }
+ ]
+}
diff --git a/test cases/failing/102 subdir parse error/test.json b/test cases/failing/102 subdir parse error/test.json
new file mode 100644
index 0000000..06fd4d3
--- /dev/null
+++ b/test cases/failing/102 subdir parse error/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/102 subdir parse error/subdir/meson.build:1:0: ERROR: Plusassignment target must be an id."
+ }
+ ]
+}
diff --git a/test cases/failing/103 invalid option file/test.json b/test cases/failing/103 invalid option file/test.json
new file mode 100644
index 0000000..20dbec3
--- /dev/null
+++ b/test cases/failing/103 invalid option file/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/103 invalid option file/meson_options.txt:1:0: ERROR: lexer"
+ }
+ ]
+}
diff --git a/test cases/failing/104 no lang/test.json b/test cases/failing/104 no lang/test.json
new file mode 100644
index 0000000..62999be
--- /dev/null
+++ b/test cases/failing/104 no lang/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/104 no lang/meson.build:2:0: ERROR: No host machine compiler for \"main.c\""
+ }
+ ]
+}
diff --git a/test cases/failing/105 no glib-compile-resources/test.json b/test cases/failing/105 no glib-compile-resources/test.json
new file mode 100644
index 0000000..67dc7e4
--- /dev/null
+++ b/test cases/failing/105 no glib-compile-resources/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/105 no glib-compile-resources/meson.build:8:0: ERROR: Could not execute glib-compile-resources."
+ }
+ ]
+}
diff --git a/test cases/failing/106 fallback consistency/meson.build b/test cases/failing/106 fallback consistency/meson.build
new file mode 100644
index 0000000..1b007f5
--- /dev/null
+++ b/test cases/failing/106 fallback consistency/meson.build
@@ -0,0 +1,3 @@
+project('fallback consistency')
+
+dependency('foo')
diff --git a/test cases/failing/106 fallback consistency/subprojects/foo.wrap b/test cases/failing/106 fallback consistency/subprojects/foo.wrap
new file mode 100644
index 0000000..28055d9
--- /dev/null
+++ b/test cases/failing/106 fallback consistency/subprojects/foo.wrap
@@ -0,0 +1,6 @@
+[wrap-file]
+source_url = http://host.invalid/foo.tar.gz
+source_filename = foo.tar.gz
+
+[provide]
+foo = bar_dep
diff --git a/test cases/failing/106 fallback consistency/subprojects/foo/meson.build b/test cases/failing/106 fallback consistency/subprojects/foo/meson.build
new file mode 100644
index 0000000..fb58a4a
--- /dev/null
+++ b/test cases/failing/106 fallback consistency/subprojects/foo/meson.build
@@ -0,0 +1,6 @@
+project('sub')
+
+foo_dep = declare_dependency()
+meson.override_dependency('foo', foo_dep)
+
+bar_dep = declare_dependency()
diff --git a/test cases/failing/106 fallback consistency/test.json b/test cases/failing/106 fallback consistency/test.json
new file mode 100644
index 0000000..af1a429
--- /dev/null
+++ b/test cases/failing/106 fallback consistency/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/106 fallback consistency/meson.build:3:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'bar_dep'"
+ }
+ ]
+}
diff --git a/test cases/failing/107 number in combo/meson.build b/test cases/failing/107 number in combo/meson.build
new file mode 100644
index 0000000..1a647df
--- /dev/null
+++ b/test cases/failing/107 number in combo/meson.build
@@ -0,0 +1 @@
+project('number in combo')
diff --git a/test cases/failing/107 number in combo/nativefile.ini b/test cases/failing/107 number in combo/nativefile.ini
new file mode 100644
index 0000000..55f10fc
--- /dev/null
+++ b/test cases/failing/107 number in combo/nativefile.ini
@@ -0,0 +1,2 @@
+[built-in options]
+optimization = 1
diff --git a/test cases/failing/107 number in combo/test.json b/test cases/failing/107 number in combo/test.json
new file mode 100644
index 0000000..f5aeb4e
--- /dev/null
+++ b/test cases/failing/107 number in combo/test.json
@@ -0,0 +1,5 @@
+{
+ "stdout": [
+ { "line": "test cases/failing/107 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." }
+ ]
+}
diff --git a/test cases/failing/108 bool in combo/meson.build b/test cases/failing/108 bool in combo/meson.build
new file mode 100644
index 0000000..c5efd67
--- /dev/null
+++ b/test cases/failing/108 bool in combo/meson.build
@@ -0,0 +1 @@
+project('bool in combo')
diff --git a/test cases/failing/108 bool in combo/meson_options.txt b/test cases/failing/108 bool in combo/meson_options.txt
new file mode 100644
index 0000000..0c8f5de
--- /dev/null
+++ b/test cases/failing/108 bool in combo/meson_options.txt
@@ -0,0 +1,5 @@
+option(
+ 'opt',
+ type : 'combo',
+ choices : ['true', 'false']
+)
diff --git a/test cases/failing/108 bool in combo/nativefile.ini b/test cases/failing/108 bool in combo/nativefile.ini
new file mode 100644
index 0000000..b423957
--- /dev/null
+++ b/test cases/failing/108 bool in combo/nativefile.ini
@@ -0,0 +1,2 @@
+[project options]
+opt = true
diff --git a/test cases/failing/108 bool in combo/test.json b/test cases/failing/108 bool in combo/test.json
new file mode 100644
index 0000000..729ad3d
--- /dev/null
+++ b/test cases/failing/108 bool in combo/test.json
@@ -0,0 +1,5 @@
+{
+ "stdout": [
+ { "line": "test cases/failing/108 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." }
+ ]
+}
diff --git a/test cases/failing/11 object arithmetic/test.json b/test cases/failing/11 object arithmetic/test.json
new file mode 100644
index 0000000..5339fac
--- /dev/null
+++ b/test cases/failing/11 object arithmetic/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "line": "test cases/failing/11 object arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*"
+ }
+ ]
+}
diff --git a/test cases/failing/12 string arithmetic/test.json b/test cases/failing/12 string arithmetic/test.json
new file mode 100644
index 0000000..476f9bb
--- /dev/null
+++ b/test cases/failing/12 string arithmetic/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "line": "test cases/failing/12 string arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*"
+ }
+ ]
+}
diff --git a/test cases/failing/13 array arithmetic/test.json b/test cases/failing/13 array arithmetic/test.json
new file mode 100644
index 0000000..55056ce
--- /dev/null
+++ b/test cases/failing/13 array arithmetic/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/13 array arithmetic/meson.build:3:0: ERROR: Multiplication works only with integers."
+ }
+ ]
+}
diff --git a/test cases/failing/14 invalid option name/test.json b/test cases/failing/14 invalid option name/test.json
new file mode 100644
index 0000000..71e685d
--- /dev/null
+++ b/test cases/failing/14 invalid option name/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/14 invalid option name/meson_options.txt:1:0: ERROR: Option names can only contain letters, numbers or dashes."
+ }
+ ]
+}
diff --git a/test cases/failing/15 kwarg before arg/test.json b/test cases/failing/15 kwarg before arg/test.json
new file mode 100644
index 0000000..c7f72c3
--- /dev/null
+++ b/test cases/failing/15 kwarg before arg/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/15 kwarg before arg/meson.build:3:0: ERROR: All keyword arguments must be after positional arguments."
+ }
+ ]
+}
diff --git a/test cases/failing/16 extract from subproject/test.json b/test cases/failing/16 extract from subproject/test.json
new file mode 100644
index 0000000..78d45a5
--- /dev/null
+++ b/test cases/failing/16 extract from subproject/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/16 extract from subproject/meson.build:6:0: ERROR: Tried to extract objects from a subproject target."
+ }
+ ]
+}
diff --git a/test cases/failing/17 same target/test.json b/test cases/failing/17 same target/test.json
new file mode 100644
index 0000000..0005ba4
--- /dev/null
+++ b/test cases/failing/17 same target/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/17 same target/meson.build:4:0: ERROR: Tried to create target \"foo\", but a target of that name already exists."
+ }
+ ]
+}
diff --git a/test cases/failing/18 wrong plusassign/test.json b/test cases/failing/18 wrong plusassign/test.json
new file mode 100644
index 0000000..c698f85
--- /dev/null
+++ b/test cases/failing/18 wrong plusassign/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/18 wrong plusassign/meson.build:3:0: ERROR: Plusassignment target must be an id."
+ }
+ ]
+}
diff --git a/test cases/failing/19 target clash/meson.build b/test cases/failing/19 target clash/meson.build
index ca09fb5..4fd0934 100644
--- a/test cases/failing/19 target clash/meson.build
+++ b/test cases/failing/19 target clash/meson.build
@@ -8,7 +8,7 @@ project('clash', 'c')
# output location is redirected.
if host_machine.system() == 'windows' or host_machine.system() == 'cygwin'
- error('This is expected.')
+ error('MESON_SKIP_TEST test only works on platforms where executables have no suffix.')
endif
executable('clash', 'clash.c')
diff --git a/test cases/failing/19 target clash/test.json b/test cases/failing/19 target clash/test.json
new file mode 100644
index 0000000..d22b894
--- /dev/null
+++ b/test cases/failing/19 target clash/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "ERROR: Multiple producers for Ninja target \"clash\". Please rename your targets."
+ }
+ ]
+}
diff --git a/test cases/failing/2 missing file/test.json b/test cases/failing/2 missing file/test.json
new file mode 100644
index 0000000..b95b8b0
--- /dev/null
+++ b/test cases/failing/2 missing file/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/2 missing file/meson.build:3:0: ERROR: File missing.c does not exist."
+ }
+ ]
+}
diff --git a/test cases/failing/20 version/test.json b/test cases/failing/20 version/test.json
new file mode 100644
index 0000000..f330624
--- /dev/null
+++ b/test cases/failing/20 version/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "line": "test cases/failing/20 version/meson\\.build:1:0: ERROR: Meson version is .* but project requires >100\\.0\\.0"
+ }
+ ]
+}
diff --git a/test cases/failing/21 subver/test.json b/test cases/failing/21 subver/test.json
new file mode 100644
index 0000000..f8cfd3a
--- /dev/null
+++ b/test cases/failing/21 subver/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/21 subver/meson.build:3:0: ERROR: Subproject foo version is 1.0.0 but >1.0.0 required."
+ }
+ ]
+}
diff --git a/test cases/failing/22 assert/test.json b/test cases/failing/22 assert/test.json
new file mode 100644
index 0000000..edae999
--- /dev/null
+++ b/test cases/failing/22 assert/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/22 assert/meson.build:3:0: ERROR: Assert failed: I am fail."
+ }
+ ]
+}
diff --git a/test cases/failing/23 rel testdir/test.json b/test cases/failing/23 rel testdir/test.json
new file mode 100644
index 0000000..ba983ab
--- /dev/null
+++ b/test cases/failing/23 rel testdir/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/23 rel testdir/meson.build:4:0: ERROR: Workdir keyword argument must be an absolute path."
+ }
+ ]
+}
diff --git a/test cases/failing/24 int conversion/test.json b/test cases/failing/24 int conversion/test.json
new file mode 100644
index 0000000..e749928
--- /dev/null
+++ b/test cases/failing/24 int conversion/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/24 int conversion/meson.build:3:13: ERROR: String 'notanumber' cannot be converted to int"
+ }
+ ]
+}
diff --git a/test cases/failing/25 badlang/test.json b/test cases/failing/25 badlang/test.json
new file mode 100644
index 0000000..0b23fd7
--- /dev/null
+++ b/test cases/failing/25 badlang/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/25 badlang/meson.build:3:0: ERROR: Tried to use unknown language \"nonexisting\"."
+ }
+ ]
+}
diff --git a/test cases/failing/26 output subdir/test.json b/test cases/failing/26 output subdir/test.json
new file mode 100644
index 0000000..796468d
--- /dev/null
+++ b/test cases/failing/26 output subdir/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/26 output subdir/meson.build:3:0: ERROR: Output file name must not contain a subdirectory."
+ }
+ ]
+}
diff --git a/test cases/failing/27 noprog use/test.json b/test cases/failing/27 noprog use/test.json
new file mode 100644
index 0000000..b84562e
--- /dev/null
+++ b/test cases/failing/27 noprog use/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/27 noprog use/meson.build:5:0: ERROR: Tried to use not-found external program in \"command\""
+ }
+ ]
+}
diff --git a/test cases/failing/28 no crossprop/test.json b/test cases/failing/28 no crossprop/test.json
new file mode 100644
index 0000000..a186a68
--- /dev/null
+++ b/test cases/failing/28 no crossprop/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/28 no crossprop/meson.build:3:0: ERROR: Unknown cross property: nonexisting."
+ }
+ ]
+}
diff --git a/test cases/failing/29 nested ternary/test.json b/test cases/failing/29 nested ternary/test.json
new file mode 100644
index 0000000..ba05013
--- /dev/null
+++ b/test cases/failing/29 nested ternary/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/29 nested ternary/meson.build:3:12: ERROR: Nested ternary operators are not allowed."
+ }
+ ]
+}
diff --git a/test cases/failing/3 missing subdir/test.json b/test cases/failing/3 missing subdir/test.json
new file mode 100644
index 0000000..562de25
--- /dev/null
+++ b/test cases/failing/3 missing subdir/test.json
@@ -0,0 +1,9 @@
+{
+ "stdout": [
+ {
+ "comment": "'missing/meson.build' gets transformed with os.path.sep separators",
+ "match": "re",
+ "line": "test cases/failing/3 missing subdir/meson\\.build:3:0: ERROR: Non\\-existent build file 'missing[\\\\/]meson\\.build'"
+ }
+ ]
+}
diff --git a/test cases/failing/30 invalid man extension/test.json b/test cases/failing/30 invalid man extension/test.json
new file mode 100644
index 0000000..3f77a04
--- /dev/null
+++ b/test cases/failing/30 invalid man extension/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/30 invalid man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 8"
+ }
+ ]
+}
diff --git a/test cases/failing/31 no man extension/test.json b/test cases/failing/31 no man extension/test.json
new file mode 100644
index 0000000..6e1f542
--- /dev/null
+++ b/test cases/failing/31 no man extension/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/31 no man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 8"
+ }
+ ]
+}
diff --git a/test cases/failing/32 exe static shared/meson.build b/test cases/failing/32 exe static shared/meson.build
index b102764..2ae5125 100644
--- a/test cases/failing/32 exe static shared/meson.build
+++ b/test cases/failing/32 exe static shared/meson.build
@@ -2,7 +2,7 @@ project('statchain', 'c')
host_system = host_machine.system()
if host_system == 'windows' or host_system == 'darwin'
- error('Test only fails on Linux and BSD')
+ error('MESON_SKIP_TEST test only fails on Linux and BSD')
endif
statlib = static_library('stat', 'stat.c', pic : false)
diff --git a/test cases/failing/32 exe static shared/test.json b/test cases/failing/32 exe static shared/test.json
new file mode 100644
index 0000000..51d3804
--- /dev/null
+++ b/test cases/failing/32 exe static shared/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/32 exe static shared/meson.build:9:0: ERROR: Can't link non-PIC static library 'stat' into shared library 'shr2'. Use the 'pic' option to static_library to build with PIC."
+ }
+ ]
+}
diff --git a/test cases/failing/33 non-root subproject/test.json b/test cases/failing/33 non-root subproject/test.json
new file mode 100644
index 0000000..a14cece
--- /dev/null
+++ b/test cases/failing/33 non-root subproject/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/33 non-root subproject/some/meson.build:1:0: ERROR: Subproject directory not found and someproj.wrap file not found"
+ }
+ ]
+}
diff --git a/test cases/failing/34 dependency not-required then required/test.json b/test cases/failing/34 dependency not-required then required/test.json
new file mode 100644
index 0000000..3cf35f5
--- /dev/null
+++ b/test cases/failing/34 dependency not-required then required/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "line": ".*/meson\\.build:4:0: ERROR: (Pkg-config binary for machine MachineChoice\\.HOST not found\\. Giving up\\.|Dependency \"foo\\-bar\\-xyz\\-12\\.3\" not found, tried .*)"
+ }
+ ]
+}
diff --git a/test cases/failing/35 project argument after target/test.json b/test cases/failing/35 project argument after target/test.json
new file mode 100644
index 0000000..f5efd9b
--- /dev/null
+++ b/test cases/failing/35 project argument after target/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/35 project argument after target/meson.build:7:0: ERROR: Tried to use 'add_project_arguments' after a build target has been declared."
+ }
+ ]
+}
diff --git a/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build b/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build
index 54d434c..874b581 100644
--- a/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build
+++ b/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build
@@ -1,3 +1,7 @@
project('impossible-dep-test', 'c', version : '1.0')
+if not dependency('zlib', required: false).found()
+ error('MESON_SKIP_TEST test requires zlib')
+endif
+
dependency('zlib', version : ['>=1.0', '<1.0'])
diff --git a/test cases/failing/36 pkgconfig dependency impossible conditions/test.json b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json
new file mode 100644
index 0000000..2ce62ac
--- /dev/null
+++ b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/36 pkgconfig dependency impossible conditions/meson.build:7:0: ERROR: Dependency 'zlib' was already checked and was not found"
+ }
+ ]
+}
diff --git a/test cases/failing/37 has function external dependency/test.json b/test cases/failing/37 has function external dependency/test.json
new file mode 100644
index 0000000..81d6f91
--- /dev/null
+++ b/test cases/failing/37 has function external dependency/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/37 has function external dependency/meson.build:8:3: ERROR: Dependencies must be external dependencies"
+ }
+ ]
+}
diff --git a/test cases/failing/38 libdir must be inside prefix/test.json b/test cases/failing/38 libdir must be inside prefix/test.json
index 1cd893c..d9256d1 100644
--- a/test cases/failing/38 libdir must be inside prefix/test.json
+++ b/test cases/failing/38 libdir must be inside prefix/test.json
@@ -1,3 +1,10 @@
{
- "do_not_set_opts": ["libdir"]
+ "do_not_set_opts": [
+ "libdir"
+ ],
+ "stdout": [
+ {
+ "line": "test cases/failing/38 libdir must be inside prefix/meson.build:1:0: ERROR: The value of the 'libdir' option is '/opt/lib' which must be a subdir of the prefix '/usr'."
+ }
+ ]
}
diff --git a/test cases/failing/39 prefix absolute/test.json b/test cases/failing/39 prefix absolute/test.json
index 4e0f6cd..2770243 100644
--- a/test cases/failing/39 prefix absolute/test.json
+++ b/test cases/failing/39 prefix absolute/test.json
@@ -1,3 +1,11 @@
{
- "do_not_set_opts": ["prefix"]
+ "do_not_set_opts": [
+ "prefix"
+ ],
+ "stdout": [
+ {
+ "comment": "literal 'some/path/notabs' appears in output, irrespective of os.path.sep, as that's the prefix",
+ "line": "test cases/failing/39 prefix absolute/meson.build:1:0: ERROR: prefix value 'some/path/notabs' must be an absolute path"
+ }
+ ]
}
diff --git a/test cases/failing/4 missing meson.build/test.json b/test cases/failing/4 missing meson.build/test.json
new file mode 100644
index 0000000..3857090
--- /dev/null
+++ b/test cases/failing/4 missing meson.build/test.json
@@ -0,0 +1,9 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "comment": "'subdir/meson.build' gets transformed with os.path.sep separators",
+ "line": "test cases/failing/4 missing meson\\.build/meson\\.build:3:0: ERROR: Non\\-existent build file 'subdir[\\\\/]meson\\.build'"
+ }
+ ]
+}
diff --git a/test cases/failing/40 kwarg assign/test.json b/test cases/failing/40 kwarg assign/test.json
new file mode 100644
index 0000000..671eb3f
--- /dev/null
+++ b/test cases/failing/40 kwarg assign/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/40 kwarg assign/meson.build:3:0: ERROR: Tried to assign values inside an argument list."
+ }
+ ]
+}
diff --git a/test cases/failing/41 custom target plainname many inputs/test.json b/test cases/failing/41 custom target plainname many inputs/test.json
new file mode 100644
index 0000000..8c15cda
--- /dev/null
+++ b/test cases/failing/41 custom target plainname many inputs/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/41 custom target plainname many inputs/meson.build:5:0: ERROR: Output cannot contain @PLAINNAME@ or @BASENAME@ when there is more than one input (we can't know which to use)"
+ }
+ ]
+}
diff --git a/test cases/failing/42 custom target outputs not matching install_dirs/meson.build b/test cases/failing/42 custom target outputs not matching install_dirs/meson.build
index 45bd7b3..765e237 100644
--- a/test cases/failing/42 custom target outputs not matching install_dirs/meson.build
+++ b/test cases/failing/42 custom target outputs not matching install_dirs/meson.build
@@ -3,7 +3,7 @@ project('outputs not matching install_dirs', 'c')
gen = find_program('generator.py')
if meson.backend() != 'ninja'
- error('Failing manually, test is only for the ninja backend')
+ error('MESON_SKIP_TEST test is only for the ninja backend')
endif
custom_target('too-few-install-dirs',
diff --git a/test cases/failing/42 custom target outputs not matching install_dirs/test.json b/test cases/failing/42 custom target outputs not matching install_dirs/test.json
index e59cb9f..f9e2ba7 100644
--- a/test cases/failing/42 custom target outputs not matching install_dirs/test.json
+++ b/test cases/failing/42 custom target outputs not matching install_dirs/test.json
@@ -1,10 +1,33 @@
{
"installed": [
- {"type": "file", "file": "usr/include/diff.h"},
- {"type": "file", "file": "usr/include/first.h"},
- {"type": "file", "file": "usr/bin/diff.sh"},
- {"type": "file", "file": "usr/bin/second.sh"},
- {"type": "file", "file": "opt/same.h"},
- {"type": "file", "file": "opt/same.sh"}
+ {
+ "type": "file",
+ "file": "usr/include/diff.h"
+ },
+ {
+ "type": "file",
+ "file": "usr/include/first.h"
+ },
+ {
+ "type": "file",
+ "file": "usr/bin/diff.sh"
+ },
+ {
+ "type": "file",
+ "file": "usr/bin/second.sh"
+ },
+ {
+ "type": "file",
+ "file": "opt/same.h"
+ },
+ {
+ "type": "file",
+ "file": "opt/same.sh"
+ }
+ ],
+ "stdout": [
+ {
+ "line": "ERROR: Target 'too-few-install-dirs' has 3 outputs: ['toofew.h', 'toofew.c', 'toofew.sh'], but only 2 \"install_dir\"s were found."
+ }
]
}
diff --git a/test cases/failing/43 project name colon/test.json b/test cases/failing/43 project name colon/test.json
new file mode 100644
index 0000000..7a55574
--- /dev/null
+++ b/test cases/failing/43 project name colon/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/43 project name colon/meson.build:1:0: ERROR: Project name 'name with :' must not contain ':'"
+ }
+ ]
+}
diff --git a/test cases/failing/44 abs subdir/test.json b/test cases/failing/44 abs subdir/test.json
new file mode 100644
index 0000000..0aa56f6
--- /dev/null
+++ b/test cases/failing/44 abs subdir/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/44 abs subdir/meson.build:5:0: ERROR: Subdir argument must be a relative path."
+ }
+ ]
+}
diff --git a/test cases/failing/45 abspath to srcdir/test.json b/test cases/failing/45 abspath to srcdir/test.json
new file mode 100644
index 0000000..b6a87fe
--- /dev/null
+++ b/test cases/failing/45 abspath to srcdir/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/45 abspath to srcdir/meson.build:3:0: ERROR: Tried to form an absolute path to a source dir. You should not do that but use relative paths instead."
+ }
+ ]
+}
diff --git a/test cases/failing/46 pkgconfig variables reserved/test.json b/test cases/failing/46 pkgconfig variables reserved/test.json
new file mode 100644
index 0000000..b92ee17
--- /dev/null
+++ b/test cases/failing/46 pkgconfig variables reserved/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/46 pkgconfig variables reserved/meson.build:8:5: ERROR: Variable \"prefix\" is reserved"
+ }
+ ]
+}
diff --git a/test cases/failing/47 pkgconfig variables zero length/test.json b/test cases/failing/47 pkgconfig variables zero length/test.json
new file mode 100644
index 0000000..097fee1
--- /dev/null
+++ b/test cases/failing/47 pkgconfig variables zero length/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/47 pkgconfig variables zero length/meson.build:8:5: ERROR: Invalid variable \"=value\". Variables must be in 'name=value' format"
+ }
+ ]
+}
diff --git a/test cases/failing/48 pkgconfig variables zero length value/test.json b/test cases/failing/48 pkgconfig variables zero length value/test.json
new file mode 100644
index 0000000..50a35ce
--- /dev/null
+++ b/test cases/failing/48 pkgconfig variables zero length value/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/48 pkgconfig variables zero length value/meson.build:8:5: ERROR: Invalid variable \"key=\". Variables must be in 'name=value' format"
+ }
+ ]
+}
diff --git a/test cases/failing/49 pkgconfig variables not key value/test.json b/test cases/failing/49 pkgconfig variables not key value/test.json
new file mode 100644
index 0000000..cf07e62
--- /dev/null
+++ b/test cases/failing/49 pkgconfig variables not key value/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/49 pkgconfig variables not key value/meson.build:8:5: ERROR: Invalid variable \"this_should_be_key_value\". Variables must be in 'name=value' format"
+ }
+ ]
+}
diff --git a/test cases/failing/5 misplaced option/test.json b/test cases/failing/5 misplaced option/test.json
new file mode 100644
index 0000000..12afdf0
--- /dev/null
+++ b/test cases/failing/5 misplaced option/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/5 misplaced option/meson.build:3:0: ERROR: Tried to call option() in build description file. All options must be in the option file."
+ }
+ ]
+}
diff --git a/test cases/failing/50 executable comparison/test.json b/test cases/failing/50 executable comparison/test.json
new file mode 100644
index 0000000..585b382
--- /dev/null
+++ b/test cases/failing/50 executable comparison/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/50 executable comparison/meson.build:6:0: ERROR: exe1 can only be compared for equality."
+ }
+ ]
+}
diff --git a/test cases/failing/51 inconsistent comparison/test.json b/test cases/failing/51 inconsistent comparison/test.json
new file mode 100644
index 0000000..5867f0a
--- /dev/null
+++ b/test cases/failing/51 inconsistent comparison/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/51 inconsistent comparison/meson.build:5:0: ERROR: Values of different types (list, str) cannot be compared using <."
+ }
+ ]
+}
diff --git a/test cases/failing/52 slashname/test.json b/test cases/failing/52 slashname/test.json
new file mode 100644
index 0000000..180400a
--- /dev/null
+++ b/test cases/failing/52 slashname/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/52 slashname/meson.build:11:0: ERROR: Problem encountered: Re-enable me once slash in name is finally prohibited."
+ }
+ ]
+}
diff --git a/test cases/failing/53 reserved meson prefix/test.json b/test cases/failing/53 reserved meson prefix/test.json
new file mode 100644
index 0000000..502d96a
--- /dev/null
+++ b/test cases/failing/53 reserved meson prefix/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/53 reserved meson prefix/meson.build:3:0: ERROR: The \"meson-\" prefix is reserved and cannot be used for top-level subdir()."
+ }
+ ]
+}
diff --git a/test cases/failing/54 wrong shared crate type/meson.build b/test cases/failing/54 wrong shared crate type/meson.build
index 69ac3da..b9fcad4 100644
--- a/test cases/failing/54 wrong shared crate type/meson.build
+++ b/test cases/failing/54 wrong shared crate type/meson.build
@@ -1,3 +1,7 @@
-project('test', 'rust')
+project('test')
+
+if not add_languages('rust', required: false)
+ error('MESON_SKIP_TEST test requires rust compiler')
+endif
shared_library('test', 'foo.rs', rust_crate_type : 'staticlib')
diff --git a/test cases/failing/54 wrong shared crate type/test.json b/test cases/failing/54 wrong shared crate type/test.json
new file mode 100644
index 0000000..5cced6f
--- /dev/null
+++ b/test cases/failing/54 wrong shared crate type/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/54 wrong shared crate type/meson.build:7:0: ERROR: Crate type \"staticlib\" invalid for dynamic libraries; must be \"dylib\" or \"cdylib\""
+ }
+ ]
+}
diff --git a/test cases/failing/55 wrong static crate type/meson.build b/test cases/failing/55 wrong static crate type/meson.build
index c094613..109907f 100644
--- a/test cases/failing/55 wrong static crate type/meson.build
+++ b/test cases/failing/55 wrong static crate type/meson.build
@@ -1,3 +1,7 @@
-project('test', 'rust')
+project('test')
+
+if not add_languages('rust', required: false)
+ error('MESON_SKIP_TEST test requires rust compiler')
+endif
static_library('test', 'foo.rs', rust_crate_type : 'cdylib')
diff --git a/test cases/failing/55 wrong static crate type/test.json b/test cases/failing/55 wrong static crate type/test.json
new file mode 100644
index 0000000..7073f7b
--- /dev/null
+++ b/test cases/failing/55 wrong static crate type/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/55 wrong static crate type/meson.build:7:0: ERROR: Crate type \"cdylib\" invalid for static libraries; must be \"rlib\" or \"staticlib\""
+ }
+ ]
+}
diff --git a/test cases/failing/56 or on new line/test.json b/test cases/failing/56 or on new line/test.json
new file mode 100644
index 0000000..c55cee6
--- /dev/null
+++ b/test cases/failing/56 or on new line/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/56 or on new line/meson.build:4:8: ERROR: Invalid or clause."
+ }
+ ]
+}
diff --git a/test cases/failing/57 kwarg in module/test.json b/test cases/failing/57 kwarg in module/test.json
new file mode 100644
index 0000000..cafb3ab
--- /dev/null
+++ b/test cases/failing/57 kwarg in module/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/57 kwarg in module/meson.build:3:0: ERROR: Function does not take keyword arguments."
+ }
+ ]
+}
diff --git a/test cases/failing/58 link with executable/test.json b/test cases/failing/58 link with executable/test.json
new file mode 100644
index 0000000..d3975c1
--- /dev/null
+++ b/test cases/failing/58 link with executable/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/58 link with executable/meson.build:4:0: ERROR: Link target 'prog' is not linkable."
+ }
+ ]
+}
diff --git a/test cases/failing/59 assign custom target index/test.json b/test cases/failing/59 assign custom target index/test.json
new file mode 100644
index 0000000..07ecb91
--- /dev/null
+++ b/test cases/failing/59 assign custom target index/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/59 assign custom target index/meson.build:24:0: ERROR: Assignment target must be an id."
+ }
+ ]
+}
diff --git a/test cases/failing/6 missing incdir/test.json b/test cases/failing/6 missing incdir/test.json
new file mode 100644
index 0000000..172d8a9
--- /dev/null
+++ b/test cases/failing/6 missing incdir/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/6 missing incdir/meson.build:3:0: ERROR: Include dir nosuchdir does not exist."
+ }
+ ]
+}
diff --git a/test cases/failing/60 getoption prefix/test.json b/test cases/failing/60 getoption prefix/test.json
new file mode 100644
index 0000000..03bf419
--- /dev/null
+++ b/test cases/failing/60 getoption prefix/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/60 getoption prefix/meson.build:5:0: ERROR: Having a colon in option name is forbidden, projects are not allowed to directly access options of other subprojects."
+ }
+ ]
+}
diff --git a/test cases/failing/61 bad option argument/test.json b/test cases/failing/61 bad option argument/test.json
new file mode 100644
index 0000000..4002005
--- /dev/null
+++ b/test cases/failing/61 bad option argument/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/61 bad option argument/meson_options.txt:1:0: ERROR: Invalid kwargs for option \"name\": \"vaule\""
+ }
+ ]
+}
diff --git a/test cases/failing/62 subproj filegrab/test.json b/test cases/failing/62 subproj filegrab/test.json
new file mode 100644
index 0000000..dd0d7bb
--- /dev/null
+++ b/test cases/failing/62 subproj filegrab/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/62 subproj filegrab/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file prog.c from a different subproject."
+ }
+ ]
+}
diff --git a/test cases/failing/63 grab subproj/test.json b/test cases/failing/63 grab subproj/test.json
new file mode 100644
index 0000000..8147905
--- /dev/null
+++ b/test cases/failing/63 grab subproj/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/63 grab subproj/meson.build:7:0: ERROR: Sandbox violation: Tried to grab file sub.c from a different subproject."
+ }
+ ]
+}
diff --git a/test cases/failing/64 grab sibling/test.json b/test cases/failing/64 grab sibling/test.json
new file mode 100644
index 0000000..1604d47
--- /dev/null
+++ b/test cases/failing/64 grab sibling/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/64 grab sibling/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file sneaky.c from a different subproject."
+ }
+ ]
+}
diff --git a/test cases/failing/65 string as link target/test.json b/test cases/failing/65 string as link target/test.json
new file mode 100644
index 0000000..e212482
--- /dev/null
+++ b/test cases/failing/65 string as link target/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/65 string as link target/meson.build:2:0: ERROR: '' is not a target."
+ }
+ ]
+}
diff --git a/test cases/failing/66 dependency not-found and required/test.json b/test cases/failing/66 dependency not-found and required/test.json
new file mode 100644
index 0000000..5b13316
--- /dev/null
+++ b/test cases/failing/66 dependency not-found and required/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/66 dependency not-found and required/meson.build:2:0: ERROR: Dependency is both required and not-found"
+ }
+ ]
+}
diff --git a/test cases/failing/67 subproj different versions/test.json b/test cases/failing/67 subproj different versions/test.json
new file mode 100644
index 0000000..d16daf9
--- /dev/null
+++ b/test cases/failing/67 subproj different versions/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/67 subproj different versions/subprojects/b/meson.build:3:0: ERROR: Dependency 'c' was already checked and was not found"
+ }
+ ]
+}
diff --git a/test cases/failing/68 wrong boost module/meson.build b/test cases/failing/68 wrong boost module/meson.build
index 7fb3a40..937e587 100644
--- a/test cases/failing/68 wrong boost module/meson.build
+++ b/test cases/failing/68 wrong boost module/meson.build
@@ -1,5 +1,9 @@
project('boosttest', 'cpp',
default_options : ['cpp_std=c++11'])
+if not dependency('boost', required: false).found()
+ error('MESON_SKIP_TEST test requires boost')
+endif
+
# abc doesn't exist
linkdep = dependency('boost', modules : ['thread', 'system', 'test', 'abc'])
diff --git a/test cases/failing/68 wrong boost module/test.json b/test cases/failing/68 wrong boost module/test.json
new file mode 100644
index 0000000..9ef1b0f
--- /dev/null
+++ b/test cases/failing/68 wrong boost module/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/68 wrong boost module/meson.build:9:0: ERROR: Dependency \"boost\" not found"
+ }
+ ]
+}
diff --git a/test cases/failing/69 install_data rename bad size/test.json b/test cases/failing/69 install_data rename bad size/test.json
new file mode 100644
index 0000000..1329fec
--- /dev/null
+++ b/test cases/failing/69 install_data rename bad size/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/69 install_data rename bad size/meson.build:3:0: ERROR: Size of rename argument is different from number of sources"
+ }
+ ]
+}
diff --git a/test cases/failing/7 go to subproject/test.json b/test cases/failing/7 go to subproject/test.json
new file mode 100644
index 0000000..c254757
--- /dev/null
+++ b/test cases/failing/7 go to subproject/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/7 go to subproject/meson.build:3:0: ERROR: Must not go into subprojects dir with subdir(), use subproject() instead."
+ }
+ ]
+}
diff --git a/test cases/failing/70 skip only subdir/test.json b/test cases/failing/70 skip only subdir/test.json
new file mode 100644
index 0000000..3b40b66
--- /dev/null
+++ b/test cases/failing/70 skip only subdir/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/70 skip only subdir/meson.build:8:0: ERROR: File main.cpp does not exist."
+ }
+ ]
+}
diff --git a/test cases/failing/71 dual override/test.json b/test cases/failing/71 dual override/test.json
new file mode 100644
index 0000000..66409e6
--- /dev/null
+++ b/test cases/failing/71 dual override/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/71 dual override/meson.build:5:6: ERROR: Tried to override executable \"override\" which has already been overridden."
+ }
+ ]
+}
diff --git a/test cases/failing/72 override used/test.json b/test cases/failing/72 override used/test.json
new file mode 100644
index 0000000..29a58f1
--- /dev/null
+++ b/test cases/failing/72 override used/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/72 override used/meson.build:5:6: ERROR: Tried to override finding of executable \"something.py\" which has already been found."
+ }
+ ]
+}
diff --git a/test cases/failing/73 run_command unclean exit/test.json b/test cases/failing/73 run_command unclean exit/test.json
new file mode 100644
index 0000000..beda187
--- /dev/null
+++ b/test cases/failing/73 run_command unclean exit/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "line": "test cases/failing/73 run_command unclean exit/meson\\.build:4:0: ERROR: Command \".*[\\\\/]test cases[\\\\/]failing[\\\\/]73 run_command unclean exit[\\\\/]\\.[\\\\/]returncode\\.py 1\" failed with status 1\\."
+ }
+ ]
+}
diff --git a/test cases/failing/74 int literal leading zero/test.json b/test cases/failing/74 int literal leading zero/test.json
new file mode 100644
index 0000000..78a735e
--- /dev/null
+++ b/test cases/failing/74 int literal leading zero/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "comment": "this error message is not very informative",
+ "line": "test cases/failing/74 int literal leading zero/meson.build:5:13: ERROR: Expecting eof got number."
+ }
+ ]
+}
diff --git a/test cases/failing/75 configuration immutable/test.json b/test cases/failing/75 configuration immutable/test.json
new file mode 100644
index 0000000..3365aae
--- /dev/null
+++ b/test cases/failing/75 configuration immutable/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/75 configuration immutable/meson.build:12:16: ERROR: Can not set values on configuration object that has been used."
+ }
+ ]
+}
diff --git a/test cases/failing/76 link with shared module on osx/meson.build b/test cases/failing/76 link with shared module on osx/meson.build
index 2c714f9..bf18b36 100644
--- a/test cases/failing/76 link with shared module on osx/meson.build
+++ b/test cases/failing/76 link with shared module on osx/meson.build
@@ -1,7 +1,7 @@
project('link with shared module', 'c')
if host_machine.system() != 'darwin'
- error('Test only fails on OSX')
+ error('MESON_SKIP_TEST test only fails on OSX')
endif
m = shared_module('mymodule', 'module.c')
diff --git a/test cases/failing/76 link with shared module on osx/test.json b/test cases/failing/76 link with shared module on osx/test.json
new file mode 100644
index 0000000..4e2856f
--- /dev/null
+++ b/test cases/failing/76 link with shared module on osx/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/76 link with shared module on osx/meson.build:8:0: ERROR: target links against shared modules."
+ }
+ ]
+}
diff --git a/test cases/failing/77 non ascii in ascii encoded configure file/test.json b/test cases/failing/77 non ascii in ascii encoded configure file/test.json
new file mode 100644
index 0000000..e35b95b
--- /dev/null
+++ b/test cases/failing/77 non ascii in ascii encoded configure file/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "match": "re",
+ "line": "test cases/failing/77 non ascii in ascii encoded configure file/meson\\.build:5:0: ERROR: Could not write output file .*[\\\\/]config9\\.h: 'ascii' codec can't encode character '\\\\u0434' in position 17: ordinal not in range\\(128\\)"
+ }
+ ]
+}
diff --git a/test cases/failing/78 subproj dependency not-found and required/test.json b/test cases/failing/78 subproj dependency not-found and required/test.json
new file mode 100644
index 0000000..534b4f4
--- /dev/null
+++ b/test cases/failing/78 subproj dependency not-found and required/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/78 subproj dependency not-found and required/meson.build:2:0: ERROR: Subproject directory not found and missing.wrap file not found"
+ }
+ ]
+}
diff --git a/test cases/failing/79 unfound run/test.json b/test cases/failing/79 unfound run/test.json
new file mode 100644
index 0000000..6baafc0
--- /dev/null
+++ b/test cases/failing/79 unfound run/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/79 unfound run/meson.build:4:0: ERROR: Tried to use non-existing executable 'nonexisting_prog'"
+ }
+ ]
+}
diff --git a/test cases/failing/8 recursive/test.json b/test cases/failing/8 recursive/test.json
new file mode 100644
index 0000000..b4c964c
--- /dev/null
+++ b/test cases/failing/8 recursive/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/8 recursive/subprojects/b/meson.build:3:0: ERROR: Recursive include of subprojects: a => b => a."
+ }
+ ]
+}
diff --git a/test cases/failing/80 framework dependency with version/meson.build b/test cases/failing/80 framework dependency with version/meson.build
index 1ead388..b7e04ba 100644
--- a/test cases/failing/80 framework dependency with version/meson.build
+++ b/test cases/failing/80 framework dependency with version/meson.build
@@ -1,4 +1,8 @@
project('framework dependency with version', 'c')
+
+if host_machine.system() != 'darwin'
+ error('MESON_SKIP_TEST test only applicable on darwin')
+endif
+
# do individual frameworks have a meaningful version to test? And multiple frameworks might be listed...
-# otherwise we're not on OSX and this will definitely fail
dep = dependency('appleframeworks', modules: 'foundation', version: '>0')
diff --git a/test cases/failing/80 framework dependency with version/test.json b/test cases/failing/80 framework dependency with version/test.json
new file mode 100644
index 0000000..5cbc129
--- /dev/null
+++ b/test cases/failing/80 framework dependency with version/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/80 framework dependency with version/meson.build:8:0: ERROR: Unknown version of dependency 'appleframeworks', but need ['>0']."
+ }
+ ]
+}
diff --git a/test cases/failing/81 override exe config/test.json b/test cases/failing/81 override exe config/test.json
new file mode 100644
index 0000000..f19785b
--- /dev/null
+++ b/test cases/failing/81 override exe config/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/81 override exe config/meson.build:6:0: ERROR: Program 'bar' was overridden with the compiled executable 'foo' and therefore cannot be used during configuration"
+ }
+ ]
+}
diff --git a/test cases/failing/82 gl dependency with version/meson.build b/test cases/failing/82 gl dependency with version/meson.build
index 3014d43..0127093 100644
--- a/test cases/failing/82 gl dependency with version/meson.build
+++ b/test cases/failing/82 gl dependency with version/meson.build
@@ -2,7 +2,7 @@ project('gl dependency with version', 'c')
host_system = host_machine.system()
if host_system != 'windows' and host_system != 'darwin'
- error('Test only fails on Windows and OSX')
+ error('MESON_SKIP_TEST: test only fails on Windows and OSX')
endif
# gl dependency found via system method doesn't have a meaningful version to check
diff --git a/test cases/failing/82 gl dependency with version/test.json b/test cases/failing/82 gl dependency with version/test.json
new file mode 100644
index 0000000..2c63a2c
--- /dev/null
+++ b/test cases/failing/82 gl dependency with version/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/82 gl dependency with version/meson.build:9:0: ERROR: Unknown version of dependency 'gl', but need ['>0']."
+ }
+ ]
+}
diff --git a/test cases/failing/83 threads dependency with version/test.json b/test cases/failing/83 threads dependency with version/test.json
new file mode 100644
index 0000000..b131be4
--- /dev/null
+++ b/test cases/failing/83 threads dependency with version/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/83 threads dependency with version/meson.build:3:0: ERROR: Unknown version of dependency 'threads', but need ['>0']."
+ }
+ ]
+}
diff --git a/test cases/failing/84 gtest dependency with version/meson.build b/test cases/failing/84 gtest dependency with version/meson.build
index 3d90994..b43a047 100644
--- a/test cases/failing/84 gtest dependency with version/meson.build
+++ b/test cases/failing/84 gtest dependency with version/meson.build
@@ -1,3 +1,8 @@
project('gtest dependency with version', ['c', 'cpp'])
+
+if not dependency('gtest', method: 'system', required: false).found()
+ error('MESON_SKIP_TEST test requires gtest')
+endif
+
# discovering gtest version is not yet implemented
dep = dependency('gtest', method: 'system', version: '>0')
diff --git a/test cases/failing/84 gtest dependency with version/test.json b/test cases/failing/84 gtest dependency with version/test.json
new file mode 100644
index 0000000..e1bbcac
--- /dev/null
+++ b/test cases/failing/84 gtest dependency with version/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/84 gtest dependency with version/meson.build:8:0: ERROR: Dependency 'gtest' was already checked and was not found"
+ }
+ ]
+}
diff --git a/test cases/failing/85 dub libray/meson.build b/test cases/failing/85 dub libray/meson.build
index 5b0ccac..306d5b3 100644
--- a/test cases/failing/85 dub libray/meson.build
+++ b/test cases/failing/85 dub libray/meson.build
@@ -1,3 +1,11 @@
-project('dub', 'd')
+project('dub')
+
+if not add_languages('d', required: false)
+ error('MESON_SKIP_TEST test requires D compiler')
+endif
+
+if not find_program('dub', required: false).found()
+ error('MESON_SKIP_TEST test requires dub')
+endif
dependency('dubtestproject', method: 'dub') # Not library (none)
diff --git a/test cases/failing/85 dub libray/test.json b/test cases/failing/85 dub libray/test.json
new file mode 100644
index 0000000..a8b3e28
--- /dev/null
+++ b/test cases/failing/85 dub libray/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/85 dub libray/meson.build:11:0: ERROR: Dependency \"dubtestproject\" not found"
+ }
+ ]
+}
diff --git a/test cases/failing/86 dub executable/meson.build b/test cases/failing/86 dub executable/meson.build
index 63fd631..9a134ea 100644
--- a/test cases/failing/86 dub executable/meson.build
+++ b/test cases/failing/86 dub executable/meson.build
@@ -1,3 +1,11 @@
-project('dub', 'd')
+project('dub')
+
+if not add_languages('d', required: false)
+ error('MESON_SKIP_TEST test requires D compiler')
+endif
+
+if not find_program('dub', required: false).found()
+ error('MESON_SKIP_TEST test requires dub')
+endif
dependency('dubtestproject:test1', method: 'dub') # Not library (executable)
diff --git a/test cases/failing/86 dub executable/test.json b/test cases/failing/86 dub executable/test.json
new file mode 100644
index 0000000..f9944af
--- /dev/null
+++ b/test cases/failing/86 dub executable/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/86 dub executable/meson.build:11:0: ERROR: Dependency \"dubtestproject:test1\" not found"
+ }
+ ]
+}
diff --git a/test cases/failing/87 dub compiler/meson.build b/test cases/failing/87 dub compiler/meson.build
index c93ccbc..36f1849 100644
--- a/test cases/failing/87 dub compiler/meson.build
+++ b/test cases/failing/87 dub compiler/meson.build
@@ -1,4 +1,8 @@
-project('dub', 'd')
+project('dub')
+
+if not add_languages('d', required: false)
+ error('MESON_SKIP_TEST test requires D compiler')
+endif
if meson.get_compiler('d').get_id() == 'dmd'
if host_machine.system() == 'windows' or host_machine.system() == 'cygwin'
@@ -6,4 +10,8 @@ if meson.get_compiler('d').get_id() == 'dmd'
endif
endif
+if not find_program('dub', required: false).found()
+ error('MESON_SKIP_TEST test requires dub')
+endif
+
dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch
diff --git a/test cases/failing/87 dub compiler/test.json b/test cases/failing/87 dub compiler/test.json
index acb7da8..f28312f 100644
--- a/test cases/failing/87 dub compiler/test.json
+++ b/test cases/failing/87 dub compiler/test.json
@@ -2,8 +2,18 @@
"matrix": {
"options": {
"warning_level": [
- { "val": "1", "skip_on_env": [ "SINGLE_DUB_COMPILER" ] }
+ {
+ "val": "1",
+ "skip_on_env": [
+ "SINGLE_DUB_COMPILER"
+ ]
+ }
]
}
- }
+ },
+ "stdout": [
+ {
+ "line": "test cases/failing/87 dub compiler/meson.build:17:0: ERROR: Dependency \"dubtestproject:test2\" not found"
+ }
+ ]
}
diff --git a/test cases/failing/88 subproj not-found dep/test.json b/test cases/failing/88 subproj not-found dep/test.json
new file mode 100644
index 0000000..a1c4231
--- /dev/null
+++ b/test cases/failing/88 subproj not-found dep/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/88 subproj not-found dep/meson.build:2:0: ERROR: Could not find dependency notfound_dep in subproject somesubproj"
+ }
+ ]
+}
diff --git a/test cases/failing/89 invalid configure file/test.json b/test cases/failing/89 invalid configure file/test.json
new file mode 100644
index 0000000..921ce61
--- /dev/null
+++ b/test cases/failing/89 invalid configure file/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/89 invalid configure file/meson.build:3:0: ERROR: \"install_dir\" must be specified when \"install\" in a configure_file is true"
+ }
+ ]
+}
diff --git a/test cases/failing/9 missing extra file/test.json b/test cases/failing/9 missing extra file/test.json
new file mode 100644
index 0000000..188b6a6
--- /dev/null
+++ b/test cases/failing/9 missing extra file/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/9 missing extra file/meson.build:3:0: ERROR: File missing.txt does not exist."
+ }
+ ]
+}
diff --git a/test cases/failing/90 kwarg dupe/test.json b/test cases/failing/90 kwarg dupe/test.json
new file mode 100644
index 0000000..a8df75d
--- /dev/null
+++ b/test cases/failing/90 kwarg dupe/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/90 kwarg dupe/meson.build:5:0: ERROR: Entry \"install\" defined both as a keyword argument and in a \"kwarg\" entry."
+ }
+ ]
+}
diff --git a/test cases/failing/91 missing pch file/test.json b/test cases/failing/91 missing pch file/test.json
new file mode 100644
index 0000000..166f627
--- /dev/null
+++ b/test cases/failing/91 missing pch file/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "comment": "literal 'pch/prog.h' from meson.build appears in output, irrespective of os.path.sep",
+ "line": "test cases/failing/91 missing pch file/meson.build:2:0: ERROR: File pch/prog.h does not exist."
+ }
+ ]
+}
diff --git a/test cases/failing/92 pch source different folder/test.json b/test cases/failing/92 pch source different folder/test.json
new file mode 100644
index 0000000..d94db50
--- /dev/null
+++ b/test cases/failing/92 pch source different folder/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/92 pch source different folder/meson.build:4:0: ERROR: PCH files must be stored in the same folder."
+ }
+ ]
+}
diff --git a/test cases/failing/93 vala without c/test.json b/test cases/failing/93 vala without c/test.json
new file mode 100644
index 0000000..6185b7e
--- /dev/null
+++ b/test cases/failing/93 vala without c/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/93 vala without c/meson.build:2:0: ERROR: Compiling Vala requires C. Add C to your project languages and rerun Meson."
+ }
+ ]
+}
diff --git a/test cases/failing/94 unknown config tool/test.json b/test cases/failing/94 unknown config tool/test.json
new file mode 100644
index 0000000..a001152
--- /dev/null
+++ b/test cases/failing/94 unknown config tool/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/94 unknown config tool/meson.build:2:0: ERROR: Dependency \"no-such-config-tool\" not found"
+ }
+ ]
+}
diff --git a/test cases/failing/95 custom target install data/test.json b/test cases/failing/95 custom target install data/test.json
new file mode 100644
index 0000000..64ef530
--- /dev/null
+++ b/test cases/failing/95 custom target install data/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/95 custom target install data/meson.build:11:0: ERROR: Argument must be string or file."
+ }
+ ]
+}
diff --git a/test cases/failing/96 add dict non string key/test.json b/test cases/failing/96 add dict non string key/test.json
new file mode 100644
index 0000000..5fd4033
--- /dev/null
+++ b/test cases/failing/96 add dict non string key/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/96 add dict non string key/meson.build:9:0: ERROR: Key must be a string"
+ }
+ ]
+}
diff --git a/test cases/failing/97 add dict duplicate keys/test.json b/test cases/failing/97 add dict duplicate keys/test.json
new file mode 100644
index 0000000..9d01551
--- /dev/null
+++ b/test cases/failing/97 add dict duplicate keys/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/97 add dict duplicate keys/meson.build:9:0: ERROR: Duplicate dictionary key: myKey"
+ }
+ ]
+}
diff --git a/test cases/failing/98 fallback consistency/test.json b/test cases/failing/98 fallback consistency/test.json
new file mode 100644
index 0000000..fd77bad
--- /dev/null
+++ b/test cases/failing/98 fallback consistency/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/98 fallback consistency/meson.build:4:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'"
+ }
+ ]
+}
diff --git a/test cases/failing/99 no native prop/test.json b/test cases/failing/99 no native prop/test.json
new file mode 100644
index 0000000..8c320d9
--- /dev/null
+++ b/test cases/failing/99 no native prop/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/failing/99 no native prop/meson.build:3:0: ERROR: Unknown native property: nonexisting."
+ }
+ ]
+}
diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build
index c2efe34..b555b17 100644
--- a/test cases/fortran/7 generated/meson.build
+++ b/test cases/fortran/7 generated/meson.build
@@ -1,11 +1,18 @@
# Tests whether fortran sources files created during configuration are properly
# scanned for dependency information
-project('generated', 'fortran')
+project('generated', 'fortran',
+ default_options : ['default_library=static'])
conf_data = configuration_data()
conf_data.set('ONE', 1)
conf_data.set('TWO', 2)
+conf_data.set('THREE', 3)
+
+configure_file(input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data)
+# Manually build absolute path to source file to test
+# https://github.com/mesonbuild/meson/issues/7265
+three = library('mod3', meson.current_build_dir() / 'mod3.f90')
templates_basenames = ['mod2', 'mod1']
generated_sources = []
@@ -18,5 +25,5 @@ foreach template_basename : templates_basenames
endforeach
sources = ['prog.f90'] + generated_sources
-exe = executable('generated', sources)
+exe = executable('generated', sources, link_with: three)
test('generated', exe)
diff --git a/test cases/fortran/7 generated/mod1.fpp b/test cases/fortran/7 generated/mod1.fpp
index 42d1fde..c4decf6 100644
--- a/test cases/fortran/7 generated/mod1.fpp
+++ b/test cases/fortran/7 generated/mod1.fpp
@@ -1,6 +1,6 @@
module mod1
- implicit none
+implicit none
- integer, parameter :: modval1 = @ONE@
+integer, parameter :: modval1 = @ONE@
end module mod1
diff --git a/test cases/fortran/7 generated/mod2.fpp b/test cases/fortran/7 generated/mod2.fpp
index 594e9df..78ceae4 100644
--- a/test cases/fortran/7 generated/mod2.fpp
+++ b/test cases/fortran/7 generated/mod2.fpp
@@ -1,7 +1,7 @@
module mod2
- use mod1
- implicit none
+use mod1, only : modval1
+implicit none
- integer, parameter :: modval2 = @TWO@
+integer, parameter :: modval2 = @TWO@
end module mod2
diff --git a/test cases/fortran/7 generated/mod3.fpp b/test cases/fortran/7 generated/mod3.fpp
new file mode 100644
index 0000000..ab3db65
--- /dev/null
+++ b/test cases/fortran/7 generated/mod3.fpp
@@ -0,0 +1,6 @@
+module mod3
+implicit none
+
+integer, parameter :: modval3 = @THREE@
+
+end module mod3
diff --git a/test cases/fortran/7 generated/prog.f90 b/test cases/fortran/7 generated/prog.f90
index 8a102c0..6ee0bca 100644
--- a/test cases/fortran/7 generated/prog.f90
+++ b/test cases/fortran/7 generated/prog.f90
@@ -1,7 +1,8 @@
-program prog
-use mod2
+program generated
+use mod2, only : modval1, modval2
+use mod3, only : modval3
implicit none
-if (modval1 + modval2 /= 3) stop 1
+if (modval1 + modval2 + modval3 /= 6) error stop
-end program prog
+end program generated
diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build
index 501ed29..6c23360 100644
--- a/test cases/frameworks/1 boost/meson.build
+++ b/test cases/frameworks/1 boost/meson.build
@@ -13,7 +13,7 @@ endif
# within one project. The need to be independent of each other.
# Use one without a library dependency and one with it.
-linkdep = dependency('boost', static: s, modules : ['thread', 'system'])
+linkdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time'])
testdep = dependency('boost', static: s, modules : ['unit_test_framework'])
nomoddep = dependency('boost', static: s)
extralibdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time', 'log_setup', 'log', 'filesystem', 'regex'])
diff --git a/test cases/frameworks/2 gtest/meson.build b/test cases/frameworks/2 gtest/meson.build
index 2d93b52..ea3ef48 100644
--- a/test cases/frameworks/2 gtest/meson.build
+++ b/test cases/frameworks/2 gtest/meson.build
@@ -8,7 +8,7 @@ endif
gtest_nomain = dependency('gtest', main : false, method : 'system')
e = executable('testprog', 'test.cc', dependencies : gtest)
-test('gtest test', e)
+test('gtest test', e, protocol : 'gtest')
e = executable('testprog_nomain', 'test_nomain.cc', dependencies : gtest_nomain)
-test('gtest nomain test', e)
+test('gtest nomain test', e, protocol : 'gtest')
diff --git a/test cases/frameworks/21 libwmf/meson.build b/test cases/frameworks/21 libwmf/meson.build
index 6952bf7..9dbab6a 100644
--- a/test cases/frameworks/21 libwmf/meson.build
+++ b/test cases/frameworks/21 libwmf/meson.build
@@ -1,7 +1,7 @@
project('libwmf test', 'c')
wm = find_program('libwmf-config', required : false)
-if not wm.found()
+if not wm.found() or meson.is_cross_build()
error('MESON_SKIP_TEST: libwmf-config not installed')
endif
diff --git a/test cases/frameworks/23 hotdoc/test.json b/test cases/frameworks/23 hotdoc/test.json
index e2d4992..8dd07e0 100644
--- a/test cases/frameworks/23 hotdoc/test.json
+++ b/test cases/frameworks/23 hotdoc/test.json
@@ -314,5 +314,8 @@
{"type": "file", "file": "usr/share/doc/foobar/html/assets/fonts/dumped.trie"},
{"type": "file", "file": "usr/share/doc/foobar/html/assets/images/home.svg"},
{"type": "file", "file": "usr/share/doc/foobar/html/assets/images/dumped.trie"}
- ]
+ ],
+ "tools": {
+ "hotdoc": ">=0.1.0"
+ }
}
diff --git a/test cases/frameworks/32 boost root/boost/include/boost/version.hpp b/test cases/frameworks/32 boost root/boost/include/boost/version.hpp
new file mode 100644
index 0000000..65e4fab
--- /dev/null
+++ b/test cases/frameworks/32 boost root/boost/include/boost/version.hpp
@@ -0,0 +1,3 @@
+#define BOOST_VERSION 100
+
+#error This is not a real version of boost
diff --git a/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib
diff --git a/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib
diff --git a/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 b/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0
diff --git a/test cases/frameworks/32 boost root/meson.build b/test cases/frameworks/32 boost root/meson.build
new file mode 100644
index 0000000..50d2f0d
--- /dev/null
+++ b/test cases/frameworks/32 boost root/meson.build
@@ -0,0 +1,6 @@
+project('boosttest', 'cpp')
+
+dep = dependency('boost', modules : 'regex', required: false)
+
+assert(dep.found(), 'expected to find a fake version of boost')
+assert(dep.version() == '0.1.0', 'expected to find version 0.1.0')
diff --git a/test cases/frameworks/32 boost root/nativefile.ini.in b/test cases/frameworks/32 boost root/nativefile.ini.in
new file mode 100644
index 0000000..54510d7
--- /dev/null
+++ b/test cases/frameworks/32 boost root/nativefile.ini.in
@@ -0,0 +1,2 @@
+[properties]
+boost_root = '@MESON_TEST_ROOT@/boost'
diff --git a/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp b/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp
new file mode 100644
index 0000000..3ba19ee
--- /dev/null
+++ b/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp
@@ -0,0 +1,3 @@
+#define BOOST_VERSION 200
+
+#error This is not a real version of boost
diff --git a/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib
diff --git a/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib
diff --git a/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 b/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0
diff --git a/test cases/frameworks/33 boost split root/meson.build b/test cases/frameworks/33 boost split root/meson.build
new file mode 100644
index 0000000..a2353bb
--- /dev/null
+++ b/test cases/frameworks/33 boost split root/meson.build
@@ -0,0 +1,6 @@
+project('boosttest', 'cpp')
+
+dep = dependency('boost', modules : 'regex', required: false)
+
+assert(dep.found(), 'expected to find a fake version of boost')
+assert(dep.version() == '0.2.0', 'expected to find version 0.2.0')
diff --git a/test cases/frameworks/33 boost split root/nativefile.ini.in b/test cases/frameworks/33 boost split root/nativefile.ini.in
new file mode 100644
index 0000000..7bd5ac2
--- /dev/null
+++ b/test cases/frameworks/33 boost split root/nativefile.ini.in
@@ -0,0 +1,3 @@
+[properties]
+boost_includedir = '@MESON_TEST_ROOT@/boost/extra-dir/include'
+boost_librarydir = '@MESON_TEST_ROOT@/boost/lib'
diff --git a/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c
new file mode 100644
index 0000000..ee5c5e1
--- /dev/null
+++ b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c
@@ -0,0 +1,124 @@
+#include "dep3.h"
+
+struct _MesonDep3
+{
+ GObject parent_instance;
+
+ gchar *msg;
+};
+
+G_DEFINE_TYPE (MesonDep3, meson_dep3, G_TYPE_OBJECT)
+
+enum {
+ PROP_0,
+ PROP_MSG,
+ LAST_PROP
+};
+
+static GParamSpec *gParamSpecs [LAST_PROP];
+
+/**
+ * meson_dep3_new:
+ * @msg: The message to set.
+ *
+ * Allocates a new #MesonDep3.
+ *
+ * Returns: (transfer full): a #MesonDep3.
+ */
+MesonDep3 *
+meson_dep3_new (const gchar *msg)
+{
+ g_return_val_if_fail (msg != NULL, NULL);
+
+ return g_object_new (MESON_TYPE_DEP3,
+ "message", msg,
+ NULL);
+}
+
+static void
+meson_dep3_finalize (GObject *object)
+{
+ MesonDep3 *self = (MesonDep3 *)object;
+
+ g_clear_pointer (&self->msg, g_free);
+
+ G_OBJECT_CLASS (meson_dep3_parent_class)->finalize (object);
+}
+
+static void
+meson_dep3_get_property (GObject *object,
+ guint prop_id,
+ GValue *value,
+ GParamSpec *pspec)
+{
+ MesonDep3 *self = MESON_DEP3 (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ g_value_set_string (value, self->msg);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_dep3_set_property (GObject *object,
+ guint prop_id,
+ const GValue *value,
+ GParamSpec *pspec)
+{
+ MesonDep3 *self = MESON_DEP3 (object);
+
+ switch (prop_id)
+ {
+ case PROP_MSG:
+ self->msg = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ }
+}
+
+static void
+meson_dep3_class_init (MesonDep3Class *klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->finalize = meson_dep3_finalize;
+ object_class->get_property = meson_dep3_get_property;
+ object_class->set_property = meson_dep3_set_property;
+
+ gParamSpecs [PROP_MSG] =
+ g_param_spec_string ("message",
+ "Message",
+ "The message to print.",
+ NULL,
+ (G_PARAM_READWRITE |
+ G_PARAM_CONSTRUCT_ONLY |
+ G_PARAM_STATIC_STRINGS));
+
+ g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs);
+}
+
+static void
+meson_dep3_init (MesonDep3 *self)
+{
+}
+
+/**
+ * meson_dep3_return_message:
+ * @self: a #MesonDep3.
+ *
+ * Returns the message.
+ *
+ * Returns: (transfer none): a const gchar*
+ */
+const gchar*
+meson_dep3_return_message (MesonDep3 *self)
+{
+ g_return_val_if_fail (MESON_IS_DEP3 (self), NULL);
+
+ return (const gchar*) self->msg;
+}
diff --git a/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h
new file mode 100644
index 0000000..9883d76
--- /dev/null
+++ b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h
@@ -0,0 +1,21 @@
+#ifndef MESON_DEP3_H
+#define MESON_DEP3_H
+
+#if !defined (MESON_TEST)
+#error "MESON_TEST not defined."
+#endif
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+
+#define MESON_TYPE_DEP3 (meson_dep3_get_type())
+
+G_DECLARE_FINAL_TYPE (MesonDep3, meson_dep3, MESON, DEP3, GObject)
+
+MesonDep3 *meson_dep3_new (const gchar *msg);
+const gchar *meson_dep3_return_message (MesonDep3 *self);
+
+G_END_DECLS
+
+#endif /* MESON_DEP3_H */
diff --git a/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build b/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build
new file mode 100644
index 0000000..1ef7765
--- /dev/null
+++ b/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build
@@ -0,0 +1,22 @@
+dep3sources = ['dep3.c', 'dep3.h']
+
+dep3lib = shared_library(
+ 'dep3lib',
+ sources : dep3sources,
+ dependencies : gobj,
+ install : true
+)
+
+dep3gir = gnome.generate_gir(
+ dep3lib,
+ sources : dep3sources,
+ nsversion : '1.0',
+ namespace : 'MesonDep3',
+ symbol_prefix : 'meson',
+ identifier_prefix : 'Meson',
+ includes : ['GObject-2.0'],
+ install : true
+)
+
+dep3_dep = declare_dependency(link_with : dep3lib,
+ sources : [dep3gir])
diff --git a/test cases/frameworks/7 gnome/gir/dep1/meson.build b/test cases/frameworks/7 gnome/gir/dep1/meson.build
index baa0b1d..2f03ede 100644
--- a/test cases/frameworks/7 gnome/gir/dep1/meson.build
+++ b/test cases/frameworks/7 gnome/gir/dep1/meson.build
@@ -1,4 +1,5 @@
subdir('dep2')
+subdir('dep3')
dep1sources = ['dep1.c', 'dep1.h']
@@ -20,11 +21,11 @@ dep1gir = gnome.generate_gir(
symbol_prefix : 'meson',
identifier_prefix : 'Meson',
header: 'dep1.h',
- includes : ['GObject-2.0', 'MesonDep2-1.0'],
+ includes : ['GObject-2.0', 'MesonDep2-1.0', dep3gir[0]],
dependencies : [dep2_dep],
install : true
)
dep1_dep = declare_dependency(link_with : dep1lib,
- dependencies : [dep2_dep],
+ dependencies : [dep2_dep, dep3_dep],
sources : [dep1gir])
diff --git a/test cases/frameworks/7 gnome/gir/meson.build b/test cases/frameworks/7 gnome/gir/meson.build
index 36bd09c..b1e0fa1 100644
--- a/test cases/frameworks/7 gnome/gir/meson.build
+++ b/test cases/frameworks/7 gnome/gir/meson.build
@@ -45,7 +45,7 @@ gnome.generate_gir(
)
test('gobject introspection/c', girexe)
-gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir()])
+gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir(), dep3lib.outdir()])
envdata = environment()
envdata.append('GI_TYPELIB_PATH', gir_paths, separator : ':')
envdata.append('LD_LIBRARY_PATH', gir_paths)
diff --git a/test cases/frameworks/7 gnome/mkenums/meson.build b/test cases/frameworks/7 gnome/mkenums/meson.build
index af4a901..3d7adf0 100644
--- a/test cases/frameworks/7 gnome/mkenums/meson.build
+++ b/test cases/frameworks/7 gnome/mkenums/meson.build
@@ -126,6 +126,14 @@ enums5 = gnome.mkenums_simple('enums5', sources : 'meson-sample.h',
install_header : true,
decorator : 'MESON_EXPORT',
header_prefix : '#include "meson-decls.h"')
+
+conf = configuration_data()
+conf.set('ENUM_FILE', 'enums5.h')
+main = configure_file(
+ input : 'main.c',
+ output : 'main5.c',
+ configuration : conf)
+
enumexe5 = executable('enumprog5', main, enums5, dependencies : gobj)
# Generate template then use as input to mkenums
diff --git a/test cases/frameworks/7 gnome/test.json b/test cases/frameworks/7 gnome/test.json
index e69c9f0..badf410 100644
--- a/test cases/frameworks/7 gnome/test.json
+++ b/test cases/frameworks/7 gnome/test.json
@@ -13,12 +13,16 @@
{"type": "file", "platform": "cygwin", "file": "usr/lib/libdep1lib.dll.a"},
{"type": "expr", "file": "usr/lib/?libdep2lib.so"},
{"type": "file", "platform": "cygwin", "file": "usr/lib/libdep2lib.dll.a"},
+ {"type": "expr", "file": "usr/lib/?libdep3lib.so"},
+ {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep3lib.dll.a"},
{"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"},
{"type": "file", "file": "usr/lib/girepository-1.0/MesonDep1-1.0.typelib"},
{"type": "file", "file": "usr/lib/girepository-1.0/MesonDep2-1.0.typelib"},
+ {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep3-1.0.typelib"},
{"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"},
{"type": "file", "file": "usr/share/gir-1.0/MesonDep1-1.0.gir"},
{"type": "file", "file": "usr/share/gir-1.0/MesonDep2-1.0.gir"},
+ {"type": "file", "file": "usr/share/gir-1.0/MesonDep3-1.0.gir"},
{"type": "file", "file": "usr/share/glib-2.0/schemas/com.github.meson.gschema.xml"},
{"type": "file", "file": "usr/share/simple-resources.gresource"},
{"type": "file", "file": "usr/include/enums6.h"},
diff --git a/test cases/java/3 args/meson.build b/test cases/java/3 args/meson.build
index db9a35c..451e42d 100644
--- a/test cases/java/3 args/meson.build
+++ b/test cases/java/3 args/meson.build
@@ -1,9 +1,9 @@
project('simplejava', 'java')
-add_project_arguments('-target', '1.8', language : 'java')
+add_project_arguments('-target', '1.7', language : 'java')
javaprog = jar('myprog', 'com/mesonbuild/Simple.java',
main_class : 'com.mesonbuild.Simple',
- java_args : ['-source', '1.8'])
+ java_args : ['-source', '1.7'])
test('mytest', javaprog)
diff --git a/test cases/kconfig/1 basic/.config b/test cases/keyval/1 basic/.config
index 071d185..071d185 100644
--- a/test cases/kconfig/1 basic/.config
+++ b/test cases/keyval/1 basic/.config
diff --git a/test cases/kconfig/1 basic/meson.build b/test cases/keyval/1 basic/meson.build
index 5dc8d19..4207b8e 100644
--- a/test cases/kconfig/1 basic/meson.build
+++ b/test cases/keyval/1 basic/meson.build
@@ -1,6 +1,6 @@
-project('kconfig basic test')
+project('keyval basic test')
-k = import('unstable-kconfig')
+k = import('keyval')
conf = k.load('.config')
if not conf.has_key('CONFIG_VAL1')
@@ -14,3 +14,5 @@ endif
if conf.get('CONFIG_VAL_VAL').to_int() != 4
error('Expected CONFIG_VAL_VAL to be 4')
endif
+
+k = import('unstable-keyval')
diff --git a/test cases/keyval/1 basic/test.json b/test cases/keyval/1 basic/test.json
new file mode 100644
index 0000000..dbdc5af
--- /dev/null
+++ b/test cases/keyval/1 basic/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "WARNING: Module unstable-keyval is now stable, please use the keyval module instead."
+ }
+ ]
+}
diff --git a/test cases/kconfig/2 subdir/.config b/test cases/keyval/2 subdir/.config
index 0599d46..0599d46 100644
--- a/test cases/kconfig/2 subdir/.config
+++ b/test cases/keyval/2 subdir/.config
diff --git a/test cases/kconfig/2 subdir/dir/meson.build b/test cases/keyval/2 subdir/dir/meson.build
index 12f1502..291ad93 100644
--- a/test cases/kconfig/2 subdir/dir/meson.build
+++ b/test cases/keyval/2 subdir/dir/meson.build
@@ -1,5 +1,5 @@
-k = import('unstable-kconfig')
+k = import('keyval')
conf = k.load(meson.source_root() / '.config')
diff --git a/test cases/kconfig/3 load_config files/meson.build b/test cases/keyval/2 subdir/meson.build
index 1245b18..0651acf 100644
--- a/test cases/kconfig/3 load_config files/meson.build
+++ b/test cases/keyval/2 subdir/meson.build
@@ -1,4 +1,4 @@
-project('kconfig subdir test')
+project('keyval subdir test')
# Test into sub directory
subdir('dir')
diff --git a/test cases/kconfig/3 load_config files/dir/config b/test cases/keyval/3 load_config files/dir/config
index 0599d46..0599d46 100644
--- a/test cases/kconfig/3 load_config files/dir/config
+++ b/test cases/keyval/3 load_config files/dir/config
diff --git a/test cases/kconfig/3 load_config files/dir/meson.build b/test cases/keyval/3 load_config files/dir/meson.build
index d7b8d44..adc5289 100644
--- a/test cases/kconfig/3 load_config files/dir/meson.build
+++ b/test cases/keyval/3 load_config files/dir/meson.build
@@ -1,5 +1,5 @@
-k = import('unstable-kconfig')
+k = import('keyval')
conf = k.load(files('config'))
diff --git a/test cases/kconfig/2 subdir/meson.build b/test cases/keyval/3 load_config files/meson.build
index 1245b18..0651acf 100644
--- a/test cases/kconfig/2 subdir/meson.build
+++ b/test cases/keyval/3 load_config files/meson.build
@@ -1,4 +1,4 @@
-project('kconfig subdir test')
+project('keyval subdir test')
# Test into sub directory
subdir('dir')
diff --git a/test cases/kconfig/4 load_config builddir/config b/test cases/keyval/4 load_config builddir/config
index 0599d46..0599d46 100644
--- a/test cases/kconfig/4 load_config builddir/config
+++ b/test cases/keyval/4 load_config builddir/config
diff --git a/test cases/kconfig/4 load_config builddir/meson.build b/test cases/keyval/4 load_config builddir/meson.build
index 1924d23..6bd83db 100644
--- a/test cases/kconfig/4 load_config builddir/meson.build
+++ b/test cases/keyval/4 load_config builddir/meson.build
@@ -1,6 +1,6 @@
-project('kconfig builddir test')
+project('keyval builddir test')
-k = import('unstable-kconfig')
+k = import('keyval')
out_conf = configure_file(input: 'config', output: 'out-config', copy: true)
conf = k.load(out_conf)
diff --git a/test cases/linuxlike/13 cmake dependency/cmVers.sh b/test cases/linuxlike/13 cmake dependency/cmVers.sh
new file mode 100755
index 0000000..70809de
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmVers.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+
+VERS=$(cmake --version | grep "cmake version")
+VERS=${VERS//cmake version/}
+
+echo -n $VERS
diff --git a/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake b/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake
new file mode 100644
index 0000000..e12aeb9
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+ set(cmMesonTestF1_FOUND ON)
+ set(cmMesonTestF1_LIBRARIES ${ZLIB_LIBRARY})
+ set(cmMesonTestF1_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+ set(cmMesonTestF1_FOUND OFF)
+endif()
diff --git a/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake b/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake
new file mode 100644
index 0000000..a7a55d8
--- /dev/null
+++ b/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake
@@ -0,0 +1,9 @@
+find_package(ZLIB)
+
+if(ZLIB_FOUND OR ZLIB_Found)
+ set(cmMesonTestF2_FOUND ON)
+ set(cmMesonTestF2_LIBRARIES ${ZLIB_LIBRARY})
+ set(cmMesonTestF2_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR})
+else()
+ set(cmMesonTestF2_FOUND OFF)
+endif()
diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build
index 79acc83..9918a71 100644
--- a/test cases/linuxlike/13 cmake dependency/meson.build
+++ b/test cases/linuxlike/13 cmake dependency/meson.build
@@ -6,6 +6,9 @@ if not find_program('cmake', required: false).found()
error('MESON_SKIP_TEST cmake binary not available.')
endif
+# CMake version
+cm_vers = run_command(find_program('./cmVers.sh')).stdout().strip()
+
# Zlib is probably on all dev machines.
dep = dependency('ZLIB', version : '>=1.2', method : 'cmake')
@@ -41,6 +44,8 @@ assert(depf2.found() == false, 'Invalid CMake targets should fail')
# Try to find cmMesonTestDep in a custom prefix
# setup_env.json is used by run_project_tests.py:_run_test to point to ./cmake_pref_env/
depPrefEnv = dependency('cmMesonTestDep', required : true, method : 'cmake')
+depPrefEnv1 = dependency('cmMesonTestF1', required : true, method : 'cmake')
+depPrefEnv2 = dependency('cmMesonTestF2', required : true, method : 'cmake')
# Try to find a dependency with a custom CMake module
@@ -48,14 +53,16 @@ depm1 = dependency('SomethingLikeZLIB', required : true, components : 'required_
depm2 = dependency('SomethingLikeZLIB', required : true, components : 'required_comp', method : 'cmake', cmake_module_path : ['cmake'])
depm3 = dependency('SomethingLikeZLIB', required : true, components : ['required_comp'], cmake_module_path : 'cmake')
-# Test some edge cases with spaces, etc.
+# Test some edge cases with spaces, etc. (but only for CMake >= 3.15)
-testDep1 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'mesonTestLibDefs')
-testDep2 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules : ['MesonTest::TestLibDefs'])
-testFlagSet1 = executable('testFlagSet1', ['testFlagSet.c'], dependencies: [testDep1])
-testFlagSet2 = executable('testFlagSet2', ['testFlagSet.c'], dependencies: [testDep2])
-test('testFlagSetTest1', testFlagSet1)
-test('testFlagSetTest2', testFlagSet2)
+if cm_vers.version_compare('>=3.15')
+ testDep1 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'mesonTestLibDefs')
+ testDep2 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules : ['MesonTest::TestLibDefs'])
+ testFlagSet1 = executable('testFlagSet1', ['testFlagSet.c'], dependencies: [testDep1])
+ testFlagSet2 = executable('testFlagSet2', ['testFlagSet.c'], dependencies: [testDep2])
+ test('testFlagSetTest1', testFlagSet1)
+ test('testFlagSetTest2', testFlagSet2)
+endif
# Try to compile a test that takes a dep and an include_directories
diff --git a/test cases/linuxlike/13 cmake dependency/test.json b/test cases/linuxlike/13 cmake dependency/test.json
index 565713e..fc29f72 100644
--- a/test cases/linuxlike/13 cmake dependency/test.json
+++ b/test cases/linuxlike/13 cmake dependency/test.json
@@ -1,5 +1,5 @@
{
"env": {
- "CMAKE_PREFIX_PATH": "@ROOT@/cmake_pref_env"
+ "CMAKE_PREFIX_PATH": "@ROOT@/cmake_fake1;@ROOT@/cmake_fake2:@ROOT@/cmake_pref_env"
}
}
diff --git a/test cases/linuxlike/3 linker script/meson.build b/test cases/linuxlike/3 linker script/meson.build
index 63765e7..5901bf7 100644
--- a/test cases/linuxlike/3 linker script/meson.build
+++ b/test cases/linuxlike/3 linker script/meson.build
@@ -1,5 +1,11 @@
project('linker script', 'c')
+# Solaris 11.4 ld supports --version-script only when you also specify
+# -z gnu-version-script-compat
+if meson.get_compiler('c').get_linker_id() == 'ld.solaris'
+ add_project_link_arguments('-Wl,-z,gnu-version-script-compat', language: 'C')
+endif
+
# Static map file
mapfile = 'bob.map'
vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile)
diff --git a/test cases/linuxlike/5 dependency versions/meson.build b/test cases/linuxlike/5 dependency versions/meson.build
index 94f424d..164e679 100644
--- a/test cases/linuxlike/5 dependency versions/meson.build
+++ b/test cases/linuxlike/5 dependency versions/meson.build
@@ -31,10 +31,10 @@ dependency('somebrokenlib', version : '>=1.0', required : false)
# Search for an external dependency that won't be found, but must later be
# found via fallbacks
-somelibnotfound = dependency('somelib', required : false)
+somelibnotfound = dependency('somelib1', required : false)
assert(somelibnotfound.found() == false, 'somelibnotfound was found?')
# Find internal dependency without version
-somelibver = dependency('somelib',
+somelibver = dependency('somelib1',
fallback : ['somelibnover', 'some_dep'])
assert(somelibver.type_name() == 'internal', 'somelibver should be of type "internal", not ' + somelibver.type_name())
# Find an internal dependency again with the same name and a specific version
diff --git a/test cases/python/1 basic/meson.build b/test cases/python/1 basic/meson.build
index 9c3af10..bd9a65c 100644
--- a/test cases/python/1 basic/meson.build
+++ b/test cases/python/1 basic/meson.build
@@ -1,4 +1,4 @@
-project('python sample', 'c')
+project('python sample')
py_mod = import('python')
py = py_mod.find_installation('python3')
@@ -12,6 +12,7 @@ py_purelib = py.get_path('purelib')
if not py_purelib.endswith('site-packages')
error('Python3 purelib path seems invalid? ' + py_purelib)
endif
+message('Python purelib path:', py_purelib)
# could be 'lib64' or 'Lib' on some systems
py_platlib = py.get_path('platlib')
diff --git a/test cases/python/1 basic/prog.py b/test cases/python/1 basic/prog.py
index 9d95aea..720fdb1 100755
--- a/test cases/python/1 basic/prog.py
+++ b/test cases/python/1 basic/prog.py
@@ -1,9 +1,8 @@
#!/usr/bin/env python3
from gluon import gluonator
-import sys
print('Running mainprog from root dir.')
if gluonator.gluoninate() != 42:
- sys.exit(1)
+ raise ValueError("!= 42")
diff --git a/test cases/python/1 basic/subdir/subprog.py b/test cases/python/1 basic/subdir/subprog.py
index 08652f0..54178e5 100755
--- a/test cases/python/1 basic/subdir/subprog.py
+++ b/test cases/python/1 basic/subdir/subprog.py
@@ -4,9 +4,8 @@
# point to source root.
from gluon import gluonator
-import sys
print('Running mainprog from subdir.')
if gluonator.gluoninate() != 42:
- sys.exit(1)
+ raise ValueError("!= 42")
diff --git a/test cases/python/2 extmodule/blaster.py b/test cases/python/2 extmodule/blaster.py
index 7e1eae6..1f01876 100755
--- a/test cases/python/2 extmodule/blaster.py
+++ b/test cases/python/2 extmodule/blaster.py
@@ -1,14 +1,11 @@
#!/usr/bin/env python3
import tachyon
-import sys
result = tachyon.phaserize('shoot')
if not isinstance(result, int):
- print('Returned result not an integer.')
- sys.exit(1)
+ raise SystemExit('Returned result not an integer.')
if result != 1:
- print('Returned result {} is not 1.'.format(result))
- sys.exit(1)
+ raise SystemExit('Returned result {} is not 1.'.format(result))
diff --git a/test cases/python/2 extmodule/meson.build b/test cases/python/2 extmodule/meson.build
index b4eb960..18d70c8 100644
--- a/test cases/python/2 extmodule/meson.build
+++ b/test cases/python/2 extmodule/meson.build
@@ -3,26 +3,33 @@ project('Python extension module', 'c',
# Because Windows Python ships only with optimized libs,
# we must build this project the same way.
+if meson.backend() != 'ninja'
+ error('MESON_SKIP_TEST: Ninja backend required')
+endif
+
+
py_mod = import('python')
py = py_mod.find_installation()
-py_dep = py.dependency()
+py_dep = py.dependency(required: false)
-if py_dep.found()
- subdir('ext')
+if not py_dep.found()
+ error('MESON_SKIP_TEST: Python libraries not found.')
+endif
- test('extmod',
- py,
- args : files('blaster.py'),
- env : ['PYTHONPATH=' + pypathdir])
+subdir('ext')
- # Check we can apply a version constraint
- dependency('python3', version: '>=@0@'.format(py_dep.version()))
+test('extmod',
+ py,
+ args : files('blaster.py'),
+ env : ['PYTHONPATH=' + pypathdir])
-else
- error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
-endif
py3_pkg_dep = dependency('python3', method: 'pkg-config', required : false)
if py3_pkg_dep.found()
python_lib_dir = py3_pkg_dep.get_pkgconfig_variable('libdir')
+
+ # Check we can apply a version constraint
+ dependency('python3', version: '>=@0@'.format(py_dep.version()))
+else
+ message('Skipped python3 pkg-config test')
endif
diff --git a/test cases/python/3 cython/cytest.py b/test cases/python/3 cython/cytest.py
index 43443dc..c08ffee 100755
--- a/test cases/python/3 cython/cytest.py
+++ b/test cases/python/3 cython/cytest.py
@@ -1,23 +1,19 @@
#!/usr/bin/env python3
from storer import Storer
-import sys
s = Storer()
if s.get_value() != 0:
- print('Initial value incorrect.')
- sys.exit(1)
+ raise SystemExit('Initial value incorrect.')
s.set_value(42)
if s.get_value() != 42:
- print('Setting value failed.')
- sys.exit(1)
+ raise SystemExit('Setting value failed.')
try:
s.set_value('not a number')
- print('Using wrong argument type did not fail.')
- sys.exit(1)
+ raise SystemExit('Using wrong argument type did not fail.')
except TypeError:
pass
diff --git a/test cases/python/3 cython/meson.build b/test cases/python/3 cython/meson.build
index 194920b..5fc07a8 100644
--- a/test cases/python/3 cython/meson.build
+++ b/test cases/python/3 cython/meson.build
@@ -1,20 +1,26 @@
project('cython', 'c',
default_options : ['warning_level=3'])
-cython = find_program('cython3', required : false)
-py3_dep = dependency('python3', required : false)
+if meson.backend() != 'ninja'
+ error('MESON_SKIP_TEST: Ninja backend required')
+endif
-if cython.found() and py3_dep.found()
- py_mod = import('python')
- py3 = py_mod.find_installation()
- py3_dep = py3.dependency()
- subdir('libdir')
+cython = find_program('cython', required : false)
+if not cython.found()
+ error('MESON_SKIP_TEST: Cython3 not found.')
+endif
- test('cython tester',
- py3,
- args : files('cytest.py'),
- env : ['PYTHONPATH=' + pydir]
- )
-else
- error('MESON_SKIP_TEST: Cython3 or Python3 libraries not found, skipping test.')
+py_mod = import('python')
+py3 = py_mod.find_installation()
+py3_dep = py3.dependency(required: false)
+if not py3_dep.found()
+ error('MESON_SKIP_TEST: Python library not found.')
endif
+
+subdir('libdir')
+
+test('cython tester',
+ py3,
+ args : files('cytest.py'),
+ env : ['PYTHONPATH=' + pydir]
+)
diff --git a/test cases/python/4 custom target depends extmodule/blaster.py b/test cases/python/4 custom target depends extmodule/blaster.py
index 6106f6b..09039cb 100644
--- a/test cases/python/4 custom target depends extmodule/blaster.py
+++ b/test cases/python/4 custom target depends extmodule/blaster.py
@@ -24,9 +24,7 @@ if options.output:
f.write('success')
if not isinstance(result, int):
- print('Returned result not an integer.')
- sys.exit(1)
+ raise SystemExit('Returned result not an integer.')
if result != 1:
- print('Returned result {} is not 1.'.format(result))
- sys.exit(1)
+ raise SystemExit('Returned result {} is not 1.'.format(result))
diff --git a/test cases/python/4 custom target depends extmodule/meson.build b/test cases/python/4 custom target depends extmodule/meson.build
index 3835377..d8a62ed 100644
--- a/test cases/python/4 custom target depends extmodule/meson.build
+++ b/test cases/python/4 custom target depends extmodule/meson.build
@@ -3,11 +3,19 @@ project('Python extension module', 'c',
# Because Windows Python ships only with optimized libs,
# we must build this project the same way.
+if meson.backend() != 'ninja'
+ error('MESON_SKIP_TEST: Ninja backend required')
+endif
+
py_mod = import('python')
py3 = py_mod.find_installation()
py3_dep = py3.dependency(required : false)
cc = meson.get_compiler('c')
+if not py3_dep.found()
+ error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
+endif
+
# Copy to the builddir so that blaster.py can find the built tachyon module
# FIXME: We should automatically detect this case and append the correct paths
# to PYTHONLIBDIR
@@ -20,21 +28,18 @@ import os, sys
with open(sys.argv[1], 'rb') as f:
assert(f.read() == b'success')
'''
-if py3_dep.found()
- message('Detected Python version: ' + py3_dep.version())
- if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1')
- error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.')
- endif
- subdir('ext')
-
- out_txt = custom_target('tachyon flux',
- input : blaster_py,
- output : 'out.txt',
- command : [py3, '@INPUT@', '-o', '@OUTPUT@'],
- depends : pylib,
- build_by_default: true)
-
- test('flux', py3, args : ['-c', check_exists, out_txt])
-else
- error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.')
+
+message('Detected Python version: ' + py3_dep.version())
+if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1')
+ error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.')
endif
+subdir('ext')
+
+out_txt = custom_target('tachyon flux',
+ input : blaster_py,
+ output : 'out.txt',
+ command : [py3, '@INPUT@', '-o', '@OUTPUT@'],
+ depends : pylib,
+ build_by_default: true)
+
+test('flux', py3, args : ['-c', check_exists, out_txt])
diff --git a/test cases/python/5 modules kwarg/meson.build b/test cases/python/5 modules kwarg/meson.build
index 3c9d54f..9751ada 100644
--- a/test cases/python/5 modules kwarg/meson.build
+++ b/test cases/python/5 modules kwarg/meson.build
@@ -1,7 +1,7 @@
project('python kwarg')
py = import('python')
-prog_python = py.find_installation('python3', modules : ['setuptools'])
+prog_python = py.find_installation('python3', modules : ['distutils'])
assert(prog_python.found() == true, 'python not found when should be')
prog_python = py.find_installation('python3', modules : ['thisbetternotexistmod'], required : false)
assert(prog_python.found() == false, 'python not found but reported as found')
diff --git a/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap b/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap
index 09ba4e8..11b2178 100644
--- a/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap
+++ b/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap
@@ -1,2 +1 @@
-The contents of this wrap file are never evaluated so they
-can be anything.
+[wrap-file]
diff --git a/test cases/unit/35 dist script/meson.build b/test cases/unit/35 dist script/meson.build
index fd672a9..2ae9438 100644
--- a/test cases/unit/35 dist script/meson.build
+++ b/test cases/unit/35 dist script/meson.build
@@ -5,3 +5,4 @@ exe = executable('comparer', 'prog.c')
test('compare', exe)
meson.add_dist_script('replacer.py', '"incorrect"', '"correct"')
+meson.add_dist_script(find_program('replacer.py'), '"incorrect"', '"correct"')
diff --git a/test cases/unit/36 exe_wrapper behaviour/meson.build b/test cases/unit/36 exe_wrapper behaviour/meson.build
index 16a44d5..d0817ba 100644
--- a/test cases/unit/36 exe_wrapper behaviour/meson.build
+++ b/test cases/unit/36 exe_wrapper behaviour/meson.build
@@ -1,7 +1,7 @@
project('exe wrapper behaviour', 'c')
assert(meson.is_cross_build(), 'not setup as cross build')
-assert(meson.has_exe_wrapper(), 'exe wrapper not defined?')
+assert(meson.has_exe_wrapper(), 'exe wrapper not defined?') # intentionally not changed to can_run_host_binaries,
exe = executable('prog', 'prog.c')
diff --git a/test cases/unit/40 external, internal library rpath/built library/meson.build b/test cases/unit/40 external, internal library rpath/built library/meson.build
index f633996..07fe7bb 100644
--- a/test cases/unit/40 external, internal library rpath/built library/meson.build
+++ b/test cases/unit/40 external, internal library rpath/built library/meson.build
@@ -18,4 +18,9 @@ l = shared_library('bar_built', 'bar.c',
if host_machine.system() == 'darwin'
e = executable('prog', 'prog.c', link_with: l, install: true)
test('testprog', e)
+elif host_machine.system() == 'linux'
+ e = executable('prog', 'prog.c', link_with: l, install: true,
+ install_rpath: '$ORIGIN/..' / get_option('libdir'),
+ )
+ test('testprog', e)
endif
diff --git a/test cases/unit/40 external, internal library rpath/external library/meson.build b/test cases/unit/40 external, internal library rpath/external library/meson.build
index 3c311f5..06ffa0f 100644
--- a/test cases/unit/40 external, internal library rpath/external library/meson.build
+++ b/test cases/unit/40 external, internal library rpath/external library/meson.build
@@ -4,16 +4,16 @@ shared_library('foo_in_system', 'foo.c', install : true)
l = shared_library('faa_pkg', 'faa.c', install: true)
if host_machine.system() == 'darwin'
- frameworks = ['-framework', 'CoreFoundation', '-framework', 'CoreMedia']
+ ldflags = ['-framework', 'CoreFoundation', '-framework', 'CoreMedia']
allow_undef_args = ['-Wl,-undefined,dynamic_lookup']
else
- frameworks = []
+ ldflags = ['-Wl,-rpath,${libdir}']
allow_undef_args = []
endif
pkg = import('pkgconfig')
pkg.generate(name: 'faa_pkg',
- libraries: [l] + frameworks,
+ libraries: [l] + ldflags,
description: 'FAA, a pkg-config test library')
# cygwin DLLs can't have undefined symbols
diff --git a/test cases/unit/57 introspection/meson.build b/test cases/unit/57 introspection/meson.build
index 9716eae..5d4dd02 100644
--- a/test cases/unit/57 introspection/meson.build
+++ b/test cases/unit/57 introspection/meson.build
@@ -13,7 +13,7 @@ test_bool = not test_bool
set_variable('list_test_plusassign', [])
list_test_plusassign += ['bugs everywhere']
-if false
+if not true
vers_str = '<=99.9.9'
dependency('somethingthatdoesnotexist', required: true, version: '>=1.2.3')
dependency('look_i_have_a_fallback', version: ['>=1.0.0', vers_str], fallback: ['oh_no', 'the_subproject_does_not_exist'])
@@ -26,7 +26,7 @@ var1 = '1'
var2 = 2.to_string()
var3 = 'test3'
-t1 = executable('test' + var1, ['t1.cpp'], link_with: [sharedlib], install: true, build_by_default: get_option('test_opt2'))
+t1 = executable('test' + var1, ['t1.cpp'], link_with: [sharedlib], install: not false, build_by_default: get_option('test_opt2'))
t2 = executable('test@0@'.format('@0@'.format(var2)), sources: ['t2.cpp'], link_with: [staticlib])
t3 = executable(var3, 't3.cpp', link_with: [sharedlib, staticlib], dependencies: [dep1])
@@ -46,3 +46,16 @@ message(osmesa_lib_name) # Infinite recursion gets triggered here when the param
test('test case 1', t1)
test('test case 2', t2)
benchmark('benchmark 1', t3)
+
+### Stuff to test the AST JSON printer
+foreach x : ['a', 'b', 'c']
+ if x == 'a'
+ message('a')
+ elif x == 'b'
+ message('a')
+ else
+ continue
+ endif
+ break
+ continue
+endforeach
diff --git a/test cases/unit/61 identity cross/build_wrapper.py b/test cases/unit/61 identity cross/build_wrapper.py
index b5fe7bb..15d5c07 100755
--- a/test cases/unit/61 identity cross/build_wrapper.py
+++ b/test cases/unit/61 identity cross/build_wrapper.py
@@ -1,5 +1,11 @@
#!/usr/bin/env python3
-import subprocess, sys
+import subprocess, sys, platform
-subprocess.call(["cc", "-DEXTERNAL_BUILD"] + sys.argv[1:])
+# Meson does not yet support Studio cc on Solaris, only gcc or clang
+if platform.system() == 'SunOS':
+ cc = 'gcc'
+else:
+ cc = 'cc'
+
+subprocess.call([cc, "-DEXTERNAL_BUILD"] + sys.argv[1:])
diff --git a/test cases/unit/61 identity cross/host_wrapper.py b/test cases/unit/61 identity cross/host_wrapper.py
index e88577c..a3a694a 100755
--- a/test cases/unit/61 identity cross/host_wrapper.py
+++ b/test cases/unit/61 identity cross/host_wrapper.py
@@ -1,5 +1,11 @@
#!/usr/bin/env python3
-import subprocess, sys
+import subprocess, sys, platform
-subprocess.call(["cc", "-DEXTERNAL_HOST"] + sys.argv[1:])
+# Meson does not yet support Studio cc on Solaris, only gcc or clang
+if platform.system() == 'SunOS':
+ cc = 'gcc'
+else:
+ cc = 'cc'
+
+subprocess.call([cc, "-DEXTERNAL_HOST"] + sys.argv[1:])
diff --git a/test cases/unit/72 cross test passed/exewrapper.py b/test cases/unit/72 cross test passed/exewrapper.py
new file mode 100755
index 0000000..2c15ed6
--- /dev/null
+++ b/test cases/unit/72 cross test passed/exewrapper.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# Test that the MESON_EXE_WRAPPER environment variable is set
+
+import argparse
+import os
+import sys
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('binary') # unused, but needed for test behavior
+ parser.add_argument('--expected', action='store_true')
+ args = parser.parse_args()
+
+ defined = 'MESON_EXE_WRAPPER' in os.environ
+
+ if args.expected != defined:
+ print(os.environ, file=sys.stderr)
+ return 1
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/test cases/unit/72 cross test passed/meson.build b/test cases/unit/72 cross test passed/meson.build
new file mode 100644
index 0000000..4deb74b
--- /dev/null
+++ b/test cases/unit/72 cross test passed/meson.build
@@ -0,0 +1,19 @@
+project(
+ 'cross test passed',
+ 'c',
+ version : '>= 0.51'
+)
+
+e = executable('exec', 'src/main.c')
+
+py = import('python').find_installation()
+
+test('root', e)
+test('main', py, args : [meson.current_source_dir() / 'script.py', e])
+
+wrapper_args = []
+if get_option('expect')
+ wrapper_args += '--expected'
+endif
+
+test('exe_wrapper in env', py, args : [meson.current_source_dir() / 'exewrapper.py', e, wrapper_args])
diff --git a/test cases/unit/72 cross test passed/meson_options.txt b/test cases/unit/72 cross test passed/meson_options.txt
new file mode 100644
index 0000000..084c776
--- /dev/null
+++ b/test cases/unit/72 cross test passed/meson_options.txt
@@ -0,0 +1,5 @@
+option(
+ 'expect',
+ type : 'boolean',
+ value : false,
+)
diff --git a/test cases/unit/72 cross test passed/script.py b/test cases/unit/72 cross test passed/script.py
new file mode 100644
index 0000000..257cd30
--- /dev/null
+++ b/test cases/unit/72 cross test passed/script.py
@@ -0,0 +1,7 @@
+#!/usr/bin/env python3
+
+import subprocess
+import sys
+
+if __name__ == "__main__":
+ sys.exit(subprocess.run(sys.argv[1:]).returncode)
diff --git a/test cases/unit/72 cross test passed/src/main.c b/test cases/unit/72 cross test passed/src/main.c
new file mode 100644
index 0000000..490b4a6
--- /dev/null
+++ b/test cases/unit/72 cross test passed/src/main.c
@@ -0,0 +1,6 @@
+#include <stdio.h>
+
+int main(int argc, char const *argv[])
+{
+ return 0;
+}
diff --git a/test cases/unit/72 summary/meson.build b/test cases/unit/73 summary/meson.build
index df4540d..1bc05ca 100644
--- a/test cases/unit/72 summary/meson.build
+++ b/test cases/unit/73 summary/meson.build
@@ -13,3 +13,4 @@ summary('A number', 1, section: 'Configuration')
summary('yes', true, bool_yn : true, section: 'Configuration')
summary('no', false, bool_yn : true, section: 'Configuration')
summary('coma list', ['a', 'b', 'c'], list_sep: ', ', section: 'Configuration')
+summary('long coma list', ['alpha', 'alphacolor', 'apetag', 'audiofx', 'audioparsers', 'auparse', 'autodetect', 'avi'], list_sep: ', ', section: 'Plugins')
diff --git a/test cases/unit/72 summary/subprojects/sub/meson.build b/test cases/unit/73 summary/subprojects/sub/meson.build
index e7d7833..e7d7833 100644
--- a/test cases/unit/72 summary/subprojects/sub/meson.build
+++ b/test cases/unit/73 summary/subprojects/sub/meson.build
diff --git a/test cases/unit/72 summary/subprojects/sub2/meson.build b/test cases/unit/73 summary/subprojects/sub2/meson.build
index 86b9cfd..86b9cfd 100644
--- a/test cases/unit/72 summary/subprojects/sub2/meson.build
+++ b/test cases/unit/73 summary/subprojects/sub2/meson.build
diff --git a/test cases/unit/73 wrap file url/meson.build b/test cases/unit/74 wrap file url/meson.build
index 3bd3b25..3bd3b25 100644
--- a/test cases/unit/73 wrap file url/meson.build
+++ b/test cases/unit/74 wrap file url/meson.build
diff --git a/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz b/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz
index fdb026c..fdb026c 100644
--- a/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz
+++ b/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz
Binary files differ
diff --git a/test cases/unit/73 wrap file url/subprojects/foo.tar.xz b/test cases/unit/74 wrap file url/subprojects/foo.tar.xz
index 2ed6ab4..2ed6ab4 100644
--- a/test cases/unit/73 wrap file url/subprojects/foo.tar.xz
+++ b/test cases/unit/74 wrap file url/subprojects/foo.tar.xz
Binary files differ
diff --git a/test cases/unit/75 dep files/foo.c b/test cases/unit/75 dep files/foo.c
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test cases/unit/75 dep files/foo.c
diff --git a/test cases/unit/74 dep files/meson.build b/test cases/unit/75 dep files/meson.build
index 4829f56..4829f56 100644
--- a/test cases/unit/74 dep files/meson.build
+++ b/test cases/unit/75 dep files/meson.build
diff --git a/test cases/unit/77 pkgconfig prefixes/client/client.c b/test cases/unit/77 pkgconfig prefixes/client/client.c
new file mode 100644
index 0000000..be9bead
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/client/client.c
@@ -0,0 +1,8 @@
+#include <val2.h>
+#include <stdio.h>
+
+int main(int argc, char **argv)
+{
+ printf("%d\n", val2());
+ return 0;
+}
diff --git a/test cases/unit/77 pkgconfig prefixes/client/meson.build b/test cases/unit/77 pkgconfig prefixes/client/meson.build
new file mode 100644
index 0000000..491937b
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/client/meson.build
@@ -0,0 +1,3 @@
+project('client', 'c')
+val2_dep = dependency('val2')
+executable('client', 'client.c', dependencies : [val2_dep], install: true)
diff --git a/test cases/unit/77 pkgconfig prefixes/val1/meson.build b/test cases/unit/77 pkgconfig prefixes/val1/meson.build
new file mode 100644
index 0000000..cc63e31
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/val1/meson.build
@@ -0,0 +1,5 @@
+project('val1', 'c')
+val1 = shared_library('val1', 'val1.c', install: true)
+install_headers('val1.h')
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(val1, libraries : ['-Wl,-rpath,${libdir}'])
diff --git a/test cases/unit/77 pkgconfig prefixes/val1/val1.c b/test cases/unit/77 pkgconfig prefixes/val1/val1.c
new file mode 100644
index 0000000..591e521
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/val1/val1.c
@@ -0,0 +1,3 @@
+#include "val1.h"
+
+int val1(void) { return 1; }
diff --git a/test cases/unit/77 pkgconfig prefixes/val1/val1.h b/test cases/unit/77 pkgconfig prefixes/val1/val1.h
new file mode 100644
index 0000000..6bd435e
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/val1/val1.h
@@ -0,0 +1 @@
+int val1(void);
diff --git a/test cases/unit/77 pkgconfig prefixes/val2/meson.build b/test cases/unit/77 pkgconfig prefixes/val2/meson.build
new file mode 100644
index 0000000..ce69481
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/val2/meson.build
@@ -0,0 +1,8 @@
+project('val2', 'c')
+val1_dep = dependency('val1')
+val2 = shared_library('val2', 'val2.c',
+ dependencies : [val1_dep],
+ install: true)
+install_headers('val2.h')
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(val2, libraries : ['-Wl,-rpath,${libdir}'])
diff --git a/test cases/unit/77 pkgconfig prefixes/val2/val2.c b/test cases/unit/77 pkgconfig prefixes/val2/val2.c
new file mode 100644
index 0000000..d7d4857
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/val2/val2.c
@@ -0,0 +1,4 @@
+#include "val1.h"
+#include "val2.h"
+
+int val2(void) { return val1() + 2; }
diff --git a/test cases/unit/77 pkgconfig prefixes/val2/val2.h b/test cases/unit/77 pkgconfig prefixes/val2/val2.h
new file mode 100644
index 0000000..995023d
--- /dev/null
+++ b/test cases/unit/77 pkgconfig prefixes/val2/val2.h
@@ -0,0 +1 @@
+int val2(void);
diff --git a/test cases/unit/78 subdir libdir/meson.build b/test cases/unit/78 subdir libdir/meson.build
new file mode 100644
index 0000000..5099c91
--- /dev/null
+++ b/test cases/unit/78 subdir libdir/meson.build
@@ -0,0 +1,2 @@
+project('toplevel', 'c')
+subproject('flub')
diff --git a/test cases/unit/78 subdir libdir/subprojects/flub/meson.build b/test cases/unit/78 subdir libdir/subprojects/flub/meson.build
new file mode 100644
index 0000000..7bfd2c5
--- /dev/null
+++ b/test cases/unit/78 subdir libdir/subprojects/flub/meson.build
@@ -0,0 +1 @@
+project('subflub', 'c')
diff --git a/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore b/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore
new file mode 100644
index 0000000..4976afc
--- /dev/null
+++ b/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore
@@ -0,0 +1 @@
+subprojects/*
diff --git a/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build b/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build
new file mode 100644
index 0000000..0bc395b
--- /dev/null
+++ b/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build
@@ -0,0 +1,3 @@
+project('user option for subproject')
+
+p = subproject('sub')
diff --git a/test cases/unit/80 global-rpath/meson.build b/test cases/unit/80 global-rpath/meson.build
new file mode 100644
index 0000000..c67d9e0
--- /dev/null
+++ b/test cases/unit/80 global-rpath/meson.build
@@ -0,0 +1,3 @@
+project('global-rpath', 'cpp')
+yonder_dep = dependency('yonder')
+executable('rpathified', 'rpathified.cpp', dependencies: [yonder_dep], install: true)
diff --git a/test cases/unit/80 global-rpath/rpathified.cpp b/test cases/unit/80 global-rpath/rpathified.cpp
new file mode 100644
index 0000000..3788906
--- /dev/null
+++ b/test cases/unit/80 global-rpath/rpathified.cpp
@@ -0,0 +1,6 @@
+#include <yonder.h>
+#include <string.h>
+int main(int argc, char **argv)
+{
+ return strcmp(yonder(), "AB54 6BR");
+}
diff --git a/test cases/unit/80 global-rpath/yonder/meson.build b/test cases/unit/80 global-rpath/yonder/meson.build
new file mode 100644
index 0000000..e32f383
--- /dev/null
+++ b/test cases/unit/80 global-rpath/yonder/meson.build
@@ -0,0 +1,5 @@
+project('yonder', 'cpp')
+yonder = shared_library('yonder', 'yonder.cpp', install: true)
+install_headers('yonder.h')
+pkgconfig = import('pkgconfig')
+pkgconfig.generate(yonder)
diff --git a/test cases/unit/80 global-rpath/yonder/yonder.cpp b/test cases/unit/80 global-rpath/yonder/yonder.cpp
new file mode 100644
index 0000000..b182d34
--- /dev/null
+++ b/test cases/unit/80 global-rpath/yonder/yonder.cpp
@@ -0,0 +1,3 @@
+#include "yonder.h"
+
+char *yonder(void) { return "AB54 6BR"; }
diff --git a/test cases/unit/80 global-rpath/yonder/yonder.h b/test cases/unit/80 global-rpath/yonder/yonder.h
new file mode 100644
index 0000000..9d9ad16
--- /dev/null
+++ b/test cases/unit/80 global-rpath/yonder/yonder.h
@@ -0,0 +1 @@
+char *yonder(void);
diff --git a/test cases/unit/81 wrap-git/meson.build b/test cases/unit/81 wrap-git/meson.build
new file mode 100644
index 0000000..b0af30a
--- /dev/null
+++ b/test cases/unit/81 wrap-git/meson.build
@@ -0,0 +1,4 @@
+project('test-wrap-git')
+
+exe = subproject('wrap_git').get_variable('exe')
+test('test1', exe)
diff --git a/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build b/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build
new file mode 100644
index 0000000..2570f77
--- /dev/null
+++ b/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build
@@ -0,0 +1,3 @@
+project('foo', 'c')
+
+exe = executable('app', 'main.c')
diff --git a/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c b/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c
new file mode 100644
index 0000000..8488f4e
--- /dev/null
+++ b/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c
@@ -0,0 +1,4 @@
+int main(void)
+{
+ return 0;
+}
diff --git a/test cases/warning/1 version for string div/test.json b/test cases/warning/1 version for string div/test.json
new file mode 100644
index 0000000..c37931a
--- /dev/null
+++ b/test cases/warning/1 version for string div/test.json
@@ -0,0 +1,8 @@
+{
+ "stdout": [
+ {
+ "comment": "literal '/' appears in output, irrespective of os.path.sep, as that's the operator",
+ "line": "WARNING: Project targeting '>=0.48.0' but tried to use feature introduced in '0.49.0': / with string arguments."
+ }
+ ]
+}
diff --git a/test cases/warning/2 languages missing native/meson.build b/test cases/warning/2 languages missing native/meson.build
index f4aa956..e204715 100644
--- a/test cases/warning/2 languages missing native/meson.build
+++ b/test cases/warning/2 languages missing native/meson.build
@@ -1,2 +1,3 @@
-project('languages missing native')
+project('languages missing native',
+ meson_version : '>= 0.54')
add_languages('c')
diff --git a/test cases/warning/2 languages missing native/test.json b/test cases/warning/2 languages missing native/test.json
new file mode 100644
index 0000000..f929654
--- /dev/null
+++ b/test cases/warning/2 languages missing native/test.json
@@ -0,0 +1,7 @@
+{
+ "stdout": [
+ {
+ "line": "test cases/warning/2 languages missing native/meson.build:3: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build."
+ }
+ ]
+}
diff --git a/test cases/windows/17 msvc ndebug/main.cpp b/test cases/windows/17 msvc ndebug/main.cpp
new file mode 100644
index 0000000..d647d71
--- /dev/null
+++ b/test cases/windows/17 msvc ndebug/main.cpp
@@ -0,0 +1,9 @@
+int main() {
+#ifdef NDEBUG
+ // NDEBUG is defined
+ return 0;
+#else
+ // NDEBUG is not defined
+ return 1;
+#endif
+} \ No newline at end of file
diff --git a/test cases/windows/17 msvc ndebug/meson.build b/test cases/windows/17 msvc ndebug/meson.build
new file mode 100644
index 0000000..78eaf89
--- /dev/null
+++ b/test cases/windows/17 msvc ndebug/meson.build
@@ -0,0 +1,7 @@
+project('msvc_ndebug', 'cpp',
+ default_options : [ 'b_ndebug=true' ]
+)
+
+exe = executable('exe', 'main.cpp')
+
+test('ndebug', exe)
diff --git a/tools/boost_names.py b/tools/boost_names.py
index d26d34b..b66c6cc 100755
--- a/tools/boost_names.py
+++ b/tools/boost_names.py
@@ -43,10 +43,10 @@ export_modules = False
class BoostLibrary():
def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
self.name = name
- self.shared = shared
- self.static = static
- self.single = single
- self.multi = multi
+ self.shared = sorted(set(shared))
+ self.static = sorted(set(static))
+ self.single = sorted(set(single))
+ self.multi = sorted(set(multi))
def __lt__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']:
if isinstance(other, BoostLibrary):
@@ -99,15 +99,35 @@ def get_libraries(jamfile: Path) -> T.List[BoostLibrary]:
cmds = raw.split(';') # Commands always terminate with a ; (I hope)
cmds = [x.strip() for x in cmds] # Some cleanup
+ project_usage_requirements: T.List[str] = []
+
# "Parse" the relevant sections
for i in cmds:
parts = i.split(' ')
- parts = [x for x in parts if x not in ['', ':']]
+ parts = [x for x in parts if x not in ['']]
if not parts:
continue
- # Parese libraries
- if parts[0] in ['lib', 'boost-lib']:
+ # Parse project
+ if parts[0] in ['project']:
+ attributes: T.Dict[str, T.List[str]] = {}
+ curr: T.Optional[str] = None
+
+ for j in parts:
+ if j == ':':
+ curr = None
+ elif curr is None:
+ curr = j
+ else:
+ if curr not in attributes:
+ attributes[curr] = []
+ attributes[curr] += [j]
+
+ if 'usage-requirements' in attributes:
+ project_usage_requirements = attributes['usage-requirements']
+
+ # Parse libraries
+ elif parts[0] in ['lib', 'boost-lib']:
assert len(parts) >= 2
# Get and check the library name
@@ -117,28 +137,36 @@ def get_libraries(jamfile: Path) -> T.List[BoostLibrary]:
if not lname.startswith('boost_'):
continue
+ # Count `:` to only select the 'usage-requirements'
+ # See https://boostorg.github.io/build/manual/master/index.html#bbv2.main-target-rule-syntax
+ colon_counter = 0
+ usage_requirements: T.List[str] = []
+ for j in parts:
+ if j == ':':
+ colon_counter += 1
+ elif colon_counter >= 4:
+ usage_requirements += [j]
+
# Get shared / static defines
shared: T.List[str] = []
static: T.List[str] = []
single: T.List[str] = []
multi: T.List[str] = []
- for j in parts:
+ for j in usage_requirements + project_usage_requirements:
m1 = re.match(r'<link>shared:<define>(.*)', j)
m2 = re.match(r'<link>static:<define>(.*)', j)
m3 = re.match(r'<threading>single:<define>(.*)', j)
m4 = re.match(r'<threading>multi:<define>(.*)', j)
if m1:
- shared += [m1.group(1)]
+ shared += [f'-D{m1.group(1)}']
if m2:
- static += [m2.group(1)]
+ static += [f'-D{m2.group(1)}']
if m3:
- single += [m3.group(1)]
+ single +=[f'-D{m3.group(1)}']
if m4:
- multi += [m4.group(1)]
+ multi += [f'-D{m4.group(1)}']
- shared = [f'-D{x}' for x in shared]
- static = [f'-D{x}' for x in static]
libs += [BoostLibrary(lname, shared, static, single, multi)]
return libs
diff --git a/tools/build_website.py b/tools/build_website.py
new file mode 100755
index 0000000..5486b69
--- /dev/null
+++ b/tools/build_website.py
@@ -0,0 +1,51 @@
+#!/usr/bin/env python3
+
+import os, sys, subprocess, shutil
+
+assert(os.getcwd() == '/home/jpakkane')
+
+from glob import glob
+
+def purge(fname):
+ if not os.path.exists(fname):
+ return
+ if os.path.isdir(fname):
+ shutil.rmtree(fname)
+ os.unlink(fname)
+
+def update():
+ webdir = 'mesonweb'
+ repodir = 'mesonwebbuild'
+ docdir = os.path.join(repodir, 'docs')
+ builddir = os.path.join(docdir, 'builddir')
+ htmldir = os.path.join(builddir, 'Meson documentation-doc/html')
+# subprocess.check_call(['git', 'pull'], cwd=webdir)
+ subprocess.check_call(['git', 'fetch', '-a'], cwd=repodir)
+ subprocess.check_call(['git', 'reset', '--hard', 'origin/master'],
+ cwd=repodir)
+ if os.path.isdir(htmldir):
+ shutil.rmtree(htmldir)
+ if os.path.isdir(builddir):
+ shutil.rmtree(builddir)
+ env = os.environ.copy()
+ env['PATH'] = env['PATH'] + ':/home/jpakkane/.local/bin'
+ subprocess.check_call(['../meson.py', '.', 'builddir'], cwd=docdir, env=env)
+ subprocess.check_call(['ninja'], cwd=builddir)
+ old_files = glob(os.path.join(webdir, '*'))
+ for f in old_files:
+ base = f[len(webdir)+1:]
+ if base == 'CNAME' or base == 'favicon.png':
+ continue
+ subprocess.check_call(['git', 'rm', '-rf', base], cwd=webdir)
+ assert(os.path.isdir(webdir))
+ new_entries = glob(os.path.join(htmldir, '*'))
+ for e in new_entries:
+ shutil.move(e, webdir)
+ subprocess.check_call('git add *', shell=True, cwd=webdir)
+ subprocess.check_call(['git', 'commit', '-a', '-m', 'Bleep. Bloop. I am a bot.'],
+ cwd=webdir)
+ subprocess.check_call(['git', 'push'], cwd=webdir)
+ shutil.rmtree(builddir)
+
+if __name__ == '__main__':
+ update()
diff --git a/tools/copy_files.py b/tools/copy_files.py
new file mode 100644
index 0000000..39eaa0a
--- /dev/null
+++ b/tools/copy_files.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Copy files
+'''
+
+import argparse
+import shutil
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def copy_files(files: T.List[str], input_dir: PathLike, output_dir: PathLike) -> None:
+ if not input_dir:
+ raise ValueError(f'Input directory value is not set')
+ if not output_dir:
+ raise ValueError(f'Output directory value is not set')
+
+ input_dir = Path(input_dir).resolve()
+ output_dir = Path(output_dir).resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ for f in files:
+ if (input_dir/f).is_dir():
+ shutil.copytree(input_dir/f, output_dir/f)
+ else:
+ shutil.copy2(input_dir/f, output_dir/f)
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Copy files')
+ parser.add_argument('files', metavar='FILE', nargs='*')
+ parser.add_argument('-C', dest='input_dir', required=True)
+ parser.add_argument('--output-dir', required=True)
+
+ args = parser.parse_args()
+
+ copy_files(files=args.files,
+ input_dir=args.input_dir,
+ output_dir=args.output_dir)
diff --git a/tools/dircondenser.py b/tools/dircondenser.py
index 023c14e..0e28bec 100755
--- a/tools/dircondenser.py
+++ b/tools/dircondenser.py
@@ -74,6 +74,10 @@ def condense(dirname: str):
#print('git mv "%s" "%s"' % (old_name, new_name))
subprocess.check_call(['git', 'mv', old_name, new_name])
replacements.append((old_name, new_name))
+ # update any appearances of old_name in expected stdout in test.json
+ json = os.path.join(new_name, 'test.json')
+ if os.path.isfile(json):
+ replace_source(json, [(old_name, new_name)])
os.chdir(curdir)
replace_source('run_unittests.py', replacements)
replace_source('run_project_tests.py', replacements)
diff --git a/tools/gen_data.py b/tools/gen_data.py
new file mode 100755
index 0000000..2cc05a4
--- /dev/null
+++ b/tools/gen_data.py
@@ -0,0 +1,139 @@
+#!/usr/bin/env python3
+
+# Copyright 2020 Daniel Mensinger
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+import hashlib
+import textwrap
+import re
+from pathlib import Path
+from datetime import datetime
+import typing as T
+
+class DataFile:
+ file_counter = 0
+
+ def __init__(self, path: Path, root: Path):
+ self.path = path
+ self.id = self.path.relative_to(root)
+ self.data_str = f'file_{DataFile.file_counter}_data_' + re.sub('[^a-zA-Z0-9]', '_', self.path.name)
+ DataFile.file_counter += 1
+
+ b = self.path.read_bytes()
+ self.data = b.decode()
+ self.sha256sum = hashlib.sha256(b).hexdigest()
+
+ def __repr__(self) -> str:
+ return f'<{type(self).__name__}: [{self.sha256sum}] {self.id}>'
+
+def main() -> int:
+ root_dir = Path(__file__).resolve().parents[1]
+ mesonbuild_dir = root_dir / 'mesonbuild'
+ out_file = mesonbuild_dir / 'mesondata.py'
+
+ data_dirs = mesonbuild_dir.glob('**/data')
+
+ data_files: T.List[DataFile] = []
+
+ for d in data_dirs:
+ for p in d.iterdir():
+ data_files += [DataFile(p, mesonbuild_dir)]
+
+ print(f'Found {len(data_files)} data files')
+
+ # Generate the data script
+ data = ''
+
+ data += textwrap.dedent(f'''\
+ # Copyright {datetime.today().year} The Meson development team
+
+ # Licensed under the Apache License, Version 2.0 (the "License");
+ # you may not use this file except in compliance with the License.
+ # You may obtain a copy of the License at
+
+ # http://www.apache.org/licenses/LICENSE-2.0
+
+ # Unless required by applicable law or agreed to in writing, software
+ # distributed under the License is distributed on an "AS IS" BASIS,
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ # See the License for the specific language governing permissions and
+ # limitations under the License.
+
+
+ ####
+ #### WARNING: This is an automatically generated file! Do not edit!
+ #### Generated by {Path(__file__).resolve().relative_to(root_dir)}
+ ####
+
+
+ from pathlib import Path
+ import typing as T
+
+ if T.TYPE_CHECKING:
+ from .environment import Environment
+
+ ######################
+ # BEGIN Data section #
+ ######################
+
+ ''')
+
+ for i in data_files:
+ data += f"{i.data_str} = '''\\\n{i.data}'''\n\n"
+
+ data += textwrap.dedent(f'''
+ ####################
+ # END Data section #
+ ####################
+
+ class DataFile:
+ def __init__(self, path: Path, sha256sum: str, data: str) -> None:
+ self.path = path
+ self.sha256sum = sha256sum
+ self.data = data
+
+ def write_once(self, path: Path) -> None:
+ if not path.exists():
+ path.write_text(self.data)
+
+ def write_to_private(self, env: 'Environment') -> Path:
+ out_file = Path(env.scratch_dir) / 'data' / self.path.name
+ out_file.parent.mkdir(exist_ok=True)
+ self.write_once(out_file)
+ return out_file
+
+
+ mesondata = {{
+ ''')
+
+ for i in data_files:
+ data += textwrap.indent(textwrap.dedent(f"""\
+ '{i.id}': DataFile(
+ Path('{i.id}'),
+ '{i.sha256sum}',
+ {i.data_str},
+ ),
+ """), ' ')
+
+ data += textwrap.dedent('''\
+ }
+ ''')
+
+ print(f'Updating {out_file}')
+ out_file.write_text(data)
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/tools/regenerate_docs.py b/tools/regenerate_docs.py
new file mode 100755
index 0000000..d443570
--- /dev/null
+++ b/tools/regenerate_docs.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python3
+
+
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+# http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+Regenerate markdown docs by using `meson.py` from the root dir
+'''
+
+import argparse
+import jinja2
+import os
+import re
+import subprocess
+import sys
+import textwrap
+import typing as T
+from pathlib import Path
+
+PathLike = T.Union[Path,str]
+
+def _get_meson_output(root_dir: Path, args: T.List):
+ env = os.environ.copy()
+ env['COLUMNS'] = '80'
+ return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip()
+
+def get_commands_data(root_dir: Path):
+ usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE)
+ positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+ options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE)
+ commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE)
+
+ def get_next_start(iterators, end):
+ return next((i.start() for i in iterators if i), end)
+
+ def normalize_text(text):
+ # clean up formatting
+ out = text
+ out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL
+ out = re.sub(r'^ +$', '', out, flags=re.MULTILINE) # remove trailing whitespace
+ out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines
+ return out
+
+ def parse_cmd(cmd):
+ cmd_len = len(cmd)
+ usage = usage_start_pattern.search(cmd)
+ positionals = positional_start_pattern.search(cmd)
+ options = options_start_pattern.search(cmd)
+ commands = commands_start_pattern.search(cmd)
+
+ arguments_start = get_next_start([positionals, options, commands], None)
+ assert arguments_start
+
+ # replace `usage:` with `$` and dedent
+ dedent_size = (usage.end() - usage.start()) - len('$ ')
+ usage_text = textwrap.dedent(f'{dedent_size * " "}$ {normalize_text(cmd[usage.end():arguments_start])}')
+
+ return {
+ 'usage': usage_text,
+ 'arguments': normalize_text(cmd[arguments_start:cmd_len]),
+ }
+
+ def clean_dir_arguments(text):
+ # Remove platform specific defaults
+ args = [
+ 'prefix',
+ 'bindir',
+ 'datadir',
+ 'includedir',
+ 'infodir',
+ 'libdir',
+ 'libexecdir',
+ 'localedir',
+ 'localstatedir',
+ 'mandir',
+ 'sbindir',
+ 'sharedstatedir',
+ 'sysconfdir'
+ ]
+ out = text
+ for a in args:
+ out = re.sub(r'(--' + a + r' .+?)\s+\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL)
+ return out
+
+ output = _get_meson_output(root_dir, ['--help'])
+ commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', output, re.MULTILINE|re.DOTALL)[0].split(','))
+ commands.remove('help')
+
+ cmd_data = dict()
+
+ for cmd in commands:
+ cmd_output = _get_meson_output(root_dir, [cmd, '--help'])
+ cmd_data[cmd] = parse_cmd(cmd_output)
+ if cmd in ['setup', 'configure']:
+ cmd_data[cmd]['arguments'] = clean_dir_arguments(cmd_data[cmd]['arguments'])
+
+ return cmd_data
+
+def regenerate_commands(root_dir: Path, output_dir: Path) -> None:
+ with open(root_dir/'docs'/'markdown_dynamic'/'Commands.md') as f:
+ template = f.read()
+
+ cmd_data = get_commands_data(root_dir)
+
+ t = jinja2.Template(template, undefined=jinja2.StrictUndefined, keep_trailing_newline=True)
+ content = t.render(cmd_help=cmd_data)
+
+ output_file = output_dir/'Commands.md'
+ with open(output_file, 'w') as f:
+ f.write(content)
+
+ print(f'`{output_file}` was regenerated')
+
+def regenerate_docs(output_dir: PathLike,
+ dummy_output_file: T.Optional[PathLike]) -> None:
+ if not output_dir:
+ raise ValueError(f'Output directory value is not set')
+
+ output_dir = Path(output_dir).resolve()
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ root_dir = Path(__file__).resolve().parent.parent
+
+ regenerate_commands(root_dir, output_dir)
+
+ if dummy_output_file:
+ with open(output_dir/dummy_output_file, 'w') as f:
+ f.write('dummy file for custom_target output')
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Generate meson docs')
+ parser.add_argument('--output-dir', required=True)
+ parser.add_argument('--dummy-output-file', type=str)
+
+ args = parser.parse_args()
+
+ regenerate_docs(output_dir=args.output_dir,
+ dummy_output_file=args.dummy_output_file)