From bfea80677eef52271cde1c089268a6d544a79453 Mon Sep 17 00:00:00 2001 From: Alexandre Lavigne Date: Tue, 21 Apr 2020 16:08:45 +0200 Subject: Issue: 7009: CMake/Centos7 Unable to find CMake even though it is installed On some systems the binary 'cmake' for version 3 is named 'cmake3', therefor printing its version number prints: 'cmake3 version X.Y.Z' instead of 'cmake version X.Y.Z' This '3' digit in the middle breaks the regular expression extracting the version number. The following fix permit both way to work and the regexp to match the proper version number. Signed-off-by: Alexandre Lavigne --- mesonbuild/cmake/executor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index a41b293..66713a1 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -132,7 +132,7 @@ class CMakeExecutor: msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' mlog.warning(msg) return None - cmvers = re.sub(r'\s*cmake version\s*', '', out.split('\n')[0]).strip() + cmvers = re.sub(r'\s*(cmake|cmake3) version\s*', '', out.split('\n')[0]).strip() return cmvers def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None: -- cgit v1.1 From 959c1becd63ff8ccbc106755574af9bb49392b08 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 22 Apr 2020 13:54:27 -0700 Subject: ci: install python3-lxml This will be used by the junit validation tests. --- ci/ciimage/arch/install.sh | 2 +- ci/ciimage/eoan/install.sh | 1 + ci/ciimage/fedora/install.sh | 2 +- ci/ciimage/opensuse/install.sh | 2 +- 4 files changed, 4 insertions(+), 3 deletions(-) diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh index 7fe139e..6cbbb27 100755 --- a/ci/ciimage/arch/install.sh +++ b/ci/ciimage/arch/install.sh @@ -12,7 +12,7 @@ pkgs=( itstool gtk3 java-environment=8 gtk-doc llvm clang sdl2 graphviz doxygen vulkan-validation-layers openssh mercurial gtk-sharp-2 qt5-tools libwmf valgrind cmake netcdf-fortran openmpi nasm gnustep-base gettext - python-jsonschema + python-jsonschema python-lxml # cuda ) diff --git a/ci/ciimage/eoan/install.sh b/ci/ciimage/eoan/install.sh index 4b3b746..7d7a1fd 100755 --- a/ci/ciimage/eoan/install.sh +++ b/ci/ciimage/eoan/install.sh @@ -11,6 +11,7 @@ export DC=gdc pkgs=( python3-pytest-xdist python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev + python3-lxml wget unzip qt5-default clang pkg-config-arm-linux-gnueabihf diff --git a/ci/ciimage/fedora/install.sh b/ci/ciimage/fedora/install.sh index 242d677..f61d97e 100755 --- a/ci/ciimage/fedora/install.sh +++ b/ci/ciimage/fedora/install.sh @@ -13,7 +13,7 @@ pkgs=( doxygen vulkan-devel vulkan-validation-layers-devel openssh mercurial gtk-sharp2-devel libpcap-devel gpgme-devel qt5-qtbase-devel qt5-qttools-devel qt5-linguist qt5-qtbase-private-devel libwmf-devel valgrind cmake openmpi-devel nasm gnustep-base-devel gettext-devel ncurses-devel - libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel + libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel python3-lxml ) # Sys update diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh index c5dd6df..79be8a1 100755 --- a/ci/ciimage/opensuse/install.sh +++ b/ci/ciimage/opensuse/install.sh @@ -5,7 +5,7 @@ set -e source /ci/common.sh pkgs=( - python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3 + python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3 python3-lxml ninja make git autoconf automake patch python3-Cython python3-jsonschema elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel -- cgit v1.1 From 4dcbb9254a586f59616ddf02bfd508ab76c47f2b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 21 Apr 2020 14:13:25 -0700 Subject: mtest: Use textrwap.dedent for large block This allows editors like vim to properly fold the code, and makes it generally easier to read. --- mesonbuild/mtest.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 23643c5..8ea8d5f 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -33,6 +33,7 @@ import signal import subprocess import sys import tempfile +import textwrap import time import typing as T @@ -775,14 +776,14 @@ class TestHarness: write_json_log(self.jsonlogfile, name, result) def print_summary(self) -> None: - msg = ''' -Ok: {:<4} -Expected Fail: {:<4} -Fail: {:<4} -Unexpected Pass: {:<4} -Skipped: {:<4} -Timeout: {:<4} -'''.format(self.success_count, self.expectedfail_count, self.fail_count, + msg = textwrap.dedent(''' + Ok: {:<4} + Expected Fail: {:<4} + Fail: {:<4} + Unexpected Pass: {:<4} + Skipped: {:<4} + Timeout: {:<4} + ''').format(self.success_count, self.expectedfail_count, self.fail_count, self.unexpectedpass_count, self.skip_count, self.timeout_count) print(msg) if self.logfile: -- cgit v1.1 From 61e53fbfd9fc05a96819da1ee1deef8dce713299 Mon Sep 17 00:00:00 2001 From: Michael Date: Tue, 21 Apr 2020 00:00:14 -0700 Subject: update MANIFEST.in --- MANIFEST.in | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 13f7949..11c804a 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -4,10 +4,9 @@ graft cross graft data graft graphics graft man -graft syntax-highlighting graft tools -include authors.txt -include contributing.txt + +include contributing.md include COPYING include README.md include run_cross_test.py @@ -15,7 +14,6 @@ include run_tests.py include run_unittests.py include run_meson_command_tests.py include run_project_tests.py -include mesonrewriter.py include ghwt.py include __main__.py include meson.py -- cgit v1.1 From 39ca178d21a72a8f7560b657b517fb73f81b4a89 Mon Sep 17 00:00:00 2001 From: Andrew Udvare Date: Thu, 23 Apr 2020 12:06:08 -0400 Subject: compilers: fix type issue mesonlib.darwin_get_object_archs() only accepts a string argument so convert it. --- mesonbuild/compilers/mixins/clike.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 24f4796..124c49c 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -919,7 +919,7 @@ class CLikeCompiler: for f in files: if not f.is_file(): continue - archs = mesonlib.darwin_get_object_archs(f) + archs = mesonlib.darwin_get_object_archs(str(f)) if archs and env.machines.host.cpu_family in archs: return f else: -- cgit v1.1 From 0c3bb15357419d3cc7a453da25b349a9c34e391d Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 21 Apr 2020 14:29:54 -0700 Subject: mtest: Store individual results for TAP tests This will be used by the Junit writer --- mesonbuild/mtest.py | 44 +++++++++++++++++++++++--------------------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 8ea8d5f..a3553dd 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -336,30 +336,29 @@ class TestRun: res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS else: res = TestResult.FAIL if bool(returncode) else TestResult.OK - return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd) + return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd) @classmethod def make_tap(cls, test: 'TestSerialisation', test_env: T.Dict[str, str], returncode: int, starttime: float, duration: float, stdo: str, stde: str, cmd: T.Optional[T.List[str]]) -> 'TestRun': - res = None - num_tests = 0 + res = None # T.Optional[TestResult] + results = [] # T.List[TestResult] failed = False - num_skipped = 0 for i in TAPParser(io.StringIO(stdo)).parse(): if isinstance(i, TAPParser.Bailout): - res = TestResult.ERROR + results.append(TestResult.ERROR) + failed = True elif isinstance(i, TAPParser.Test): - if i.result == TestResult.SKIP: - num_skipped += 1 - elif i.result in (TestResult.FAIL, TestResult.UNEXPECTEDPASS): + results.append(i.result) + if i.result not in {TestResult.OK, TestResult.EXPECTEDFAIL}: failed = True - num_tests += 1 elif isinstance(i, TAPParser.Error): - res = TestResult.ERROR + results.append(TestResult.ERROR) stde += '\nTAP parsing error: ' + i.message + failed = True if returncode != 0: res = TestResult.ERROR @@ -367,7 +366,7 @@ class TestRun: if res is None: # Now determine the overall result of the test based on the outcome of the subcases - if num_skipped == num_tests: + if all(t is TestResult.SKIP for t in results): # This includes the case where num_tests is zero res = TestResult.SKIP elif test.should_fail: @@ -375,14 +374,16 @@ class TestRun: else: res = TestResult.FAIL if failed else TestResult.OK - return cls(test, test_env, res, returncode, starttime, duration, stdo, stde, cmd) + return cls(test, test_env, res, results, returncode, starttime, duration, stdo, stde, cmd) def __init__(self, test: 'TestSerialisation', test_env: T.Dict[str, str], - res: TestResult, returncode: int, starttime: float, duration: float, + res: TestResult, results: T.List[TestResult], returncode: + int, starttime: float, duration: float, stdo: T.Optional[str], stde: T.Optional[str], cmd: T.Optional[T.List[str]]): assert isinstance(res, TestResult) self.res = res + self.results = results # May be an empty list self.returncode = returncode self.starttime = starttime self.duration = duration @@ -391,6 +392,7 @@ class TestRun: self.cmd = cmd self.env = test_env self.should_fail = test.should_fail + self.project = test.project_name def get_log(self) -> str: res = '--- command ---\n' @@ -491,7 +493,7 @@ class SingleTestRunner: cmd = self._get_cmd() if cmd is None: skip_stdout = 'Not run because can not execute cross compiled binaries.' - return TestRun(self.test, self.test_env, TestResult.SKIP, GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None) + return TestRun(self.test, self.test_env, TestResult.SKIP, [], GNU_SKIP_RETURNCODE, time.time(), 0.0, skip_stdout, None, None) else: wrap = TestHarness.get_wrapper(self.options) if self.options.gdb: @@ -634,7 +636,7 @@ class SingleTestRunner: stdo = "" stde = additional_error if timed_out: - return TestRun(self.test, self.test_env, TestResult.TIMEOUT, p.returncode, starttime, duration, stdo, stde, cmd) + return TestRun(self.test, self.test_env, TestResult.TIMEOUT, [], p.returncode, starttime, duration, stdo, stde, cmd) else: if self.test.protocol == 'exitcode': return TestRun.make_exitcode(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd) @@ -656,6 +658,7 @@ class TestHarness: self.timeout_count = 0 self.is_run = False self.tests = None + self.results = [] # type: T.List[TestRun] self.logfilename = None # type: T.Optional[str] self.logfile = None # type: T.Optional[T.TextIO] self.jsonlogfile = None # type: T.Optional[T.TextIO] @@ -679,12 +682,11 @@ class TestHarness: self.close_logfiles() def close_logfiles(self) -> None: - if self.logfile: - self.logfile.close() - self.logfile = None - if self.jsonlogfile: - self.jsonlogfile.close() - self.jsonlogfile = None + for f in ['logfile', 'jsonlogfile']: + lfile = getattr(self, f) + if lfile: + lfile.close() + setattr(self, f, None) def merge_suite_options(self, options: argparse.Namespace, test: 'TestSerialisation') -> T.Dict[str, str]: if ':' in options.setup: -- cgit v1.1 From 7b7f93a09f90c93d94e4a78ddc1dd766f05bf4a9 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 22 Apr 2020 12:25:49 -0700 Subject: mtest: Generate a JUnit xml result file JUnit is pretty ubiquitous, lots of services and results viewers understand it, in particular gitlab and jenkins know how to consume JUnit xml. This means projects using CI services can have their test results consumed automatically. Fixes: #6972 --- data/schema.xsd | 96 ++++++++++++++++++ docs/markdown/snippets/junit_result_generation.md | 4 + mesonbuild/mtest.py | 113 ++++++++++++++++++++++ run_unittests.py | 25 +++++ 4 files changed, 238 insertions(+) create mode 100644 data/schema.xsd create mode 100644 docs/markdown/snippets/junit_result_generation.md diff --git a/data/schema.xsd b/data/schema.xsd new file mode 100644 index 0000000..58c6bfd --- /dev/null +++ b/data/schema.xsd @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/markdown/snippets/junit_result_generation.md b/docs/markdown/snippets/junit_result_generation.md new file mode 100644 index 0000000..fbe910b --- /dev/null +++ b/docs/markdown/snippets/junit_result_generation.md @@ -0,0 +1,4 @@ +## Meson test now produces JUnit xml from results + +Meson will now generate a JUnit compatible XML file from test results. it +will be in the meson-logs directory and is called testlog.junit.xml. diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index a3553dd..3239736 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -36,6 +36,7 @@ import tempfile import textwrap import time import typing as T +import xml.etree.ElementTree as et from . import build from . import environment @@ -321,6 +322,110 @@ class TAPParser: yield self.Error('Too many tests run (expected {}, got {})'.format(plan.count, num_tests)) + +class JunitBuilder: + + """Builder for Junit test results. + + Junit is impossible to stream out, it requires attributes counting the + total number of tests, failures, skips, and errors in the root element + and in each test suite. As such, we use a builder class to track each + test case, and calculate all metadata before writing it out. + + For tests with multiple results (like from a TAP test), we record the + test as a suite with the project_name.test_name. This allows us to track + each result separately. For tests with only one result (such as exit-code + tests) we record each one into a suite with the name project_name. The use + of the project_name allows us to sort subproject tests separately from + the root project. + """ + + def __init__(self, filename: str) -> None: + self.filename = filename + self.root = et.Element( + 'testsuites', tests='0', errors='0', failures='0') + self.suites = {} # type: T.Dict[str, et.Element] + + def log(self, name: str, test: 'TestRun') -> None: + """Log a single test case.""" + # In this case we have a test binary with multiple results. + # We want to record this so that each result is recorded + # separately + if test.results: + suitename = '{}.{}'.format(test.project, name) + assert suitename not in self.suites, 'duplicate suite' + + suite = self.suites[suitename] = et.Element( + 'testsuite', + name=suitename, + tests=str(len(test.results)), + errors=str(sum(1 for r in test.results if r is TestResult.ERROR)), + failures=str(sum(1 for r in test.results if r in + {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})), + skipped=str(sum(1 for r in test.results if r is TestResult.SKIP)), + ) + + for i, result in enumerate(test.results): + # Both name and classname are required. Set them both to the + # number of the test in a TAP test, as TAP doesn't give names. + testcase = et.SubElement(suite, 'testcase', name=str(i), classname=str(i)) + if result is TestResult.SKIP: + et.SubElement(testcase, 'skipped') + elif result is TestResult.ERROR: + et.SubElement(testcase, 'error') + elif result is TestResult.FAIL: + et.SubElement(testcase, 'failure') + elif result is TestResult.UNEXPECTEDPASS: + fail = et.SubElement(testcase, 'failure') + fail.text = 'Test unexpected passed.' + elif result is TestResult.TIMEOUT: + fail = et.SubElement(testcase, 'failure') + fail.text = 'Test did not finish before configured timeout.' + if test.stdo: + out = et.SubElement(suite, 'system-out') + out.text = test.stdo.rstrip() + if test.stde: + err = et.SubElement(suite, 'system-err') + err.text = test.stde.rstrip() + else: + if test.project not in self.suites: + suite = self.suites[test.project] = et.Element( + 'testsuite', name=test.project, tests='1', errors='0', + failures='0', skipped='0') + else: + suite = self.suites[test.project] + suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1) + + testcase = et.SubElement(suite, 'testcase', name=name, classname=name) + if test.res is TestResult.SKIP: + et.SubElement(testcase, 'skipped') + suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1) + elif test.res is TestResult.ERROR: + et.SubElement(testcase, 'error') + suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1) + elif test.res is TestResult.FAIL: + et.SubElement(testcase, 'failure') + suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1) + if test.stdo: + out = et.SubElement(testcase, 'system-out') + out.text = test.stdo.rstrip() + if test.stde: + err = et.SubElement(testcase, 'system-err') + err.text = test.stde.rstrip() + + def write(self) -> None: + """Calculate total test counts and write out the xml result.""" + for suite in self.suites.values(): + self.root.append(suite) + # Skipped is really not allowed in the "testsuits" element + for attr in ['tests', 'errors', 'failures']: + self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr])) + + tree = et.ElementTree(self.root) + with open(self.filename, 'wb') as f: + tree.write(f, encoding='utf-8', xml_declaration=True) + + class TestRun: @classmethod @@ -662,6 +767,7 @@ class TestHarness: self.logfilename = None # type: T.Optional[str] self.logfile = None # type: T.Optional[T.TextIO] self.jsonlogfile = None # type: T.Optional[T.TextIO] + self.junit = None # type: T.Optional[JunitBuilder] if self.options.benchmark: self.tests = load_benchmarks(options.wd) else: @@ -776,6 +882,8 @@ class TestHarness: self.logfile.write(result_str) if self.jsonlogfile: write_json_log(self.jsonlogfile, name, result) + if self.junit: + self.junit.log(name, result) def print_summary(self) -> None: msg = textwrap.dedent(''' @@ -790,6 +898,8 @@ class TestHarness: print(msg) if self.logfile: self.logfile.write(msg) + if self.junit: + self.junit.write() def print_collected_logs(self) -> None: if len(self.collected_logs) > 0: @@ -906,6 +1016,9 @@ class TestHarness: if namebase: logfile_base += '-' + namebase.replace(' ', '_') + + self.junit = JunitBuilder(logfile_base + '.junit.xml') + self.logfilename = logfile_base + '.txt' self.jsonlogfilename = logfile_base + '.json' diff --git a/run_unittests.py b/run_unittests.py index 831e53f..da898a3 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4617,6 +4617,31 @@ recommended as it is not supported on some platforms''') out = self.build() self.assertNotIn('Project configured', out) + def _test_junit(self, case: str) -> None: + try: + import lxml.etree as et + except ImportError: + raise unittest.SkipTest('lxml required, but not found.') + + schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd'))) + + testdir = os.path.join(self.common_test_dir, case) + self.init(testdir) + self.run_tests() + + junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml')) + try: + schema.assertValid(junit) + except et.DocumentInvalid as e: + self.fail(e.error_log) + + def test_junit_valid_tap(self): + self._test_junit('213 tap tests') + + def test_junit_valid_exitcode(self): + self._test_junit('44 test args') + + class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically -- cgit v1.1 From 97f7e3d83cdafbd94d4a164582ea71035c996baa Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 23 Apr 2020 10:36:13 -0700 Subject: ci: Try to fix opensuse image --- ci/ciimage/opensuse/install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh index 79be8a1..7c90ec3 100755 --- a/ci/ciimage/opensuse/install.sh +++ b/ci/ciimage/opensuse/install.sh @@ -17,7 +17,7 @@ pkgs=( libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel boost-devel libboost_date_time-devel libboost_filesystem-devel libboost_locale-devel libboost_system-devel libboost_test-devel libboost_log-devel libboost_regex-devel - libboost_python-devel libboost_python-py3-1_71_0-devel libboost_regex-devel + libboost_python-py3-1_71_0-devel libboost_regex-devel ) # Sys update -- cgit v1.1 From 8e397491b49062ca7a147ed5f8cfa19055263b7b Mon Sep 17 00:00:00 2001 From: georgev93 Date: Tue, 21 Apr 2020 21:21:41 -0400 Subject: Adjust regex to handle cases such as C:/Program Files/folder --- mesonbuild/cmake/traceparser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index 432cd21..0aee3fa 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -655,7 +655,7 @@ class CMakeTraceParser: # Try joining file paths that contain spaces - reg_start = re.compile(r'^([A-Za-z]:)?/.*/[^./]+$') + reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$') reg_end = re.compile(r'^.*\.[a-zA-Z]+$') fixed_list = [] # type: T.List[str] -- cgit v1.1 From 7c68fe80083fd72a100998578417b223e3704af5 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Thu, 23 Apr 2020 19:27:50 -0400 Subject: backend/vs: Fix build when not all languages have pch It is not unheard-of for a project to use pch for C++ but not for C (because C usually builds fast enough anyway, so it's not worth the developer overhead of maintaining the pch file). This code was trying to optimize the vcxproj file size by detecting "only one language", but it was only looking at the number of pch-languages defined. This is incorrect when pch is not defined for all languages in use. Instead of tweaking the optimization further, remove it. This makes the vs backend behave more like the ninja backend. --- mesonbuild/backend/vs2010backend.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index ef849e1..80ff910 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -591,10 +591,8 @@ class Vs2010Backend(backends.Backend): raise MesonException('Could not guess language from source file %s.' % src) def add_pch(self, pch_sources, lang, inc_cl): - if len(pch_sources) <= 1: - # We only need per file precompiled headers if we have more than 1 language. - return - self.use_pch(pch_sources, lang, inc_cl) + if lang in pch_sources: + self.use_pch(pch_sources, lang, inc_cl) def create_pch(self, pch_sources, lang, inc_cl): pch = ET.SubElement(inc_cl, 'PrecompiledHeader') @@ -602,6 +600,8 @@ class Vs2010Backend(backends.Backend): self.add_pch_files(pch_sources, lang, inc_cl) def use_pch(self, pch_sources, lang, inc_cl): + pch = ET.SubElement(inc_cl, 'PrecompiledHeader') + pch.text = 'Use' header = self.add_pch_files(pch_sources, lang, inc_cl) pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles') pch_include.text = header + ';%(ForcedIncludeFiles)' @@ -1046,12 +1046,10 @@ class Vs2010Backend(backends.Backend): # Note: SuppressStartupBanner is /NOLOGO and is 'true' by default pch_sources = {} if self.environment.coredata.base_options.get('b_pch', False): - pch_node = ET.SubElement(clconf, 'PrecompiledHeader') for lang in ['c', 'cpp']: pch = target.get_pch(lang) if not pch: continue - pch_node.text = 'Use' if compiler.id == 'msvc': if len(pch) == 1: # Auto generate PCH. @@ -1065,10 +1063,6 @@ class Vs2010Backend(backends.Backend): # I don't know whether its relevant but let's handle other compilers # used with a vs backend pch_sources[lang] = [pch[0], None, lang, None] - if len(pch_sources) == 1: - # If there is only 1 language with precompiled headers, we can use it for the entire project, which - # is cleaner than specifying it for each source file. - self.use_pch(pch_sources, list(pch_sources)[0], clconf) resourcecompile = ET.SubElement(compiles, 'ResourceCompile') ET.SubElement(resourcecompile, 'PreprocessorDefinitions') -- cgit v1.1 From ccdf7f6d34fbcf172ad5362a6ce2959f07d0e1bd Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sat, 21 Mar 2020 11:51:02 +0100 Subject: wrap: Add support for local files via only `*_filename` --- mesonbuild/interpreter.py | 2 +- mesonbuild/wrap/wrap.py | 38 +++++++++++++++++++++++++++----------- 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 48b6bd6..0303e6a 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2675,7 +2675,7 @@ external dependencies (including libraries) must go to "dependencies".''') return subproject subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) - r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode')) + r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'), current_subproject=self.subproject) try: resolved = r.resolve(dirname, method) except wrap.WrapException as e: diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 1715cd3..9d95bff 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -27,6 +27,7 @@ import sys import configparser import typing as T +from pathlib import Path from . import WrapMode from ..mesonlib import git, GIT, ProgressBar, MesonException @@ -126,7 +127,7 @@ class PackageDefinition: raise WrapException(m.format(key, self.basename)) def has_patch(self) -> bool: - return 'patch_url' in self.values + return 'patch_filename' in self.values def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition: fname = os.path.join(subdir_root, packagename + '.wrap') @@ -146,10 +147,12 @@ def get_directory(subdir_root: str, packagename: str): return wrap, directory class Resolver: - def __init__(self, subdir_root: str, wrap_mode=WrapMode.default): + def __init__(self, subdir_root: str, wrap_mode=WrapMode.default, current_subproject: str = ''): self.wrap_mode = wrap_mode self.subdir_root = subdir_root + self.current_subproject = current_subproject self.cachedir = os.path.join(self.subdir_root, 'packagecache') + self.filesdir = os.path.join(self.subdir_root, 'packagefiles') def resolve(self, packagename: str, method: str) -> str: self.packagename = packagename @@ -363,7 +366,9 @@ class Resolver: hashvalue = h.hexdigest() return hashvalue, tmpfile.name - def check_hash(self, what: str, path: str) -> None: + def check_hash(self, what: str, path: str, hash_required: bool = True) -> None: + if what + '_hash' not in self.wrap.values and not hash_required: + return expected = self.wrap.get(what + '_hash') h = hashlib.sha256() with open(path, 'rb') as f: @@ -393,17 +398,28 @@ class Resolver: def get_file_internal(self, what: str) -> str: filename = self.wrap.get(what + '_filename') - cache_path = os.path.join(self.cachedir, filename) + if what + '_url' in self.wrap.values: + cache_path = os.path.join(self.cachedir, filename) - if os.path.exists(cache_path): - self.check_hash(what, cache_path) - mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.') + if os.path.exists(cache_path): + self.check_hash(what, cache_path) + mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.') + return cache_path + + if not os.path.isdir(self.cachedir): + os.mkdir(self.cachedir) + self.download(what, cache_path) return cache_path + else: + from ..interpreterbase import FeatureNew + FeatureNew('Local wrap patch files without {}_url'.format(what), '0.55.0').use(self.current_subproject) + path = Path(self.filesdir) / filename + + if not path.exists(): + raise WrapException('File "{}" does not exist'.format(path)) + self.check_hash(what, path.as_posix(), hash_required=False) - if not os.path.isdir(self.cachedir): - os.mkdir(self.cachedir) - self.download(what, cache_path) - return cache_path + return path.as_posix() def apply_patch(self) -> None: path = self.get_file_internal('patch') -- cgit v1.1 From faf16545faca2c02e2462ecce9f8946d5a5790e6 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sat, 21 Mar 2020 11:52:11 +0100 Subject: wrap: Add test case for local source / patch files --- .../common/157 wrap file should not failed/meson.build | 6 +++++- .../157 wrap file should not failed/src/meson.build | 4 ++++ .../common/157 wrap file should not failed/src/test.c | 9 +++++++++ .../subprojects/.gitignore | 2 ++ .../subprojects/bar.wrap | 8 ++++++++ .../subprojects/foo-1.0/foo.c | 3 --- .../subprojects/foo-1.0/meson.build | 2 -- .../subprojects/foo.wrap | 4 ++-- .../subprojects/packagecache/foo-1.0-patch.tar.xz | Bin 232 -> 244 bytes .../subprojects/packagecache/foo-1.0.tar.xz | Bin 180 -> 196 bytes .../subprojects/packagefiles/bar-1.0-patch.tar.xz | Bin 0 -> 244 bytes .../subprojects/packagefiles/bar-1.0.tar.xz | Bin 0 -> 200 bytes 12 files changed, 30 insertions(+), 8 deletions(-) create mode 100644 test cases/common/157 wrap file should not failed/src/test.c create mode 100644 test cases/common/157 wrap file should not failed/subprojects/.gitignore create mode 100644 test cases/common/157 wrap file should not failed/subprojects/bar.wrap delete mode 100644 test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c delete mode 100644 test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build create mode 100644 test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz create mode 100644 test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz diff --git a/test cases/common/157 wrap file should not failed/meson.build b/test cases/common/157 wrap file should not failed/meson.build index f4ec2a8..cffce2f 100644 --- a/test cases/common/157 wrap file should not failed/meson.build +++ b/test cases/common/157 wrap file should not failed/meson.build @@ -3,7 +3,11 @@ project('mainproj', 'c', ) subproject('zlib') -subproject('foo') +foo = subproject('foo') +bar = subproject('bar') + +libfoo = foo.get_variable('libfoo') +libbar = bar.get_variable('libbar') executable('grabprog', files('src/subprojects/prog.c')) executable('grabprog2', files('src/subprojects/foo/prog2.c')) diff --git a/test cases/common/157 wrap file should not failed/src/meson.build b/test cases/common/157 wrap file should not failed/src/meson.build index 69f666d..0c82165 100644 --- a/test cases/common/157 wrap file should not failed/src/meson.build +++ b/test cases/common/157 wrap file should not failed/src/meson.build @@ -1,2 +1,6 @@ executable('grabprog3', files('subprojects/prog.c')) executable('grabprog4', files('subprojects/foo/prog2.c')) + +texe = executable('testexe', files('test.c'), link_with: [libfoo, libbar]) + +test('t1', texe) diff --git a/test cases/common/157 wrap file should not failed/src/test.c b/test cases/common/157 wrap file should not failed/src/test.c new file mode 100644 index 0000000..34cf991 --- /dev/null +++ b/test cases/common/157 wrap file should not failed/src/test.c @@ -0,0 +1,9 @@ +#include + +int bar_dummy_func(void); +int dummy_func(void); + +int main(void) { + printf("Hello world %d\n", bar_dummy_func() + dummy_func()); + return 0; +} diff --git a/test cases/common/157 wrap file should not failed/subprojects/.gitignore b/test cases/common/157 wrap file should not failed/subprojects/.gitignore new file mode 100644 index 0000000..5550e2e --- /dev/null +++ b/test cases/common/157 wrap file should not failed/subprojects/.gitignore @@ -0,0 +1,2 @@ +/foo-1.0 +/bar-1.0 diff --git a/test cases/common/157 wrap file should not failed/subprojects/bar.wrap b/test cases/common/157 wrap file should not failed/subprojects/bar.wrap new file mode 100644 index 0000000..4e8f7e3 --- /dev/null +++ b/test cases/common/157 wrap file should not failed/subprojects/bar.wrap @@ -0,0 +1,8 @@ +[wrap-file] +directory = bar-1.0 +lead_directory_missing = true + +source_filename = bar-1.0.tar.xz +source_hash = f0f61948530dc0d33e3028cd71a9f8ee869f6b3665960d8f41d715cf4aed6467 + +patch_filename = bar-1.0-patch.tar.xz diff --git a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c b/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c deleted file mode 100644 index 267b43a..0000000 --- a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/foo.c +++ /dev/null @@ -1,3 +0,0 @@ -int dummy_func(void) { - return 42; -} diff --git a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build b/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build deleted file mode 100644 index 318e81d..0000000 --- a/test cases/common/157 wrap file should not failed/subprojects/foo-1.0/meson.build +++ /dev/null @@ -1,2 +0,0 @@ -project('shared lib', 'c') -libfoo = shared_library('foo', 'foo.c') diff --git a/test cases/common/157 wrap file should not failed/subprojects/foo.wrap b/test cases/common/157 wrap file should not failed/subprojects/foo.wrap index 90d6d40..c67c5e5 100644 --- a/test cases/common/157 wrap file should not failed/subprojects/foo.wrap +++ b/test cases/common/157 wrap file should not failed/subprojects/foo.wrap @@ -3,9 +3,9 @@ directory = foo-1.0 source_url = http://something.invalid source_filename = foo-1.0.tar.xz -source_hash = ae5fc03185654f76b459db16ca25809703f8821aeb39a433902244bb479c4b79 +source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1 lead_directory_missing = true patch_url = https://something.invalid/patch patch_filename = foo-1.0-patch.tar.xz -patch_hash = 8f2e286a4b190228d4e0c25ddc91195449cfb5e5c52006355838964b244037da +patch_hash = d0ddc5e60fdb27d808552f5ac8d0bb603ea2cba306538b4427b985535b26c9c5 diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz index 26d2927..e26b8e0 100644 Binary files a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz and b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0-patch.tar.xz differ diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz index 2647ef9..37eb6cc 100644 Binary files a/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz and b/test cases/common/157 wrap file should not failed/subprojects/packagecache/foo-1.0.tar.xz differ diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz new file mode 100644 index 0000000..f257a19 Binary files /dev/null and b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0-patch.tar.xz differ diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz new file mode 100644 index 0000000..d90a9e8 Binary files /dev/null and b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/bar-1.0.tar.xz differ -- cgit v1.1 From 5e5895814fab79c3f60ce406b0c6cf8d89a7970e Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sat, 21 Mar 2020 11:54:00 +0100 Subject: wrap: Updated docs --- docs/markdown/Wrap-dependency-system-manual.md | 18 ++++++++++++------ docs/markdown/snippets/wrap_patch.md | 6 ++++++ 2 files changed, 18 insertions(+), 6 deletions(-) create mode 100644 docs/markdown/snippets/wrap_patch.md diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 6e47d58..868263c 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -28,16 +28,16 @@ itself in a way that makes it easy to use (usually this means as a static library). To use this kind of a project as a dependency you could just copy and -extract it inside your project's `subprojects` directory. +extract it inside your project's `subprojects` directory. However there is a simpler way. You can specify a Wrap file that tells Meson -how to download it for you. If you then use this subproject in your build, +how to download it for you. If you then use this subproject in your build, Meson will automatically download and extract it during build. This makes subproject embedding extremely easy. All wrap files must have a name of `.wrap` form and be in `subprojects` dir. -Currently Meson has four kinds of wraps: +Currently Meson has four kinds of wraps: - wrap-file - wrap-git - wrap-hg @@ -83,6 +83,12 @@ revision = head directory name. Needed when the source file does not have a leading directory. +Since *0.55.0* it is possible to use only the `source_filename` and +`patch_filename` value in a .wrap file (without `source_url` and `patch_url`) to +specify a local archive in the `subprojects/packagefiles` directory. The `*_hash` +entries are optional when using this method. This method should be prefered over +the old `packagecache` approach described below. + Since *0.49.0* if `source_filename` or `patch_filename` is found in the project's `subprojects/packagecache` directory, it will be used instead of downloading the file, even if `--wrap-mode` option is set to @@ -129,14 +135,14 @@ wrap-git, the repository must contain all Meson build definitions. ## Using wrapped projects -Wraps provide a convenient way of obtaining a project into your subproject directory. +Wraps provide a convenient way of obtaining a project into your subproject directory. Then you use it as a regular subproject (see [subprojects](Subprojects.md)). ## Getting wraps -Usually you don't want to write your wraps by hand. +Usually you don't want to write your wraps by hand. -There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides +There is an online repository called [WrapDB](https://wrapdb.mesonbuild.com) that provides many dependencies ready to use. You can read more about WrapDB [here](Using-the-WrapDB.md). There is also a Meson subcommand to get and manage wraps (see [using wraptool](Using-wraptool.md)). diff --git a/docs/markdown/snippets/wrap_patch.md b/docs/markdown/snippets/wrap_patch.md new file mode 100644 index 0000000..7d6d9c2 --- /dev/null +++ b/docs/markdown/snippets/wrap_patch.md @@ -0,0 +1,6 @@ +## Local wrap source and patch files + +It is now possible to use the `patch_filename` and `source_filename` value in a +`.wrap` file without `*_url` to specify a local source / patch file. All local +files must be located in the `subprojects/packagefiles` directory. The `*_hash` +entries are optional with this setup. -- cgit v1.1 From 39a69d1fb0e130fae9f64b81e0a992503869a97a Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Mon, 20 Apr 2020 16:23:33 -0400 Subject: find_program: Fixes when the program has been overridden by executable - ExternalProgramHolder has path() method while CustomTargetHolder and BuildTargetHolder have full_path(). - The returned ExternalProgramHolder's path() method was broken, because build.Executable object has no get_path() method, it needs the backend. - find_program('overridden_prog', version : '>=1.0') was broken because it needs to execute the exe that is not yet built. Now assume the program has the (sub)project version. - If the version check fails, interpreter uses ExternalProgramHolder.get_name() for the error message but build.Executable does not implement get_name() method. --- docs/markdown/Reference-manual.md | 10 ++++- docs/markdown/snippets/find_program.md | 20 +++++++++ mesonbuild/interpreter.py | 52 +++++++++++++++------- mesonbuild/modules/python.py | 10 ++--- .../common/201 override with exe/meson.build | 8 +++- .../subprojects/sub/meson.build | 2 +- 6 files changed, 79 insertions(+), 23 deletions(-) create mode 100644 docs/markdown/snippets/find_program.md diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 5156b5b..963af9d 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1846,7 +1846,9 @@ the following methods. specifies that whenever `find_program` is used to find a program named `progname`, Meson should not look it up on the system but instead return `program`, which may either be the result of - `find_program`, `configure_file` or `executable`. + `find_program`, `configure_file` or `executable`. *Since 0.55.0* if a version + check is passed to `find_program` for a program that has been overridden with + an executable, the current project version is used. If `program` is an `executable`, it cannot be used during configure. @@ -2460,6 +2462,12 @@ and has the following methods: - `path()` which returns a string pointing to the script or executable **NOTE:** You should not need to use this method. Passing the object itself should work in all cases. For example: `run_command(obj, arg1, arg2)` + *Since 0.55.0* this method has been deprecated in favor of `full_path()` for + consistency with other returned objects. + +- `full_path()` *Since 0.55.0* which returns a string pointing to the script or + executable **NOTE:** You should not need to use this method. Passing the object + itself should work in all cases. For example: `run_command(obj, arg1, arg2)`. ### `environment` object diff --git a/docs/markdown/snippets/find_program.md b/docs/markdown/snippets/find_program.md new file mode 100644 index 0000000..d0bb64d --- /dev/null +++ b/docs/markdown/snippets/find_program.md @@ -0,0 +1,20 @@ +## find_program: Fixes when the program has been overridden by executable + +When a program has been overridden by an executable, the returned object of +find_program() had some issues: + +```meson +# In a subproject: +exe = executable('foo', ...) +meson.override_find_program('foo', exe) + +# In main project: +# The version check was crashing meson. +prog = find_program('foo', version : '>=1.0') + +# This was crashing meson. +message(prog.path()) + +# New method to be consistent with built objects. +message(prog.full_path()) +``` diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 48b6bd6..40f8b26 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -509,11 +509,14 @@ class DependencyHolder(InterpreterObject, ObjectHolder): return DependencyHolder(new_dep, self.subproject) class ExternalProgramHolder(InterpreterObject, ObjectHolder): - def __init__(self, ep): + def __init__(self, ep, subproject, backend=None): InterpreterObject.__init__(self) ObjectHolder.__init__(self, ep) + self.subproject = subproject + self.backend = backend self.methods.update({'found': self.found_method, - 'path': self.path_method}) + 'path': self.path_method, + 'full_path': self.full_path_method}) self.cached_version = None @noPosargs @@ -524,7 +527,20 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder): @noPosargs @permittedKwargs({}) def path_method(self, args, kwargs): - return self.held_object.get_path() + mlog.deprecation('path() method is deprecated and replaced by full_path()') + return self._full_path() + + @noPosargs + @permittedKwargs({}) + @FeatureNew('ExternalProgram.full_path', '0.55.0') + def full_path_method(self, args, kwargs): + return self._full_path() + + def _full_path(self): + exe = self.held_object + if isinstance(exe, build.Executable): + return self.backend.get_target_filename_abs(exe) + return exe.get_path() def found(self): return isinstance(self.held_object, build.Executable) or self.held_object.found() @@ -533,9 +549,14 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder): return self.held_object.get_command() def get_name(self): - return self.held_object.get_name() + exe = self.held_object + if isinstance(exe, build.Executable): + return exe.name + return exe.get_name() def get_version(self, interpreter): + if isinstance(self.held_object, build.Executable): + return self.held_object.project_version if not self.cached_version: raw_cmd = self.get_command() + ['--version'] cmd = [self, '--version'] @@ -2366,7 +2387,7 @@ class Interpreter(InterpreterBase): elif isinstance(item, dependencies.Dependency): return DependencyHolder(item, self.subproject) elif isinstance(item, dependencies.ExternalProgram): - return ExternalProgramHolder(item) + return ExternalProgramHolder(item, self.subproject) elif hasattr(item, 'held_object'): return item else: @@ -2389,7 +2410,7 @@ class Interpreter(InterpreterBase): elif isinstance(v, build.Data): self.build.data.append(v) elif isinstance(v, dependencies.ExternalProgram): - return ExternalProgramHolder(v) + return ExternalProgramHolder(v, self.subproject) elif isinstance(v, dependencies.InternalDependency): # FIXME: This is special cased and not ideal: # The first source is our new VapiTarget, the rest are deps @@ -3143,7 +3164,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Executable name must be a string') prog = ExternalProgram.from_bin_list(self.environment, for_machine, p) if prog.found(): - return ExternalProgramHolder(prog) + return ExternalProgramHolder(prog, self.subproject) return None def program_from_system(self, args, search_dirs, silent=False): @@ -3170,7 +3191,7 @@ external dependencies (including libraries) must go to "dependencies".''') extprog = dependencies.ExternalProgram(exename, search_dir=search_dir, extra_search_dirs=extra_search_dirs, silent=silent) - progobj = ExternalProgramHolder(extprog) + progobj = ExternalProgramHolder(extprog, self.subproject) if progobj.found(): return progobj @@ -3183,7 +3204,7 @@ external dependencies (including libraries) must go to "dependencies".''') if not silent: mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'), '(overridden: %s)' % exe.description()) - return ExternalProgramHolder(exe) + return ExternalProgramHolder(exe, self.subproject, self.backend) return None def store_name_lookups(self, command_names): @@ -3214,11 +3235,11 @@ external dependencies (including libraries) must go to "dependencies".''') progobj = self.program_from_system(args, search_dirs, silent=silent) if progobj is None and args[0].endswith('python3'): prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True) - progobj = ExternalProgramHolder(prog) + progobj = ExternalProgramHolder(prog, self.subproject) if required and (progobj is None or not progobj.found()): raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args)) if progobj is None: - return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args))) + return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) # Only store successful lookups self.store_name_lookups(args) if wanted: @@ -3231,7 +3252,7 @@ external dependencies (including libraries) must go to "dependencies".''') if required: m = 'Invalid version of program, need {!r} {!r} found {!r}.' raise InvalidArguments(m.format(progobj.get_name(), not_found, version)) - return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args))) + return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) return progobj @FeatureNewKwargs('find_program', '0.53.0', ['dirs']) @@ -3246,7 +3267,7 @@ external dependencies (including libraries) must go to "dependencies".''') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled') - return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args))) + return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) search_dirs = extract_search_dirs(kwargs) wanted = mesonlib.stringlistify(kwargs.get('version', [])) @@ -3321,7 +3342,7 @@ external dependencies (including libraries) must go to "dependencies".''') return dep = subi.get_variable_method([varname], {}) if dep.held_object != cached_dep: - m = 'Inconsistency: Subproject has overriden the dependency with another variable than {!r}' + m = 'Inconsistency: Subproject has overridden the dependency with another variable than {!r}' raise DependencyException(m.format(varname)) def get_subproject_dep(self, name, display_name, dirname, varname, kwargs): @@ -3333,7 +3354,7 @@ external dependencies (including libraries) must go to "dependencies".''') subproject = self.subprojects[dirname] _, cached_dep = self._find_cached_dep(name, kwargs) if varname is None: - # Assuming the subproject overriden the dependency we want + # Assuming the subproject overridden the dependency we want if cached_dep: if required and not cached_dep.found(): m = 'Dependency {!r} is not satisfied' @@ -4551,6 +4572,7 @@ Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_s kwargs['include_directories'] = self.extract_incdirs(kwargs) target = targetclass(name, self.subdir, self.subproject, for_machine, sources, objs, self.environment, kwargs) + target.project_version = self.project_version if not self.environment.machines.matches_build_machine(for_machine): self.add_cross_stdlib_info(target) diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index a5c58a2..79e1824 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -285,7 +285,7 @@ print (json.dumps ({ class PythonInstallation(ExternalProgramHolder): def __init__(self, interpreter, python, info): - ExternalProgramHolder.__init__(self, python) + ExternalProgramHolder.__init__(self, python, interpreter.subproject) self.interpreter = interpreter self.subproject = self.interpreter.subproject prefix = self.interpreter.environment.coredata.get_builtin_option('prefix') @@ -514,7 +514,7 @@ class PythonModule(ExtensionModule): if disabled: mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')') - return ExternalProgramHolder(NonExistingExternalProgram()) + return ExternalProgramHolder(NonExistingExternalProgram(), state.subproject) if not name_or_path: python = ExternalProgram('python3', mesonlib.python_command, silent=True) @@ -561,11 +561,11 @@ class PythonModule(ExtensionModule): if not python.found(): if required: raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python')) - res = ExternalProgramHolder(NonExistingExternalProgram()) + res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject) elif missing_modules: if required: raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules))) - res = ExternalProgramHolder(NonExistingExternalProgram()) + res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject) else: # Sanity check, we expect to have something that at least quacks in tune try: @@ -583,7 +583,7 @@ class PythonModule(ExtensionModule): if isinstance(info, dict) and 'version' in info and self._check_version(name_or_path, info['version']): res = PythonInstallation(interpreter, python, info) else: - res = ExternalProgramHolder(NonExistingExternalProgram()) + res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject) if required: raise mesonlib.MesonException('{} is not a valid python or it is missing setuptools'.format(python)) diff --git a/test cases/common/201 override with exe/meson.build b/test cases/common/201 override with exe/meson.build index 81f6c02..62d2f32 100644 --- a/test cases/common/201 override with exe/meson.build +++ b/test cases/common/201 override with exe/meson.build @@ -1,6 +1,10 @@ project('myexe', 'c') sub = subproject('sub') -prog = find_program('foobar') + +prog = find_program('foobar', version : '>= 2.0', required : false) +assert(not prog.found()) + +prog = find_program('foobar', version : '>= 1.0') custom1 = custom_target('custom1', build_by_default : true, input : [], @@ -11,5 +15,7 @@ gen = generator(prog, arguments : ['@OUTPUT@']) custom2 = gen.process('main2.input') +message(prog.full_path()) + executable('e1', custom1) executable('e2', custom2) diff --git a/test cases/common/201 override with exe/subprojects/sub/meson.build b/test cases/common/201 override with exe/subprojects/sub/meson.build index 1f186da..f0343b2 100644 --- a/test cases/common/201 override with exe/subprojects/sub/meson.build +++ b/test cases/common/201 override with exe/subprojects/sub/meson.build @@ -1,3 +1,3 @@ -project('sub', 'c') +project('sub', 'c', version : '1.0') foobar = executable('foobar', 'foobar.c', native : true) meson.override_find_program('foobar', foobar) -- cgit v1.1 From 76c36a64c3e55692f5d37b1f1762fac448e845bd Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Mon, 27 Apr 2020 19:06:39 -0400 Subject: dependency: log cached or skipped dependencies with reference to modules * dependency: log cached or skipped dependencies with reference to modules If the dependency is a special dependency which takes modules, the modules get cached separately and probably reference different pkg-config files. It's very plausible that we have multiple dependencies, using different modules. For example: Run-time dependency qt5 (modules: Core) found: YES 5.14.2 (pkg-config) Dependency qt5 skipped: feature gui disabled Obviously this makes no sense, because of course we found qt5 and even used it. The second line is a lot more readable if it shows this: Dependency qt5 (modules: Widgets) skipped: feature gui disabled Similar confusion abounds in the case where a module is found in the cache -- which module, exactly, has been found here: Dependency qt5 found: YES 5.14.2 (cached) Rewrite the dependency function to *consistently* pass around (and use!) the display_name even in cases where we know it isn't anonymous (this is just more correct anyway), and make it serve a dual purpose by also appending the list of modules just like we do for pretty-printing that a dependency has just been found for the first time. * fixup! dependency: log cached or skipped dependencies with reference to modules pointlessly cast modules to str, as they cannot be anything else. But we want to fail later on, with something more friendly than a stacktrace. boost/wx have special exceptions for people passing an integer there. --- mesonbuild/interpreter.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 40f8b26..5d9fcf7 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3282,7 +3282,7 @@ external dependencies (including libraries) must go to "dependencies".''') 'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n' ) - def _find_cached_dep(self, name, kwargs): + def _find_cached_dep(self, name, display_name, kwargs): # Check if we want this as a build-time / build machine or runt-time / # host machine dep. for_machine = self.machine_from_native_kwarg(kwargs) @@ -3297,7 +3297,7 @@ external dependencies (including libraries) must go to "dependencies".''') # have explicitly called meson.override_dependency() with a not-found # dep. if not cached_dep.found(): - mlog.log('Dependency', mlog.bold(name), + mlog.log('Dependency', mlog.bold(display_name), 'found:', mlog.red('NO'), *info) return identifier, cached_dep found_vers = cached_dep.get_version() @@ -3319,7 +3319,7 @@ external dependencies (including libraries) must go to "dependencies".''') if cached_dep: if found_vers: info = [mlog.normal_cyan(found_vers), *info] - mlog.log('Dependency', mlog.bold(name), + mlog.log('Dependency', mlog.bold(display_name), 'found:', mlog.green('YES'), *info) return identifier, cached_dep @@ -3352,7 +3352,7 @@ external dependencies (including libraries) must go to "dependencies".''') dep = self.notfound_dependency() try: subproject = self.subprojects[dirname] - _, cached_dep = self._find_cached_dep(name, kwargs) + _, cached_dep = self._find_cached_dep(name, display_name, kwargs) if varname is None: # Assuming the subproject overridden the dependency we want if cached_dep: @@ -3425,6 +3425,9 @@ external dependencies (including libraries) must go to "dependencies".''') self.validate_arguments(args, 1, [str]) name = args[0] display_name = name if name else '(anonymous)' + mods = extract_as_list(kwargs, 'modules') + if mods: + display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods)) not_found_message = kwargs.get('not_found_message', '') if not isinstance(not_found_message, str): raise InvalidArguments('The not_found_message must be a string.') @@ -3466,7 +3469,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify' 'version\n requirements use the \'version\' keyword argument instead.') - identifier, cached_dep = self._find_cached_dep(name, kwargs) + identifier, cached_dep = self._find_cached_dep(name, display_name, kwargs) if cached_dep: if has_fallback: dirname, varname = self.get_subproject_infos(kwargs) -- cgit v1.1 From 4640f13faa899a2de58ef4d835605f53b46550f3 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 5 Apr 2020 18:52:28 +0200 Subject: docs: Fix Contributing.md --- docs/markdown/Contributing.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 5332938..554c3f5 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -174,7 +174,7 @@ contents of an additional file into the CI log on failure. Projects needed by unit tests are in the `test cases/unit` subdirectory. They are not run as part of `./run_project_tests.py`. -#### Configuring project tests +### Configuring project tests The (optional) `test.json` file, in the root of a test case, is used for configuring the test. All of the following root entries in the `test.json` @@ -213,13 +213,13 @@ Exanple `test.json`: } ``` -##### env +#### env The `env` key contains a dictionary which specifies additional environment variables to be set during the configure step of the test. `@ROOT@` is replaced with the absolute path of the source directory. -##### installed +#### installed The `installed` dict contains a list of dicts, describing which files are expected to be installed. Each dict contains the following keys: @@ -277,7 +277,7 @@ the platform matches. The following values for `platform` are currently supporte | `cygwin` | Matches when the platform is cygwin | | `!cygwin` | Not `cygwin` | -##### matrix +#### matrix The `matrix` section can be used to define a test matrix to run project tests with different meson options. @@ -318,7 +318,7 @@ The above example will produce the following matrix entries: - `opt1=qwert` - `opt1=qwert opt2=true` -##### do_not_set_opts +#### do_not_set_opts Currently supported values are: - `prefix` -- cgit v1.1 From 20bacf82eea4bd0d3732c398298c27e0d82a526d Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 5 Apr 2020 18:53:24 +0200 Subject: tests: Avoid a CMake error by checking the CMake version --- test cases/linuxlike/13 cmake dependency/cmVers.sh | 6 ++++++ test cases/linuxlike/13 cmake dependency/meson.build | 19 ++++++++++++------- 2 files changed, 18 insertions(+), 7 deletions(-) create mode 100755 test cases/linuxlike/13 cmake dependency/cmVers.sh diff --git a/test cases/linuxlike/13 cmake dependency/cmVers.sh b/test cases/linuxlike/13 cmake dependency/cmVers.sh new file mode 100755 index 0000000..70809de --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/cmVers.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +VERS=$(cmake --version | grep "cmake version") +VERS=${VERS//cmake version/} + +echo -n $VERS diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build index 79acc83..93824ab 100644 --- a/test cases/linuxlike/13 cmake dependency/meson.build +++ b/test cases/linuxlike/13 cmake dependency/meson.build @@ -6,6 +6,9 @@ if not find_program('cmake', required: false).found() error('MESON_SKIP_TEST cmake binary not available.') endif +# CMake version +cm_vers = run_command(find_program('./cmVers.sh')).stdout().strip() + # Zlib is probably on all dev machines. dep = dependency('ZLIB', version : '>=1.2', method : 'cmake') @@ -48,14 +51,16 @@ depm1 = dependency('SomethingLikeZLIB', required : true, components : 'required_ depm2 = dependency('SomethingLikeZLIB', required : true, components : 'required_comp', method : 'cmake', cmake_module_path : ['cmake']) depm3 = dependency('SomethingLikeZLIB', required : true, components : ['required_comp'], cmake_module_path : 'cmake') -# Test some edge cases with spaces, etc. +# Test some edge cases with spaces, etc. (but only for CMake >= 3.15) -testDep1 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'mesonTestLibDefs') -testDep2 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules : ['MesonTest::TestLibDefs']) -testFlagSet1 = executable('testFlagSet1', ['testFlagSet.c'], dependencies: [testDep1]) -testFlagSet2 = executable('testFlagSet2', ['testFlagSet.c'], dependencies: [testDep2]) -test('testFlagSetTest1', testFlagSet1) -test('testFlagSetTest2', testFlagSet2) +if cm_vers.version_compare('>=3.15') + testDep1 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules: 'mesonTestLibDefs') + testDep2 = dependency('ImportedTarget', required : true, method : 'cmake', cmake_module_path : 'cmake', modules : ['MesonTest::TestLibDefs']) + testFlagSet1 = executable('testFlagSet1', ['testFlagSet.c'], dependencies: [testDep1]) + testFlagSet2 = executable('testFlagSet2', ['testFlagSet.c'], dependencies: [testDep2]) + test('testFlagSetTest1', testFlagSet1) + test('testFlagSetTest2', testFlagSet2) +endif # Try to compile a test that takes a dep and an include_directories -- cgit v1.1 From 4a1f1977435c7153d97652984aa783c2cbd1e803 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 5 Apr 2020 19:03:06 +0200 Subject: tests: Add support for specifying tool requirements Adds the `tools` section to `tests.json` to specify requirements for the tools in the environment. All tests that fail at least one tool requirements check are skipped. --- docs/markdown/Contributing.md | 10 ++++++++++ run_project_tests.py | 15 +++++++++++++-- test cases/cmake/2 advanced/test.json | 5 ++++- test cases/cmake/3 advanced no dep/test.json | 5 ++++- 4 files changed, 31 insertions(+), 4 deletions(-) diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 554c3f5..8a24e0b 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -209,6 +209,9 @@ Exanple `test.json`: { "opt1": "qwert", "opt2": "false" }, { "opt1": "bad" } ] + }, + "tools": { + "cmake": ">=3.11" } } ``` @@ -324,6 +327,13 @@ Currently supported values are: - `prefix` - `libdir` +#### tools + +This section specifies a list of tool requirements in a simple key-value format. +If a tool is specified, it has to be present in the environment, and the version +requirement must be fulfilled match. Otherwise, the entire test is skipped +(including every element in the test matrix). + ### Skipping integration tests Meson uses several continuous integration testing systems that have slightly diff --git a/run_project_tests.py b/run_project_tests.py index 875a522..cc8e333 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -190,7 +190,7 @@ class TestDef: self.skip = skip self.env = os.environ.copy() self.installed_files = [] # type: T.List[InstalledFile] - self.do_not_set_opts = [] # type: T.List[str] + self.do_not_set_opts = [] # type: T.List[str] def __repr__(self) -> str: return '<{}: {:<48} [{}: {}] -- {}>'.format(type(self).__name__, str(self.path), self.name, self.args, self.skip) @@ -233,6 +233,7 @@ no_meson_log_msg = 'No meson-log.txt found.' system_compiler = None compiler_id_map = {} # type: T.Dict[str, str] +tool_vers_map = {} # type: T.Dict[str, str] class StopException(Exception): def __init__(self): @@ -568,6 +569,15 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] + # Skip tests if the tool requirements are not met + if 'tools' in test_def: + assert isinstance(test_def['tools'], dict) + for tool, vers_req in test_def['tools'].items(): + if tool not in tool_vers_map: + t.skip = True + elif not mesonlib.version_compare(tool_vers_map[tool], vers_req): + t.skip = True + # Skip the matrix code and just update the existing test if 'matrix' not in test_def: t.env.update(env) @@ -639,7 +649,7 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: name = ' '.join([x[0] for x in i if x[0] is not None]) opts = ['-D' + x[0] for x in i if x[0] is not None] skip = any([x[1] for x in i]) - test = TestDef(t.path, name, opts, skip) + test = TestDef(t.path, name, opts, skip or t.skip) test.env.update(env) test.installed_files = installed test.do_not_set_opts = do_not_set_opts @@ -1123,6 +1133,7 @@ def print_tool_versions(): i = i.strip('\n\r\t ') m = t['regex'].match(i) if m is not None: + tool_vers_map[t['tool']] = m.group(t['match_group']) return '{} ({})'.format(exe, m.group(t['match_group'])) return '{} (unknown)'.format(exe) diff --git a/test cases/cmake/2 advanced/test.json b/test cases/cmake/2 advanced/test.json index 11aad94..e12f530 100644 --- a/test cases/cmake/2 advanced/test.json +++ b/test cases/cmake/2 advanced/test.json @@ -4,5 +4,8 @@ {"type": "implib", "platform": "cygwin", "file": "usr/lib/libcm_cmModLib"}, {"type": "implib", "platform": "!cygwin", "file": "usr/bin/libcm_cmModLib"}, {"type": "exe", "file": "usr/bin/cm_testEXE"} - ] + ], + "tools": { + "cmake": ">=3.11" + } } diff --git a/test cases/cmake/3 advanced no dep/test.json b/test cases/cmake/3 advanced no dep/test.json index 24c89c4..98a1719 100644 --- a/test cases/cmake/3 advanced no dep/test.json +++ b/test cases/cmake/3 advanced no dep/test.json @@ -8,5 +8,8 @@ {"type": "exe", "file": "usr/bin/cm_testEXE"}, {"type": "pdb", "file": "usr/bin/cm_testEXE2"}, {"type": "exe", "file": "usr/bin/cm_testEXE2"} - ] + ], + "tools": { + "cmake": ">=3.11" + } } -- cgit v1.1 From afffb9e57d6f4194398493aeecdb1ba6a0f42622 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 5 Apr 2020 20:54:17 +0200 Subject: ci: Added bionic image --- .github/workflows/images.yml | 1 + .github/workflows/os_comp.yml | 1 + ci/ciimage/bionic/image.json | 8 ++++++ ci/ciimage/bionic/install.sh | 58 +++++++++++++++++++++++++++++++++++++++++++ ci/ciimage/build.py | 5 +++- 5 files changed, 72 insertions(+), 1 deletion(-) create mode 100644 ci/ciimage/bionic/image.json create mode 100755 ci/ciimage/bionic/install.sh diff --git a/.github/workflows/images.yml b/.github/workflows/images.yml index bde2223..0cf9156 100644 --- a/.github/workflows/images.yml +++ b/.github/workflows/images.yml @@ -31,6 +31,7 @@ jobs: - { name: CUDA (on Arch), id: cuda } - { name: Fedora, id: fedora } - { name: OpenSUSE, id: opensuse } + - { name: Ubuntu Bionic, id: bionic } - { name: Ubuntu Eoan, id: eoan } steps: - uses: actions/checkout@v2 diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml index 7f3437e..4a6d7cd 100644 --- a/.github/workflows/os_comp.yml +++ b/.github/workflows/os_comp.yml @@ -39,6 +39,7 @@ jobs: - { name: CUDA (on Arch), id: cuda } - { name: Fedora, id: fedora } - { name: OpenSUSE, id: opensuse } + - { name: Ubuntu Bionic, id: bionic } container: mesonbuild/${{ matrix.cfg.id }}:latest steps: - uses: actions/checkout@v1 diff --git a/ci/ciimage/bionic/image.json b/ci/ciimage/bionic/image.json new file mode 100644 index 0000000..6a3d723 --- /dev/null +++ b/ci/ciimage/bionic/image.json @@ -0,0 +1,8 @@ +{ + "base_image": "ubuntu:bionic", + "env": { + "CI": "1", + "SKIP_SCIENTIFIC": "1", + "DC": "gdc" + } +} diff --git a/ci/ciimage/bionic/install.sh b/ci/ciimage/bionic/install.sh new file mode 100755 index 0000000..47deb2a --- /dev/null +++ b/ci/ciimage/bionic/install.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +set -e + +export DEBIAN_FRONTEND=noninteractive +export LANG='C.UTF-8' +export DC=gdc + +pkgs=( + python3-pytest-xdist + python3-pip libxml2-dev libxslt1-dev libyaml-dev libjson-glib-dev + wget unzip cmake doxygen + clang + pkg-config-arm-linux-gnueabihf + qt4-linguist-tools qt5-default qtbase5-private-dev + python-dev + libomp-dev + ldc + libclang-dev + libgcrypt20-dev + libgpgme-dev + libhdf5-dev openssh-server + libboost-python-dev libboost-regex-dev + libblocksruntime-dev + libperl-dev libscalapack-mpi-dev libncurses-dev +) + +boost_pkgs=(atomic chrono date-time filesystem log regex serialization system test thread) + +sed -i '/^#\sdeb-src /s/^#//' "/etc/apt/sources.list" +apt-get -y update +apt-get -y upgrade +apt-get -y install eatmydata + +# Base stuff +eatmydata apt-get -y build-dep meson + +# Add boost packages +for i in "${boost_pkgs[@]}"; do + for j in "1.62.0" "1.65.1"; do + pkgs+=("libboost-${i}${j}") + done +done + +# packages +eatmydata apt-get -y install "${pkgs[@]}" + +eatmydata python3 -m pip install codecov jsonschema + +# Install the ninja 0.10 +wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip +unzip ninja-linux.zip -d /ci + +# cleanup +apt-get -y remove ninja-build +apt-get -y clean +apt-get -y autoclean +rm ninja-linux.zip diff --git a/ci/ciimage/build.py b/ci/ciimage/build.py index 34a92fa..e623a7e 100755 --- a/ci/ciimage/build.py +++ b/ci/ciimage/build.py @@ -71,6 +71,9 @@ class Builder(BuilderBase): for key, val in self.image_def.env.items(): out_data += f'export {key}="{val}"\n' + # Also add /ci to PATH + out_data += 'export PATH="/ci:$PATH"\n' + out_file.write_text(out_data) # make it executable @@ -157,7 +160,7 @@ class ImageTester(BuilderBase): test_cmd = [ self.docker, 'run', '--rm', '-t', 'meson_test_image', - '/usr/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS' + '/bin/bash', '-c', 'source /ci/env_vars.sh; cd meson; ./run_tests.py $CI_ARGS' ] if subprocess.run(test_cmd).returncode != 0: raise RuntimeError('Running tests failed') -- cgit v1.1 From 2a586f00b7d92d688f5742d94ae6916bef69869e Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Tue, 28 Apr 2020 17:36:02 +0200 Subject: tests: Skip hotdoc test if hotdoc is not installed --- run_project_tests.py | 12 ++++++++++++ test cases/frameworks/23 hotdoc/test.json | 5 ++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/run_project_tests.py b/run_project_tests.py index cc8e333..8cbf989 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -1113,11 +1113,23 @@ def detect_system_compiler(options): def print_tool_versions(): tools = [ { + 'tool': 'ninja', + 'args': ['--version'], + 'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'), + 'match_group': 1, + }, + { 'tool': 'cmake', 'args': ['--version'], 'regex': re.compile(r'^cmake version ([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'), 'match_group': 1, }, + { + 'tool': 'hotdoc', + 'args': ['--version'], + 'regex': re.compile(r'^([0-9]+(\.[0-9]+)*(-[a-z0-9]+)?)$'), + 'match_group': 1, + }, ] def get_version(t: dict) -> str: diff --git a/test cases/frameworks/23 hotdoc/test.json b/test cases/frameworks/23 hotdoc/test.json index e2d4992..8dd07e0 100644 --- a/test cases/frameworks/23 hotdoc/test.json +++ b/test cases/frameworks/23 hotdoc/test.json @@ -314,5 +314,8 @@ {"type": "file", "file": "usr/share/doc/foobar/html/assets/fonts/dumped.trie"}, {"type": "file", "file": "usr/share/doc/foobar/html/assets/images/home.svg"}, {"type": "file", "file": "usr/share/doc/foobar/html/assets/images/dumped.trie"} - ] + ], + "tools": { + "hotdoc": ">=0.1.0" + } } -- cgit v1.1 From ade5962e0a77259661e98ead58891e87c11cbce0 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Tue, 28 Apr 2020 17:37:48 +0200 Subject: ci: Fix OpenSUSE immage boost_python --- ci/ciimage/opensuse/install.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh index 7c90ec3..b9e440d 100755 --- a/ci/ciimage/opensuse/install.sh +++ b/ci/ciimage/opensuse/install.sh @@ -17,7 +17,7 @@ pkgs=( libxml2-devel libxslt-devel libyaml-devel glib2-devel json-glib-devel boost-devel libboost_date_time-devel libboost_filesystem-devel libboost_locale-devel libboost_system-devel libboost_test-devel libboost_log-devel libboost_regex-devel - libboost_python-py3-1_71_0-devel libboost_regex-devel + libboost_python3-devel libboost_regex-devel ) # Sys update -- cgit v1.1 From eefc7d450c7839db63df2d8a6e4bcc991205a058 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Tue, 28 Apr 2020 18:47:00 +0200 Subject: ci: Upgrade to checkout@v2 --- .github/workflows/os_comp.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml index 4a6d7cd..a5abf7d 100644 --- a/.github/workflows/os_comp.yml +++ b/.github/workflows/os_comp.yml @@ -11,7 +11,7 @@ jobs: name: Ubuntu 16.04 runs-on: ubuntu-16.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Install Dependencies run: | sudo apt update -yq @@ -42,7 +42,7 @@ jobs: - { name: Ubuntu Bionic, id: bionic } container: mesonbuild/${{ matrix.cfg.id }}:latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Run tests # All environment variables are stored inside the docker image in /ci/env_vars.sh # They are defined in the `env` section in each image.json. CI_ARGS should be set -- cgit v1.1 From 30b89ea57307f09563db862b307e873cc1e7cfc3 Mon Sep 17 00:00:00 2001 From: georgev93 Date: Sat, 25 Apr 2020 16:42:52 -0400 Subject: Adding a conditional case in _guess_files to confirm that the complete path is put together in even if a portion of the path is a location that exists. For instance if C:/Program Files (x86)/folder is passed to _guess_files, it would resolve to ['C:/Program Files', '(x86)/folder'] since C:/Program Files is an actual file location that can exist. --- mesonbuild/cmake/traceparser.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index 0aee3fa..f20bcc8 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -660,25 +660,40 @@ class CMakeTraceParser: fixed_list = [] # type: T.List[str] curr_str = None # type: T.Optional[str] + path_found = False # type: bool for i in broken_list: if curr_str is None: curr_str = i + path_found = False elif os.path.isfile(curr_str): # Abort concatenation if curr_str is an existing file fixed_list += [curr_str] curr_str = i + path_found = False elif not reg_start.match(curr_str): # Abort concatenation if curr_str no longer matches the regex fixed_list += [curr_str] curr_str = i - elif reg_end.match(i) or os.path.exists('{} {}'.format(curr_str, i)): + path_found = False + elif reg_end.match(i): # File detected curr_str = '{} {}'.format(curr_str, i) fixed_list += [curr_str] curr_str = None + path_found = False + elif os.path.exists('{} {}'.format(curr_str, i)): + # Path detected + curr_str = '{} {}'.format(curr_str, i) + path_found = True + elif path_found: + # Add path to fixed_list after ensuring the whole path is in curr_str + fixed_list += [curr_str] + curr_str = i + path_found = False else: curr_str = '{} {}'.format(curr_str, i) + path_found = False if curr_str: fixed_list += [curr_str] -- cgit v1.1 From 707d3a2e20611466fcb6a8af7a977e5c3c989c25 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 22 Apr 2020 17:57:48 -0400 Subject: gnome: Fix usage of gobject-introspection as subproject --- mesonbuild/interpreter.py | 5 +++ mesonbuild/modules/gnome.py | 96 +++++++++++++++++---------------------------- 2 files changed, 42 insertions(+), 59 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 5d9fcf7..6575868 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1801,6 +1801,11 @@ class ModuleHolder(InterpreterObject, ObjectHolder): target_machine=self.interpreter.builtin['target_machine'].held_object, current_node=self.current_node ) + # Many modules do for example self.interpreter.find_program_impl(), + # so we have to ensure they use the current interpreter and not the one + # that first imported that module, otherwise it will use outdated + # overrides. + self.held_object.interpreter = self.interpreter if self.held_object.is_snippet(method_name): value = fn(self.interpreter, state, args, kwargs) return self.interpreter.holderify(value) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 8317629..14cb4c4 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -32,7 +32,7 @@ from ..mesonlib import ( MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list, join_args, unholder, ) -from ..dependencies import Dependency, PkgConfigDependency, InternalDependency +from ..dependencies import Dependency, PkgConfigDependency, InternalDependency, ExternalProgram from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs # gresource compilation is broken due to the way @@ -44,20 +44,6 @@ gresource_dep_needed_version = '>= 2.51.1' native_glib_version = None -@functools.lru_cache(maxsize=None) -def gir_has_option(intr_obj, option): - try: - g_ir_scanner = intr_obj.find_program_impl('g-ir-scanner') - # Handle overridden g-ir-scanner - if isinstance(getattr(g_ir_scanner, "held_object", g_ir_scanner), interpreter.OverrideProgram): - assert option in ['--extra-library', '--sources-top-dirs'] - return True - - opts = Popen_safe(g_ir_scanner.get_command() + ['--help'], stderr=subprocess.STDOUT)[1] - return option in opts - except (MesonException, FileNotFoundError, subprocess.CalledProcessError): - return False - class GnomeModule(ExtensionModule): gir_dep = None @@ -303,7 +289,7 @@ class GnomeModule(ExtensionModule): link_command.append('-L' + d) if include_rpath: link_command.append('-Wl,-rpath,' + d) - if gir_has_option(self.interpreter, '--extra-library') and use_gir_args: + if use_gir_args and self._gir_has_option('--extra-library'): link_command.append('--extra-library=' + lib.name) else: link_command.append('-l' + lib.name) @@ -321,6 +307,10 @@ class GnomeModule(ExtensionModule): deps = mesonlib.unholder(mesonlib.listify(deps)) for dep in deps: + if isinstance(dep, Dependency): + girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='') + if girdir: + gi_includes.update([girdir]) if isinstance(dep, InternalDependency): cflags.update(dep.get_compile_args()) cflags.update(get_include_args(dep.include_directories)) @@ -371,11 +361,6 @@ class GnomeModule(ExtensionModule): external_ldflags_nodedup += [lib, next(ldflags)] else: external_ldflags.update([lib]) - - if isinstance(dep, PkgConfigDependency): - girdir = dep.get_pkgconfig_variable("girdir", {'default': ''}) - if girdir: - gi_includes.update([girdir]) elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): cflags.update(get_include_args(dep.get_include_dirs())) depends.append(dep) @@ -383,7 +368,7 @@ class GnomeModule(ExtensionModule): mlog.log('dependency {!r} not handled to build gir files'.format(dep)) continue - if gir_has_option(self.interpreter, '--extra-library') and use_gir_args: + if use_gir_args and self._gir_has_option('--extra-library'): def fix_ldflags(ldflags): fixed_ldflags = OrderedSet() for ldflag in ldflags: @@ -417,15 +402,29 @@ class GnomeModule(ExtensionModule): return girtarget def _get_gir_dep(self, state): - try: - gir_dep = self.gir_dep or PkgConfigDependency('gobject-introspection-1.0', - state.environment, - {'native': True}) - pkgargs = gir_dep.get_compile_args() - except Exception: - raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.') - - return gir_dep, pkgargs + if not self.gir_dep: + kwargs = {'native': True, 'required': True} + holder = self.interpreter.func_dependency(state.current_node, ['gobject-introspection-1.0'], kwargs) + self.gir_dep = holder.held_object + if self.gir_dep.type_name == 'pkgconfig': + self.giscanner = ExternalProgram('g_ir_scanner', self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {})) + self.gicompiler = ExternalProgram('g_ir_compiler', self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})) + else: + self.giscanner = self.interpreter.find_program_impl('g-ir-scanner') + self.gicompiler = self.interpreter.find_program_impl('g-ir-compiler') + return self.gir_dep, self.giscanner, self.gicompiler + + @functools.lru_cache(maxsize=None) + def _gir_has_option(self, option): + exe = self.giscanner + if hasattr(exe, 'held_object'): + exe = exe.held_object + if isinstance(exe, interpreter.OverrideProgram): + # Handle overridden g-ir-scanner + assert option in ['--extra-library', '--sources-top-dirs'] + return True + p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT) + return p.returncode == 0 and option in o def _scan_header(self, kwargs): ret = [] @@ -731,29 +730,7 @@ class GnomeModule(ExtensionModule): if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]): raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable') - self.gir_dep, pkgargs = self._get_gir_dep(state) - # find_program is needed in the case g-i is built as subproject. - # In that case it uses override_find_program so the gobject utilities - # can be used from the build dir instead of from the system. - # However, GObject-introspection provides the appropriate paths to - # these utilities via pkg-config, so it would be best to use the - # results from pkg-config when possible. - gi_util_dirs_check = [state.environment.get_build_dir(), state.environment.get_source_dir()] - giscanner = self.interpreter.find_program_impl('g-ir-scanner') - if giscanner.found(): - giscanner_path = giscanner.get_command()[0] - if not any(x in giscanner_path for x in gi_util_dirs_check): - giscanner = self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {}) - else: - giscanner = self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {}) - - gicompiler = self.interpreter.find_program_impl('g-ir-compiler') - if gicompiler.found(): - gicompiler_path = gicompiler.get_command()[0] - if not any(x in gicompiler_path for x in gi_util_dirs_check): - gicompiler = self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {}) - else: - gicompiler = self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {}) + gir_dep, giscanner, gicompiler = self._get_gir_dep(state) ns = kwargs.pop('namespace') nsversion = kwargs.pop('nsversion') @@ -761,12 +738,13 @@ class GnomeModule(ExtensionModule): girfile = '%s-%s.gir' % (ns, nsversion) srcdir = os.path.join(state.environment.get_source_dir(), state.subdir) builddir = os.path.join(state.environment.get_build_dir(), state.subdir) - depends = [] + girtargets + depends = gir_dep.sources + girtargets gir_inc_dirs = [] langs_compilers = self._get_girtargets_langs_compilers(girtargets) cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers) deps = self._get_gir_targets_deps(girtargets) deps += mesonlib.unholder(extract_as_list(kwargs, 'dependencies', pop=True)) + deps += [gir_dep] typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends) # ldflags will be misinterpreted by gir scanner (showing # spurious dependencies) but building GStreamer fails if they @@ -779,9 +757,9 @@ class GnomeModule(ExtensionModule): external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags)) girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets) inc_dirs = self._scan_inc_dirs(kwargs) + gi_includes.update(gir_inc_dirs + inc_dirs) scan_command = [giscanner] - scan_command += pkgargs scan_command += ['--no-libtool'] scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion] scan_command += ['--warn-all'] @@ -800,13 +778,13 @@ class GnomeModule(ExtensionModule): scan_command += cflags scan_command += ['--cflags-end'] scan_command += get_include_args(inc_dirs) - scan_command += get_include_args(list(gi_includes) + gir_inc_dirs + inc_dirs, prefix='--add-include-path=') + scan_command += get_include_args(list(gi_includes), prefix='--add-include-path=') scan_command += list(internal_ldflags) scan_command += self._scan_gir_targets(state, girtargets) scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers]) scan_command += list(external_ldflags) - if gir_has_option(self.interpreter, '--sources-top-dirs'): + if self._gir_has_option('--sources-top-dirs'): scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)] scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)] @@ -814,7 +792,7 @@ class GnomeModule(ExtensionModule): typelib_output = '%s-%s.typelib' % (ns, nsversion) typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@'] - typelib_cmd += get_include_args(gir_inc_dirs, prefix='--includedir=') + typelib_cmd += get_include_args(list(gi_includes), prefix='--includedir=') for incdir in typelib_includes: typelib_cmd += ["--includedir=" + incdir] -- cgit v1.1 From ad426547e8f8a3c20d6479af86cd2b1de8dbaa15 Mon Sep 17 00:00:00 2001 From: Benjamin Frye Date: Wed, 29 Apr 2020 14:28:20 +1000 Subject: Clarified error message for test(). (#7040) Changes the error message for test() so it now reports the expected and actual number of parameters. Fixes: #7029 --- mesonbuild/interpreter.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 6575868..dd1e57b 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3790,7 +3790,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self def add_test(self, node, args, kwargs, is_base_test): if len(args) != 2: - raise InterpreterException('Incorrect number of arguments') + raise InterpreterException('test expects 2 arguments, {} given'.format(len(args))) if not isinstance(args[0], str): raise InterpreterException('First argument of test must be a string.') exe = args[1] -- cgit v1.1 From 1cf7f40e8542acf3b65ec18e1414edb65113fe42 Mon Sep 17 00:00:00 2001 From: Brendan Simon Date: Wed, 29 Apr 2020 14:29:03 +1000 Subject: Meson Cmake Wrapper unmaintained [skip ci] The Meson CMake Wrapper project is currently unmaintained. --- docs/markdown/IDE-integration.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index a6c6f4b..73737e8 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -280,6 +280,6 @@ This API can also work without a build directory for the `--projectinfo` command - [Gnome Builder](https://wiki.gnome.org/Apps/Builder) - [KDevelop](https://www.kdevelop.org) - [Eclipse CDT](https://www.eclipse.org/cdt/) (experimental) -- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) +- [Meson Cmake Wrapper](https://github.com/prozum/meson-cmake-wrapper) (for cmake IDEs) (currently unmaintained !!) - [Meson-UI](https://github.com/michaelbadcrumble/meson-ui) (Meson build GUI) - [Meson Syntax Highlighter](https://plugins.jetbrains.com/plugin/13269-meson-syntax-highlighter) plugin for JetBrains IDEs. -- cgit v1.1 From c5d2299caca3f3ca9b8e568c6b6045aed2f00124 Mon Sep 17 00:00:00 2001 From: Laurent Pinchart Date: Wed, 29 Apr 2020 01:34:56 +0300 Subject: Fix symlink deletion with --wipe option When wiping a build tree with --wipe, every entry in the build directory is removed with mesonlib.windows_proof_rmtree() for directories and mesonlib.windows_proof_rm() for other files. Symlinks to directories are considered directories, resulting in the former being called. This causes an exception to be raised, as the implementation calls shutil.rmtree(), which isn't allowed on symlinks. Fix this by using mesonlib.windows_proof_rm() for symlinks. Signed-off-by: Laurent Pinchart --- mesonbuild/msetup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py index 77d8377..2521511 100644 --- a/mesonbuild/msetup.py +++ b/mesonbuild/msetup.py @@ -86,7 +86,7 @@ class MesonApp: # will cause a crash for l in os.listdir(self.build_dir): l = os.path.join(self.build_dir, l) - if os.path.isdir(l): + if os.path.isdir(l) and not os.path.islink(l): mesonlib.windows_proof_rmtree(l) else: mesonlib.windows_proof_rm(l) -- cgit v1.1 From 9ddbcd301a601ffb7a271ff583f0ef16464114f5 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 29 Apr 2020 09:58:51 -0400 Subject: ExternalProgram: Do special windows tricks even when name is provided Closes: #7051 --- mesonbuild/dependencies/base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index c0ec089..95a3956 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -1800,6 +1800,10 @@ class ExternalProgram: self.name = name if command is not None: self.command = listify(command) + if mesonlib.is_windows(): + cmd = self.command[0] + args = self.command[1:] + self.command = self._search_windows_special_cases(name, cmd) + args else: all_search_dirs = [search_dir] if extra_search_dirs: -- cgit v1.1 From 4929547b535462f976c88912f7e941195d533183 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 29 Apr 2020 10:01:09 -0400 Subject: gnome: Print proper error when 'nsversion' or 'namespace' are missing --- mesonbuild/modules/gnome.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 14cb4c4..7629f18 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -732,8 +732,12 @@ class GnomeModule(ExtensionModule): gir_dep, giscanner, gicompiler = self._get_gir_dep(state) - ns = kwargs.pop('namespace') - nsversion = kwargs.pop('nsversion') + ns = kwargs.get('namespace') + if not ns: + raise MesonException('Missing "namespace" keyword argument') + nsversion = kwargs.get('nsversion') + if not nsversion: + raise MesonException('Missing "nsversion" keyword argument') libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True) girfile = '%s-%s.gir' % (ns, nsversion) srcdir = os.path.join(state.environment.get_source_dir(), state.subdir) -- cgit v1.1 From cd566d2bd5f2f1faa3576b51b6b47c74a7ed0392 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Tue, 28 Apr 2020 15:02:26 +0100 Subject: CI: pin pylint Pin pylint version to workaround https://github.com/PyCQA/pylint/issues/3524 --- .github/workflows/lint_mypy.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml index c826729..7afee2e 100644 --- a/.github/workflows/lint_mypy.yml +++ b/.github/workflows/lint_mypy.yml @@ -19,7 +19,8 @@ jobs: - uses: actions/setup-python@v1 with: python-version: '3.x' - - run: python -m pip install pylint + # pylint version constraint can be removed when https://github.com/PyCQA/pylint/issues/3524 is resolved + - run: python -m pip install pylint==2.4.4 - run: pylint mesonbuild mypy: -- cgit v1.1 From 2c0eaf5c4f4493146355eeb8521c17a3c2ef5acd Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 16 Apr 2020 21:42:38 -0700 Subject: interpreter: Allow install_script to use additional input types This adds support for Files, CustomTarget, Indexs of CustomTargets, ConfigureFiles, ExternalPrograms, and Executables. Fixes: #1234 Fixes: #3552 Fixes: #6175 --- mesonbuild/interpreter.py | 70 ++++++++++++++++++---- .../common/56 install script/customtarget.py | 19 ++++++ test cases/common/56 install script/meson.build | 26 ++++++++ test cases/common/56 install script/myinstall.py | 29 +++++++-- test cases/common/56 install script/src/a file.txt | 0 test cases/common/56 install script/src/exe.c | 24 ++++++++ .../common/56 install script/src/meson.build | 4 ++ .../common/56 install script/src/myinstall.py | 4 +- test cases/common/56 install script/test.json | 10 +++- test cases/common/56 install script/wrap.py | 6 ++ 10 files changed, 174 insertions(+), 18 deletions(-) create mode 100755 test cases/common/56 install script/customtarget.py create mode 100644 test cases/common/56 install script/src/a file.txt create mode 100644 test cases/common/56 install script/src/exe.c create mode 100755 test cases/common/56 install script/wrap.py diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index dd1e57b..2b699f8 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1893,31 +1893,79 @@ class MesonMain(InterpreterObject): 'backend': self.backend_method, }) - def _find_source_script(self, name, args): + def _find_source_script(self, prog: T.Union[str, ExecutableHolder], args): + if isinstance(prog, ExecutableHolder): + prog_path = self.interpreter.backend.get_target_filename(prog.held_object) + return build.RunScript([prog_path], args) + elif isinstance(prog, ExternalProgramHolder): + return build.RunScript(prog.get_command(), args) + # Prefer scripts in the current source directory search_dir = os.path.join(self.interpreter.environment.source_dir, self.interpreter.subdir) - key = (name, search_dir) + key = (prog, search_dir) if key in self._found_source_scripts: found = self._found_source_scripts[key] else: - found = dependencies.ExternalProgram(name, search_dir=search_dir) + found = dependencies.ExternalProgram(prog, search_dir=search_dir) if found.found(): self._found_source_scripts[key] = found else: m = 'Script or command {!r} not found or not executable' - raise InterpreterException(m.format(name)) + raise InterpreterException(m.format(prog)) return build.RunScript(found.get_command(), args) - @permittedKwargs({}) - def add_install_script_method(self, args, kwargs): + def _process_script_args( + self, name: str, args: T.List[T.Union[ + str, mesonlib.File, CustomTargetHolder, + CustomTargetIndexHolder, ConfigureFileHolder, + ExternalProgramHolder, ExecutableHolder, + ]]) -> T.List[str]: + script_args = [] # T.List[str] + new = False + for a in args: + a = unholder(a) + if isinstance(a, str): + script_args.append(a) + elif isinstance(a, mesonlib.File): + new = True + script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir)) + elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)): + new = True + script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()]) + + # This feels really hacky, but I'm not sure how else to fix + # this without completely rewriting install script handling. + # This is complicated by the fact that the install target + # depends on all. + if isinstance(a, build.CustomTargetIndex): + a.target.build_by_default = True + else: + a.build_by_default = True + elif isinstance(a, build.ConfigureFile): + new = True + script_args.append(os.path.join(a.subdir, a.targetname)) + elif isinstance(a, dependencies.ExternalProgram): + script_args.extend(a.command) + new = True + else: + raise InterpreterException( + 'Arguments to {} must be strings, Files, CustomTargets, ' + 'Indexes of CustomTargets, or ConfigureFiles'.format(name)) + if new: + FeatureNew('Calling "{}" with File, CustomTaget, Index of CustomTarget, ConfigureFile, Executable, or ExternalProgram'.format(name), '0.55.0').use( + self.interpreter.subproject) + return script_args + + @permittedKwargs(set()) + def add_install_script_method(self, args: 'T.Tuple[T.Union[str, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder], ...]', kwargs): if len(args) < 1: raise InterpreterException('add_install_script takes one or more arguments') - check_stringlist(args, 'add_install_script args must be strings') - script = self._find_source_script(args[0], args[1:]) + script_args = self._process_script_args('add_install_script', args[1:]) + script = self._find_source_script(args[0], script_args) self.build.install_scripts.append(script) - @permittedKwargs({}) + @permittedKwargs(set()) def add_postconf_script_method(self, args, kwargs): if len(args) < 1: raise InterpreterException('add_postconf_script takes one or more arguments') @@ -1925,13 +1973,13 @@ class MesonMain(InterpreterObject): script = self._find_source_script(args[0], args[1:]) self.build.postconf_scripts.append(script) - @permittedKwargs({}) + @permittedKwargs(set()) def add_dist_script_method(self, args, kwargs): if len(args) < 1: raise InterpreterException('add_dist_script takes one or more arguments') if len(args) > 1: FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject) - check_stringlist(args, 'add_dist_script argument must be a string') + check_stringlist(args, 'add_dist_script argumetn must be a string') if self.interpreter.subproject != '': raise InterpreterException('add_dist_script may not be used in a subproject.') script = self._find_source_script(args[0], args[1:]) diff --git a/test cases/common/56 install script/customtarget.py b/test cases/common/56 install script/customtarget.py new file mode 100755 index 0000000..e28373a --- /dev/null +++ b/test cases/common/56 install script/customtarget.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +import argparse +import os + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('dirname') + args = parser.parse_args() + + with open(os.path.join(args.dirname, '1.txt'), 'w') as f: + f.write('') + with open(os.path.join(args.dirname, '2.txt'), 'w') as f: + f.write('') + + +if __name__ == "__main__": + main() diff --git a/test cases/common/56 install script/meson.build b/test cases/common/56 install script/meson.build index 6351518..e80e666 100644 --- a/test cases/common/56 install script/meson.build +++ b/test cases/common/56 install script/meson.build @@ -5,3 +5,29 @@ meson.add_install_script('myinstall.py', 'diiba/daaba', 'file.dat') meson.add_install_script('myinstall.py', 'this/should', 'also-work.dat') subdir('src') + +meson.add_install_script('myinstall.py', 'dir', afile, '--mode=copy') + +data = configuration_data() +data.set10('foo', true) +conf = configure_file( + configuration : data, + output : 'conf.txt' +) + +meson.add_install_script('myinstall.py', 'dir', conf, '--mode=copy') + +t = custom_target( + 'ct', + command : [find_program('customtarget.py'), '@OUTDIR@'], + output : ['1.txt', '2.txt'], +) + +meson.add_install_script('myinstall.py', 'customtarget', t, '--mode=copy') +meson.add_install_script('myinstall.py', 'customtargetindex', t[0], '--mode=copy') + +meson.add_install_script(exe, 'generated.txt') +wrap = find_program('wrap.py') +# Yes, these are getting silly +meson.add_install_script(wrap, exe, 'wrapped.txt') +meson.add_install_script(wrap, wrap, exe, 'wrapped2.txt') diff --git a/test cases/common/56 install script/myinstall.py b/test cases/common/56 install script/myinstall.py index 812561e..a573342 100644 --- a/test cases/common/56 install script/myinstall.py +++ b/test cases/common/56 install script/myinstall.py @@ -1,12 +1,31 @@ #!/usr/bin/env python3 +import argparse import os -import sys +import shutil prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX'] -dirname = os.path.join(prefix, sys.argv[1]) -os.makedirs(dirname) -with open(os.path.join(dirname, sys.argv[2]), 'w') as f: - f.write('') +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('dirname') + parser.add_argument('files', nargs='+') + parser.add_argument('--mode', action='store', default='create', choices=['create', 'copy']) + args = parser.parse_args() + + dirname = os.path.join(prefix, args.dirname) + if not os.path.exists(dirname): + os.makedirs(dirname) + + if args.mode == 'create': + for name in args.files: + with open(os.path.join(dirname, name), 'w') as f: + f.write('') + else: + for name in args.files: + shutil.copy(name, dirname) + + +if __name__ == "__main__": + main() diff --git a/test cases/common/56 install script/src/a file.txt b/test cases/common/56 install script/src/a file.txt new file mode 100644 index 0000000..e69de29 diff --git a/test cases/common/56 install script/src/exe.c b/test cases/common/56 install script/src/exe.c new file mode 100644 index 0000000..b573b91 --- /dev/null +++ b/test cases/common/56 install script/src/exe.c @@ -0,0 +1,24 @@ +#include +#include +#include + +int main(int argc, char * argv[]) { + if (argc != 2) { + fprintf(stderr, "Takes exactly 2 arguments\n"); + return 1; + } + + char * dirname = getenv("MESON_INSTALL_DESTDIR_PREFIX"); + char * fullname = malloc(strlen(dirname) + 1 + strlen(argv[1]) + 1); + strcpy(fullname, dirname); + strcat(fullname, "/"); + strcat(fullname, argv[1]); + + FILE * fp = fopen(fullname, "w"); + fputs("Some text\n", fp); + fclose(fp); + + free(fullname); + + return 0; +} diff --git a/test cases/common/56 install script/src/meson.build b/test cases/common/56 install script/src/meson.build index b23574a..1db424f 100644 --- a/test cases/common/56 install script/src/meson.build +++ b/test cases/common/56 install script/src/meson.build @@ -1 +1,5 @@ meson.add_install_script('myinstall.py', 'this/does', 'something-different.dat') + +afile = files('a file.txt') + +exe = executable('exe', 'exe.c', install : false, native : true) diff --git a/test cases/common/56 install script/src/myinstall.py b/test cases/common/56 install script/src/myinstall.py index 3b7ce37..3a9d89b 100644 --- a/test cases/common/56 install script/src/myinstall.py +++ b/test cases/common/56 install script/src/myinstall.py @@ -7,6 +7,8 @@ prefix = os.environ['MESON_INSTALL_DESTDIR_PREFIX'] dirname = os.path.join(prefix, sys.argv[1]) -os.makedirs(dirname) +if not os.path.exists(dirname): + os.makedirs(dirname) + with open(os.path.join(dirname, sys.argv[2] + '.in'), 'w') as f: f.write('') diff --git a/test cases/common/56 install script/test.json b/test cases/common/56 install script/test.json index d17625f..b2a5971 100644 --- a/test cases/common/56 install script/test.json +++ b/test cases/common/56 install script/test.json @@ -4,6 +4,14 @@ {"type": "pdb", "file": "usr/bin/prog"}, {"type": "file", "file": "usr/diiba/daaba/file.dat"}, {"type": "file", "file": "usr/this/should/also-work.dat"}, - {"type": "file", "file": "usr/this/does/something-different.dat.in"} + {"type": "file", "file": "usr/this/does/something-different.dat.in"}, + {"type": "file", "file": "usr/dir/a file.txt"}, + {"type": "file", "file": "usr/dir/conf.txt"}, + {"type": "file", "file": "usr/customtarget/1.txt"}, + {"type": "file", "file": "usr/customtarget/2.txt"}, + {"type": "file", "file": "usr/customtargetindex/1.txt"}, + {"type": "file", "file": "usr/generated.txt"}, + {"type": "file", "file": "usr/wrapped.txt"}, + {"type": "file", "file": "usr/wrapped2.txt"} ] } diff --git a/test cases/common/56 install script/wrap.py b/test cases/common/56 install script/wrap.py new file mode 100755 index 0000000..87508e0 --- /dev/null +++ b/test cases/common/56 install script/wrap.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import subprocess +import sys + +subprocess.run(sys.argv[1:]) -- cgit v1.1 From c239ce31f55579cfe1e29b769a8bda97deca2166 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 29 Apr 2020 13:00:41 -0700 Subject: allow postconf and dist scripts to use Files, ExternalPrograms, and ConfigureFiles These things are all known to be ready when these scripts are run, and thus they can safely consume them. --- mesonbuild/interpreter.py | 14 ++++++++------ test cases/common/104 postconf with args/meson.build | 7 ++++++- test cases/unit/35 dist script/meson.build | 1 + 3 files changed, 15 insertions(+), 7 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 2b699f8..b525690 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1920,7 +1920,7 @@ class MesonMain(InterpreterObject): str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder, ExternalProgramHolder, ExecutableHolder, - ]]) -> T.List[str]: + ]], allow_built: bool = False) -> T.List[str]: script_args = [] # T.List[str] new = False for a in args: @@ -1931,6 +1931,8 @@ class MesonMain(InterpreterObject): new = True script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir)) elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)): + if not allow_built: + raise InterpreterException('Arguments to {} cannot be built'.format(name)) new = True script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()]) @@ -1961,7 +1963,7 @@ class MesonMain(InterpreterObject): def add_install_script_method(self, args: 'T.Tuple[T.Union[str, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder], ...]', kwargs): if len(args) < 1: raise InterpreterException('add_install_script takes one or more arguments') - script_args = self._process_script_args('add_install_script', args[1:]) + script_args = self._process_script_args('add_install_script', args[1:], allow_built=True) script = self._find_source_script(args[0], script_args) self.build.install_scripts.append(script) @@ -1969,8 +1971,8 @@ class MesonMain(InterpreterObject): def add_postconf_script_method(self, args, kwargs): if len(args) < 1: raise InterpreterException('add_postconf_script takes one or more arguments') - check_stringlist(args, 'add_postconf_script arguments must be strings') - script = self._find_source_script(args[0], args[1:]) + script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True) + script = self._find_source_script(args[0], script_args) self.build.postconf_scripts.append(script) @permittedKwargs(set()) @@ -1979,10 +1981,10 @@ class MesonMain(InterpreterObject): raise InterpreterException('add_dist_script takes one or more arguments') if len(args) > 1: FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject) - check_stringlist(args, 'add_dist_script argumetn must be a string') if self.interpreter.subproject != '': raise InterpreterException('add_dist_script may not be used in a subproject.') - script = self._find_source_script(args[0], args[1:]) + script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True) + script = self._find_source_script(args[0], script_args) self.build.dist_scripts.append(script) @noPosargs diff --git a/test cases/common/104 postconf with args/meson.build b/test cases/common/104 postconf with args/meson.build index 8510c5b..a34502c 100644 --- a/test cases/common/104 postconf with args/meson.build +++ b/test cases/common/104 postconf with args/meson.build @@ -1,5 +1,10 @@ project('postconf script', 'c') -meson.add_postconf_script('postconf.py', '5', '33') +conf = configure_file( + configuration : configuration_data(), + output : 'out' +) + +meson.add_postconf_script(find_program('postconf.py'), '5', '33', conf) test('post', executable('prog', 'prog.c')) diff --git a/test cases/unit/35 dist script/meson.build b/test cases/unit/35 dist script/meson.build index fd672a9..2ae9438 100644 --- a/test cases/unit/35 dist script/meson.build +++ b/test cases/unit/35 dist script/meson.build @@ -5,3 +5,4 @@ exe = executable('comparer', 'prog.c') test('compare', exe) meson.add_dist_script('replacer.py', '"incorrect"', '"correct"') +meson.add_dist_script(find_program('replacer.py'), '"incorrect"', '"correct"') -- cgit v1.1 From 81f35f15492a35bcb294f7a516459141b1bd73ab Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 29 Apr 2020 13:14:10 -0700 Subject: docs: Update documentation for add_*_script --- docs/markdown/Reference-manual.md | 7 +++++++ .../snippets/add_foo_script_type_additions.md | 24 ++++++++++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 docs/markdown/snippets/add_foo_script_type_additions.md diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 963af9d..48e165a 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1735,6 +1735,8 @@ the following methods. 0.49.0, the function only accepted a single argument. Since 0.54.0 the `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` environment variables are set when dist scripts are run. + *(Since 0.55.0)* The output of `configure_file`, `files`, and `find_program` + as well as strings. - `add_install_script(script_name, arg1, arg2, ...)` causes the script given as an argument to be run during the install step, this script @@ -1742,6 +1744,9 @@ the following methods. `MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`, `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set. All positional arguments are passed as parameters. + *(Since 0.55.0)* The output of `configure_file`, `files`, `find_program`, + `custom_target`, indexes of `custom_target`, `executable`, `library`, and + other built targets as well as strings. *(added 0.54)* If `meson install` is called with the `--quiet` option, the environment variable `MESON_INSTALL_QUIET` will be set. @@ -1772,6 +1777,8 @@ the following methods. executable given as an argument after all project files have been generated. This script will have the environment variables `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` set. + *(Since 0.55.0)* The output of `configure_file`, `files`, and `find_program` + as well as strings. - `backend()` *(added 0.37.0)* returns a string representing the current backend: `ninja`, `vs2010`, `vs2015`, `vs2017`, `vs2019`, diff --git a/docs/markdown/snippets/add_foo_script_type_additions.md b/docs/markdown/snippets/add_foo_script_type_additions.md new file mode 100644 index 0000000..88a88b2 --- /dev/null +++ b/docs/markdown/snippets/add_foo_script_type_additions.md @@ -0,0 +1,24 @@ +## meson.add_*_script methods accept new types + +All three (`add_install_script`, `add_dist_script`, and +`add_postconf_script`) now accept ExternalPrograms (as returned by +`find_program`), Files, and the output of `configure_file`. The dist and +postconf methods cannot accept other types because of when they are run. +While dist could, in theory, take other dependencies, it would require more +extensive changes, particularly to the backend. + +```meson +meson.add_install_script(find_program('foo'), files('bar')) +meson.add_dist_script(find_program('foo'), files('bar')) +meson.add_postconf_script(find_program('foo'), files('bar')) +``` + +The install script variant is also able to accept custom_targets, +custom_target indexes, and build targets (executables, libraries), and can +use built executables a the script to run + +```meson +installer = executable('installer', ...) +meson.add_install_script(installer, ...) +meson.add_install_script('foo.py', installer) +``` -- cgit v1.1 From 7d4af2ab7c0f3d7cf98aada7da21dc5dccf5002d Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 24 Mar 2020 11:09:13 -0700 Subject: build: add missing type annotation --- mesonbuild/build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index c200261..8cef017 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1217,7 +1217,7 @@ You probably should put it in link_with instead.''') See: https://github.com/mesonbuild/meson/issues/1653 ''' - langs = [] + langs = [] # type: T.List[str] # User specified link_language of target (for multi-language targets) if self.link_language: -- cgit v1.1 From 90883fa35d48bf4c4b522ba76e3de4f32456a22d Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 24 Mar 2020 11:15:00 -0700 Subject: tests: Add common test for link_language It's cool we have a fortran test, but we should have a C/C++ test, especially for C++ extern "C". --- test cases/common/232 link language/c_linkage.cpp | 5 +++++ test cases/common/232 link language/c_linkage.h | 10 ++++++++++ test cases/common/232 link language/main.c | 5 +++++ test cases/common/232 link language/meson.build | 12 ++++++++++++ 4 files changed, 32 insertions(+) create mode 100644 test cases/common/232 link language/c_linkage.cpp create mode 100644 test cases/common/232 link language/c_linkage.h create mode 100644 test cases/common/232 link language/main.c create mode 100644 test cases/common/232 link language/meson.build diff --git a/test cases/common/232 link language/c_linkage.cpp b/test cases/common/232 link language/c_linkage.cpp new file mode 100644 index 0000000..dc006b9 --- /dev/null +++ b/test cases/common/232 link language/c_linkage.cpp @@ -0,0 +1,5 @@ +extern "C" { + int makeInt(void) { + return 0; + } +} diff --git a/test cases/common/232 link language/c_linkage.h b/test cases/common/232 link language/c_linkage.h new file mode 100644 index 0000000..1609f47 --- /dev/null +++ b/test cases/common/232 link language/c_linkage.h @@ -0,0 +1,10 @@ + +#ifdef __cplusplus +extern "C" { +#endif + +int makeInt(void); + +#ifdef __cplusplus +} +#endif diff --git a/test cases/common/232 link language/main.c b/test cases/common/232 link language/main.c new file mode 100644 index 0000000..5a167e7 --- /dev/null +++ b/test cases/common/232 link language/main.c @@ -0,0 +1,5 @@ +#include "c_linkage.h" + +int main(void) { + return makeInt(); +} diff --git a/test cases/common/232 link language/meson.build b/test cases/common/232 link language/meson.build new file mode 100644 index 0000000..fdb3deb --- /dev/null +++ b/test cases/common/232 link language/meson.build @@ -0,0 +1,12 @@ +project( + 'link_language', + ['c', 'cpp'], +) + +exe = executable( + 'main', + ['main.c', 'c_linkage.cpp'], + link_language : 'c', +) + +test('main', exe) -- cgit v1.1 From 650023f0ccfe2ff8ed8134ab3b26d50c891b0f74 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 24 Mar 2020 11:54:00 -0700 Subject: unittests: Add test to show that link_language is broken --- run_unittests.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index da898a3..35f5f9c 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4641,6 +4641,21 @@ recommended as it is not supported on some platforms''') def test_junit_valid_exitcode(self): self._test_junit('44 test args') + def test_link_language_linker(self): + # TODO: there should be some way to query how we're linking things + # without resorting to reading the ninja.build file + if self.backend is not Backend.ninja: + raise unittest.SkipTest('This test reads the ninja file') + + testdir = os.path.join(self.common_test_dir, '230 link language') + self.init(testdir) + + build_ninja = os.path.join(self.builddir, 'build.ninja') + with open(build_ninja, 'r', encoding='utf-8') as f: + contents = f.read() + + self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER') + class FailureTests(BasePlatformTests): ''' -- cgit v1.1 From d8a3c777a62b18d33bbdbca0fd4954399e1ebc04 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 24 Mar 2020 12:02:14 -0700 Subject: build: Fix link_language selection Currently it does nothing, as the field is read too late, and additional languages have already been considered. As such if the language requested is closer to C (for example you want C but have a C++ source with only extern C functions) then link_langauge is ignored. Fixes #6453 --- mesonbuild/build.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 8cef017..4a8e9cc 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1219,10 +1219,6 @@ You probably should put it in link_with instead.''') ''' langs = [] # type: T.List[str] - # User specified link_language of target (for multi-language targets) - if self.link_language: - return [self.link_language] - # Check if any of the external libraries were written in this language for dep in self.external_deps: if dep.language is None: @@ -1253,6 +1249,12 @@ You probably should put it in link_with instead.''') # Populate list of all compilers, not just those being used to compile # sources in this target all_compilers = self.environment.coredata.compilers[self.for_machine] + + # If the user set the link_language, just return that. + if self.link_language: + comp = all_compilers[self.link_language] + return comp, comp.language_stdlib_only_link_flags() + # Languages used by dependencies dep_langs = self.get_langs_used_by_deps() # Pick a compiler based on the language priority-order -- cgit v1.1 From fdd60225301731d5b696479eb091c01a2ea121e0 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 24 Mar 2020 12:09:54 -0700 Subject: interpreter: Add link_language to all build targets If the feature hadn't been broken in the first place it would have worked on them anyway, so we might as well expose it. I'm loathe to do it because one of the best features of meson in a mixed C/C++ code base is that meson figures out the right linker every time, but there are cases people have where they want to force a linker. We'll let them keep the pieces. --- mesonbuild/build.py | 3 ++- run_unittests.py | 3 ++- test cases/common/232 link language/lib.cpp | 5 +++++ test cases/common/232 link language/meson.build | 6 ++++++ 4 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 test cases/common/232 link language/lib.cpp diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 4a8e9cc..aff1d5f 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -82,6 +82,7 @@ buildtarget_kwargs = set([ 'override_options', 'sources', 'gnu_symbol_visibility', + 'link_language', ]) known_build_target_kwargs = ( @@ -92,7 +93,7 @@ known_build_target_kwargs = ( rust_kwargs | cs_kwargs) -known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'link_language', 'pie'} +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'} known_stlib_kwargs = known_build_target_kwargs | {'pic'} diff --git a/run_unittests.py b/run_unittests.py index 35f5f9c..c77c9c0 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4647,7 +4647,7 @@ recommended as it is not supported on some platforms''') if self.backend is not Backend.ninja: raise unittest.SkipTest('This test reads the ninja file') - testdir = os.path.join(self.common_test_dir, '230 link language') + testdir = os.path.join(self.common_test_dir, '232 link language') self.init(testdir) build_ninja = os.path.join(self.builddir, 'build.ninja') @@ -4655,6 +4655,7 @@ recommended as it is not supported on some platforms''') contents = f.read() self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER') + self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER') class FailureTests(BasePlatformTests): diff --git a/test cases/common/232 link language/lib.cpp b/test cases/common/232 link language/lib.cpp new file mode 100644 index 0000000..ab43828 --- /dev/null +++ b/test cases/common/232 link language/lib.cpp @@ -0,0 +1,5 @@ +extern "C" { + int makeInt(void) { + return 1; + } +} diff --git a/test cases/common/232 link language/meson.build b/test cases/common/232 link language/meson.build index fdb3deb..f9af6cd 100644 --- a/test cases/common/232 link language/meson.build +++ b/test cases/common/232 link language/meson.build @@ -9,4 +9,10 @@ exe = executable( link_language : 'c', ) +lib = library( + 'mylib', + ['lib.cpp'], + link_language : 'c', +) + test('main', exe) -- cgit v1.1 From 1bfeaadd6d1dc87532a1c0ad4be615eb34044495 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 24 Mar 2020 12:20:20 -0700 Subject: Docs: Update link_language docs to explain when it should be used --- docs/markdown/Reference-manual.md | 8 ++++++-- docs/markdown/snippets/link_language_all_targets.md | 8 ++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 docs/markdown/snippets/link_language_all_targets.md diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 963af9d..5c5f56a 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -600,8 +600,12 @@ be passed to [shared and static libraries](#library). depends on such as a symbol visibility map. The purpose is to automatically trigger a re-link (but not a re-compile) of the target when this file changes. -- `link_language` since 0.51.0 makes the linker for this target - be for the specified language. This is helpful for multi-language targets. +- `link_language` since 0.51.0 (broken until 0.55.0) makes the linker for this + target be for the specified language. It is generally unnecessary to set + this, as meson will detect the right linker to use in most cases. There are + only two cases where this is needed. One, your main function in an + executable is not in the language meson picked, or second you want to force + a library to use only one ABI. - `link_whole` links all contents of the given static libraries whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC, available since 0.40.0. diff --git a/docs/markdown/snippets/link_language_all_targets.md b/docs/markdown/snippets/link_language_all_targets.md new file mode 100644 index 0000000..9019d50 --- /dev/null +++ b/docs/markdown/snippets/link_language_all_targets.md @@ -0,0 +1,8 @@ +## link_language argument added to all targets + +Previously the `link_language` argument was only supposed to be allowed in +executables, because the linker used needs to be the linker for the language +that implements the main function. Unfortunately it didn't work in that case, +and, even worse, if it had been implemented properly it would have worked for +*all* targets. In 0.55.0 this restriction has been removed, and the bug fixed. +It now is valid for `executable` and all derivative of `library`. -- cgit v1.1 From f107f9b3962167e1ff5014951aa89fdc176ea683 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 12 Feb 2020 00:03:51 +0000 Subject: Make colourize_console() a function Currently, colourize_console is a constant, set at process initialization. To allow the actual stdout to be easily compared with the expected when running tests, we want to allow colourization to be on for the test driver, but not for the in-process configure done by run_configure, which has stdout redirected from a tty to a pipe. v2: Cache _colorize_console per file object v3: Reset cache on setup_console() --- mesonbuild/mlog.py | 29 +++++++++++++++++++++++------ run_project_tests.py | 8 ++++---- run_tests.py | 6 +++--- 3 files changed, 30 insertions(+), 13 deletions(-) diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index 8cbd248..7b8aec7 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -40,15 +40,32 @@ def _windows_ansi() -> bool: # original behavior return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON')) -def setup_console() -> bool: +def colorize_console() -> bool: + _colorize_console = getattr(sys.stdout, 'colorize_console', None) # type: bool + if _colorize_console is not None: + return _colorize_console + try: if platform.system().lower() == 'windows': - return os.isatty(sys.stdout.fileno()) and _windows_ansi() - return os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb' + _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi() + else: + _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb' except Exception: - return False + _colorize_console = False + + sys.stdout.colorize_console = _colorize_console # type: ignore[attr-defined] + return _colorize_console + +def setup_console(): + # on Windows, a subprocess might call SetConsoleMode() on the console + # connected to stdout and turn off ANSI escape processing. Call this after + # running a subprocess to ensure we turn it on again. + if platform.system().lower() == 'windows': + try: + delattr(sys.stdout, 'colorize_console') + except AttributeError: + pass -colorize_console = setup_console() log_dir = None # type: T.Optional[str] log_file = None # type: T.Optional[T.TextIO] log_fname = 'meson-log.txt' # type: str @@ -204,7 +221,7 @@ def log(*args: T.Union[str, AnsiDecorator], is_error: bool = False, if log_file is not None: print(*arr, file=log_file, **kwargs) log_file.flush() - if colorize_console: + if colorize_console(): arr = process_markup(args, True) if not log_errors_only or is_error: force_print(*arr, **kwargs) diff --git a/run_project_tests.py b/run_project_tests.py index 8cbf989..f636d63 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -341,19 +341,19 @@ def log_text_file(logfile, testdir, stdo, stde): def bold(text): - return mlog.bold(text).get_text(mlog.colorize_console) + return mlog.bold(text).get_text(mlog.colorize_console()) def green(text): - return mlog.green(text).get_text(mlog.colorize_console) + return mlog.green(text).get_text(mlog.colorize_console()) def red(text): - return mlog.red(text).get_text(mlog.colorize_console) + return mlog.red(text).get_text(mlog.colorize_console()) def yellow(text): - return mlog.yellow(text).get_text(mlog.colorize_console) + return mlog.yellow(text).get_text(mlog.colorize_console()) def _run_ci_include(args: T.List[str]) -> str: diff --git a/run_tests.py b/run_tests.py index 005d9a0..44dcf82 100755 --- a/run_tests.py +++ b/run_tests.py @@ -303,7 +303,7 @@ def run_configure(commandlist, env=None): return run_configure_inprocess(commandlist, env=env) def print_system_info(): - print(mlog.bold('System information.').get_text(mlog.colorize_console)) + print(mlog.bold('System information.').get_text(mlog.colorize_console())) print('Architecture:', platform.architecture()) print('Machine:', platform.machine()) print('Platform:', platform.system()) @@ -377,7 +377,7 @@ def main(): print(flush=True) returncode = 0 else: - print(mlog.bold('Running unittests.').get_text(mlog.colorize_console)) + print(mlog.bold('Running unittests.').get_text(mlog.colorize_console())) print(flush=True) cmd = mesonlib.python_command + ['run_unittests.py', '-v'] if options.failfast: @@ -390,7 +390,7 @@ def main(): else: cross_test_args = mesonlib.python_command + ['run_cross_test.py'] for cf in options.cross: - print(mlog.bold('Running {} cross tests.'.format(cf)).get_text(mlog.colorize_console)) + print(mlog.bold('Running {} cross tests.'.format(cf)).get_text(mlog.colorize_console())) print(flush=True) cmd = cross_test_args + ['cross/' + cf] if options.failfast: -- cgit v1.1 From d1920886a1e3f0e50969399a836a76ecf01d703e Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 12 Feb 2020 16:12:18 +0000 Subject: Skip failing tests when they won't fail in the expected way --- test cases/failing/19 target clash/meson.build | 2 +- test cases/failing/32 exe static shared/meson.build | 2 +- .../36 pkgconfig dependency impossible conditions/meson.build | 4 ++++ .../meson.build | 2 +- test cases/failing/54 wrong shared crate type/meson.build | 6 +++++- test cases/failing/55 wrong static crate type/meson.build | 6 +++++- test cases/failing/68 wrong boost module/meson.build | 4 ++++ .../failing/76 link with shared module on osx/meson.build | 2 +- .../failing/80 framework dependency with version/meson.build | 6 +++++- test cases/failing/82 gl dependency with version/meson.build | 2 +- .../failing/84 gtest dependency with version/meson.build | 5 +++++ test cases/failing/85 dub libray/meson.build | 10 +++++++++- test cases/failing/86 dub executable/meson.build | 10 +++++++++- test cases/failing/87 dub compiler/meson.build | 10 +++++++++- 14 files changed, 60 insertions(+), 11 deletions(-) diff --git a/test cases/failing/19 target clash/meson.build b/test cases/failing/19 target clash/meson.build index ca09fb5..4fd0934 100644 --- a/test cases/failing/19 target clash/meson.build +++ b/test cases/failing/19 target clash/meson.build @@ -8,7 +8,7 @@ project('clash', 'c') # output location is redirected. if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' - error('This is expected.') + error('MESON_SKIP_TEST test only works on platforms where executables have no suffix.') endif executable('clash', 'clash.c') diff --git a/test cases/failing/32 exe static shared/meson.build b/test cases/failing/32 exe static shared/meson.build index b102764..2ae5125 100644 --- a/test cases/failing/32 exe static shared/meson.build +++ b/test cases/failing/32 exe static shared/meson.build @@ -2,7 +2,7 @@ project('statchain', 'c') host_system = host_machine.system() if host_system == 'windows' or host_system == 'darwin' - error('Test only fails on Linux and BSD') + error('MESON_SKIP_TEST test only fails on Linux and BSD') endif statlib = static_library('stat', 'stat.c', pic : false) diff --git a/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build b/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build index 54d434c..874b581 100644 --- a/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build +++ b/test cases/failing/36 pkgconfig dependency impossible conditions/meson.build @@ -1,3 +1,7 @@ project('impossible-dep-test', 'c', version : '1.0') +if not dependency('zlib', required: false).found() + error('MESON_SKIP_TEST test requires zlib') +endif + dependency('zlib', version : ['>=1.0', '<1.0']) diff --git a/test cases/failing/42 custom target outputs not matching install_dirs/meson.build b/test cases/failing/42 custom target outputs not matching install_dirs/meson.build index 45bd7b3..765e237 100644 --- a/test cases/failing/42 custom target outputs not matching install_dirs/meson.build +++ b/test cases/failing/42 custom target outputs not matching install_dirs/meson.build @@ -3,7 +3,7 @@ project('outputs not matching install_dirs', 'c') gen = find_program('generator.py') if meson.backend() != 'ninja' - error('Failing manually, test is only for the ninja backend') + error('MESON_SKIP_TEST test is only for the ninja backend') endif custom_target('too-few-install-dirs', diff --git a/test cases/failing/54 wrong shared crate type/meson.build b/test cases/failing/54 wrong shared crate type/meson.build index 69ac3da..b9fcad4 100644 --- a/test cases/failing/54 wrong shared crate type/meson.build +++ b/test cases/failing/54 wrong shared crate type/meson.build @@ -1,3 +1,7 @@ -project('test', 'rust') +project('test') + +if not add_languages('rust', required: false) + error('MESON_SKIP_TEST test requires rust compiler') +endif shared_library('test', 'foo.rs', rust_crate_type : 'staticlib') diff --git a/test cases/failing/55 wrong static crate type/meson.build b/test cases/failing/55 wrong static crate type/meson.build index c094613..109907f 100644 --- a/test cases/failing/55 wrong static crate type/meson.build +++ b/test cases/failing/55 wrong static crate type/meson.build @@ -1,3 +1,7 @@ -project('test', 'rust') +project('test') + +if not add_languages('rust', required: false) + error('MESON_SKIP_TEST test requires rust compiler') +endif static_library('test', 'foo.rs', rust_crate_type : 'cdylib') diff --git a/test cases/failing/68 wrong boost module/meson.build b/test cases/failing/68 wrong boost module/meson.build index 7fb3a40..937e587 100644 --- a/test cases/failing/68 wrong boost module/meson.build +++ b/test cases/failing/68 wrong boost module/meson.build @@ -1,5 +1,9 @@ project('boosttest', 'cpp', default_options : ['cpp_std=c++11']) +if not dependency('boost', required: false).found() + error('MESON_SKIP_TEST test requires boost') +endif + # abc doesn't exist linkdep = dependency('boost', modules : ['thread', 'system', 'test', 'abc']) diff --git a/test cases/failing/76 link with shared module on osx/meson.build b/test cases/failing/76 link with shared module on osx/meson.build index 2c714f9..bf18b36 100644 --- a/test cases/failing/76 link with shared module on osx/meson.build +++ b/test cases/failing/76 link with shared module on osx/meson.build @@ -1,7 +1,7 @@ project('link with shared module', 'c') if host_machine.system() != 'darwin' - error('Test only fails on OSX') + error('MESON_SKIP_TEST test only fails on OSX') endif m = shared_module('mymodule', 'module.c') diff --git a/test cases/failing/80 framework dependency with version/meson.build b/test cases/failing/80 framework dependency with version/meson.build index 1ead388..b7e04ba 100644 --- a/test cases/failing/80 framework dependency with version/meson.build +++ b/test cases/failing/80 framework dependency with version/meson.build @@ -1,4 +1,8 @@ project('framework dependency with version', 'c') + +if host_machine.system() != 'darwin' + error('MESON_SKIP_TEST test only applicable on darwin') +endif + # do individual frameworks have a meaningful version to test? And multiple frameworks might be listed... -# otherwise we're not on OSX and this will definitely fail dep = dependency('appleframeworks', modules: 'foundation', version: '>0') diff --git a/test cases/failing/82 gl dependency with version/meson.build b/test cases/failing/82 gl dependency with version/meson.build index 3014d43..0127093 100644 --- a/test cases/failing/82 gl dependency with version/meson.build +++ b/test cases/failing/82 gl dependency with version/meson.build @@ -2,7 +2,7 @@ project('gl dependency with version', 'c') host_system = host_machine.system() if host_system != 'windows' and host_system != 'darwin' - error('Test only fails on Windows and OSX') + error('MESON_SKIP_TEST: test only fails on Windows and OSX') endif # gl dependency found via system method doesn't have a meaningful version to check diff --git a/test cases/failing/84 gtest dependency with version/meson.build b/test cases/failing/84 gtest dependency with version/meson.build index 3d90994..b43a047 100644 --- a/test cases/failing/84 gtest dependency with version/meson.build +++ b/test cases/failing/84 gtest dependency with version/meson.build @@ -1,3 +1,8 @@ project('gtest dependency with version', ['c', 'cpp']) + +if not dependency('gtest', method: 'system', required: false).found() + error('MESON_SKIP_TEST test requires gtest') +endif + # discovering gtest version is not yet implemented dep = dependency('gtest', method: 'system', version: '>0') diff --git a/test cases/failing/85 dub libray/meson.build b/test cases/failing/85 dub libray/meson.build index 5b0ccac..306d5b3 100644 --- a/test cases/failing/85 dub libray/meson.build +++ b/test cases/failing/85 dub libray/meson.build @@ -1,3 +1,11 @@ -project('dub', 'd') +project('dub') + +if not add_languages('d', required: false) + error('MESON_SKIP_TEST test requires D compiler') +endif + +if not find_program('dub', required: false).found() + error('MESON_SKIP_TEST test requires dub') +endif dependency('dubtestproject', method: 'dub') # Not library (none) diff --git a/test cases/failing/86 dub executable/meson.build b/test cases/failing/86 dub executable/meson.build index 63fd631..9a134ea 100644 --- a/test cases/failing/86 dub executable/meson.build +++ b/test cases/failing/86 dub executable/meson.build @@ -1,3 +1,11 @@ -project('dub', 'd') +project('dub') + +if not add_languages('d', required: false) + error('MESON_SKIP_TEST test requires D compiler') +endif + +if not find_program('dub', required: false).found() + error('MESON_SKIP_TEST test requires dub') +endif dependency('dubtestproject:test1', method: 'dub') # Not library (executable) diff --git a/test cases/failing/87 dub compiler/meson.build b/test cases/failing/87 dub compiler/meson.build index c93ccbc..36f1849 100644 --- a/test cases/failing/87 dub compiler/meson.build +++ b/test cases/failing/87 dub compiler/meson.build @@ -1,4 +1,8 @@ -project('dub', 'd') +project('dub') + +if not add_languages('d', required: false) + error('MESON_SKIP_TEST test requires D compiler') +endif if meson.get_compiler('d').get_id() == 'dmd' if host_machine.system() == 'windows' or host_machine.system() == 'cygwin' @@ -6,4 +10,8 @@ if meson.get_compiler('d').get_id() == 'dmd' endif endif +if not find_program('dub', required: false).found() + error('MESON_SKIP_TEST test requires dub') +endif + dependency('dubtestproject:test2', method: 'dub') # Compiler mismatch -- cgit v1.1 From 6a5c6fb439c9351d513c63ff7eb028dde4c3f1c0 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 12 Feb 2020 18:47:51 +0000 Subject: Be more careful about the use of repr() in error messages Generally, we'd want to use str() rather than repr() in error messages anyhow, as that explicitly gives something designed to be read by humans. Sometimes {!r} is being used as a shortcut to avoid writing the quotes in '{!s}'. Unfortunately, these things aren't quite the same, as the repr of a string containing '\' (the path separator on Windows) will have those escaped. We don't have a good string representation to use for the arbitrary internal object used as an argument for install_data() when it's neither a string nor file (which doesn't lead to a good error message), so drop that for the moment. --- mesonbuild/build.py | 5 ++++- mesonbuild/coredata.py | 2 +- mesonbuild/interpreter.py | 4 ++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index c200261..98930b3 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -531,6 +531,9 @@ class BuildTarget(Target): repr_str = "<{0} {1}: {2}>" return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) + def __str__(self): + return "{}".format(self.name) + def validate_install(self, environment): if self.for_machine is MachineChoice.BUILD and self.need_install: if environment.is_cross_build(): @@ -1104,7 +1107,7 @@ You probably should put it in link_with instead.''') if not isinstance(t, (Target, CustomTargetIndex)): raise InvalidArguments('{!r} is not a target.'.format(t)) if not t.is_linkable_target(): - raise InvalidArguments('Link target {!r} is not linkable.'.format(t)) + raise InvalidArguments("Link target '{!s}' is not linkable.".format(t)) if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic: msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name) msg += "Use the 'pic' option to static_library to build with PIC." diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 0b79084..c4fe8db 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -490,7 +490,7 @@ class CoreData: # commonpath will always return a path in the native format, so we # must use pathlib.PurePath to do the same conversion before # comparing. - msg = ('The value of the {!r} option is {!r} which must be a ' + msg = ('The value of the {!r} option is \'{!s}\' which must be a ' 'subdir of the prefix {!r}.\nNote that if you pass a ' 'relative path, it is assumed to be a subdir of prefix.') # os.path.commonpath doesn't understand case-insensitive filesystems, diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index dd1e57b..11eceae 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3897,7 +3897,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self absname = os.path.join(self.environment.get_source_dir(), buildfilename) if not os.path.isfile(absname): self.subdir = prev_subdir - raise InterpreterException('Non-existent build file {!r}'.format(buildfilename)) + raise InterpreterException("Non-existent build file '{!s}'".format(buildfilename)) with open(absname, encoding='utf8') as f: code = f.read() assert(isinstance(code, str)) @@ -3945,7 +3945,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self elif isinstance(s, str): source_strings.append(s) else: - raise InvalidArguments('Argument {!r} must be string or file.'.format(s)) + raise InvalidArguments('Argument must be string or file.') sources += self.source_strings_to_files(source_strings) install_dir = kwargs.get('install_dir', None) if not isinstance(install_dir, (str, type(None))): -- cgit v1.1 From 10817381131ba58b243e13d76f97be59d65a5a8a Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 12 Feb 2020 19:13:43 +0000 Subject: Check before compiler detection if 'c' language is present when adding 'vala' For the sake of a consistent error message (irrespective of if 'valac' is present or not), check if the 'c' language is present if we are adding 'vala' before (rather than after) we do compiler detection. --- mesonbuild/interpreter.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 11eceae..9811b0a 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3117,6 +3117,12 @@ external dependencies (including libraries) must go to "dependencies".''') return should def add_languages_for(self, args, required, for_machine: MachineChoice): + langs = set(self.coredata.compilers[for_machine].keys()) + langs.update(args) + if 'vala' in langs: + if 'c' not in langs: + raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.') + success = True for lang in sorted(args, key=compilers.sort_clink): lang = lang.lower() @@ -3154,11 +3160,6 @@ external dependencies (including libraries) must go to "dependencies".''') mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version) self.build.ensure_static_linker(comp) - langs = self.coredata.compilers[for_machine].keys() - if 'vala' in langs: - if 'c' not in langs: - raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.') - return success def program_from_file_for(self, for_machine, prognames, silent): -- cgit v1.1 From b647ce1b63961a76a6de63a3586015a4c5d56b44 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 12 Feb 2020 14:17:50 +0000 Subject: Cosmetic tweak to error message for incdir() with an absolute path Cosmetic tweak to the error message for incdir() with an absolute path. Don't split the message in the middle of a sentence at a point which may or may not correspond to the terminal width. --- mesonbuild/interpreter.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 9811b0a..0dfb616 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -4220,8 +4220,9 @@ This will become a hard error in the future.''' % kwargs['input'], location=self for a in incdir_strings: if a.startswith(src_root): - raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use -relative paths instead. + raise InvalidArguments('Tried to form an absolute path to a source dir. ' + 'You should not do that but use relative paths instead.' + ''' To get include path to any directory relative to the current dir do -- cgit v1.1 From f867bfbce07aae6ed7a5b38c583490af3ea13af9 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Tue, 21 Jan 2020 00:37:29 +0000 Subject: Add a mechanism for validating meson output in tests Expected stdout lines must match lines from the actual stdout, in the same order. Lines with match type 're' are regex matched. v2: Ignore comment lines in expected_stdout v3: Automatically adjust path separators for location in expected output v4: Put expected stdout in test.json, rather than a separate file --- docs/markdown/Contributing.md | 21 +++++++++++++++ run_project_tests.py | 59 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 80 insertions(+) diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 8a24e0b..34cd8ca 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -334,6 +334,27 @@ If a tool is specified, it has to be present in the environment, and the version requirement must be fulfilled match. Otherwise, the entire test is skipped (including every element in the test matrix). +#### stdout + +The `stdout` key contains a list of dicts, describing the expected stdout. + +Each dict contains the following keys: + +- `line` +- `match` (optional) + +Each item in the list is matched, in order, against the remaining actual stdout +lines, after any previous matches. If the actual stdout is exhausted before +every item in the list is matched, the expected output has not been seen, and +the test has failed. + +The `match` element of the dict determines how the `line` element is matched: + +| Type | Description | +| -------- | ----------------------- | +| `literal` | Literal match (default) | +| `re` | regex match | + ### Skipping integration tests Meson uses several continuous integration testing systems that have slightly diff --git a/run_project_tests.py b/run_project_tests.py index f636d63..3abe88c 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -191,6 +191,7 @@ class TestDef: self.env = os.environ.copy() self.installed_files = [] # type: T.List[InstalledFile] self.do_not_set_opts = [] # type: T.List[str] + self.stdout = [] # type: T.List[T.Dict[str, str]] def __repr__(self) -> str: return '<{}: {:<48} [{}: {}] -- {}>'.format(type(self).__name__, str(self.path), self.name, self.args, self.skip) @@ -381,6 +382,54 @@ def run_ci_commands(raw_log: str) -> T.List[str]: res += ['CI COMMAND {}:\n{}\n'.format(cmd[0], ci_commands[cmd[0]](cmd[1:]))] return res +def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) -> str: + if expected: + i = iter(expected) + + def next_expected(i): + # Get the next expected line + item = next(i) + how = item.get('match', 'literal') + expected = item.get('line') + + # Simple heuristic to automatically convert path separators for + # Windows: + # + # Any '/' appearing before 'WARNING' or 'ERROR' (i.e. a path in a + # filename part of a location) is replaced with '\' (in a re: '\\' + # which matches a literal '\') + # + # (There should probably be a way to turn this off for more complex + # cases which don't fit this) + if mesonlib.is_windows(): + if how != "re": + sub = r'\\' + else: + sub = r'\\\\' + expected = re.sub(r'/(?=.*(WARNING|ERROR))', sub, expected) + + return how, expected + + try: + how, expected = next_expected(i) + for actual in output.splitlines(): + if how == "re": + match = bool(re.match(expected, actual)) + else: + match = (expected == actual) + if match: + how, expected = next_expected(i) + + # reached the end of output without finding expected + return 'expected "{}" not found in {}'.format(expected, desc) + except StopIteration: + # matched all expected lines + pass + + return '' + +def validate_output(test: TestDef, stdo: str, stde: str) -> str: + return _compare_output(test.stdout, stdo, 'stdout') def run_test_inprocess(testdir): old_stdout = sys.stdout @@ -452,6 +501,11 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, cicmds = run_ci_commands(mesonlog) testresult = TestResult(cicmds) testresult.add_step(BuildStep.configure, stdo, stde, mesonlog, time.time() - gen_start) + output_msg = validate_output(test, stdo, stde) + testresult.mlog += output_msg + if output_msg: + testresult.fail('Unexpected output while configuring.') + return testresult if should_fail == 'meson': if returncode == 1: return testresult @@ -566,6 +620,9 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: if 'installed' in test_def: installed = [InstalledFile(x) for x in test_def['installed']] + # Handle expected output + stdout = test_def.get('stdout', []) + # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] @@ -583,6 +640,7 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: t.env.update(env) t.installed_files = installed t.do_not_set_opts = do_not_set_opts + t.stdout = stdout all_tests += [t] continue @@ -653,6 +711,7 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: test.env.update(env) test.installed_files = installed test.do_not_set_opts = do_not_set_opts + test.stdout = stdout all_tests += [test] return sorted(all_tests) -- cgit v1.1 From 3cff11a75b4af8824281b473bda1f8c45a4add7d Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 12 Feb 2020 00:31:57 +0000 Subject: Add expected stdout for failing-meson and warning-meson tests Initially produced using: for d in "test cases/failing/"* ; do rm -r _build ; ./meson.py setup "$d" _build | grep ERROR >"$d"/expected_stdout.txt; done then converted to json with jq using: jq --raw-input --slurp 'split("\n") | {stdout: map({line: select(. != "")})}' expected_stdout.txt >test.json or merged with existing json using: jq --slurp '.[0] + .[1]' test.json expected.json >test.json.new v2: Add some comments to explain the match when it isn't totally obvious v3: Add or adjust existing re: in expected output to handle '/' or '\' path separators appearing in message, not location. v4: Put expected stdout in test.json, rather than a separate expected_stdout.txt file Park comments in an unused 'comments' key, as JSON doesn't have a syntax for comments --- test cases/failing/1 project not first/test.json | 7 +++++ test cases/failing/10 out of bounds/test.json | 7 +++++ test cases/failing/11 object arithmetic/test.json | 8 +++++ test cases/failing/12 string arithmetic/test.json | 8 +++++ test cases/failing/13 array arithmetic/test.json | 7 +++++ .../failing/14 invalid option name/test.json | 7 +++++ test cases/failing/15 kwarg before arg/test.json | 7 +++++ .../failing/16 extract from subproject/test.json | 7 +++++ test cases/failing/17 same target/test.json | 7 +++++ test cases/failing/18 wrong plusassign/test.json | 7 +++++ test cases/failing/19 target clash/test.json | 7 +++++ test cases/failing/2 missing file/test.json | 7 +++++ test cases/failing/20 version/test.json | 8 +++++ test cases/failing/21 subver/test.json | 7 +++++ test cases/failing/22 assert/test.json | 7 +++++ test cases/failing/23 rel testdir/test.json | 7 +++++ test cases/failing/24 int conversion/test.json | 7 +++++ test cases/failing/25 badlang/test.json | 7 +++++ test cases/failing/26 output subdir/test.json | 7 +++++ test cases/failing/27 noprog use/test.json | 7 +++++ test cases/failing/28 no crossprop/test.json | 7 +++++ test cases/failing/29 nested ternary/test.json | 7 +++++ test cases/failing/3 missing subdir/test.json | 9 ++++++ .../failing/30 invalid man extension/test.json | 7 +++++ test cases/failing/31 no man extension/test.json | 7 +++++ test cases/failing/32 exe static shared/test.json | 7 +++++ .../failing/33 non-root subproject/test.json | 7 +++++ .../test.json | 8 +++++ .../35 project argument after target/test.json | 7 +++++ .../37 has function external dependency/test.json | 7 +++++ .../38 libdir must be inside prefix/test.json | 9 +++++- test cases/failing/39 prefix absolute/test.json | 10 ++++++- test cases/failing/4 missing meson.build/test.json | 9 ++++++ test cases/failing/40 kwarg assign/test.json | 7 +++++ .../test.json | 7 +++++ .../test.json | 35 ++++++++++++++++++---- test cases/failing/43 project name colon/test.json | 7 +++++ test cases/failing/44 abs subdir/test.json | 7 +++++ test cases/failing/45 abspath to srcdir/test.json | 7 +++++ .../46 pkgconfig variables reserved/test.json | 7 +++++ .../47 pkgconfig variables zero length/test.json | 7 +++++ .../test.json | 7 +++++ .../49 pkgconfig variables not key value/test.json | 7 +++++ test cases/failing/5 misplaced option/test.json | 7 +++++ .../failing/50 executable comparison/test.json | 7 +++++ .../failing/51 inconsistent comparison/test.json | 7 +++++ test cases/failing/52 slashname/test.json | 7 +++++ .../failing/53 reserved meson prefix/test.json | 7 +++++ .../failing/54 wrong shared crate type/test.json | 7 +++++ .../failing/55 wrong static crate type/test.json | 7 +++++ test cases/failing/56 or on new line/test.json | 7 +++++ test cases/failing/57 kwarg in module/test.json | 7 +++++ .../failing/58 link with executable/test.json | 7 +++++ .../59 assign custom target index/test.json | 7 +++++ test cases/failing/6 missing incdir/test.json | 7 +++++ test cases/failing/60 getoption prefix/test.json | 7 +++++ .../failing/61 bad option argument/test.json | 7 +++++ test cases/failing/62 subproj filegrab/test.json | 7 +++++ test cases/failing/63 grab subproj/test.json | 7 +++++ test cases/failing/64 grab sibling/test.json | 7 +++++ .../failing/65 string as link target/test.json | 7 +++++ .../66 dependency not-found and required/test.json | 7 +++++ test cases/failing/68 wrong boost module/test.json | 7 +++++ .../69 install_data rename bad size/test.json | 7 +++++ test cases/failing/7 go to subproject/test.json | 7 +++++ test cases/failing/70 skip only subdir/test.json | 7 +++++ test cases/failing/71 dual override/test.json | 7 +++++ test cases/failing/72 override used/test.json | 7 +++++ .../failing/73 run_command unclean exit/test.json | 8 +++++ .../failing/74 int literal leading zero/test.json | 8 +++++ .../failing/75 configuration immutable/test.json | 7 +++++ .../76 link with shared module on osx/test.json | 7 +++++ .../test.json | 8 +++++ .../test.json | 7 +++++ test cases/failing/79 unfound run/test.json | 7 +++++ test cases/failing/8 recursive/test.json | 7 +++++ .../80 framework dependency with version/test.json | 7 +++++ .../failing/81 override exe config/test.json | 7 +++++ .../82 gl dependency with version/test.json | 7 +++++ .../83 threads dependency with version/test.json | 7 +++++ test cases/failing/85 dub libray/test.json | 7 +++++ test cases/failing/86 dub executable/test.json | 7 +++++ test cases/failing/87 dub compiler/test.json | 14 +++++++-- .../failing/88 subproj not-found dep/test.json | 7 +++++ .../failing/89 invalid configure file/test.json | 7 +++++ test cases/failing/9 missing extra file/test.json | 7 +++++ test cases/failing/90 kwarg dupe/test.json | 7 +++++ test cases/failing/91 missing pch file/test.json | 8 +++++ .../92 pch source different folder/test.json | 7 +++++ test cases/failing/93 vala without c/test.json | 7 +++++ .../failing/94 unknown config tool/test.json | 7 +++++ .../95 custom target install data/test.json | 7 +++++ .../failing/96 add dict non string key/test.json | 7 +++++ .../failing/97 add dict duplicate keys/test.json | 7 +++++ test cases/failing/99 no native prop/test.json | 7 +++++ .../warning/1 version for string div/test.json | 8 +++++ .../warning/2 languages missing native/test.json | 7 +++++ 97 files changed, 722 insertions(+), 10 deletions(-) create mode 100644 test cases/failing/1 project not first/test.json create mode 100644 test cases/failing/10 out of bounds/test.json create mode 100644 test cases/failing/11 object arithmetic/test.json create mode 100644 test cases/failing/12 string arithmetic/test.json create mode 100644 test cases/failing/13 array arithmetic/test.json create mode 100644 test cases/failing/14 invalid option name/test.json create mode 100644 test cases/failing/15 kwarg before arg/test.json create mode 100644 test cases/failing/16 extract from subproject/test.json create mode 100644 test cases/failing/17 same target/test.json create mode 100644 test cases/failing/18 wrong plusassign/test.json create mode 100644 test cases/failing/19 target clash/test.json create mode 100644 test cases/failing/2 missing file/test.json create mode 100644 test cases/failing/20 version/test.json create mode 100644 test cases/failing/21 subver/test.json create mode 100644 test cases/failing/22 assert/test.json create mode 100644 test cases/failing/23 rel testdir/test.json create mode 100644 test cases/failing/24 int conversion/test.json create mode 100644 test cases/failing/25 badlang/test.json create mode 100644 test cases/failing/26 output subdir/test.json create mode 100644 test cases/failing/27 noprog use/test.json create mode 100644 test cases/failing/28 no crossprop/test.json create mode 100644 test cases/failing/29 nested ternary/test.json create mode 100644 test cases/failing/3 missing subdir/test.json create mode 100644 test cases/failing/30 invalid man extension/test.json create mode 100644 test cases/failing/31 no man extension/test.json create mode 100644 test cases/failing/32 exe static shared/test.json create mode 100644 test cases/failing/33 non-root subproject/test.json create mode 100644 test cases/failing/34 dependency not-required then required/test.json create mode 100644 test cases/failing/35 project argument after target/test.json create mode 100644 test cases/failing/37 has function external dependency/test.json create mode 100644 test cases/failing/4 missing meson.build/test.json create mode 100644 test cases/failing/40 kwarg assign/test.json create mode 100644 test cases/failing/41 custom target plainname many inputs/test.json create mode 100644 test cases/failing/43 project name colon/test.json create mode 100644 test cases/failing/44 abs subdir/test.json create mode 100644 test cases/failing/45 abspath to srcdir/test.json create mode 100644 test cases/failing/46 pkgconfig variables reserved/test.json create mode 100644 test cases/failing/47 pkgconfig variables zero length/test.json create mode 100644 test cases/failing/48 pkgconfig variables zero length value/test.json create mode 100644 test cases/failing/49 pkgconfig variables not key value/test.json create mode 100644 test cases/failing/5 misplaced option/test.json create mode 100644 test cases/failing/50 executable comparison/test.json create mode 100644 test cases/failing/51 inconsistent comparison/test.json create mode 100644 test cases/failing/52 slashname/test.json create mode 100644 test cases/failing/53 reserved meson prefix/test.json create mode 100644 test cases/failing/54 wrong shared crate type/test.json create mode 100644 test cases/failing/55 wrong static crate type/test.json create mode 100644 test cases/failing/56 or on new line/test.json create mode 100644 test cases/failing/57 kwarg in module/test.json create mode 100644 test cases/failing/58 link with executable/test.json create mode 100644 test cases/failing/59 assign custom target index/test.json create mode 100644 test cases/failing/6 missing incdir/test.json create mode 100644 test cases/failing/60 getoption prefix/test.json create mode 100644 test cases/failing/61 bad option argument/test.json create mode 100644 test cases/failing/62 subproj filegrab/test.json create mode 100644 test cases/failing/63 grab subproj/test.json create mode 100644 test cases/failing/64 grab sibling/test.json create mode 100644 test cases/failing/65 string as link target/test.json create mode 100644 test cases/failing/66 dependency not-found and required/test.json create mode 100644 test cases/failing/68 wrong boost module/test.json create mode 100644 test cases/failing/69 install_data rename bad size/test.json create mode 100644 test cases/failing/7 go to subproject/test.json create mode 100644 test cases/failing/70 skip only subdir/test.json create mode 100644 test cases/failing/71 dual override/test.json create mode 100644 test cases/failing/72 override used/test.json create mode 100644 test cases/failing/73 run_command unclean exit/test.json create mode 100644 test cases/failing/74 int literal leading zero/test.json create mode 100644 test cases/failing/75 configuration immutable/test.json create mode 100644 test cases/failing/76 link with shared module on osx/test.json create mode 100644 test cases/failing/77 non ascii in ascii encoded configure file/test.json create mode 100644 test cases/failing/78 subproj dependency not-found and required/test.json create mode 100644 test cases/failing/79 unfound run/test.json create mode 100644 test cases/failing/8 recursive/test.json create mode 100644 test cases/failing/80 framework dependency with version/test.json create mode 100644 test cases/failing/81 override exe config/test.json create mode 100644 test cases/failing/82 gl dependency with version/test.json create mode 100644 test cases/failing/83 threads dependency with version/test.json create mode 100644 test cases/failing/85 dub libray/test.json create mode 100644 test cases/failing/86 dub executable/test.json create mode 100644 test cases/failing/88 subproj not-found dep/test.json create mode 100644 test cases/failing/89 invalid configure file/test.json create mode 100644 test cases/failing/9 missing extra file/test.json create mode 100644 test cases/failing/90 kwarg dupe/test.json create mode 100644 test cases/failing/91 missing pch file/test.json create mode 100644 test cases/failing/92 pch source different folder/test.json create mode 100644 test cases/failing/93 vala without c/test.json create mode 100644 test cases/failing/94 unknown config tool/test.json create mode 100644 test cases/failing/95 custom target install data/test.json create mode 100644 test cases/failing/96 add dict non string key/test.json create mode 100644 test cases/failing/97 add dict duplicate keys/test.json create mode 100644 test cases/failing/99 no native prop/test.json create mode 100644 test cases/warning/1 version for string div/test.json create mode 100644 test cases/warning/2 languages missing native/test.json diff --git a/test cases/failing/1 project not first/test.json b/test cases/failing/1 project not first/test.json new file mode 100644 index 0000000..70f3c41 --- /dev/null +++ b/test cases/failing/1 project not first/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "ERROR: First statement must be a call to project" + } + ] +} diff --git a/test cases/failing/10 out of bounds/test.json b/test cases/failing/10 out of bounds/test.json new file mode 100644 index 0000000..e27d990 --- /dev/null +++ b/test cases/failing/10 out of bounds/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/10 out of bounds/meson.build:4:0: ERROR: Index 0 out of bounds of array of size 0." + } + ] +} diff --git a/test cases/failing/11 object arithmetic/test.json b/test cases/failing/11 object arithmetic/test.json new file mode 100644 index 0000000..5339fac --- /dev/null +++ b/test cases/failing/11 object arithmetic/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/11 object arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*" + } + ] +} diff --git a/test cases/failing/12 string arithmetic/test.json b/test cases/failing/12 string arithmetic/test.json new file mode 100644 index 0000000..476f9bb --- /dev/null +++ b/test cases/failing/12 string arithmetic/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/12 string arithmetic/meson\\.build:3:0: ERROR: Invalid use of addition: .*" + } + ] +} diff --git a/test cases/failing/13 array arithmetic/test.json b/test cases/failing/13 array arithmetic/test.json new file mode 100644 index 0000000..55056ce --- /dev/null +++ b/test cases/failing/13 array arithmetic/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/13 array arithmetic/meson.build:3:0: ERROR: Multiplication works only with integers." + } + ] +} diff --git a/test cases/failing/14 invalid option name/test.json b/test cases/failing/14 invalid option name/test.json new file mode 100644 index 0000000..71e685d --- /dev/null +++ b/test cases/failing/14 invalid option name/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/14 invalid option name/meson_options.txt:1:0: ERROR: Option names can only contain letters, numbers or dashes." + } + ] +} diff --git a/test cases/failing/15 kwarg before arg/test.json b/test cases/failing/15 kwarg before arg/test.json new file mode 100644 index 0000000..c7f72c3 --- /dev/null +++ b/test cases/failing/15 kwarg before arg/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/15 kwarg before arg/meson.build:3:0: ERROR: All keyword arguments must be after positional arguments." + } + ] +} diff --git a/test cases/failing/16 extract from subproject/test.json b/test cases/failing/16 extract from subproject/test.json new file mode 100644 index 0000000..78d45a5 --- /dev/null +++ b/test cases/failing/16 extract from subproject/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/16 extract from subproject/meson.build:6:0: ERROR: Tried to extract objects from a subproject target." + } + ] +} diff --git a/test cases/failing/17 same target/test.json b/test cases/failing/17 same target/test.json new file mode 100644 index 0000000..0005ba4 --- /dev/null +++ b/test cases/failing/17 same target/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/17 same target/meson.build:4:0: ERROR: Tried to create target \"foo\", but a target of that name already exists." + } + ] +} diff --git a/test cases/failing/18 wrong plusassign/test.json b/test cases/failing/18 wrong plusassign/test.json new file mode 100644 index 0000000..c698f85 --- /dev/null +++ b/test cases/failing/18 wrong plusassign/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/18 wrong plusassign/meson.build:3:0: ERROR: Plusassignment target must be an id." + } + ] +} diff --git a/test cases/failing/19 target clash/test.json b/test cases/failing/19 target clash/test.json new file mode 100644 index 0000000..d22b894 --- /dev/null +++ b/test cases/failing/19 target clash/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "ERROR: Multiple producers for Ninja target \"clash\". Please rename your targets." + } + ] +} diff --git a/test cases/failing/2 missing file/test.json b/test cases/failing/2 missing file/test.json new file mode 100644 index 0000000..b95b8b0 --- /dev/null +++ b/test cases/failing/2 missing file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/2 missing file/meson.build:3:0: ERROR: File missing.c does not exist." + } + ] +} diff --git a/test cases/failing/20 version/test.json b/test cases/failing/20 version/test.json new file mode 100644 index 0000000..f330624 --- /dev/null +++ b/test cases/failing/20 version/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/20 version/meson\\.build:1:0: ERROR: Meson version is .* but project requires >100\\.0\\.0" + } + ] +} diff --git a/test cases/failing/21 subver/test.json b/test cases/failing/21 subver/test.json new file mode 100644 index 0000000..f8cfd3a --- /dev/null +++ b/test cases/failing/21 subver/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/21 subver/meson.build:3:0: ERROR: Subproject foo version is 1.0.0 but >1.0.0 required." + } + ] +} diff --git a/test cases/failing/22 assert/test.json b/test cases/failing/22 assert/test.json new file mode 100644 index 0000000..edae999 --- /dev/null +++ b/test cases/failing/22 assert/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/22 assert/meson.build:3:0: ERROR: Assert failed: I am fail." + } + ] +} diff --git a/test cases/failing/23 rel testdir/test.json b/test cases/failing/23 rel testdir/test.json new file mode 100644 index 0000000..ba983ab --- /dev/null +++ b/test cases/failing/23 rel testdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/23 rel testdir/meson.build:4:0: ERROR: Workdir keyword argument must be an absolute path." + } + ] +} diff --git a/test cases/failing/24 int conversion/test.json b/test cases/failing/24 int conversion/test.json new file mode 100644 index 0000000..e749928 --- /dev/null +++ b/test cases/failing/24 int conversion/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/24 int conversion/meson.build:3:13: ERROR: String 'notanumber' cannot be converted to int" + } + ] +} diff --git a/test cases/failing/25 badlang/test.json b/test cases/failing/25 badlang/test.json new file mode 100644 index 0000000..0b23fd7 --- /dev/null +++ b/test cases/failing/25 badlang/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/25 badlang/meson.build:3:0: ERROR: Tried to use unknown language \"nonexisting\"." + } + ] +} diff --git a/test cases/failing/26 output subdir/test.json b/test cases/failing/26 output subdir/test.json new file mode 100644 index 0000000..796468d --- /dev/null +++ b/test cases/failing/26 output subdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/26 output subdir/meson.build:3:0: ERROR: Output file name must not contain a subdirectory." + } + ] +} diff --git a/test cases/failing/27 noprog use/test.json b/test cases/failing/27 noprog use/test.json new file mode 100644 index 0000000..b84562e --- /dev/null +++ b/test cases/failing/27 noprog use/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/27 noprog use/meson.build:5:0: ERROR: Tried to use not-found external program in \"command\"" + } + ] +} diff --git a/test cases/failing/28 no crossprop/test.json b/test cases/failing/28 no crossprop/test.json new file mode 100644 index 0000000..a186a68 --- /dev/null +++ b/test cases/failing/28 no crossprop/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/28 no crossprop/meson.build:3:0: ERROR: Unknown cross property: nonexisting." + } + ] +} diff --git a/test cases/failing/29 nested ternary/test.json b/test cases/failing/29 nested ternary/test.json new file mode 100644 index 0000000..ba05013 --- /dev/null +++ b/test cases/failing/29 nested ternary/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/29 nested ternary/meson.build:3:12: ERROR: Nested ternary operators are not allowed." + } + ] +} diff --git a/test cases/failing/3 missing subdir/test.json b/test cases/failing/3 missing subdir/test.json new file mode 100644 index 0000000..562de25 --- /dev/null +++ b/test cases/failing/3 missing subdir/test.json @@ -0,0 +1,9 @@ +{ + "stdout": [ + { + "comment": "'missing/meson.build' gets transformed with os.path.sep separators", + "match": "re", + "line": "test cases/failing/3 missing subdir/meson\\.build:3:0: ERROR: Non\\-existent build file 'missing[\\\\/]meson\\.build'" + } + ] +} diff --git a/test cases/failing/30 invalid man extension/test.json b/test cases/failing/30 invalid man extension/test.json new file mode 100644 index 0000000..3f77a04 --- /dev/null +++ b/test cases/failing/30 invalid man extension/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/30 invalid man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 8" + } + ] +} diff --git a/test cases/failing/31 no man extension/test.json b/test cases/failing/31 no man extension/test.json new file mode 100644 index 0000000..6e1f542 --- /dev/null +++ b/test cases/failing/31 no man extension/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/31 no man extension/meson.build:2:0: ERROR: Man file must have a file extension of a number between 1 and 8" + } + ] +} diff --git a/test cases/failing/32 exe static shared/test.json b/test cases/failing/32 exe static shared/test.json new file mode 100644 index 0000000..51d3804 --- /dev/null +++ b/test cases/failing/32 exe static shared/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/32 exe static shared/meson.build:9:0: ERROR: Can't link non-PIC static library 'stat' into shared library 'shr2'. Use the 'pic' option to static_library to build with PIC." + } + ] +} diff --git a/test cases/failing/33 non-root subproject/test.json b/test cases/failing/33 non-root subproject/test.json new file mode 100644 index 0000000..a14cece --- /dev/null +++ b/test cases/failing/33 non-root subproject/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/33 non-root subproject/some/meson.build:1:0: ERROR: Subproject directory not found and someproj.wrap file not found" + } + ] +} diff --git a/test cases/failing/34 dependency not-required then required/test.json b/test cases/failing/34 dependency not-required then required/test.json new file mode 100644 index 0000000..bed1a45 --- /dev/null +++ b/test cases/failing/34 dependency not-required then required/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/34 dependency not\\-required then required/meson\\.build:4:0: ERROR: Dependency \"foo\\-bar\\-xyz\\-12\\.3\" not found, tried .*" + } + ] +} diff --git a/test cases/failing/35 project argument after target/test.json b/test cases/failing/35 project argument after target/test.json new file mode 100644 index 0000000..f5efd9b --- /dev/null +++ b/test cases/failing/35 project argument after target/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/35 project argument after target/meson.build:7:0: ERROR: Tried to use 'add_project_arguments' after a build target has been declared." + } + ] +} diff --git a/test cases/failing/37 has function external dependency/test.json b/test cases/failing/37 has function external dependency/test.json new file mode 100644 index 0000000..81d6f91 --- /dev/null +++ b/test cases/failing/37 has function external dependency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/37 has function external dependency/meson.build:8:3: ERROR: Dependencies must be external dependencies" + } + ] +} diff --git a/test cases/failing/38 libdir must be inside prefix/test.json b/test cases/failing/38 libdir must be inside prefix/test.json index 1cd893c..d9256d1 100644 --- a/test cases/failing/38 libdir must be inside prefix/test.json +++ b/test cases/failing/38 libdir must be inside prefix/test.json @@ -1,3 +1,10 @@ { - "do_not_set_opts": ["libdir"] + "do_not_set_opts": [ + "libdir" + ], + "stdout": [ + { + "line": "test cases/failing/38 libdir must be inside prefix/meson.build:1:0: ERROR: The value of the 'libdir' option is '/opt/lib' which must be a subdir of the prefix '/usr'." + } + ] } diff --git a/test cases/failing/39 prefix absolute/test.json b/test cases/failing/39 prefix absolute/test.json index 4e0f6cd..2770243 100644 --- a/test cases/failing/39 prefix absolute/test.json +++ b/test cases/failing/39 prefix absolute/test.json @@ -1,3 +1,11 @@ { - "do_not_set_opts": ["prefix"] + "do_not_set_opts": [ + "prefix" + ], + "stdout": [ + { + "comment": "literal 'some/path/notabs' appears in output, irrespective of os.path.sep, as that's the prefix", + "line": "test cases/failing/39 prefix absolute/meson.build:1:0: ERROR: prefix value 'some/path/notabs' must be an absolute path" + } + ] } diff --git a/test cases/failing/4 missing meson.build/test.json b/test cases/failing/4 missing meson.build/test.json new file mode 100644 index 0000000..3857090 --- /dev/null +++ b/test cases/failing/4 missing meson.build/test.json @@ -0,0 +1,9 @@ +{ + "stdout": [ + { + "match": "re", + "comment": "'subdir/meson.build' gets transformed with os.path.sep separators", + "line": "test cases/failing/4 missing meson\\.build/meson\\.build:3:0: ERROR: Non\\-existent build file 'subdir[\\\\/]meson\\.build'" + } + ] +} diff --git a/test cases/failing/40 kwarg assign/test.json b/test cases/failing/40 kwarg assign/test.json new file mode 100644 index 0000000..671eb3f --- /dev/null +++ b/test cases/failing/40 kwarg assign/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/40 kwarg assign/meson.build:3:0: ERROR: Tried to assign values inside an argument list." + } + ] +} diff --git a/test cases/failing/41 custom target plainname many inputs/test.json b/test cases/failing/41 custom target plainname many inputs/test.json new file mode 100644 index 0000000..8c15cda --- /dev/null +++ b/test cases/failing/41 custom target plainname many inputs/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/41 custom target plainname many inputs/meson.build:5:0: ERROR: Output cannot contain @PLAINNAME@ or @BASENAME@ when there is more than one input (we can't know which to use)" + } + ] +} diff --git a/test cases/failing/42 custom target outputs not matching install_dirs/test.json b/test cases/failing/42 custom target outputs not matching install_dirs/test.json index e59cb9f..f9e2ba7 100644 --- a/test cases/failing/42 custom target outputs not matching install_dirs/test.json +++ b/test cases/failing/42 custom target outputs not matching install_dirs/test.json @@ -1,10 +1,33 @@ { "installed": [ - {"type": "file", "file": "usr/include/diff.h"}, - {"type": "file", "file": "usr/include/first.h"}, - {"type": "file", "file": "usr/bin/diff.sh"}, - {"type": "file", "file": "usr/bin/second.sh"}, - {"type": "file", "file": "opt/same.h"}, - {"type": "file", "file": "opt/same.sh"} + { + "type": "file", + "file": "usr/include/diff.h" + }, + { + "type": "file", + "file": "usr/include/first.h" + }, + { + "type": "file", + "file": "usr/bin/diff.sh" + }, + { + "type": "file", + "file": "usr/bin/second.sh" + }, + { + "type": "file", + "file": "opt/same.h" + }, + { + "type": "file", + "file": "opt/same.sh" + } + ], + "stdout": [ + { + "line": "ERROR: Target 'too-few-install-dirs' has 3 outputs: ['toofew.h', 'toofew.c', 'toofew.sh'], but only 2 \"install_dir\"s were found." + } ] } diff --git a/test cases/failing/43 project name colon/test.json b/test cases/failing/43 project name colon/test.json new file mode 100644 index 0000000..7a55574 --- /dev/null +++ b/test cases/failing/43 project name colon/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/43 project name colon/meson.build:1:0: ERROR: Project name 'name with :' must not contain ':'" + } + ] +} diff --git a/test cases/failing/44 abs subdir/test.json b/test cases/failing/44 abs subdir/test.json new file mode 100644 index 0000000..0aa56f6 --- /dev/null +++ b/test cases/failing/44 abs subdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/44 abs subdir/meson.build:5:0: ERROR: Subdir argument must be a relative path." + } + ] +} diff --git a/test cases/failing/45 abspath to srcdir/test.json b/test cases/failing/45 abspath to srcdir/test.json new file mode 100644 index 0000000..b6a87fe --- /dev/null +++ b/test cases/failing/45 abspath to srcdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/45 abspath to srcdir/meson.build:3:0: ERROR: Tried to form an absolute path to a source dir. You should not do that but use relative paths instead." + } + ] +} diff --git a/test cases/failing/46 pkgconfig variables reserved/test.json b/test cases/failing/46 pkgconfig variables reserved/test.json new file mode 100644 index 0000000..b92ee17 --- /dev/null +++ b/test cases/failing/46 pkgconfig variables reserved/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/46 pkgconfig variables reserved/meson.build:8:5: ERROR: Variable \"prefix\" is reserved" + } + ] +} diff --git a/test cases/failing/47 pkgconfig variables zero length/test.json b/test cases/failing/47 pkgconfig variables zero length/test.json new file mode 100644 index 0000000..097fee1 --- /dev/null +++ b/test cases/failing/47 pkgconfig variables zero length/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/47 pkgconfig variables zero length/meson.build:8:5: ERROR: Invalid variable \"=value\". Variables must be in 'name=value' format" + } + ] +} diff --git a/test cases/failing/48 pkgconfig variables zero length value/test.json b/test cases/failing/48 pkgconfig variables zero length value/test.json new file mode 100644 index 0000000..50a35ce --- /dev/null +++ b/test cases/failing/48 pkgconfig variables zero length value/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/48 pkgconfig variables zero length value/meson.build:8:5: ERROR: Invalid variable \"key=\". Variables must be in 'name=value' format" + } + ] +} diff --git a/test cases/failing/49 pkgconfig variables not key value/test.json b/test cases/failing/49 pkgconfig variables not key value/test.json new file mode 100644 index 0000000..cf07e62 --- /dev/null +++ b/test cases/failing/49 pkgconfig variables not key value/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/49 pkgconfig variables not key value/meson.build:8:5: ERROR: Invalid variable \"this_should_be_key_value\". Variables must be in 'name=value' format" + } + ] +} diff --git a/test cases/failing/5 misplaced option/test.json b/test cases/failing/5 misplaced option/test.json new file mode 100644 index 0000000..12afdf0 --- /dev/null +++ b/test cases/failing/5 misplaced option/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/5 misplaced option/meson.build:3:0: ERROR: Tried to call option() in build description file. All options must be in the option file." + } + ] +} diff --git a/test cases/failing/50 executable comparison/test.json b/test cases/failing/50 executable comparison/test.json new file mode 100644 index 0000000..585b382 --- /dev/null +++ b/test cases/failing/50 executable comparison/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/50 executable comparison/meson.build:6:0: ERROR: exe1 can only be compared for equality." + } + ] +} diff --git a/test cases/failing/51 inconsistent comparison/test.json b/test cases/failing/51 inconsistent comparison/test.json new file mode 100644 index 0000000..5867f0a --- /dev/null +++ b/test cases/failing/51 inconsistent comparison/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/51 inconsistent comparison/meson.build:5:0: ERROR: Values of different types (list, str) cannot be compared using <." + } + ] +} diff --git a/test cases/failing/52 slashname/test.json b/test cases/failing/52 slashname/test.json new file mode 100644 index 0000000..180400a --- /dev/null +++ b/test cases/failing/52 slashname/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/52 slashname/meson.build:11:0: ERROR: Problem encountered: Re-enable me once slash in name is finally prohibited." + } + ] +} diff --git a/test cases/failing/53 reserved meson prefix/test.json b/test cases/failing/53 reserved meson prefix/test.json new file mode 100644 index 0000000..502d96a --- /dev/null +++ b/test cases/failing/53 reserved meson prefix/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/53 reserved meson prefix/meson.build:3:0: ERROR: The \"meson-\" prefix is reserved and cannot be used for top-level subdir()." + } + ] +} diff --git a/test cases/failing/54 wrong shared crate type/test.json b/test cases/failing/54 wrong shared crate type/test.json new file mode 100644 index 0000000..5cced6f --- /dev/null +++ b/test cases/failing/54 wrong shared crate type/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/54 wrong shared crate type/meson.build:7:0: ERROR: Crate type \"staticlib\" invalid for dynamic libraries; must be \"dylib\" or \"cdylib\"" + } + ] +} diff --git a/test cases/failing/55 wrong static crate type/test.json b/test cases/failing/55 wrong static crate type/test.json new file mode 100644 index 0000000..7073f7b --- /dev/null +++ b/test cases/failing/55 wrong static crate type/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/55 wrong static crate type/meson.build:7:0: ERROR: Crate type \"cdylib\" invalid for static libraries; must be \"rlib\" or \"staticlib\"" + } + ] +} diff --git a/test cases/failing/56 or on new line/test.json b/test cases/failing/56 or on new line/test.json new file mode 100644 index 0000000..c55cee6 --- /dev/null +++ b/test cases/failing/56 or on new line/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/56 or on new line/meson.build:4:8: ERROR: Invalid or clause." + } + ] +} diff --git a/test cases/failing/57 kwarg in module/test.json b/test cases/failing/57 kwarg in module/test.json new file mode 100644 index 0000000..cafb3ab --- /dev/null +++ b/test cases/failing/57 kwarg in module/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/57 kwarg in module/meson.build:3:0: ERROR: Function does not take keyword arguments." + } + ] +} diff --git a/test cases/failing/58 link with executable/test.json b/test cases/failing/58 link with executable/test.json new file mode 100644 index 0000000..d3975c1 --- /dev/null +++ b/test cases/failing/58 link with executable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/58 link with executable/meson.build:4:0: ERROR: Link target 'prog' is not linkable." + } + ] +} diff --git a/test cases/failing/59 assign custom target index/test.json b/test cases/failing/59 assign custom target index/test.json new file mode 100644 index 0000000..07ecb91 --- /dev/null +++ b/test cases/failing/59 assign custom target index/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/59 assign custom target index/meson.build:24:0: ERROR: Assignment target must be an id." + } + ] +} diff --git a/test cases/failing/6 missing incdir/test.json b/test cases/failing/6 missing incdir/test.json new file mode 100644 index 0000000..172d8a9 --- /dev/null +++ b/test cases/failing/6 missing incdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/6 missing incdir/meson.build:3:0: ERROR: Include dir nosuchdir does not exist." + } + ] +} diff --git a/test cases/failing/60 getoption prefix/test.json b/test cases/failing/60 getoption prefix/test.json new file mode 100644 index 0000000..03bf419 --- /dev/null +++ b/test cases/failing/60 getoption prefix/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/60 getoption prefix/meson.build:5:0: ERROR: Having a colon in option name is forbidden, projects are not allowed to directly access options of other subprojects." + } + ] +} diff --git a/test cases/failing/61 bad option argument/test.json b/test cases/failing/61 bad option argument/test.json new file mode 100644 index 0000000..4002005 --- /dev/null +++ b/test cases/failing/61 bad option argument/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/61 bad option argument/meson_options.txt:1:0: ERROR: Invalid kwargs for option \"name\": \"vaule\"" + } + ] +} diff --git a/test cases/failing/62 subproj filegrab/test.json b/test cases/failing/62 subproj filegrab/test.json new file mode 100644 index 0000000..dd0d7bb --- /dev/null +++ b/test cases/failing/62 subproj filegrab/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/62 subproj filegrab/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file prog.c from a different subproject." + } + ] +} diff --git a/test cases/failing/63 grab subproj/test.json b/test cases/failing/63 grab subproj/test.json new file mode 100644 index 0000000..8147905 --- /dev/null +++ b/test cases/failing/63 grab subproj/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/63 grab subproj/meson.build:7:0: ERROR: Sandbox violation: Tried to grab file sub.c from a different subproject." + } + ] +} diff --git a/test cases/failing/64 grab sibling/test.json b/test cases/failing/64 grab sibling/test.json new file mode 100644 index 0000000..1604d47 --- /dev/null +++ b/test cases/failing/64 grab sibling/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/64 grab sibling/subprojects/a/meson.build:3:0: ERROR: Sandbox violation: Tried to grab file sneaky.c from a different subproject." + } + ] +} diff --git a/test cases/failing/65 string as link target/test.json b/test cases/failing/65 string as link target/test.json new file mode 100644 index 0000000..e212482 --- /dev/null +++ b/test cases/failing/65 string as link target/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/65 string as link target/meson.build:2:0: ERROR: '' is not a target." + } + ] +} diff --git a/test cases/failing/66 dependency not-found and required/test.json b/test cases/failing/66 dependency not-found and required/test.json new file mode 100644 index 0000000..5b13316 --- /dev/null +++ b/test cases/failing/66 dependency not-found and required/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/66 dependency not-found and required/meson.build:2:0: ERROR: Dependency is both required and not-found" + } + ] +} diff --git a/test cases/failing/68 wrong boost module/test.json b/test cases/failing/68 wrong boost module/test.json new file mode 100644 index 0000000..9ef1b0f --- /dev/null +++ b/test cases/failing/68 wrong boost module/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/68 wrong boost module/meson.build:9:0: ERROR: Dependency \"boost\" not found" + } + ] +} diff --git a/test cases/failing/69 install_data rename bad size/test.json b/test cases/failing/69 install_data rename bad size/test.json new file mode 100644 index 0000000..1329fec --- /dev/null +++ b/test cases/failing/69 install_data rename bad size/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/69 install_data rename bad size/meson.build:3:0: ERROR: Size of rename argument is different from number of sources" + } + ] +} diff --git a/test cases/failing/7 go to subproject/test.json b/test cases/failing/7 go to subproject/test.json new file mode 100644 index 0000000..c254757 --- /dev/null +++ b/test cases/failing/7 go to subproject/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/7 go to subproject/meson.build:3:0: ERROR: Must not go into subprojects dir with subdir(), use subproject() instead." + } + ] +} diff --git a/test cases/failing/70 skip only subdir/test.json b/test cases/failing/70 skip only subdir/test.json new file mode 100644 index 0000000..3b40b66 --- /dev/null +++ b/test cases/failing/70 skip only subdir/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/70 skip only subdir/meson.build:8:0: ERROR: File main.cpp does not exist." + } + ] +} diff --git a/test cases/failing/71 dual override/test.json b/test cases/failing/71 dual override/test.json new file mode 100644 index 0000000..66409e6 --- /dev/null +++ b/test cases/failing/71 dual override/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/71 dual override/meson.build:5:6: ERROR: Tried to override executable \"override\" which has already been overridden." + } + ] +} diff --git a/test cases/failing/72 override used/test.json b/test cases/failing/72 override used/test.json new file mode 100644 index 0000000..29a58f1 --- /dev/null +++ b/test cases/failing/72 override used/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/72 override used/meson.build:5:6: ERROR: Tried to override finding of executable \"something.py\" which has already been found." + } + ] +} diff --git a/test cases/failing/73 run_command unclean exit/test.json b/test cases/failing/73 run_command unclean exit/test.json new file mode 100644 index 0000000..beda187 --- /dev/null +++ b/test cases/failing/73 run_command unclean exit/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/73 run_command unclean exit/meson\\.build:4:0: ERROR: Command \".*[\\\\/]test cases[\\\\/]failing[\\\\/]73 run_command unclean exit[\\\\/]\\.[\\\\/]returncode\\.py 1\" failed with status 1\\." + } + ] +} diff --git a/test cases/failing/74 int literal leading zero/test.json b/test cases/failing/74 int literal leading zero/test.json new file mode 100644 index 0000000..78a735e --- /dev/null +++ b/test cases/failing/74 int literal leading zero/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "comment": "this error message is not very informative", + "line": "test cases/failing/74 int literal leading zero/meson.build:5:13: ERROR: Expecting eof got number." + } + ] +} diff --git a/test cases/failing/75 configuration immutable/test.json b/test cases/failing/75 configuration immutable/test.json new file mode 100644 index 0000000..3365aae --- /dev/null +++ b/test cases/failing/75 configuration immutable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/75 configuration immutable/meson.build:12:16: ERROR: Can not set values on configuration object that has been used." + } + ] +} diff --git a/test cases/failing/76 link with shared module on osx/test.json b/test cases/failing/76 link with shared module on osx/test.json new file mode 100644 index 0000000..4e2856f --- /dev/null +++ b/test cases/failing/76 link with shared module on osx/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/76 link with shared module on osx/meson.build:8:0: ERROR: target links against shared modules." + } + ] +} diff --git a/test cases/failing/77 non ascii in ascii encoded configure file/test.json b/test cases/failing/77 non ascii in ascii encoded configure file/test.json new file mode 100644 index 0000000..e35b95b --- /dev/null +++ b/test cases/failing/77 non ascii in ascii encoded configure file/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "match": "re", + "line": "test cases/failing/77 non ascii in ascii encoded configure file/meson\\.build:5:0: ERROR: Could not write output file .*[\\\\/]config9\\.h: 'ascii' codec can't encode character '\\\\u0434' in position 17: ordinal not in range\\(128\\)" + } + ] +} diff --git a/test cases/failing/78 subproj dependency not-found and required/test.json b/test cases/failing/78 subproj dependency not-found and required/test.json new file mode 100644 index 0000000..534b4f4 --- /dev/null +++ b/test cases/failing/78 subproj dependency not-found and required/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/78 subproj dependency not-found and required/meson.build:2:0: ERROR: Subproject directory not found and missing.wrap file not found" + } + ] +} diff --git a/test cases/failing/79 unfound run/test.json b/test cases/failing/79 unfound run/test.json new file mode 100644 index 0000000..6baafc0 --- /dev/null +++ b/test cases/failing/79 unfound run/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/79 unfound run/meson.build:4:0: ERROR: Tried to use non-existing executable 'nonexisting_prog'" + } + ] +} diff --git a/test cases/failing/8 recursive/test.json b/test cases/failing/8 recursive/test.json new file mode 100644 index 0000000..b4c964c --- /dev/null +++ b/test cases/failing/8 recursive/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/8 recursive/subprojects/b/meson.build:3:0: ERROR: Recursive include of subprojects: a => b => a." + } + ] +} diff --git a/test cases/failing/80 framework dependency with version/test.json b/test cases/failing/80 framework dependency with version/test.json new file mode 100644 index 0000000..5cbc129 --- /dev/null +++ b/test cases/failing/80 framework dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/80 framework dependency with version/meson.build:8:0: ERROR: Unknown version of dependency 'appleframeworks', but need ['>0']." + } + ] +} diff --git a/test cases/failing/81 override exe config/test.json b/test cases/failing/81 override exe config/test.json new file mode 100644 index 0000000..f19785b --- /dev/null +++ b/test cases/failing/81 override exe config/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/81 override exe config/meson.build:6:0: ERROR: Program 'bar' was overridden with the compiled executable 'foo' and therefore cannot be used during configuration" + } + ] +} diff --git a/test cases/failing/82 gl dependency with version/test.json b/test cases/failing/82 gl dependency with version/test.json new file mode 100644 index 0000000..2c63a2c --- /dev/null +++ b/test cases/failing/82 gl dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/82 gl dependency with version/meson.build:9:0: ERROR: Unknown version of dependency 'gl', but need ['>0']." + } + ] +} diff --git a/test cases/failing/83 threads dependency with version/test.json b/test cases/failing/83 threads dependency with version/test.json new file mode 100644 index 0000000..b131be4 --- /dev/null +++ b/test cases/failing/83 threads dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/83 threads dependency with version/meson.build:3:0: ERROR: Unknown version of dependency 'threads', but need ['>0']." + } + ] +} diff --git a/test cases/failing/85 dub libray/test.json b/test cases/failing/85 dub libray/test.json new file mode 100644 index 0000000..a8b3e28 --- /dev/null +++ b/test cases/failing/85 dub libray/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/85 dub libray/meson.build:11:0: ERROR: Dependency \"dubtestproject\" not found" + } + ] +} diff --git a/test cases/failing/86 dub executable/test.json b/test cases/failing/86 dub executable/test.json new file mode 100644 index 0000000..f9944af --- /dev/null +++ b/test cases/failing/86 dub executable/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/86 dub executable/meson.build:11:0: ERROR: Dependency \"dubtestproject:test1\" not found" + } + ] +} diff --git a/test cases/failing/87 dub compiler/test.json b/test cases/failing/87 dub compiler/test.json index acb7da8..f28312f 100644 --- a/test cases/failing/87 dub compiler/test.json +++ b/test cases/failing/87 dub compiler/test.json @@ -2,8 +2,18 @@ "matrix": { "options": { "warning_level": [ - { "val": "1", "skip_on_env": [ "SINGLE_DUB_COMPILER" ] } + { + "val": "1", + "skip_on_env": [ + "SINGLE_DUB_COMPILER" + ] + } ] } - } + }, + "stdout": [ + { + "line": "test cases/failing/87 dub compiler/meson.build:17:0: ERROR: Dependency \"dubtestproject:test2\" not found" + } + ] } diff --git a/test cases/failing/88 subproj not-found dep/test.json b/test cases/failing/88 subproj not-found dep/test.json new file mode 100644 index 0000000..a1c4231 --- /dev/null +++ b/test cases/failing/88 subproj not-found dep/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/88 subproj not-found dep/meson.build:2:0: ERROR: Could not find dependency notfound_dep in subproject somesubproj" + } + ] +} diff --git a/test cases/failing/89 invalid configure file/test.json b/test cases/failing/89 invalid configure file/test.json new file mode 100644 index 0000000..921ce61 --- /dev/null +++ b/test cases/failing/89 invalid configure file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/89 invalid configure file/meson.build:3:0: ERROR: \"install_dir\" must be specified when \"install\" in a configure_file is true" + } + ] +} diff --git a/test cases/failing/9 missing extra file/test.json b/test cases/failing/9 missing extra file/test.json new file mode 100644 index 0000000..188b6a6 --- /dev/null +++ b/test cases/failing/9 missing extra file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/9 missing extra file/meson.build:3:0: ERROR: File missing.txt does not exist." + } + ] +} diff --git a/test cases/failing/90 kwarg dupe/test.json b/test cases/failing/90 kwarg dupe/test.json new file mode 100644 index 0000000..a8df75d --- /dev/null +++ b/test cases/failing/90 kwarg dupe/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/90 kwarg dupe/meson.build:5:0: ERROR: Entry \"install\" defined both as a keyword argument and in a \"kwarg\" entry." + } + ] +} diff --git a/test cases/failing/91 missing pch file/test.json b/test cases/failing/91 missing pch file/test.json new file mode 100644 index 0000000..166f627 --- /dev/null +++ b/test cases/failing/91 missing pch file/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "comment": "literal 'pch/prog.h' from meson.build appears in output, irrespective of os.path.sep", + "line": "test cases/failing/91 missing pch file/meson.build:2:0: ERROR: File pch/prog.h does not exist." + } + ] +} diff --git a/test cases/failing/92 pch source different folder/test.json b/test cases/failing/92 pch source different folder/test.json new file mode 100644 index 0000000..d94db50 --- /dev/null +++ b/test cases/failing/92 pch source different folder/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/92 pch source different folder/meson.build:4:0: ERROR: PCH files must be stored in the same folder." + } + ] +} diff --git a/test cases/failing/93 vala without c/test.json b/test cases/failing/93 vala without c/test.json new file mode 100644 index 0000000..6185b7e --- /dev/null +++ b/test cases/failing/93 vala without c/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/93 vala without c/meson.build:2:0: ERROR: Compiling Vala requires C. Add C to your project languages and rerun Meson." + } + ] +} diff --git a/test cases/failing/94 unknown config tool/test.json b/test cases/failing/94 unknown config tool/test.json new file mode 100644 index 0000000..a001152 --- /dev/null +++ b/test cases/failing/94 unknown config tool/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/94 unknown config tool/meson.build:2:0: ERROR: Dependency \"no-such-config-tool\" not found" + } + ] +} diff --git a/test cases/failing/95 custom target install data/test.json b/test cases/failing/95 custom target install data/test.json new file mode 100644 index 0000000..64ef530 --- /dev/null +++ b/test cases/failing/95 custom target install data/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/95 custom target install data/meson.build:11:0: ERROR: Argument must be string or file." + } + ] +} diff --git a/test cases/failing/96 add dict non string key/test.json b/test cases/failing/96 add dict non string key/test.json new file mode 100644 index 0000000..5fd4033 --- /dev/null +++ b/test cases/failing/96 add dict non string key/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/96 add dict non string key/meson.build:9:0: ERROR: Key must be a string" + } + ] +} diff --git a/test cases/failing/97 add dict duplicate keys/test.json b/test cases/failing/97 add dict duplicate keys/test.json new file mode 100644 index 0000000..9d01551 --- /dev/null +++ b/test cases/failing/97 add dict duplicate keys/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/97 add dict duplicate keys/meson.build:9:0: ERROR: Duplicate dictionary key: myKey" + } + ] +} diff --git a/test cases/failing/99 no native prop/test.json b/test cases/failing/99 no native prop/test.json new file mode 100644 index 0000000..8c320d9 --- /dev/null +++ b/test cases/failing/99 no native prop/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/99 no native prop/meson.build:3:0: ERROR: Unknown native property: nonexisting." + } + ] +} diff --git a/test cases/warning/1 version for string div/test.json b/test cases/warning/1 version for string div/test.json new file mode 100644 index 0000000..ce1af59 --- /dev/null +++ b/test cases/warning/1 version for string div/test.json @@ -0,0 +1,8 @@ +{ + "stdout": [ + { + "comment": "literal '/' appears in output, irrespective of os.path.sep, as that's the operator", + "line": "WARNING: Project targeting '>=0.48.0' but tried to use feature introduced in '0.49.0': / with string arguments" + } + ] +} diff --git a/test cases/warning/2 languages missing native/test.json b/test cases/warning/2 languages missing native/test.json new file mode 100644 index 0000000..36da0a7 --- /dev/null +++ b/test cases/warning/2 languages missing native/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/warning/2 languages missing native/meson.build:2: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build." + } + ] +} -- cgit v1.1 From 630cfd84ad9fd924ea56ccbf69cf37b040b2b3d3 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Mon, 27 Apr 2020 22:10:30 +0100 Subject: Update dircondenser.py tool to update paths in test.json as well Update dircondenser.py tool to update paths appearing in the expected stdout in test.json when the containing directory is renamed. --- tools/dircondenser.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tools/dircondenser.py b/tools/dircondenser.py index 023c14e..0e28bec 100755 --- a/tools/dircondenser.py +++ b/tools/dircondenser.py @@ -74,6 +74,10 @@ def condense(dirname: str): #print('git mv "%s" "%s"' % (old_name, new_name)) subprocess.check_call(['git', 'mv', old_name, new_name]) replacements.append((old_name, new_name)) + # update any appearances of old_name in expected stdout in test.json + json = os.path.join(new_name, 'test.json') + if os.path.isfile(json): + replace_source(json, [(old_name, new_name)]) os.chdir(curdir) replace_source('run_unittests.py', replacements) replace_source('run_project_tests.py', replacements) -- cgit v1.1 From 1dee6c618d879c20bf0a70eebc54bc8caf47716f Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Tue, 28 Apr 2020 00:45:08 +0100 Subject: Update test.json schema to add stdout Update the test.json schema, adding the 'stdout' property. Also amend the test.json schema so the presence of an unexpected property on the root object causes a validation error. v2: Also add 'tools' property to json schema. Amend the documentation not to use the word 'list' to describe a dict. --- data/test.schema.json | 25 +++++++++++++++++++++++++ docs/markdown/Contributing.md | 6 +++--- 2 files changed, 28 insertions(+), 3 deletions(-) diff --git a/data/test.schema.json b/data/test.schema.json index 72f160f..d3b80d0 100644 --- a/data/test.schema.json +++ b/data/test.schema.json @@ -1,5 +1,6 @@ { "type": "object", + "additionalProperties": false, "properties": { "env": { "type": "object", @@ -100,6 +101,30 @@ "prefix" ] } + }, + "tools": { + "type": "object" + }, + "stdout": { + "type": "array", + "items": { + "type": "object", + "properties": { + "line": { + "type": "string" + }, + "match": { + "type": "string", + "enum": [ + "literal", + "re" + ] + } + }, + "required": [ + "line" + ] + } } } } diff --git a/docs/markdown/Contributing.md b/docs/markdown/Contributing.md index 34cd8ca..b16f615 100644 --- a/docs/markdown/Contributing.md +++ b/docs/markdown/Contributing.md @@ -329,10 +329,10 @@ Currently supported values are: #### tools -This section specifies a list of tool requirements in a simple key-value format. +This section specifies a dict of tool requirements in a simple key-value format. If a tool is specified, it has to be present in the environment, and the version -requirement must be fulfilled match. Otherwise, the entire test is skipped -(including every element in the test matrix). +requirement must be fulfilled. Otherwise, the entire test is skipped (including +every element in the test matrix). #### stdout -- cgit v1.1 From d7c24ccddd13b4b36d63df1908cfa886f9fb7324 Mon Sep 17 00:00:00 2001 From: James Hilliard Date: Sun, 26 Apr 2020 19:30:42 -0600 Subject: Allow get_variable to still function when the fallback is a disabler. --- mesonbuild/interpreter.py | 2 ++ mesonbuild/interpreterbase.py | 2 +- test cases/common/163 disabler/meson.build | 28 ++++++++++++++++++++++++++++ 3 files changed, 31 insertions(+), 1 deletion(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index dd1e57b..441f3c4 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -4661,6 +4661,8 @@ This will become a hard error in the future.''', location=self.current_node) if len(args) < 1 or len(args) > 2: raise InvalidCode('Get_variable takes one or two arguments.') varname = args[0] + if isinstance(varname, Disabler): + return varname if not isinstance(varname, str): raise InterpreterException('First argument must be a string.') try: diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 1a7aa38..6246a06 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -819,7 +819,7 @@ The result of this is undefined and will become a hard error in a future Meson r def function_call(self, node: mparser.FunctionNode) -> T.Optional[TYPE_var]: func_name = node.func_name (posargs, kwargs) = self.reduce_arguments(node.args) - if is_disabled(posargs, kwargs) and func_name != 'set_variable' and func_name != 'is_disabler': + if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'is_disabler'}: return Disabler() if func_name in self.funcs: func = self.funcs[func_name] diff --git a/test cases/common/163 disabler/meson.build b/test cases/common/163 disabler/meson.build index 5554f14..d132e2b 100644 --- a/test cases/common/163 disabler/meson.build +++ b/test cases/common/163 disabler/meson.build @@ -9,6 +9,7 @@ d2 = dependency(d) d3 = (d == d2) d4 = d + 0 d5 = d2 or true +set_variable('d6', disabler()) has_not_changed = false if is_disabler(d) @@ -23,12 +24,14 @@ assert(is_disabler(d2), 'Function laundered disabler was not identified correctl assert(is_disabler(d3), 'Disabler comparison should yield disabler.') assert(is_disabler(d4), 'Disabler addition should yield disabler.') assert(is_disabler(d5), 'Disabler logic op should yield disabler.') +assert(is_disabler(d6), 'set_variable with a disabler should set variable to disabler.') assert(d, 'Disabler did not cause this to be skipped.') assert(d2, 'Function laundered disabler did not cause this to be skipped.') assert(d3, 'Disabler comparison should yield disabler and thus this would not be called.') assert(d4, 'Disabler addition should yield disabler and thus this would not be called.') assert(d5, 'Disabler logic op should yield disabler and thus this would not be called.') +assert(d6, 'set_variable with a disabler did not cause this to be skipped.') number = 0 @@ -80,6 +83,31 @@ else endif assert(has_not_changed, 'App has changed.') +assert(not is_disabler(is_variable('d6')), 'is_variable should not return a disabler') +assert(is_variable('d6'), 'is_variable for a disabler should return true') + +if_is_not_disabled = false +if is_variable('d6') + if_is_not_disabled = true +else + if_is_not_disabled = true +endif +assert(if_is_not_disabled, 'Disabler in is_variable should not skip blocks') + +get_d = get_variable('d6') +assert(is_disabler(get_d), 'get_variable should yield a disabler') + +get_fallback_d = get_variable('nonexistant', disabler()) +assert(is_disabler(get_fallback_d), 'get_variable fallback should yield a disabler') + +var_true = true +get_no_fallback_d = get_variable('var_true', disabler()) +assert(not is_disabler(get_no_fallback_d), 'get_variable should not fallback to disabler') +assert(get_no_fallback_d, 'get_variable should yield true') + +assert(is_disabler(get_variable(disabler())), 'get_variable should yield a disabler') +assert(is_disabler(get_variable(disabler(), var_true)), 'get_variable should yield a disabler') + if_is_disabled = true if disabler() if_is_disabled = false -- cgit v1.1 From 542255993cce730b01d8bf79bf48aa8f5ad36fc9 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 13:29:16 -0700 Subject: mtest: Replace if (bool) { return bool; } with return bool; --- mesonbuild/mtest.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 3239736..b90a65d 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -544,9 +544,7 @@ def write_json_log(jsonlogfile: T.TextIO, test_name: str, result: TestRun) -> No jsonlogfile.write(json.dumps(jresult) + '\n') def run_with_mono(fname: str) -> bool: - if fname.endswith('.exe') and not (is_windows() or is_cygwin()): - return True - return False + return fname.endswith('.exe') and not (is_windows() or is_cygwin()) def load_benchmarks(build_dir: str) -> T.List['TestSerialisation']: datafile = Path(build_dir) / 'meson-private' / 'meson_benchmark_setup.dat' -- cgit v1.1 From fe46515630a9ec8da23fea3f9940302815385119 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 13:30:37 -0700 Subject: mtest: use argparse.type to simplify some code --- mesonbuild/mtest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index b90a65d..846b474 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -93,7 +93,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help='List available tests.') parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args, help='wrapper to run tests with (e.g. Valgrind)') - parser.add_argument('-C', default='.', dest='wd', + parser.add_argument('-C', default='.', dest='wd', type=os.path.abspath, help='directory to cd into before running') parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE', help='Only run tests belonging to the given suite.') @@ -1160,7 +1160,6 @@ def run(options: argparse.Namespace) -> int: if not exe.found(): print('Could not find requested program: {!r}'.format(check_bin)) return 1 - options.wd = os.path.abspath(options.wd) if not options.list and not options.no_rebuild: if not rebuild_all(options.wd): -- cgit v1.1 From 793c3d706e81ff639db1a1314b773b0cfdebd7db Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 13:35:55 -0700 Subject: backends/backends: sort and cleanup imports --- mesonbuild/backend/backends.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 31ddfb4..774764d 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -12,23 +12,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os, pickle, re +from collections import OrderedDict +from functools import lru_cache +import json +import os +import pickle +import re +import shlex +import subprocess import textwrap +import typing as T + from .. import build from .. import dependencies from .. import mesonlib from .. import mlog -import json -import subprocess -from ..mesonlib import MachineChoice, MesonException, OrderedSet, OptionOverrideProxy -from ..mesonlib import classify_unity_sources, unholder -from ..mesonlib import File from ..compilers import CompilerArgs, VisualStudioLikeCompiler from ..interpreter import Interpreter -from collections import OrderedDict -import shlex -from functools import lru_cache -import typing as T +from ..mesonlib import ( + File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy, + classify_unity_sources, unholder +) class CleanTrees: -- cgit v1.1 From c2a4474b582fb98bd81c0babd1056eeb51d0f1ce Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 13:39:36 -0700 Subject: build: cleanup and sort imports --- mesonbuild/build.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index c200261..b95988e 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -12,12 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import copy, os, re from collections import OrderedDict, defaultdict -import itertools, pathlib +from functools import lru_cache +import copy import hashlib +import itertools, pathlib +import os import pickle -from functools import lru_cache +import re import typing as T from . import environment -- cgit v1.1 From 28e3ce67ae49494d57372f27b6f91580656f77a7 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 13:54:46 -0700 Subject: Convert test protocol into an enum This gives us better type safety, and will be important as we add more test methods --- mesonbuild/backend/backends.py | 30 ++++++++++++++++++++++++++---- mesonbuild/interpreter.py | 3 ++- mesonbuild/mintro.py | 2 +- mesonbuild/mtest.py | 5 +++-- 4 files changed, 32 insertions(+), 8 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 774764d..ecdf330 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -14,6 +14,7 @@ from collections import OrderedDict from functools import lru_cache +import enum import json import os import pickle @@ -28,12 +29,33 @@ from .. import dependencies from .. import mesonlib from .. import mlog from ..compilers import CompilerArgs, VisualStudioLikeCompiler -from ..interpreter import Interpreter from ..mesonlib import ( File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy, classify_unity_sources, unholder ) +if T.TYPE_CHECKING: + from ..interpreter import Interpreter + + +class TestProtocol(enum.Enum): + + EXITCODE = 0 + TAP = 1 + + @classmethod + def from_str(cls, string: str) -> 'TestProtocol': + if string == 'exitcode': + return cls.EXITCODE + elif string == 'tap': + return cls.TAP + raise MesonException('unknown test format {}'.format(string)) + + def __str__(self) -> str: + if self is self.EXITCODE: + return 'exitcode' + return 'tap' + class CleanTrees: ''' @@ -91,7 +113,7 @@ class TestSerialisation: needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables, should_fail: bool, timeout: T.Optional[int], workdir: T.Optional[str], - extra_paths: T.List[str], protocol: str, priority: int): + extra_paths: T.List[str], protocol: TestProtocol, priority: int): self.name = name self.project_name = project self.suite = suite @@ -111,7 +133,7 @@ class TestSerialisation: self.priority = priority self.needs_exe_wrapper = needs_exe_wrapper -def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional[Interpreter] = None) -> T.Optional['Backend']: +def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']: if backend == 'ninja': from . import ninjabackend return ninjabackend.NinjaBackend(build, interpreter) @@ -138,7 +160,7 @@ def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, i # This class contains the basic functionality that is needed by all backends. # Feel free to move stuff in and out of it as you see fit. class Backend: - def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']): # Make it possible to construct a dummy backend # This is used for introspection without a build directory if build is None: diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index dd1e57b..7b8ca63 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -33,6 +33,7 @@ from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs from .interpreterbase import ObjectHolder from .modules import ModuleReturnValue from .cmake import CMakeInterpreter +from .backend.backends import TestProtocol from pathlib import Path, PurePath import os @@ -979,7 +980,7 @@ class Test(InterpreterObject): self.should_fail = should_fail self.timeout = timeout self.workdir = workdir - self.protocol = protocol + self.protocol = TestProtocol.from_str(protocol) self.priority = priority def get_exe(self): diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index d5516d4..54e302b 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -328,7 +328,7 @@ def get_test_list(testdata) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], to['suite'] = t.suite to['is_parallel'] = t.is_parallel to['priority'] = t.priority - to['protocol'] = t.protocol + to['protocol'] = str(t.protocol) result.append(to) return result diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 846b474..69da400 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -43,6 +43,7 @@ from . import environment from . import mlog from .dependencies import ExternalProgram from .mesonlib import MesonException, get_wine_shortpath, split_args +from .backend.backends import TestProtocol if T.TYPE_CHECKING: from .backend.backends import TestSerialisation @@ -631,7 +632,7 @@ class SingleTestRunner: if not self.options.verbose: stdout = tempfile.TemporaryFile("wb+") stderr = tempfile.TemporaryFile("wb+") if self.options.split else stdout - if self.test.protocol == 'tap' and stderr is stdout: + if self.test.protocol is TestProtocol.TAP and stderr is stdout: stdout = tempfile.TemporaryFile("wb+") # Let gdb handle ^C instead of us @@ -741,7 +742,7 @@ class SingleTestRunner: if timed_out: return TestRun(self.test, self.test_env, TestResult.TIMEOUT, [], p.returncode, starttime, duration, stdo, stde, cmd) else: - if self.test.protocol == 'exitcode': + if self.test.protocol is TestProtocol.EXITCODE: return TestRun.make_exitcode(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd) else: if self.options.verbose: -- cgit v1.1 From 0c51762463abd72526ac84f3cfeaa286186ae1d7 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 13:59:49 -0700 Subject: backend/backends: Fix type annotation --- mesonbuild/backend/backends.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index ecdf330..ad01011 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -109,7 +109,7 @@ class ExecutableSerialisation: class TestSerialisation: def __init__(self, name: str, project: str, suite: str, fname: T.List[str], - is_cross_built: bool, exe_wrapper: T.Optional[build.Executable], + is_cross_built: bool, exe_wrapper: T.Optional[dependencies.ExternalProgram], needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables, should_fail: bool, timeout: T.Optional[int], workdir: T.Optional[str], -- cgit v1.1 From a124624c0e18a44cd51e49d020b30393dbe8bc9d Mon Sep 17 00:00:00 2001 From: Flow-It Date: Fri, 1 May 2020 21:02:17 +0200 Subject: Document formal Meson grammar [skip ci] * WIP: Document formal Meson grammar * Various little fixes [skip ci] 1) Add missing logical_not_expr 2) 'in' and 'not in' are valid relational operators at least for dicts 3) dictionary keys can be expressions, but kwarg names cannot 4) typo logical_end_expression -> logical_and_expression 5) Make jump statements only allowed inside an iteration statement * Rework EBNF style [skip ci] As there is no good order for the productions, just go alphabetically. The EBNF style was changed to match the one the Python lark project uses, that is colons for productions and terminals enclosed in double quotes. * Add missing production for unary operators [skip ci] * Add production for multiline strings [skip ci] * Properly define terminal symbols [skip ci] Depending on the EBNF flavor, regex can be used to describe the terminal symbols. Lark allows this, and as it was mentioned as a possible user of this grammar, let's follow its flavor here. Most regexes used are easily human-readable, and we can always add comments to more complicated ones. * Small changes to which expressions can be used where [skip ci] Let the grammar be very general. The type system then has to check, that the used expression really evaluates to the correct type. Even if we know today that assignment expressions always evaluate to None (and can therefore only be used as a toplevel expression in an expression statement), this needn't be the case forever. So this way, the grammar stays stable even if such changes were made. * Rework function argument list production [skip ci] * Be more verbose for production names [skip ci] Rename expr -> expression, stmt -> statement, op -> operator, program -> build_definition. Also adjust some list productions. * Add paragraph about syntax stability promises [skip ci] --- docs/markdown/Syntax.md | 70 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index cf0516c..666d50e 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -588,3 +588,73 @@ FAQ](FAQ.md#why-is-meson-not-just-a-python-module-so-i-could-code-my-build-setup because of this limitation you find yourself copying and pasting code a lot you may be able to use a [`foreach` loop instead](#foreach-statements). + +Stability Promises +-- + +Meson is very actively developed and continuously improved. There is a +possibility that future enhancements to the Meson build system will require +changes to the syntax. Such changes might be the addition of new reserved +keywords, changing the meaning of existing keywords or additions around the +basic building blocks like statements and fundamental types. It is planned +to stabilize the syntax with the 1.0 release. + +Grammar +-- + +This is the full Meson grammar, as it is used to parse Meson build definition files: + +``` +additive_expression: multiplicative_expression | (additive_expression additive_operator multiplicative_expression) +additive_operator: "+" | "-" +argument_list: positional_arguments ["," keyword_arguments] | keyword_arguments +array_literal: "[" [expression_list] "]" +assignment_expression: conditional_expression | (logical_or_expression assignment_operator assignment_expression) +assignment_operator: "=" | "*=" | "/=" | "%=" | "+=" | "-=" +boolean_literal: "true" | "false" +build_definition: (NEWLINE | statement)* +condition: expression +conditional_expression: logical_or_expression | (logical_or_expression "?" expression ":" assignment_expression +decimal_literal: DECIMAL_NUMBER +DECIMAL_NUMBER: /[1-9][0-9]*/ +dictionary_literal: "{" [key_value_list] "}" +equality_expression: relational_expression | (equality_expression equality_operator relational_expression) +equality_operator: "==" | "!=" +expression: assignment_expression +expression_list: expression ("," expression)* +expression_statememt: expression +function_expression: id_expression "(" [argument_list] ")" +hex_literal: "0x" HEX_NUMBER +HEX_NUMBER: /[a-fA-F0-9]+/ +id_expression: IDENTIFIER +IDENTIFIER: /[a-zA-Z_][a-zA-Z_0-9]*/ +identifier_list: id_expression ("," id_expression)* +integer_literal: decimal_literal | octal_literal | hex_literal +iteration_statement: "foreach" identifier_list ":" id_expression NEWLINE (statement | jump_statement)* "endforeach" +jump_statement: ("break" | "continue") NEWLINE +key_value_item: expression ":" expression +key_value_list: key_value_item ("," key_value_item)* +keyword_item: id_expression ":" expression +keyword_arguments: keyword_item ("," keyword_item)* +literal: integer_literal | string_literal | boolean_literal | array_literal | dictionary_literal +logical_and_expression: equality_expression | (logical_and_expression "and" equality_expression) +logical_or_expression: logical_and_expression | (logical_or_expression "or" logical_and_expression) +method_expression: postfix_expression "." function_expression +multiplicative_expression: unary_expression | (multiplicative_expression multiplicative_operator unary_expression) +multiplicative_operator: "*" | "/" | "%" +octal_literal: "0o" OCTAL_NUMBER +OCTAL_NUMBER: /[0-7]+/ +positional_arguments: expression ("," expression)* +postfix_expression: primary_expression | subscript_expression | function_expression | method_expression +primary_expression: literal | ("(" expression ")") | id_expression +relational_expression: additive_expression | (relational_expression relational_operator additive_expression) +relational_operator: ">" | "<" | ">=" | "<=" | "in" | ("not" "in") +selection_statement: "if" condition NEWLINE (statement)* ("elif" condition NEWLINE (statement)*)* ["else" (statement)*] "endif" +statement: (expression_statement | selection_statement | iteration_statement) NEWLINE +string_literal: ("'" STRING_SIMPLE_VALUE "'") | ("'''" STRING_MULTILINE_VALUE "'''") +STRING_MULTILINE_VALUE: \.*?(''')\ +STRING_SIMPLE_VALUE: \.*?(? Date: Mon, 27 Apr 2020 17:48:19 +0200 Subject: boost: Only use usage-requirements defines (fixes #7046) --- mesonbuild/dependencies/boost.py | 93 +++++++++++++++++-------------- test cases/frameworks/1 boost/meson.build | 2 +- tools/boost_names.py | 56 ++++++++++++++----- 3 files changed, 93 insertions(+), 58 deletions(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 13054f5..33408c1 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -665,9 +665,9 @@ boost_arch_map = { #### ---- BEGIN GENERATED ---- #### # # # Generated with tools/boost_names.py: -# - boost version: 1.72.0 -# - modules found: 158 -# - libraries found: 42 +# - boost version: 1.73.0 +# - modules found: 159 +# - libraries found: 43 # class BoostLibrary(): @@ -690,16 +690,16 @@ class BoostModule(): boost_libraries = { 'boost_atomic': BoostLibrary( name='boost_atomic', - shared=[], - static=[], + shared=['-DBOOST_ATOMIC_DYN_LINK=1'], + static=['-DBOOST_ATOMIC_STATIC_LINK=1'], single=[], multi=[], ), 'boost_chrono': BoostLibrary( name='boost_chrono', - shared=['-DBOOST_ALL_DYN_LINK=1'], - static=['-DBOOST_All_STATIC_LINK=1'], - single=[], + shared=['-DBOOST_CHRONO_DYN_LINK=1'], + static=['-DBOOST_CHRONO_STATIC_LINK=1'], + single=['-DBOOST_CHRONO_THREAD_DISABLED'], multi=[], ), 'boost_container': BoostLibrary( @@ -711,28 +711,28 @@ boost_libraries = { ), 'boost_context': BoostLibrary( name='boost_context', - shared=[], + shared=['-DBOOST_CONTEXT_DYN_LINK=1'], static=[], single=[], multi=[], ), 'boost_contract': BoostLibrary( name='boost_contract', - shared=[], - static=[], - single=[], + shared=['-DBOOST_CONTRACT_DYN_LINK'], + static=['-DBOOST_CONTRACT_STATIC_LINK'], + single=['-DBOOST_CONTRACT_DISABLE_THREADS'], multi=[], ), 'boost_coroutine': BoostLibrary( name='boost_coroutine', - shared=[], + shared=['-DBOOST_COROUTINES_DYN_LINK=1'], static=[], single=[], multi=[], ), 'boost_date_time': BoostLibrary( name='boost_date_time', - shared=[], + shared=['-DBOOST_DATE_TIME_DYN_LINK=1'], static=[], single=[], multi=[], @@ -746,14 +746,14 @@ boost_libraries = { ), 'boost_fiber': BoostLibrary( name='boost_fiber', - shared=[], + shared=['-DBOOST_FIBERS_DYN_LINK=1'], static=[], single=[], multi=[], ), 'boost_fiber_numa': BoostLibrary( name='boost_fiber_numa', - shared=[], + shared=['-DBOOST_FIBERS_DYN_LINK=1'], static=[], single=[], multi=[], @@ -767,84 +767,91 @@ boost_libraries = { ), 'boost_graph': BoostLibrary( name='boost_graph', - shared=['-DBOOST_GRAPH_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_iostreams': BoostLibrary( name='boost_iostreams', - shared=['-DBOOST_IOSTREAMS_DYN_LINK=1', '-DBOOST_IOSTREAMS_DYN_LINK=1'], + shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'], static=[], single=[], multi=[], ), 'boost_locale': BoostLibrary( name='boost_locale', - shared=['-DBOOST_LOCALE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_log': BoostLibrary( name='boost_log', - shared=['-DBOOST_LOG_DLL', '-DBOOST_LOG_DYN_LINK=1'], + shared=['-DBOOST_LOG_DYN_LINK=1'], static=[], - single=['BOOST_LOG_NO_THREADS'], + single=['-DBOOST_LOG_NO_THREADS'], multi=[], ), 'boost_log_setup': BoostLibrary( name='boost_log_setup', - shared=['-DBOOST_LOG_DYN_LINK=1', '-DBOOST_LOG_SETUP_DLL', '-DBOOST_LOG_SETUP_DYN_LINK=1'], + shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'], static=[], - single=['BOOST_LOG_NO_THREADS'], + single=['-DBOOST_LOG_NO_THREADS'], multi=[], ), 'boost_math_c99': BoostLibrary( name='boost_math_c99', - shared=['-DBOOST_MATH_TR1_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_math_c99f': BoostLibrary( name='boost_math_c99f', - shared=['-DBOOST_MATH_TR1_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_math_c99l': BoostLibrary( name='boost_math_c99l', - shared=['-DBOOST_MATH_TR1_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_math_tr1': BoostLibrary( name='boost_math_tr1', - shared=['-DBOOST_MATH_TR1_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_math_tr1f': BoostLibrary( name='boost_math_tr1f', - shared=['-DBOOST_MATH_TR1_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_math_tr1l': BoostLibrary( name='boost_math_tr1l', - shared=['-DBOOST_MATH_TR1_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_mpi': BoostLibrary( name='boost_mpi', - shared=['-DBOOST_MPI_DYN_LINK=1'], + shared=[], + static=[], + single=[], + multi=[], + ), + 'boost_nowide': BoostLibrary( + name='boost_nowide', + shared=['-DBOOST_NOWIDE_DYN_LINK=1'], static=[], single=[], multi=[], @@ -865,63 +872,63 @@ boost_libraries = { ), 'boost_random': BoostLibrary( name='boost_random', - shared=[], + shared=['-DBOOST_RANDOM_DYN_LINK'], static=[], single=[], multi=[], ), 'boost_regex': BoostLibrary( name='boost_regex', - shared=['-DBOOST_REGEX_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_serialization': BoostLibrary( name='boost_serialization', - shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_stacktrace_addr2line': BoostLibrary( name='boost_stacktrace_addr2line', - shared=['-DBOOST_STACKTRACE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_stacktrace_backtrace': BoostLibrary( name='boost_stacktrace_backtrace', - shared=['-DBOOST_STACKTRACE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_stacktrace_basic': BoostLibrary( name='boost_stacktrace_basic', - shared=['-DBOOST_STACKTRACE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_stacktrace_noop': BoostLibrary( name='boost_stacktrace_noop', - shared=['-DBOOST_STACKTRACE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_stacktrace_windbg': BoostLibrary( name='boost_stacktrace_windbg', - shared=['-DBOOST_STACKTRACE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], ), 'boost_stacktrace_windbg_cached': BoostLibrary( name='boost_stacktrace_windbg_cached', - shared=['-DBOOST_STACKTRACE_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], @@ -942,8 +949,8 @@ boost_libraries = { ), 'boost_thread': BoostLibrary( name='boost_thread', - shared=['-DBOOST_THREAD_USE_DLL=1'], - static=['-DBOOST_THREAD_USE_LIB=1'], + shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'], + static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'], single=[], multi=[], ), @@ -956,7 +963,7 @@ boost_libraries = { ), 'boost_type_erasure': BoostLibrary( name='boost_type_erasure', - shared=[], + shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'], static=[], single=[], multi=[], @@ -977,7 +984,7 @@ boost_libraries = { ), 'boost_wserialization': BoostLibrary( name='boost_wserialization', - shared=['-DBOOST_SERIALIZATION_DYN_LINK=1'], + shared=[], static=[], single=[], multi=[], diff --git a/test cases/frameworks/1 boost/meson.build b/test cases/frameworks/1 boost/meson.build index 501ed29..6c23360 100644 --- a/test cases/frameworks/1 boost/meson.build +++ b/test cases/frameworks/1 boost/meson.build @@ -13,7 +13,7 @@ endif # within one project. The need to be independent of each other. # Use one without a library dependency and one with it. -linkdep = dependency('boost', static: s, modules : ['thread', 'system']) +linkdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time']) testdep = dependency('boost', static: s, modules : ['unit_test_framework']) nomoddep = dependency('boost', static: s) extralibdep = dependency('boost', static: s, modules : ['thread', 'system', 'date_time', 'log_setup', 'log', 'filesystem', 'regex']) diff --git a/tools/boost_names.py b/tools/boost_names.py index d26d34b..b66c6cc 100755 --- a/tools/boost_names.py +++ b/tools/boost_names.py @@ -43,10 +43,10 @@ export_modules = False class BoostLibrary(): def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]): self.name = name - self.shared = shared - self.static = static - self.single = single - self.multi = multi + self.shared = sorted(set(shared)) + self.static = sorted(set(static)) + self.single = sorted(set(single)) + self.multi = sorted(set(multi)) def __lt__(self, other: T.Any) -> T.Union[bool, 'NotImplemented']: if isinstance(other, BoostLibrary): @@ -99,15 +99,35 @@ def get_libraries(jamfile: Path) -> T.List[BoostLibrary]: cmds = raw.split(';') # Commands always terminate with a ; (I hope) cmds = [x.strip() for x in cmds] # Some cleanup + project_usage_requirements: T.List[str] = [] + # "Parse" the relevant sections for i in cmds: parts = i.split(' ') - parts = [x for x in parts if x not in ['', ':']] + parts = [x for x in parts if x not in ['']] if not parts: continue - # Parese libraries - if parts[0] in ['lib', 'boost-lib']: + # Parse project + if parts[0] in ['project']: + attributes: T.Dict[str, T.List[str]] = {} + curr: T.Optional[str] = None + + for j in parts: + if j == ':': + curr = None + elif curr is None: + curr = j + else: + if curr not in attributes: + attributes[curr] = [] + attributes[curr] += [j] + + if 'usage-requirements' in attributes: + project_usage_requirements = attributes['usage-requirements'] + + # Parse libraries + elif parts[0] in ['lib', 'boost-lib']: assert len(parts) >= 2 # Get and check the library name @@ -117,28 +137,36 @@ def get_libraries(jamfile: Path) -> T.List[BoostLibrary]: if not lname.startswith('boost_'): continue + # Count `:` to only select the 'usage-requirements' + # See https://boostorg.github.io/build/manual/master/index.html#bbv2.main-target-rule-syntax + colon_counter = 0 + usage_requirements: T.List[str] = [] + for j in parts: + if j == ':': + colon_counter += 1 + elif colon_counter >= 4: + usage_requirements += [j] + # Get shared / static defines shared: T.List[str] = [] static: T.List[str] = [] single: T.List[str] = [] multi: T.List[str] = [] - for j in parts: + for j in usage_requirements + project_usage_requirements: m1 = re.match(r'shared:(.*)', j) m2 = re.match(r'static:(.*)', j) m3 = re.match(r'single:(.*)', j) m4 = re.match(r'multi:(.*)', j) if m1: - shared += [m1.group(1)] + shared += [f'-D{m1.group(1)}'] if m2: - static += [m2.group(1)] + static += [f'-D{m2.group(1)}'] if m3: - single += [m3.group(1)] + single +=[f'-D{m3.group(1)}'] if m4: - multi += [m4.group(1)] + multi += [f'-D{m4.group(1)}'] - shared = [f'-D{x}' for x in shared] - static = [f'-D{x}' for x in static] libs += [BoostLibrary(lname, shared, static, single, multi)] return libs -- cgit v1.1 From 30c4a7744f00c0393e4f261a6337d65b4c4d469d Mon Sep 17 00:00:00 2001 From: Princeton Ferro Date: Fri, 1 May 2020 15:11:56 -0400 Subject: docs/Users: add Vala Language Server See https://github.com/benwaffle/vala-language-server --- docs/markdown/Users.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index bfc8a7a..41d8dfa 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -124,6 +124,7 @@ format files - [Terminology](https://github.com/billiob/terminology), a terminal emulator based on the Enlightenment Foundation Libraries - [Tilix](https://github.com/gnunn1/tilix), a tiling terminal emulator for Linux using GTK+ 3 - [Tizonia](https://github.com/tizonia/tizonia-openmax-il), a command-line cloud music player for Linux with support for Spotify, Google Play Music, YouTube, SoundCloud, TuneIn, Plex servers and Chromecast devices + - [Vala Language Server](https://github.com/benwaffle/vala-language-server), code intelligence engine for the Vala and Genie programming languages - [Valum](https://github.com/valum-framework/valum), a micro web framework written in Vala - [Venom](https://github.com/naxuroqa/Venom), a modern Tox client for the GNU/Linux desktop - [VMAF](https://github.com/Netflix/vmaf) (by Netflix), a perceptual video quality assessment based on multi-method fusion -- cgit v1.1 From b75dcd05c536a43bd8b568ca742ea2319542185c Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 1 May 2020 21:29:53 +0200 Subject: boost: Do not set BOOST_ALL_DYN_LINK (fixes #7056) --- mesonbuild/dependencies/boost.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 33408c1..fb9d573 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -637,11 +637,8 @@ class BoostDependency(ExternalDependency): return BoostIncludeDir(hfile.parents[1], int(m.group(1))) def _extra_compile_args(self) -> T.List[str]: - args = [] # type: T.List[str] - args += ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking - if not self.static: - args += ['-DBOOST_ALL_DYN_LINK'] - return args + # BOOST_ALL_DYN_LINK should not be required with the known defines below + return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking # See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming -- cgit v1.1 From 3ac437cecfdbf46400263e144beac375408c623b Mon Sep 17 00:00:00 2001 From: Michael Brockus <55331536+michaelbadcrumble@users.noreply.github.com> Date: Sun, 3 May 2020 04:00:15 -0700 Subject: rm python2 %s from backends.py and ninjabackend.py --- mesonbuild/backend/backends.py | 12 ++++---- mesonbuild/backend/ninjabackend.py | 61 ++++++++++++++++++-------------------- 2 files changed, 35 insertions(+), 38 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 31ddfb4..0aaf66a 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -196,7 +196,7 @@ class Backend: return os.path.join(self.get_target_dir(target), target.get_filename()) elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)): if not target.is_linkable_target(): - raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name) + raise MesonException('Tried to link against custom target "{}", which is not linkable.'.format(target.name)) return os.path.join(self.get_target_dir(target), target.get_filename()) elif isinstance(target, build.Executable): if target.import_filename: @@ -282,7 +282,7 @@ class Backend: ofile = init_language_file(comp.get_default_suffix(), unity_file_number) unity_file_number += 1 files_in_current = 0 - ofile.write('#include<%s>\n' % src) + ofile.write('#include<{}>\n'.format(src)) files_in_current += 1 if ofile: ofile.close() @@ -537,14 +537,14 @@ class Backend: def create_msvc_pch_implementation(self, target, lang, pch_header): # We have to include the language in the file name, otherwise # pch.c and pch.cpp will both end up as pch.obj in VS backends. - impl_name = 'meson_pch-%s.%s' % (lang, lang) + impl_name = 'meson_pch-{}.{}'.format(lang, lang) pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name) # Make sure to prepend the build dir, since the working directory is # not defined. Otherwise, we might create the file in the wrong path. pch_file = os.path.join(self.build_dir, pch_rel_to_build) os.makedirs(os.path.dirname(pch_file), exist_ok=True) - content = '#include "%s"' % os.path.basename(pch_header) + content = '#include "{}"'.format(os.path.basename(pch_header)) pch_file_tmp = pch_file + '.tmp' with open(pch_file_tmp, 'w') as f: f.write(content) @@ -664,7 +664,7 @@ class Backend: args = [] for d in deps: if not (d.is_linkable_target()): - raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename()) + raise RuntimeError('Tried to link with a non-library target "{}".'.format(d.get_basename())) arg = self.get_target_filename_for_linking(d) if not arg: continue @@ -853,7 +853,7 @@ class Backend: m = regex.search(arg) while m is not None: index = int(m.group(1)) - src = '@OUTPUT%d@' % index + src = '@OUTPUT{}@'.format(index) arg = arg.replace(src, os.path.join(private_dir, output_list[index])) m = regex.search(arg) newargs.append(arg) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index e765466..9b895c9 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -67,9 +67,9 @@ def ninja_quote(text, is_build_line=False): if '\n' in text: errmsg = '''Ninja does not support newlines in rules. The content was: -%s +{} -Please report this error with a test case to the Meson bug tracker.''' % text +Please report this error with a test case to the Meson bug tracker.'''.format(text) raise MesonException(errmsg) return text @@ -101,18 +101,18 @@ class NinjaRule: if not self.refcount: return - outfile.write('rule %s\n' % self.name) + outfile.write('rule {}\n'.format(self.name)) if self.rspable: - outfile.write(' command = %s @$out.rsp\n' % ' '.join(self.command)) + outfile.write(' command = {} @$out.rsp\n'.format(' '.join(self.command))) outfile.write(' rspfile = $out.rsp\n') - outfile.write(' rspfile_content = %s\n' % ' '.join(self.args)) + outfile.write(' rspfile_content = {}\n'.format(' '.join(self.args))) else: - outfile.write(' command = %s\n' % ' '.join(self.command + self.args)) + outfile.write(' command = {}\n'.format(' '.join(self.command + self.args))) if self.deps: - outfile.write(' deps = %s\n' % self.deps) + outfile.write(' deps = {}\n'.format(self.deps)) if self.depfile: - outfile.write(' depfile = %s\n' % self.depfile) - outfile.write(' description = %s\n' % self.description) + outfile.write(' depfile = {}\n'.format(self.depfile)) + outfile.write(' description = {}\n'.format(self.description)) if self.extra: for l in self.extra.split('\n'): outfile.write(' ') @@ -185,7 +185,7 @@ class NinjaBuildElement: for e in self.elems: (name, elems) = e should_quote = name not in raw_names - line = ' %s = ' % name + line = ' {} = '.format(name) newelems = [] for i in elems: if not should_quote or i == '&&': # Hackety hack hack @@ -204,7 +204,7 @@ class NinjaBuildElement: def check_outputs(self): for n in self.outfilenames: if n in self.all_outputs: - raise MesonException('Multiple producers for Ninja target "%s". Please rename your targets.' % n) + raise MesonException('Multiple producers for Ninja target "{}". Please rename your targets.'.format(n)) self.all_outputs[n] = True class NinjaBackend(backends.Backend): @@ -299,8 +299,7 @@ int dummy; outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) tempfilename = outfilename + '~' with open(tempfilename, 'w', encoding='utf-8') as outfile: - outfile.write('# This is the build file for project "%s"\n' % - self.build.get_project()) + outfile.write('# This is the build file for project "{}"\n'.format(self.build.get_project())) outfile.write('# It is autogenerated by the Meson build system.\n') outfile.write('# Do not edit by hand.\n\n') outfile.write('ninja_required_version = 1.7.1\n\n') @@ -308,9 +307,9 @@ int dummy; num_pools = self.environment.coredata.backend_options['backend_max_links'].value if num_pools > 0: outfile.write('''pool link_pool - depth = %d + depth = {} -''' % num_pools) +'''.format(num_pools)) with self.detect_vs_dep_prefix(tempfilename) as outfile: self.generate_rules() @@ -765,7 +764,7 @@ int dummy; target_name = 'meson-{}'.format(self.build_run_target_name(target)) elem = NinjaBuildElement(self.all_outputs, target_name, 'CUSTOM_COMMAND', []) elem.add_item('COMMAND', cmd) - elem.add_item('description', 'Running external command %s' % target.name) + elem.add_item('description', 'Running external command {}'.format(target.name)) elem.add_item('pool', 'console') # Alias that runs the target defined above with the name the user specified self.create_target_alias(target_name) @@ -980,12 +979,12 @@ int dummy; ofilename = os.path.join(self.get_target_private_dir(target), ofilebase) elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile) elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) - elem.add_item('DESC', 'Compiling resource %s' % rel_sourcefile) + elem.add_item('DESC', 'Compiling resource {}'.format(rel_sourcefile)) self.add_build(elem) deps.append(ofilename) a = '-resource:' + ofilename else: - raise InvalidArguments('Unknown resource file %s.' % r) + raise InvalidArguments('Unknown resource file {}.'.format(r)) args.append(a) return args, deps @@ -1278,7 +1277,7 @@ int dummy; main_rust_file = None for i in target.get_sources(): if not rustc.can_compile(i): - raise InvalidArguments('Rust target %s contains a non-rust source file.' % target.get_basename()) + raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename())) if main_rust_file is None: main_rust_file = i.rel_to_builddir(self.build_to_src) if main_rust_file is None: @@ -1377,11 +1376,11 @@ int dummy; @classmethod def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice) -> str: - return '%s_COMPILER%s' % (lang, cls.get_rule_suffix(for_machine)) + return '{}_COMPILER{}'.format(lang, cls.get_rule_suffix(for_machine)) @classmethod def get_pch_rule_name(cls, lang: str, for_machine: MachineChoice) -> str: - return '%s_PCH%s' % (lang, cls.get_rule_suffix(for_machine)) + return '{}_PCH{}'.format(lang, cls.get_rule_suffix(for_machine)) @classmethod def compiler_to_rule_name(cls, compiler: Compiler) -> str: @@ -1453,7 +1452,7 @@ int dummy; abs_headers.append(absh) header_imports += swiftc.get_header_import_args(absh) else: - raise InvalidArguments('Swift target %s contains a non-swift source file.' % target.get_basename()) + raise InvalidArguments('Swift target {} contains a non-swift source file.'.format(target.get_basename())) os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) compile_args = swiftc.get_compile_only_args() compile_args += swiftc.get_optimization_args(self.get_option_for_target('optimization', target)) @@ -1540,7 +1539,7 @@ int dummy; static_linker = self.build.static_linker[for_machine] if static_linker is None: return - rule = 'STATIC_LINKER%s' % self.get_rule_suffix(for_machine) + rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine)) cmdlist = [] args = ['$in'] # FIXME: Must normalize file names with pathlib.Path before writing @@ -1574,7 +1573,7 @@ int dummy; or langname == 'rust' \ or langname == 'cs': continue - rule = '%s_LINKER%s' % (langname, self.get_rule_suffix(for_machine)) + rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine)) command = compiler.get_linker_exelist() args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS'] description = 'Linking target $out' @@ -1645,7 +1644,7 @@ int dummy; self.add_rule(NinjaRule(rule, command, [], description)) def generate_fortran_dep_hack(self, crstr): - rule = 'FORTRAN_DEP_HACK%s' % (crstr) + rule = 'FORTRAN_DEP_HACK{}'.format(crstr) if mesonlib.is_windows(): cmd = ['cmd', '/C'] else: @@ -1698,7 +1697,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) command = [ninja_quote(i) for i in compiler.get_exelist()] args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in'] - description = 'Compiling %s object $out' % compiler.get_display_language() + description = 'Compiling {} object $out'.format(compiler.get_display_language()) if isinstance(compiler, VisualStudioLikeCompiler): deps = 'msvc' depfile = None @@ -1859,9 +1858,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) modname = modmatch.group(1).lower() if modname in module_files: raise InvalidArguments( - 'Namespace collision: module %s defined in ' - 'two files %s and %s.' % - (modname, module_files[modname], s)) + 'Namespace collision: module {} defined in ' + 'two files {} and {}.'.format(modname, module_files[modname], s)) module_files[modname] = s else: submodmatch = submodre.match(line) @@ -1872,9 +1870,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) if submodname in submodule_files: raise InvalidArguments( - 'Namespace collision: submodule %s defined in ' - 'two files %s and %s.' % - (submodname, submodule_files[submodname], s)) + 'Namespace collision: submodule {} defined in ' + 'two files {} and {}.'.format(submodname, submodule_files[submodname], s)) submodule_files[submodname] = s self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files} -- cgit v1.1 From 27bbf37cf016d138c287278272a058950f15e7db Mon Sep 17 00:00:00 2001 From: GustavoLCR Date: Sun, 3 May 2020 20:21:20 -0300 Subject: Fix incremental debug builds in VS --- mesonbuild/backend/vs2010backend.py | 3 ++- run_unittests.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 80ff910..b5803bf 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -1192,7 +1192,8 @@ class Vs2010Backend(backends.Backend): # /nologo ET.SubElement(link, 'SuppressStartupBanner').text = 'true' # /release - ET.SubElement(link, 'SetChecksum').text = 'true' + if not self.environment.coredata.get_builtin_option('debug'): + ET.SubElement(link, 'SetChecksum').text = 'true' meson_file_group = ET.SubElement(root, 'ItemGroup') ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename)) diff --git a/run_unittests.py b/run_unittests.py index c77c9c0..39191a3 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -5175,7 +5175,7 @@ class WindowsTests(BasePlatformTests): raise raise unittest.SkipTest('pefile module not found') testdir = os.path.join(self.common_test_dir, '6 linkshared') - self.init(testdir) + self.init(testdir, extra_args=['--buildtype=release']) self.build() # Test that binaries have a non-zero checksum env = get_fake_env() -- cgit v1.1 From dee10c9151c7233d20e7e142ee9b7ac6f93a45e8 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Mon, 4 May 2020 18:11:39 +0300 Subject: Revert "coredata: init_builtins should always call libdir_cross_fixup" This reverts commit cc4e9e79be54f59a09d8e5ca96c6a2946245a88d. --- mesonbuild/coredata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 0b79084..c8061a7 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -385,6 +385,7 @@ class CoreData: # Only to print a warning if it changes between Meson invocations. self.config_files = self.__load_config_files(options, scratch_dir, 'native') self.init_builtins('') + self.libdir_cross_fixup() @staticmethod def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]: @@ -510,7 +511,6 @@ class CoreData: for for_machine in iter(MachineChoice): for key, opt in builtin_options_per_machine.items(): self.add_builtin_option(self.builtins_per_machine[for_machine], key, opt, subproject) - self.libdir_cross_fixup() def add_builtin_option(self, opts_map, key, opt, subproject): if subproject: -- cgit v1.1 From d61f7a1e849e953a5478cd0f1b93cd5b7f4193a2 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Mon, 4 May 2020 14:37:05 +0300 Subject: Add regression test for libdir reset. --- run_unittests.py | 11 +++++++++++ test cases/unit/75 subdir libdir/meson.build | 2 ++ test cases/unit/75 subdir libdir/subprojects/flub/meson.build | 1 + 3 files changed, 14 insertions(+) create mode 100644 test cases/unit/75 subdir libdir/meson.build create mode 100644 test cases/unit/75 subdir libdir/subprojects/flub/meson.build diff --git a/run_unittests.py b/run_unittests.py index 39191a3..0cd52d1 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -6683,6 +6683,17 @@ class LinuxCrossArmTests(BasePlatformTests): return self.assertTrue(False, 'Option libdir not in introspect data.') + def test_cross_libdir_subproject(self): + # Guard against a regression where calling "subproject" + # would reset the value of libdir to its default value. + testdir = os.path.join(self.unit_test_dir, '75 subdir libdir') + self.init(testdir, extra_args=['--libdir=fuf']) + for i in self.introspect('--buildoptions'): + if i['name'] == 'libdir': + self.assertEqual(i['value'], 'fuf') + return + self.assertTrue(False, 'Libdir specified on command line gets reset.') + def test_std_remains(self): # C_std defined in project options must be in effect also when cross compiling. testdir = os.path.join(self.unit_test_dir, '51 noncross options') diff --git a/test cases/unit/75 subdir libdir/meson.build b/test cases/unit/75 subdir libdir/meson.build new file mode 100644 index 0000000..5099c91 --- /dev/null +++ b/test cases/unit/75 subdir libdir/meson.build @@ -0,0 +1,2 @@ +project('toplevel', 'c') +subproject('flub') diff --git a/test cases/unit/75 subdir libdir/subprojects/flub/meson.build b/test cases/unit/75 subdir libdir/subprojects/flub/meson.build new file mode 100644 index 0000000..7bfd2c5 --- /dev/null +++ b/test cases/unit/75 subdir libdir/subprojects/flub/meson.build @@ -0,0 +1 @@ +project('subflub', 'c') -- cgit v1.1 From 8ce4952890c9ed25ee471d811990eb1fc3fb13b1 Mon Sep 17 00:00:00 2001 From: Eric Engestrom Date: Sun, 3 May 2020 21:54:37 +0200 Subject: coredata: init IntegerOption choices to the correct value It's not a boolean, so let's avoid initializing it to invalid choices followed by assigning it valid ones. --- mesonbuild/coredata.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 0b79084..d3ca4f6 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -99,16 +99,16 @@ class UserBooleanOption(UserOption[bool]): class UserIntegerOption(UserOption[int]): def __init__(self, description, value, yielding=None): min_value, max_value, default_value = value - super().__init__(description, [True, False], yielding) self.min_value = min_value self.max_value = max_value - self.set_value(default_value) c = [] if min_value is not None: c.append('>=' + str(min_value)) if max_value is not None: c.append('<=' + str(max_value)) - self.choices = ', '.join(c) + choices = ', '.join(c) + super().__init__(description, choices, yielding) + self.set_value(default_value) def validate_value(self, value) -> int: if isinstance(value, str): -- cgit v1.1 From 083c5f635741a29f93f95c817601dbc66207699d Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Apr 2020 15:36:17 -0700 Subject: Add native support for gtest tests Gtest can output junit results with a command line switch. We can parse this to get more detailed results than the returncode, and put those in our own Junit output. We basically just throw away the top level 'testsuites' object, then fixup the names of the tests, and shove that into our junit. --- docs/markdown/Reference-manual.md | 13 +++++--- docs/markdown/snippets/gtest_protocol.md | 6 ++++ mesonbuild/backend/backends.py | 5 +++ mesonbuild/interpreter.py | 6 ++-- mesonbuild/mtest.py | 54 +++++++++++++++++++++++++++---- run_unittests.py | 10 +++--- test cases/frameworks/2 gtest/meson.build | 4 +-- 7 files changed, 78 insertions(+), 20 deletions(-) create mode 100644 docs/markdown/snippets/gtest_protocol.md diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 963af9d..15a438b 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1662,11 +1662,14 @@ test(..., env: nomalloc, ...) before test is executed even if they have `build_by_default : false`. Since 0.46.0 -- `protocol` specifies how the test results are parsed and can be one - of `exitcode` (the executable's exit code is used by the test harness - to record the outcome of the test) or `tap` ([Test Anything - Protocol](https://www.testanything.org/)). For more on the Meson test - harness protocol read [Unit Tests](Unit-tests.md). Since 0.50.0 +- `protocol` *(Since 0.50.0)* specifies how the test results are parsed and can + be one of `exitcode`, `tap`, or `gtest`. For more information about test + harness protocol read [Unit Tests](Unit-tests.md). The following values are + accepted: + - `exitcode`: the executable's exit code is used by the test harness + to record the outcome of the test) + - `tap` ([Test Anything Protocol](https://www.testanything.org/)) + - `gtest`. *(Since 0.55.0)* for Google Tests. - `priority` specifies the priority of a test. Tests with a higher priority are *started* before tests with a lower priority. diff --git a/docs/markdown/snippets/gtest_protocol.md b/docs/markdown/snippets/gtest_protocol.md new file mode 100644 index 0000000..14f3af9 --- /dev/null +++ b/docs/markdown/snippets/gtest_protocol.md @@ -0,0 +1,6 @@ +## Test protocol for gtest + +Due to the popularity of Gtest (google test) among C and C++ developers meson +now supports a special protocol for gtest. With this protocol meson injects +arguments to gtests to output JUnit, reads that JUnit, and adds the output to +the JUnit it generates. diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index ad01011..d41cef1 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -42,6 +42,7 @@ class TestProtocol(enum.Enum): EXITCODE = 0 TAP = 1 + GTEST = 2 @classmethod def from_str(cls, string: str) -> 'TestProtocol': @@ -49,11 +50,15 @@ class TestProtocol(enum.Enum): return cls.EXITCODE elif string == 'tap': return cls.TAP + elif string == 'gtest': + return cls.GTEST raise MesonException('unknown test format {}'.format(string)) def __str__(self) -> str: if self is self.EXITCODE: return 'exitcode' + elif self is self.GTEST: + return 'gtest' return 'tap' diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 7b8ca63..c0be92a 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3772,6 +3772,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self @FeatureNewKwargs('test', '0.52.0', ['priority']) @permittedKwargs(permitted_kwargs['test']) def func_test(self, node, args, kwargs): + if kwargs.get('protocol') == 'gtest': + FeatureNew('"gtest" protocol for tests', '0.55.0').use(self.subproject) self.add_test(node, args, kwargs, True) def unpack_env_kwarg(self, kwargs) -> build.EnvironmentVariables: @@ -3823,8 +3825,8 @@ This will become a hard error in the future.''' % kwargs['input'], location=self if not isinstance(timeout, int): raise InterpreterException('Timeout must be an integer.') protocol = kwargs.get('protocol', 'exitcode') - if protocol not in ('exitcode', 'tap'): - raise InterpreterException('Protocol must be "exitcode" or "tap".') + if protocol not in {'exitcode', 'tap', 'gtest'}: + raise InterpreterException('Protocol must be "exitcode", "tap", or "gtest".') suite = [] prj = self.subproject if self.is_subproject() else self.build.project_name for s in mesonlib.stringlistify(kwargs.get('suite', '')): diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 69da400..4592c90 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -94,7 +94,10 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: help='List available tests.') parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args, help='wrapper to run tests with (e.g. Valgrind)') - parser.add_argument('-C', default='.', dest='wd', type=os.path.abspath, + parser.add_argument('-C', default='.', dest='wd', + # https://github.com/python/typeshed/issues/3107 + # https://github.com/python/mypy/issues/7177 + type=os.path.abspath, # type: ignore help='directory to cd into before running') parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE', help='Only run tests belonging to the given suite.') @@ -349,6 +352,19 @@ class JunitBuilder: def log(self, name: str, test: 'TestRun') -> None: """Log a single test case.""" + if test.junit is not None: + for suite in test.junit.findall('.//testsuite'): + # Assume that we don't need to merge anything here... + suite.attrib['name'] = '{}.{}.{}'.format(test.project, name, suite.attrib['name']) + + # GTest can inject invalid attributes + for case in suite.findall('.//testcase[@result]'): + del case.attrib['result'] + for case in suite.findall('.//testcase[@timestamp]'): + del case.attrib['timestamp'] + self.root.append(suite) + return + # In this case we have a test binary with multiple results. # We want to record this so that each result is recorded # separately @@ -430,10 +446,24 @@ class JunitBuilder: class TestRun: @classmethod + def make_gtest(cls, test: 'TestSerialisation', test_env: T.Dict[str, str], + returncode: int, starttime: float, duration: float, + stdo: T.Optional[str], stde: T.Optional[str], + cmd: T.Optional[T.List[str]]) -> 'TestRun': + filename = '{}.xml'.format(test.name) + if test.workdir: + filename = os.path.join(test.workdir, filename) + tree = et.parse(filename) + + return cls.make_exitcode( + test, test_env, returncode, starttime, duration, stdo, stde, cmd, + junit=tree) + + @classmethod def make_exitcode(cls, test: 'TestSerialisation', test_env: T.Dict[str, str], returncode: int, starttime: float, duration: float, stdo: T.Optional[str], stde: T.Optional[str], - cmd: T.Optional[T.List[str]]) -> 'TestRun': + cmd: T.Optional[T.List[str]], **kwargs) -> 'TestRun': if returncode == GNU_SKIP_RETURNCODE: res = TestResult.SKIP elif returncode == GNU_ERROR_RETURNCODE: @@ -442,15 +472,15 @@ class TestRun: res = TestResult.EXPECTEDFAIL if bool(returncode) else TestResult.UNEXPECTEDPASS else: res = TestResult.FAIL if bool(returncode) else TestResult.OK - return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd) + return cls(test, test_env, res, [], returncode, starttime, duration, stdo, stde, cmd, **kwargs) @classmethod def make_tap(cls, test: 'TestSerialisation', test_env: T.Dict[str, str], returncode: int, starttime: float, duration: float, stdo: str, stde: str, cmd: T.Optional[T.List[str]]) -> 'TestRun': - res = None # T.Optional[TestResult] - results = [] # T.List[TestResult] + res = None # type: T.Optional[TestResult] + results = [] # type: T.List[TestResult] failed = False for i in TAPParser(io.StringIO(stdo)).parse(): @@ -486,7 +516,7 @@ class TestRun: res: TestResult, results: T.List[TestResult], returncode: int, starttime: float, duration: float, stdo: T.Optional[str], stde: T.Optional[str], - cmd: T.Optional[T.List[str]]): + cmd: T.Optional[T.List[str]], *, junit: T.Optional[et.ElementTree] = None): assert isinstance(res, TestResult) self.res = res self.results = results # May be an empty list @@ -499,6 +529,7 @@ class TestRun: self.env = test_env self.should_fail = test.should_fail self.project = test.project_name + self.junit = junit def get_log(self) -> str: res = '--- command ---\n' @@ -652,7 +683,14 @@ class SingleTestRunner: # errors avoid not being able to use the terminal. os.setsid() # type: ignore - p = subprocess.Popen(cmd, + extra_cmd = [] # type: T.List[str] + if self.test.protocol is TestProtocol.GTEST: + gtestname = '{}.xml'.format(self.test.name) + if self.test.workdir: + gtestname = '{}:{}'.format(self.test.workdir, self.test.name) + extra_cmd.append('--gtest_output=xml:{}'.format(gtestname)) + + p = subprocess.Popen(cmd + extra_cmd, stdout=stdout, stderr=stderr, env=self.env, @@ -744,6 +782,8 @@ class SingleTestRunner: else: if self.test.protocol is TestProtocol.EXITCODE: return TestRun.make_exitcode(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd) + elif self.test.protocol is TestProtocol.GTEST: + return TestRun.make_gtest(self.test, self.test_env, p.returncode, starttime, duration, stdo, stde, cmd) else: if self.options.verbose: print(stdo, end='') diff --git a/run_unittests.py b/run_unittests.py index da898a3..3826762 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4625,8 +4625,7 @@ recommended as it is not supported on some platforms''') schema = et.XMLSchema(et.parse(str(Path(__file__).parent / 'data' / 'schema.xsd'))) - testdir = os.path.join(self.common_test_dir, case) - self.init(testdir) + self.init(case) self.run_tests() junit = et.parse(str(Path(self.builddir) / 'meson-logs' / 'testlog.junit.xml')) @@ -4636,10 +4635,13 @@ recommended as it is not supported on some platforms''') self.fail(e.error_log) def test_junit_valid_tap(self): - self._test_junit('213 tap tests') + self._test_junit(os.path.join(self.common_test_dir, '213 tap tests')) def test_junit_valid_exitcode(self): - self._test_junit('44 test args') + self._test_junit(os.path.join(self.common_test_dir, '44 test args')) + + def test_junit_valid_gtest(self): + self._test_junit(os.path.join(self.framework_test_dir, '2 gtest')) class FailureTests(BasePlatformTests): diff --git a/test cases/frameworks/2 gtest/meson.build b/test cases/frameworks/2 gtest/meson.build index 2d93b52..ea3ef48 100644 --- a/test cases/frameworks/2 gtest/meson.build +++ b/test cases/frameworks/2 gtest/meson.build @@ -8,7 +8,7 @@ endif gtest_nomain = dependency('gtest', main : false, method : 'system') e = executable('testprog', 'test.cc', dependencies : gtest) -test('gtest test', e) +test('gtest test', e, protocol : 'gtest') e = executable('testprog_nomain', 'test_nomain.cc', dependencies : gtest_nomain) -test('gtest nomain test', e) +test('gtest nomain test', e, protocol : 'gtest') -- cgit v1.1 From bbbfccf0af26fd6f3b3970d6d9452ba8acd13128 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 1 May 2020 09:59:56 -0700 Subject: docs: Reformat unit-tests to be ~80 characters per line --- docs/markdown/Unit-tests.md | 125 +++++++++++++++++++++++++++++--------------- 1 file changed, 84 insertions(+), 41 deletions(-) diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index 0785549..b1f4cff 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -4,20 +4,24 @@ short-description: Meson's own unit-test system # Unit tests -Meson comes with a fully functional unit test system. To use it simply build an executable and then use it in a test. +Meson comes with a fully functional unit test system. To use it simply build +an executable and then use it in a test. ```meson e = executable('prog', 'testprog.c') test('name of test', e) ``` -You can add as many tests as you want. They are run with the command `ninja test`. +You can add as many tests as you want. They are run with the command `ninja +test`. -Meson captures the output of all tests and writes it in the log file `meson-logs/testlog.txt`. +Meson captures the output of all tests and writes it in the log file +`meson-logs/testlog.txt`. ## Test parameters -Some tests require the use of command line arguments or environment variables. These are simple to define. +Some tests require the use of command line arguments or environment +variables. These are simple to define. ```meson test('command line test', exe, args : ['first', 'second']) @@ -29,38 +33,46 @@ Note how you need to specify multiple values as an array. ### MALLOC_PERTURB_ By default, environment variable -[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) -is set to a random value between 1..255. This can help find memory -leaks on configurations using glibc, including with non-GCC compilers. -This feature can be disabled as discussed in [test()](Reference-manual.md#test). +[`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) is +set to a random value between 1..255. This can help find memory leaks on +configurations using glibc, including with non-GCC compilers. This feature +can be disabled as discussed in [test()](Reference-manual.md#test). ## Coverage If you enable coverage measurements by giving Meson the command line flag -`-Db_coverage=true`, you can generate coverage reports after running the tests -(running the tests is required to gather the list of functions that get -called). Meson will autodetect what coverage generator tools you have installed -and will generate the corresponding targets. These targets are `coverage-xml` -and `coverage-text` which are both provided by [Gcovr](http://gcovr.com) -(version 3.3 or higher) and `coverage-html`, which requires -[Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and -[GenHTML](https://linux.die.net/man/1/genhtml) or -[Gcovr](http://gcovr.com). As a convenience, a high-level `coverage` target is -also generated which will produce all 3 coverage report types, if possible. - -The output of these commands is written to the log directory `meson-logs` in your build directory. +`-Db_coverage=true`, you can generate coverage reports after running the +tests (running the tests is required to gather the list of functions that get +called). Meson will autodetect what coverage generator tools you have +installed and will generate the corresponding targets. These targets are +`coverage-xml` and `coverage-text` which are both provided by +[Gcovr](http://gcovr.com) (version 3.3 or higher) and `coverage-html`, which +requires [Lcov](https://ltp.sourceforge.io/coverage/lcov.php) and +[GenHTML](https://linux.die.net/man/1/genhtml) or [Gcovr](http://gcovr.com). +As a convenience, a high-level `coverage` target is also generated which will +produce all 3 coverage report types, if possible. + +The output of these commands is written to the log directory `meson-logs` in +your build directory. ## Parallelism -To reduce test times, Meson will by default run multiple unit tests in parallel. It is common to have some tests which can not be run in parallel because they require unique hold on some resource such as a file or a D-Bus name. You have to specify these tests with a keyword argument. +To reduce test times, Meson will by default run multiple unit tests in +parallel. It is common to have some tests which can not be run in parallel +because they require unique hold on some resource such as a file or a D-Bus +name. You have to specify these tests with a keyword argument. ```meson test('unique test', t, is_parallel : false) ``` -Meson will then make sure that no other unit test is running at the same time. Non-parallel tests take longer to run so it is recommended that you write your unit tests to be parallel executable whenever possible. +Meson will then make sure that no other unit test is running at the same +time. Non-parallel tests take longer to run so it is recommended that you +write your unit tests to be parallel executable whenever possible. -By default Meson uses as many concurrent processes as there are cores on the test machine. You can override this with the environment variable `MESON_TESTTHREADS` like this. +By default Meson uses as many concurrent processes as there are cores on the +test machine. You can override this with the environment variable +`MESON_TESTTHREADS` like this. ```console $ MESON_TESTTHREADS=5 ninja test @@ -70,7 +82,10 @@ $ MESON_TESTTHREADS=5 ninja test *(added in version 0.52.0)* -Tests can be assigned a priority that determines when a test is *started*. Tests with higher priority are started first, tests with lower priority started later. The default priority is 0, meson makes no guarantee on the ordering of tests with identical priority. +Tests can be assigned a priority that determines when a test is *started*. +Tests with higher priority are started first, tests with lower priority +started later. The default priority is 0, meson makes no guarantee on the +ordering of tests with identical priority. ```meson test('started second', t, priority : 0) @@ -78,23 +93,37 @@ test('started third', t, priority : -50) test('started first', t, priority : 1000) ``` -Note that the test priority only affects the starting order of tests and subsequent tests are affected by how long it takes previous tests to complete. It is thus possible that a higher-priority test is still running when lower-priority tests with a shorter runtime have completed. +Note that the test priority only affects the starting order of tests and +subsequent tests are affected by how long it takes previous tests to +complete. It is thus possible that a higher-priority test is still running +when lower-priority tests with a shorter runtime have completed. ## Skipped tests and hard errors Sometimes a test can only determine at runtime that it can not be run. -For the default `exitcode` testing protocol, the GNU standard approach in this case is to exit the program with error code 77. Meson will detect this and report these tests as skipped rather than failed. This behavior was added in version 0.37.0. +For the default `exitcode` testing protocol, the GNU standard approach in +this case is to exit the program with error code 77. Meson will detect this +and report these tests as skipped rather than failed. This behavior was added +in version 0.37.0. -For TAP-based tests, skipped tests should print a single line starting with `1..0 # SKIP`. +For TAP-based tests, skipped tests should print a single line starting with +`1..0 # SKIP`. -In addition, sometimes a test fails set up so that it should fail even if it is marked as an expected failure. The GNU standard approach in this case is to exit the program with error code 99. Again, Meson will detect this and report these tests as `ERROR`, ignoring the setting of `should_fail`. This behavior was added in version 0.50.0. +In addition, sometimes a test fails set up so that it should fail even if it +is marked as an expected failure. The GNU standard approach in this case is +to exit the program with error code 99. Again, Meson will detect this and +report these tests as `ERROR`, ignoring the setting of `should_fail`. This +behavior was added in version 0.50.0. ## Testing tool -The goal of the meson test tool is to provide a simple way to run tests in a variety of different ways. The tool is designed to be run in the build directory. +The goal of the meson test tool is to provide a simple way to run tests in a +variety of different ways. The tool is designed to be run in the build +directory. -The simplest thing to do is just to run all tests, which is equivalent to running `ninja test`. +The simplest thing to do is just to run all tests, which is equivalent to +running `ninja test`. ```console $ meson test @@ -125,7 +154,8 @@ Tests belonging to a suite `suite` can be run as follows $ meson test --suite (sub)project_name:suite ``` -Since version *0.46*, `(sub)project_name` can be omitted if it is the top-level project. +Since version *0.46*, `(sub)project_name` can be omitted if it is the +top-level project. Multiple suites are specified like: @@ -145,7 +175,8 @@ Sometimes you need to run the tests multiple times, which is done like this: $ meson test --repeat=10 ``` -Invoking tests via a helper executable such as Valgrind can be done with the `--wrap` argument +Invoking tests via a helper executable such as Valgrind can be done with the +`--wrap` argument ```console $ meson test --wrap=valgrind testname @@ -163,17 +194,25 @@ Meson also supports running the tests under GDB. Just doing this: $ meson test --gdb testname ``` -Meson will launch `gdb` all set up to run the test. Just type `run` in the GDB command prompt to start the program. +Meson will launch `gdb` all set up to run the test. Just type `run` in the +GDB command prompt to start the program. -The second use case is a test that segfaults only rarely. In this case you can invoke the following command: +The second use case is a test that segfaults only rarely. In this case you +can invoke the following command: ```console $ meson test --gdb --repeat=10000 testname ``` -This runs the test up to 10 000 times under GDB automatically. If the program crashes, GDB will halt and the user can debug the application. Note that testing timeouts are disabled in this case so `meson test` will not kill `gdb` while the developer is still debugging it. The downside is that if the test binary freezes, the test runner will wait forever. +This runs the test up to 10 000 times under GDB automatically. If the program +crashes, GDB will halt and the user can debug the application. Note that +testing timeouts are disabled in this case so `meson test` will not kill +`gdb` while the developer is still debugging it. The downside is that if the +test binary freezes, the test runner will wait forever. -Sometimes, the GDB binary is not in the PATH variable or the user wants to use a GDB replacement. Therefore, the invoked GDB program can be specified *(added 0.52.0)*: +Sometimes, the GDB binary is not in the PATH variable or the user wants to +use a GDB replacement. Therefore, the invoked GDB program can be specified +*(added 0.52.0)*: ```console $ meson test --gdb --gdb-path /path/to/gdb testname @@ -183,12 +222,16 @@ $ meson test --gdb --gdb-path /path/to/gdb testname $ meson test --print-errorlogs ``` -Meson will report the output produced by the failing tests along with other useful information as the environmental variables. This is useful, for example, when you run the tests on Travis-CI, Jenkins and the like. +Meson will report the output produced by the failing tests along with other +useful information as the environmental variables. This is useful, for +example, when you run the tests on Travis-CI, Jenkins and the like. -For further information see the command line help of Meson by running `meson test -h`. +For further information see the command line help of Meson by running `meson +test -h`. ## Legacy notes -If `meson test` does not work for you, you likely have a old version of Meson. -In that case you should call `mesontest` instead. If `mesontest` doesn't work -either you have a very old version prior to 0.37.0 and should upgrade. +If `meson test` does not work for you, you likely have a old version of +Meson. In that case you should call `mesontest` instead. If `mesontest` +doesn't work either you have a very old version prior to 0.37.0 and should +upgrade. -- cgit v1.1 From dbe00dfe95e9630bd733a45f32076bab6ff80226 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 1 May 2020 10:07:30 -0700 Subject: docs/unit-tests: Add information about output files --- docs/markdown/Unit-tests.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index b1f4cff..bd91dbb 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -235,3 +235,28 @@ If `meson test` does not work for you, you likely have a old version of Meson. In that case you should call `mesontest` instead. If `mesontest` doesn't work either you have a very old version prior to 0.37.0 and should upgrade. + +## Test outputs + +Meson will write several different files with detailed results of running +tests. These will be written into $builddir/meson-logs/ + +### testlog.json + +This is not a proper json file, but a file containing one valid json object +per line. This is file is designed so each line is streamed out as each test +is run, so it can be read as a stream while the test harness is running + +### testlog.junit.xml + +This is a valid JUnit XML description of all tests run. It is not streamed +out, and is written only once all tests complete running. + +When tests use the `tap` protocol each test will be recorded as a testsuite +container, with each case named by the number of the result. + +When tests use the `gtest` protocol meson will inject arguments to the test +to generate it's own JUnit XML, which meson will include as part of this XML +file. + +*New in 0.55.0* -- cgit v1.1 From 247eecc8fc61822e10ccbef54641fd326484a80a Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 4 May 2020 12:02:12 -0700 Subject: envconfig: Always honor PKG_CONFIG_PATH The comment for this code is correct, but the code itself isn't. The way it's implemented in a cross compile we don't look at PKG_CONFIG_PATH at all. Fixes: #7062 --- mesonbuild/envconfig.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index 25b3c7f..a4af39a 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -121,7 +121,7 @@ def get_env_var_pair(for_machine: MachineChoice, # ones. ([var_name + '_FOR_BUILD'] if is_cross else [var_name]), # Always just the unprefixed host verions - ([] if is_cross else [var_name]), + [var_name] )[for_machine] for var in candidates: value = os.environ.get(var) -- cgit v1.1 From 1e073c4c1bd7de06bc74d84e3807c9b210e57a22 Mon Sep 17 00:00:00 2001 From: James Hilliard Date: Sat, 2 May 2020 20:43:36 -0600 Subject: Allow overriding g-ir-scanner and g-ir-compiler binaries. This is useful when one needs to force meson to use wrappers for cross compilation. Signed-off-by: James Hilliard --- mesonbuild/modules/gnome.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 7629f18..a97fffa 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -406,11 +406,19 @@ class GnomeModule(ExtensionModule): kwargs = {'native': True, 'required': True} holder = self.interpreter.func_dependency(state.current_node, ['gobject-introspection-1.0'], kwargs) self.gir_dep = holder.held_object - if self.gir_dep.type_name == 'pkgconfig': + giscanner = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-scanner') + if giscanner is not None: + self.giscanner = ExternalProgram.from_entry('g-ir-scanner', giscanner) + elif self.gir_dep.type_name == 'pkgconfig': self.giscanner = ExternalProgram('g_ir_scanner', self.gir_dep.get_pkgconfig_variable('g_ir_scanner', {})) - self.gicompiler = ExternalProgram('g_ir_compiler', self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})) else: self.giscanner = self.interpreter.find_program_impl('g-ir-scanner') + gicompiler = state.environment.lookup_binary_entry(MachineChoice.HOST, 'g-ir-compiler') + if gicompiler is not None: + self.gicompiler = ExternalProgram.from_entry('g-ir-compiler', gicompiler) + elif self.gir_dep.type_name == 'pkgconfig': + self.gicompiler = ExternalProgram('g_ir_compiler', self.gir_dep.get_pkgconfig_variable('g_ir_compiler', {})) + else: self.gicompiler = self.interpreter.find_program_impl('g-ir-compiler') return self.gir_dep, self.giscanner, self.gicompiler -- cgit v1.1 From a535ef6719816b23085da492dbcdcc4b7bfa8d2b Mon Sep 17 00:00:00 2001 From: Wenjian He Date: Thu, 7 May 2020 20:16:37 +0800 Subject: docs/Precompiled-headers: minor spell check. [skip ci] a give target -> a given target Proposed by #7081 . --- docs/markdown/Precompiled-headers.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Precompiled-headers.md b/docs/markdown/Precompiled-headers.md index d9ac7a4..05b50bc 100644 --- a/docs/markdown/Precompiled-headers.md +++ b/docs/markdown/Precompiled-headers.md @@ -51,7 +51,7 @@ Using precompiled headers with GCC and derivatives -- Once you have a file to precompile, you can enable the use of pch for -a give target with a *pch* keyword argument. As an example, let's assume +a given target with a *pch* keyword argument. As an example, let's assume you want to build a small C binary with precompiled headers. Let's say the source files of the binary use the system headers `stdio.h` and `string.h`. Then you create a header file `pch/myexe_pch.h` with this -- cgit v1.1 From 7e1529501883ce8741d8689c150f589ab68a814f Mon Sep 17 00:00:00 2001 From: Paolo Bonzini Date: Fri, 7 Feb 2020 01:55:27 +0100 Subject: rename unstable-kconfig to unstable-keyval Discussions in #6524 have shown that there are various possible uses of the kconfig module and even disagreements in the exact file format between Python-based kconfiglib and the tools in Linux. Instead of trying to reconcile them, just rename the module to something less suggestive and leave any policy to meson.build files. In the future it may be possible to add some kind of parsing through keyword arguments such as bool_true, quoted_strings, etc. and possibly creation of key-value lists too. For now, configuration_data objects provide an easy way to access quoted strings. Note that Kconfig stores false as "absent" so it was already necessary to write "x.has_key('abc')" rather than the more compact "x['abc']". Therefore, having to use configuration_data does not make things much more verbose. --- docs/markdown/Kconfig-module.md | 52 --------------- docs/markdown/Keyval-module.md | 55 ++++++++++++++++ docs/markdown/snippets/keyval_kobject.md | 6 ++ docs/sitemap.txt | 2 +- docs/theme/extra/templates/navbar_links.html | 2 +- mesonbuild/modules/unstable_kconfig.py | 73 ---------------------- mesonbuild/modules/unstable_keyval.py | 71 +++++++++++++++++++++ run_project_tests.py | 4 +- .../222 source set realistic example/meson.build | 6 +- test cases/kconfig/1 basic/.config | 3 - test cases/kconfig/1 basic/meson.build | 16 ----- test cases/kconfig/2 subdir/.config | 2 - test cases/kconfig/2 subdir/dir/meson.build | 13 ---- test cases/kconfig/2 subdir/meson.build | 4 -- test cases/kconfig/3 load_config files/dir/config | 2 - .../kconfig/3 load_config files/dir/meson.build | 13 ---- test cases/kconfig/3 load_config files/meson.build | 4 -- test cases/kconfig/4 load_config builddir/config | 2 - .../kconfig/4 load_config builddir/meson.build | 14 ----- test cases/keyval/1 basic/.config | 3 + test cases/keyval/1 basic/meson.build | 16 +++++ test cases/keyval/2 subdir/.config | 2 + test cases/keyval/2 subdir/dir/meson.build | 13 ++++ test cases/keyval/2 subdir/meson.build | 4 ++ test cases/keyval/3 load_config files/dir/config | 2 + .../keyval/3 load_config files/dir/meson.build | 13 ++++ test cases/keyval/3 load_config files/meson.build | 4 ++ test cases/keyval/4 load_config builddir/config | 2 + .../keyval/4 load_config builddir/meson.build | 14 +++++ 29 files changed, 212 insertions(+), 205 deletions(-) delete mode 100644 docs/markdown/Kconfig-module.md create mode 100644 docs/markdown/Keyval-module.md create mode 100644 docs/markdown/snippets/keyval_kobject.md delete mode 100644 mesonbuild/modules/unstable_kconfig.py create mode 100644 mesonbuild/modules/unstable_keyval.py delete mode 100644 test cases/kconfig/1 basic/.config delete mode 100644 test cases/kconfig/1 basic/meson.build delete mode 100644 test cases/kconfig/2 subdir/.config delete mode 100644 test cases/kconfig/2 subdir/dir/meson.build delete mode 100644 test cases/kconfig/2 subdir/meson.build delete mode 100644 test cases/kconfig/3 load_config files/dir/config delete mode 100644 test cases/kconfig/3 load_config files/dir/meson.build delete mode 100644 test cases/kconfig/3 load_config files/meson.build delete mode 100644 test cases/kconfig/4 load_config builddir/config delete mode 100644 test cases/kconfig/4 load_config builddir/meson.build create mode 100644 test cases/keyval/1 basic/.config create mode 100644 test cases/keyval/1 basic/meson.build create mode 100644 test cases/keyval/2 subdir/.config create mode 100644 test cases/keyval/2 subdir/dir/meson.build create mode 100644 test cases/keyval/2 subdir/meson.build create mode 100644 test cases/keyval/3 load_config files/dir/config create mode 100644 test cases/keyval/3 load_config files/dir/meson.build create mode 100644 test cases/keyval/3 load_config files/meson.build create mode 100644 test cases/keyval/4 load_config builddir/config create mode 100644 test cases/keyval/4 load_config builddir/meson.build diff --git a/docs/markdown/Kconfig-module.md b/docs/markdown/Kconfig-module.md deleted file mode 100644 index 5807f8d..0000000 --- a/docs/markdown/Kconfig-module.md +++ /dev/null @@ -1,52 +0,0 @@ ---- -short-description: Unstable kconfig module -authors: - - name: Mark Schulte, Paolo Bonzini - years: [2017, 2019] - has-copyright: false -... - -# Unstable kconfig module - -This module parses Kconfig output files to allow use of kconfig -configurations in meson projects. - -**Note**: this does not provide kconfig frontend tooling to generate a -configuration. You still need something such as kconfig frontends (see -link below) to parse your Kconfig files, and then (after you've -chosen the configuration options), output a ".config" file. - - [kconfig-frontends]: http://ymorin.is-a-geek.org/projects/kconfig-frontends - -## Usage - -The module may be imported as follows: - -``` meson -kconfig = import('unstable-kconfig') -``` - -The following functions will then be available as methods on the object -with the name `kconfig`. You can, of course, replace the name -`kconfig` with anything else. - -### kconfig.load() - -This function loads a kconfig output file and returns a dictionary object. - -`kconfig.load()` makes no attempt at parsing the values in the -file. Therefore, true boolean values will be represented as the string "y" -and integer values will have to be converted with `.to_int()`. - -Kconfig frontends usually have ".config" as the default name for the -configuration file. However, placing the configuration file in the source -directory limits the user to one configuration per source directory. -In order to allow separate configurations for each build directory, as is -the Meson standard, `meson.build` should not hardcode ".config" as the -argument to `kconfig.load()`, and should instead make the argument to -`kconfig.load()` a [project build option](Build-options.md). - -* The first (and only) argument is the path to the configuration file to - load (usually ".config"). - -**Returns**: a [dictionary object](Reference-manual.md#dictionary-object). diff --git a/docs/markdown/Keyval-module.md b/docs/markdown/Keyval-module.md new file mode 100644 index 0000000..643265e --- /dev/null +++ b/docs/markdown/Keyval-module.md @@ -0,0 +1,55 @@ +--- +short-description: Unstable keyval module +authors: + - name: Mark Schulte, Paolo Bonzini + years: [2017, 2019] + has-copyright: false +... + +# keyval module + +This module parses files consisting of a series of `key=value` lines. One use +of this module is to load kconfig configurations in meson projects. + +**Note**: this does not provide kconfig frontend tooling to generate a +configuration. You still need something such as kconfig frontends (see +link below) to parse your Kconfig files, and then (after you've +chosen the configuration options), output a ".config" file. + + [kconfig-frontends]: http://ymorin.is-a-geek.org/projects/kconfig-frontends + +## Usage + +The module may be imported as follows: + +``` meson +keyval = import('unstable-keyval') +``` + +The following functions will then be available as methods on the object +with the name `keyval`. You can, of course, replace the name +`keyval` with anything else. + +### keyval.load() + +This function loads a file consisting of a series of `key=value` lines +and returns a dictionary object. + +`keyval.load()` makes no attempt at parsing the values in the file. +In particular boolean and integer values will be represented as strings, +and strings will keep any quoting that is present in the input file. It +can be useful to create a [`configuration_data()`](#configuration_data) +object from the dictionary and use methods such as `get_unquoted()`. + +Kconfig frontends usually have ".config" as the default name for the +configuration file. However, placing the configuration file in the source +directory limits the user to one configuration per source directory. +In order to allow separate configurations for each build directory, as is +the Meson standard, `meson.build` should not hardcode ".config" as the +argument to `kconfig.load()`, and should instead make the argument to +`kconfig.load()` a [project build option](Build-options.md). + +* The first (and only) argument is the path to the configuration file to + load (usually ".config"). + +**Returns**: a [dictionary object](Reference-manual.md#dictionary-object). diff --git a/docs/markdown/snippets/keyval_kobject.md b/docs/markdown/snippets/keyval_kobject.md new file mode 100644 index 0000000..4add23c --- /dev/null +++ b/docs/markdown/snippets/keyval_kobject.md @@ -0,0 +1,6 @@ +## `unstable-kconfig` module renamed to `unstable-keyval` + +The `unstable-kconfig` module is now renamed to `unstable-keyval`. +We expect this module to become stable once it has some usage experience, +specifically in the next or the following release + diff --git a/docs/sitemap.txt b/docs/sitemap.txt index 3ac138e..4029a60 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -48,7 +48,7 @@ index.md SourceSet-module.md Windows-module.md Cuda-module.md - Kconfig-module.md + Keyval-module.md Java.md Vala.md D.md diff --git a/docs/theme/extra/templates/navbar_links.html b/docs/theme/extra/templates/navbar_links.html index 6980f81..832bd2c 100644 --- a/docs/theme/extra/templates/navbar_links.html +++ b/docs/theme/extra/templates/navbar_links.html @@ -14,7 +14,7 @@ ("Hotdoc-module.html","Hotdoc"), \ ("i18n-module.html","i18n"), \ ("Icestorm-module.html","Icestorm"), \ - ("Kconfig-module.html","kconfig"), \ + ("Keyval-module.html","Keyval"), \ ("Pkgconfig-module.html","Pkgconfig"), \ ("Python-module.html","Python"), \ ("Python-3-module.html","Python 3"), \ diff --git a/mesonbuild/modules/unstable_kconfig.py b/mesonbuild/modules/unstable_kconfig.py deleted file mode 100644 index 6685710..0000000 --- a/mesonbuild/modules/unstable_kconfig.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2017, 2019 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import ExtensionModule - -from .. import mesonlib -from ..mesonlib import typeslistify -from ..interpreterbase import FeatureNew, noKwargs -from ..interpreter import InvalidCode - -import os - -class KconfigModule(ExtensionModule): - - @FeatureNew('Kconfig Module', '0.51.0') - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.snippets.add('load') - - def _load_file(self, path_to_config): - result = dict() - try: - with open(path_to_config) as f: - for line in f: - if '#' in line: - comment_idx = line.index('#') - line = line[:comment_idx] - line = line.strip() - try: - name, val = line.split('=', 1) - except ValueError: - continue - result[name.strip()] = val.strip() - except IOError as e: - raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e)) - - return result - - @noKwargs - def load(self, interpreter, state, args, kwargs): - sources = typeslistify(args, (str, mesonlib.File)) - if len(sources) != 1: - raise InvalidCode('load takes only one file input.') - - s = sources[0] - is_built = False - if isinstance(s, mesonlib.File): - if s.is_built: - FeatureNew('kconfig.load() of built files', '0.52.0').use(state.subproject) - is_built = True - s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir) - else: - s = os.path.join(interpreter.environment.source_dir, s) - - if s not in interpreter.build_def_files and not is_built: - interpreter.build_def_files.append(s) - - return self._load_file(s) - - -def initialize(*args, **kwargs): - return KconfigModule(*args, **kwargs) diff --git a/mesonbuild/modules/unstable_keyval.py b/mesonbuild/modules/unstable_keyval.py new file mode 100644 index 0000000..3da2992 --- /dev/null +++ b/mesonbuild/modules/unstable_keyval.py @@ -0,0 +1,71 @@ +# Copyright 2017, 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import ExtensionModule + +from .. import mesonlib +from ..mesonlib import typeslistify +from ..interpreterbase import FeatureNew, noKwargs +from ..interpreter import InvalidCode + +import os + +class KeyvalModule(ExtensionModule): + + @FeatureNew('Keyval Module', '0.55.0') + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.snippets.add('load') + + def _load_file(self, path_to_config): + result = dict() + try: + with open(path_to_config) as f: + for line in f: + if '#' in line: + comment_idx = line.index('#') + line = line[:comment_idx] + line = line.strip() + try: + name, val = line.split('=', 1) + except ValueError: + continue + result[name.strip()] = val.strip() + except IOError as e: + raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e)) + + return result + + @noKwargs + def load(self, interpreter, state, args, kwargs): + sources = typeslistify(args, (str, mesonlib.File)) + if len(sources) != 1: + raise InvalidCode('load takes only one file input.') + + s = sources[0] + is_built = False + if isinstance(s, mesonlib.File): + is_built = is_built or s.is_built + s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir) + else: + s = os.path.join(interpreter.environment.source_dir, s) + + if s not in interpreter.build_def_files and not is_built: + interpreter.build_def_files.append(s) + + return self._load_file(s) + + +def initialize(*args, **kwargs): + return KeyvalModule(*args, **kwargs) diff --git a/run_project_tests.py b/run_project_tests.py index 8cbf989..9da67b2 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -50,7 +50,7 @@ from run_tests import ensure_backend_detects_changes from run_tests import guess_backend ALL_TESTS = ['cmake', 'common', 'warning-meson', 'failing-meson', 'failing-build', 'failing-test', - 'kconfig', 'platform-osx', 'platform-windows', 'platform-linux', + 'keyval', 'platform-osx', 'platform-windows', 'platform-linux', 'java', 'C#', 'vala', 'rust', 'd', 'objective c', 'objective c++', 'fortran', 'swift', 'cuda', 'python3', 'python', 'fpga', 'frameworks', 'nasm', 'wasm' ] @@ -845,7 +845,7 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, ('failing-meson', 'failing', False), ('failing-build', 'failing build', False), ('failing-test', 'failing test', False), - ('kconfig', 'kconfig', False), + ('keyval', 'keyval', False), ('platform-osx', 'osx', not mesonlib.is_osx()), ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), diff --git a/test cases/common/222 source set realistic example/meson.build b/test cases/common/222 source set realistic example/meson.build index 5b0e495..106b81d 100644 --- a/test cases/common/222 source set realistic example/meson.build +++ b/test cases/common/222 source set realistic example/meson.build @@ -1,4 +1,4 @@ -# a sort-of realistic example that combines the sourceset and kconfig +# a sort-of realistic example that combines the sourceset and keyval # modules, inspired by QEMU's build system project('sourceset-example', 'cpp', default_options: ['cpp_std=c++11']) @@ -9,7 +9,7 @@ if cppid == 'pgi' endif ss = import('sourceset') -kconfig = import('unstable-kconfig') +keyval = import('unstable-keyval') zlib = declare_dependency(compile_args: '-DZLIB=1') another = declare_dependency(compile_args: '-DANOTHER=1') @@ -39,7 +39,7 @@ targets = [ 'arm', 'aarch64', 'x86' ] target_dirs = { 'arm' : 'arm', 'aarch64' : 'arm', 'x86': 'x86' } foreach x : targets - config = kconfig.load('config' / x) + config = keyval.load('config' / x) target_specific = specific.apply(config, strict: false) target_common = common.apply(config, strict: false) target_deps = target_specific.dependencies() + target_common.dependencies() diff --git a/test cases/kconfig/1 basic/.config b/test cases/kconfig/1 basic/.config deleted file mode 100644 index 071d185..0000000 --- a/test cases/kconfig/1 basic/.config +++ /dev/null @@ -1,3 +0,0 @@ -CONFIG_VAL1=y -# CONFIG_VAL2 is not set -CONFIG_VAL_VAL=4 diff --git a/test cases/kconfig/1 basic/meson.build b/test cases/kconfig/1 basic/meson.build deleted file mode 100644 index 5dc8d19..0000000 --- a/test cases/kconfig/1 basic/meson.build +++ /dev/null @@ -1,16 +0,0 @@ -project('kconfig basic test') - -k = import('unstable-kconfig') -conf = k.load('.config') - -if not conf.has_key('CONFIG_VAL1') - error('Expected CONFIG_VAL1 to be set, but it wasn\'t') -endif - -if conf.has_key('CONFIG_VAL2') - error('Expected CONFIG_VAL2 not be set, but it was') -endif - -if conf.get('CONFIG_VAL_VAL').to_int() != 4 - error('Expected CONFIG_VAL_VAL to be 4') -endif diff --git a/test cases/kconfig/2 subdir/.config b/test cases/kconfig/2 subdir/.config deleted file mode 100644 index 0599d46..0000000 --- a/test cases/kconfig/2 subdir/.config +++ /dev/null @@ -1,2 +0,0 @@ -CONFIG_IS_SET=y -# CONFIG_NOT_IS_SET is not set diff --git a/test cases/kconfig/2 subdir/dir/meson.build b/test cases/kconfig/2 subdir/dir/meson.build deleted file mode 100644 index 12f1502..0000000 --- a/test cases/kconfig/2 subdir/dir/meson.build +++ /dev/null @@ -1,13 +0,0 @@ - -k = import('unstable-kconfig') - -conf = k.load(meson.source_root() / '.config') - -if not conf.has_key('CONFIG_IS_SET') - error('Expected CONFIG_IS_SET to be set, but it wasn\'t') -endif - -if conf.has_key('CONFIG_NOT_IS_SET') - error('Expected CONFIG_NOT_IS_SET not be set, but it was') -endif - diff --git a/test cases/kconfig/2 subdir/meson.build b/test cases/kconfig/2 subdir/meson.build deleted file mode 100644 index 1245b18..0000000 --- a/test cases/kconfig/2 subdir/meson.build +++ /dev/null @@ -1,4 +0,0 @@ -project('kconfig subdir test') - -# Test into sub directory -subdir('dir') diff --git a/test cases/kconfig/3 load_config files/dir/config b/test cases/kconfig/3 load_config files/dir/config deleted file mode 100644 index 0599d46..0000000 --- a/test cases/kconfig/3 load_config files/dir/config +++ /dev/null @@ -1,2 +0,0 @@ -CONFIG_IS_SET=y -# CONFIG_NOT_IS_SET is not set diff --git a/test cases/kconfig/3 load_config files/dir/meson.build b/test cases/kconfig/3 load_config files/dir/meson.build deleted file mode 100644 index d7b8d44..0000000 --- a/test cases/kconfig/3 load_config files/dir/meson.build +++ /dev/null @@ -1,13 +0,0 @@ - -k = import('unstable-kconfig') - -conf = k.load(files('config')) - -if not conf.has_key('CONFIG_IS_SET') - error('Expected CONFIG_IS_SET to be set, but it wasn\'t') -endif - -if conf.has_key('CONFIG_NOT_IS_SET') - error('Expected CONFIG_NOT_IS_SET not be set, but it was') -endif - diff --git a/test cases/kconfig/3 load_config files/meson.build b/test cases/kconfig/3 load_config files/meson.build deleted file mode 100644 index 1245b18..0000000 --- a/test cases/kconfig/3 load_config files/meson.build +++ /dev/null @@ -1,4 +0,0 @@ -project('kconfig subdir test') - -# Test into sub directory -subdir('dir') diff --git a/test cases/kconfig/4 load_config builddir/config b/test cases/kconfig/4 load_config builddir/config deleted file mode 100644 index 0599d46..0000000 --- a/test cases/kconfig/4 load_config builddir/config +++ /dev/null @@ -1,2 +0,0 @@ -CONFIG_IS_SET=y -# CONFIG_NOT_IS_SET is not set diff --git a/test cases/kconfig/4 load_config builddir/meson.build b/test cases/kconfig/4 load_config builddir/meson.build deleted file mode 100644 index 1924d23..0000000 --- a/test cases/kconfig/4 load_config builddir/meson.build +++ /dev/null @@ -1,14 +0,0 @@ -project('kconfig builddir test') - -k = import('unstable-kconfig') - -out_conf = configure_file(input: 'config', output: 'out-config', copy: true) -conf = k.load(out_conf) - -if not conf.has_key('CONFIG_IS_SET') - error('Expected CONFIG_IS_SET to be set, but it wasn\'t') -endif - -if conf.has_key('CONFIG_NOT_IS_SET') - error('Expected CONFIG_NOT_IS_SET not be set, but it was') -endif diff --git a/test cases/keyval/1 basic/.config b/test cases/keyval/1 basic/.config new file mode 100644 index 0000000..071d185 --- /dev/null +++ b/test cases/keyval/1 basic/.config @@ -0,0 +1,3 @@ +CONFIG_VAL1=y +# CONFIG_VAL2 is not set +CONFIG_VAL_VAL=4 diff --git a/test cases/keyval/1 basic/meson.build b/test cases/keyval/1 basic/meson.build new file mode 100644 index 0000000..fc7ddb3 --- /dev/null +++ b/test cases/keyval/1 basic/meson.build @@ -0,0 +1,16 @@ +project('keyval basic test') + +k = import('unstable-keyval') +conf = k.load('.config') + +if not conf.has_key('CONFIG_VAL1') + error('Expected CONFIG_VAL1 to be set, but it wasn\'t') +endif + +if conf.has_key('CONFIG_VAL2') + error('Expected CONFIG_VAL2 not be set, but it was') +endif + +if conf.get('CONFIG_VAL_VAL').to_int() != 4 + error('Expected CONFIG_VAL_VAL to be 4') +endif diff --git a/test cases/keyval/2 subdir/.config b/test cases/keyval/2 subdir/.config new file mode 100644 index 0000000..0599d46 --- /dev/null +++ b/test cases/keyval/2 subdir/.config @@ -0,0 +1,2 @@ +CONFIG_IS_SET=y +# CONFIG_NOT_IS_SET is not set diff --git a/test cases/keyval/2 subdir/dir/meson.build b/test cases/keyval/2 subdir/dir/meson.build new file mode 100644 index 0000000..dc1b478 --- /dev/null +++ b/test cases/keyval/2 subdir/dir/meson.build @@ -0,0 +1,13 @@ + +k = import('unstable-keyval') + +conf = k.load(meson.source_root() / '.config') + +if not conf.has_key('CONFIG_IS_SET') + error('Expected CONFIG_IS_SET to be set, but it wasn\'t') +endif + +if conf.has_key('CONFIG_NOT_IS_SET') + error('Expected CONFIG_NOT_IS_SET not be set, but it was') +endif + diff --git a/test cases/keyval/2 subdir/meson.build b/test cases/keyval/2 subdir/meson.build new file mode 100644 index 0000000..0651acf --- /dev/null +++ b/test cases/keyval/2 subdir/meson.build @@ -0,0 +1,4 @@ +project('keyval subdir test') + +# Test into sub directory +subdir('dir') diff --git a/test cases/keyval/3 load_config files/dir/config b/test cases/keyval/3 load_config files/dir/config new file mode 100644 index 0000000..0599d46 --- /dev/null +++ b/test cases/keyval/3 load_config files/dir/config @@ -0,0 +1,2 @@ +CONFIG_IS_SET=y +# CONFIG_NOT_IS_SET is not set diff --git a/test cases/keyval/3 load_config files/dir/meson.build b/test cases/keyval/3 load_config files/dir/meson.build new file mode 100644 index 0000000..43fba13 --- /dev/null +++ b/test cases/keyval/3 load_config files/dir/meson.build @@ -0,0 +1,13 @@ + +k = import('unstable-keyval') + +conf = k.load(files('config')) + +if not conf.has_key('CONFIG_IS_SET') + error('Expected CONFIG_IS_SET to be set, but it wasn\'t') +endif + +if conf.has_key('CONFIG_NOT_IS_SET') + error('Expected CONFIG_NOT_IS_SET not be set, but it was') +endif + diff --git a/test cases/keyval/3 load_config files/meson.build b/test cases/keyval/3 load_config files/meson.build new file mode 100644 index 0000000..0651acf --- /dev/null +++ b/test cases/keyval/3 load_config files/meson.build @@ -0,0 +1,4 @@ +project('keyval subdir test') + +# Test into sub directory +subdir('dir') diff --git a/test cases/keyval/4 load_config builddir/config b/test cases/keyval/4 load_config builddir/config new file mode 100644 index 0000000..0599d46 --- /dev/null +++ b/test cases/keyval/4 load_config builddir/config @@ -0,0 +1,2 @@ +CONFIG_IS_SET=y +# CONFIG_NOT_IS_SET is not set diff --git a/test cases/keyval/4 load_config builddir/meson.build b/test cases/keyval/4 load_config builddir/meson.build new file mode 100644 index 0000000..1bb0285 --- /dev/null +++ b/test cases/keyval/4 load_config builddir/meson.build @@ -0,0 +1,14 @@ +project('keyval builddir test') + +k = import('unstable-keyval') + +out_conf = configure_file(input: 'config', output: 'out-config', copy: true) +conf = k.load(out_conf) + +if not conf.has_key('CONFIG_IS_SET') + error('Expected CONFIG_IS_SET to be set, but it wasn\'t') +endif + +if conf.has_key('CONFIG_NOT_IS_SET') + error('Expected CONFIG_NOT_IS_SET not be set, but it was') +endif -- cgit v1.1 From 9492eec55fb8a3dc8c5cc11182621e98d15e8729 Mon Sep 17 00:00:00 2001 From: Andrei Alexeyev Date: Thu, 7 May 2020 04:43:36 +0300 Subject: Do not pass rpath flags to wasm-ld --- mesonbuild/linkers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 44c720f..db735e7 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -761,6 +761,11 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna def get_asneeded_args(self) -> T.List[str]: return [] + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: str, build_rpath: str, + install_rpath: str) -> T.List[str]: + return [] + class CcrxDynamicLinker(DynamicLinker): -- cgit v1.1 From 10dc8f3c7cea6f236171e9eb2f3c3e123e7fec85 Mon Sep 17 00:00:00 2001 From: Reza Housseini Date: Thu, 7 May 2020 15:20:41 +0200 Subject: More robust cmake version detection --- mesonbuild/cmake/executor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index 66713a1..adc028c 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -132,7 +132,7 @@ class CMakeExecutor: msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' mlog.warning(msg) return None - cmvers = re.sub(r'\s*(cmake|cmake3) version\s*', '', out.split('\n')[0]).strip() + cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out).group(2) return cmvers def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None: -- cgit v1.1 From d87925dc3a45fce97603b6165fb489c30e004b4d Mon Sep 17 00:00:00 2001 From: Szabi Tolnai Date: Fri, 8 May 2020 12:34:49 +0100 Subject: Allow indexed custom target to be used in executable's depends. Change-Id: I7f3e0e0dd9c413d7f6e3267de9664b89f2294e27 --- mesonbuild/build.py | 2 +- .../233 link depends indexed custom target/foo.c | 15 +++++++++++++++ .../make_file.py | 8 ++++++++ .../meson.build | 19 +++++++++++++++++++ 4 files changed, 43 insertions(+), 1 deletion(-) create mode 100644 test cases/common/233 link depends indexed custom target/foo.c create mode 100644 test cases/common/233 link depends indexed custom target/make_file.py create mode 100644 test cases/common/233 link depends indexed custom target/meson.build diff --git a/mesonbuild/build.py b/mesonbuild/build.py index fbf2b17..2b4b1b9 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -729,7 +729,7 @@ class BuildTarget(Target): File.from_source_file(environment.source_dir, self.subdir, s)) elif hasattr(s, 'get_outputs'): self.link_depends.extend( - [File.from_built_file(s.subdir, p) for p in s.get_outputs()]) + [File.from_built_file(s.get_subdir(), p) for p in s.get_outputs()]) else: raise InvalidArguments( 'Link_depends arguments must be strings, Files, ' diff --git a/test cases/common/233 link depends indexed custom target/foo.c b/test cases/common/233 link depends indexed custom target/foo.c new file mode 100644 index 0000000..58c86a6 --- /dev/null +++ b/test cases/common/233 link depends indexed custom target/foo.c @@ -0,0 +1,15 @@ +#include + +int main(void) { + const char *fn = DEPFILE; + FILE *f = fopen(fn, "r"); + if (!f) { + printf("could not open %s", fn); + return 1; + } + else { + printf("successfully opened %s", fn); + } + + return 0; +} diff --git a/test cases/common/233 link depends indexed custom target/make_file.py b/test cases/common/233 link depends indexed custom target/make_file.py new file mode 100644 index 0000000..6a43b7d --- /dev/null +++ b/test cases/common/233 link depends indexed custom target/make_file.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +import sys + +with open(sys.argv[1], 'w') as f: + print('# this file does nothing', file=f) + +with open(sys.argv[2], 'w') as f: + print('# this file does nothing', file=f) diff --git a/test cases/common/233 link depends indexed custom target/meson.build b/test cases/common/233 link depends indexed custom target/meson.build new file mode 100644 index 0000000..5c066e9 --- /dev/null +++ b/test cases/common/233 link depends indexed custom target/meson.build @@ -0,0 +1,19 @@ +project('link_depends_indexed_custom_target', 'c') + +if meson.backend().startswith('vs') + # FIXME: Broken on the VS backends + error('MESON_SKIP_TEST see https://github.com/mesonbuild/meson/issues/1799') +endif + +cmd = find_program('make_file.py') + +dep_files = custom_target('gen_dep', + command: [cmd, '@OUTPUT@'], + output: ['dep_file1', 'dep_file2']) + +exe = executable('foo', 'foo.c', + link_depends: dep_files[1], + c_args: ['-DDEPFILE="' + dep_files[0].full_path()+ '"']) + +# check that dep_file1 exists, which means that link_depends target ran +test('runtest', exe) -- cgit v1.1 From d298a00afb4286ce7dfae231c311500e0d3de728 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 8 May 2020 22:13:39 +0200 Subject: boost: Do not be strict about static if not specified (fixes #7057) --- mesonbuild/dependencies/boost.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index fb9d573..3262d8b 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -344,6 +344,7 @@ class BoostDependency(ExternalDependency): self.multithreading = kwargs.get('threading', 'multi') == 'multi' self.boost_root = None + self.explicit_static = 'static' in kwargs # Extract and validate modules self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str] @@ -522,7 +523,7 @@ class BoostDependency(ExternalDependency): except (KeyError, IndexError, AttributeError): pass - libs = [x for x in libs if x.static == self.static] + libs = [x for x in libs if x.static == self.static or not self.explicit_static] libs = [x for x in libs if x.mt == self.multithreading] libs = [x for x in libs if x.version_matches(lib_vers)] libs = [x for x in libs if x.arch_matches(self.arch)] -- cgit v1.1 From c4960cefb00d62fe1ebd89e42a9b82ecfbc2443a Mon Sep 17 00:00:00 2001 From: Andrei Alexeyev Date: Thu, 7 May 2020 05:10:33 +0300 Subject: Improve Emscripten linker version detection --- mesonbuild/environment.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 64efda6..4ced8e0 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -985,12 +985,15 @@ class Environment: if 'Emscripten' in out: cls = EmscriptenCCompiler if lang == 'c' else EmscriptenCPPCompiler self.coredata.add_lang_args(cls.language, cls, for_machine, self) - # emcc cannot be queried to get the version out of it (it - # ignores -Wl,--version and doesn't have an alternative). - # Further, wasm-id *is* lld and will return `LLD X.Y.Z` if you - # call `wasm-ld --version`, but a special version of lld that - # takes different options. - p, o, _ = Popen_safe(['wasm-ld', '--version']) + + # emcc requires a file input in order to pass arguments to the + # linker. It'll exit with an error code, but still print the + # linker version. Old emcc versions ignore -Wl,--version completely, + # however. We'll report "unknown version" in that case. + with tempfile.NamedTemporaryFile(suffix='.c') as f: + cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name] + _, o, _ = Popen_safe(cmd) + linker = WASMDynamicLinker( compiler, for_machine, cls.LINKER_PREFIX, [], version=search_version(o)) -- cgit v1.1 From efb86088bcf8960db440eadcd11c0e073c80ab52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Andr=C3=A9=20Lureau?= Date: Thu, 26 Mar 2020 19:22:41 +0100 Subject: python: install_sources() should default to pure, following the doc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit As stated by the doc, default to install python sources to purelib location, as they should not depend on platform. This also fixes discrepancy between get_install_dir() and install_sources() locations. Signed-off-by: Marc-André Lureau --- mesonbuild/modules/python.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index 79e1824..ceabd76 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -361,7 +361,7 @@ class PythonInstallation(ExternalProgramHolder): @permittedKwargs(['pure', 'subdir']) def install_sources_method(self, args, kwargs): - pure = kwargs.pop('pure', False) + pure = kwargs.pop('pure', True) if not isinstance(pure, bool): raise InvalidArguments('"pure" argument must be a boolean.') -- cgit v1.1 From d7e20b1543499b516f424ac3a831f402a884714d Mon Sep 17 00:00:00 2001 From: Christoph Reiter Date: Sun, 10 May 2020 23:36:15 +0200 Subject: Fix builtin check in has_function() with GCC 10 on Windows The builtin check had a special case that if a header was provided and the function wasn't defined, it would ignore the builtin to avoid non-functional builtins (for example __builtin_posix_memalign in MSYS2). GCC 10 gained support for __has_builtin() which now skipps this check and because __has_builtin(__builtin_posix_memalign) returns true the non functional builtin is now reported as available. To get the old behaviour back move the special case in front of the actual availability check. Fixes #7113 --- mesonbuild/compilers/mixins/clike.py | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 124c49c..df97598 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -727,24 +727,29 @@ class CLikeCompiler: # need to look for them differently. On nice compilers like clang, we # can just directly use the __has_builtin() macro. fargs['no_includes'] = '#include' not in prefix - fargs['__builtin_'] = '' if funcname.startswith('__builtin_') else '__builtin_' + is_builtin = funcname.startswith('__builtin_') + fargs['is_builtin'] = is_builtin + fargs['__builtin_'] = '' if is_builtin else '__builtin_' t = '''{prefix} int main(void) {{ + + /* With some toolchains (MSYS2/mingw for example) the compiler + * provides various builtins which are not really implemented and + * fall back to the stdlib where they aren't provided and fail at + * build/link time. In case the user provides a header, including + * the header didn't lead to the function being defined, and the + * function we are checking isn't a builtin itself we assume the + * builtin is not functional and we just error out. */ + #if !{no_includes:d} && !defined({func}) && !{is_builtin:d} + #error "No definition for {__builtin_}{func} found in the prefix" + #endif + #ifdef __has_builtin #if !__has_builtin({__builtin_}{func}) #error "{__builtin_}{func} not found" #endif #elif ! defined({func}) - /* Check for {__builtin_}{func} only if no includes were added to the - * prefix above, which means no definition of {func} can be found. - * We would always check for this, but we get false positives on - * MSYS2 if we do. Their toolchain is broken, but we can at least - * give them a workaround. */ - #if {no_includes:d} - {__builtin_}{func}; - #else - #error "No definition for {__builtin_}{func} found in the prefix" - #endif + {__builtin_}{func}; #endif return 0; }}''' -- cgit v1.1 From 630a00374600bffedbabb8ef623e16bed52f21c4 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 10 May 2020 18:16:23 +0300 Subject: Add AVR to cpu families. Closes #7085. --- docs/markdown/Reference-tables.md | 1 + mesonbuild/envconfig.py | 1 + 2 files changed, 2 insertions(+) diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index dfae339..c42d608 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -81,6 +81,7 @@ set in the cross file. | alpha | DEC Alpha processor | | arc | 32 bit ARC processor | | arm | 32 bit ARM processor | +| avr | Atmel AVR processor | | e2k | MCST Elbrus processor | | c2000 | 32 bit C2000 processor | | ia64 | Itanium processor | diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index a4af39a..b74be35 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -40,6 +40,7 @@ known_cpu_families = ( 'alpha', 'arc', 'arm', + 'avr', 'c2000', 'e2k', 'ia64', -- cgit v1.1 From 245d659522fd73857bf7f4e83bd572d9cdcd7469 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 7 May 2020 16:58:22 -0400 Subject: ConfigToolDependency: Don't fallback to system tool when cross compiling The system tool is always the wrong thing to use and cause hard to debug issues when trying to link system libraries with cross built binaries. The ExternalDependency base class already had a method to deal with this, used by PkgConfigDependency and QtBaseDependency, so it should make things more consistent. --- docs/markdown/Dependencies.md | 3 +++ .../markdown/snippets/config_tool_no_cross_path.md | 7 ++++++ mesonbuild/dependencies/base.py | 25 ++++------------------ test cases/frameworks/21 libwmf/meson.build | 2 +- 4 files changed, 15 insertions(+), 22 deletions(-) create mode 100644 docs/markdown/snippets/config_tool_no_cross_path.md diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md index 17c9991..572a3d1 100644 --- a/docs/markdown/Dependencies.md +++ b/docs/markdown/Dependencies.md @@ -242,6 +242,9 @@ libgcrypt_dep = dependency('libgcrypt', version: '>= 1.8') gpgme_dep = dependency('gpgme', version: '>= 1.0') ``` +*Since 0.55.0* Meson won't search $PATH any more for a config tool binary when +cross compiling if the config tool did not have an entry in the cross file. + ## AppleFrameworks Use the `modules` keyword to list frameworks required, e.g. diff --git a/docs/markdown/snippets/config_tool_no_cross_path.md b/docs/markdown/snippets/config_tool_no_cross_path.md new file mode 100644 index 0000000..cec22e4 --- /dev/null +++ b/docs/markdown/snippets/config_tool_no_cross_path.md @@ -0,0 +1,7 @@ +## Config tool based dependencies no longer search PATH for cross compiling + +Before 0.55.0 config tool based dependencies (llvm-config, cups-config, etc), +would search system $PATH if they weren't defined in the cross file. This has +been a source of bugs and has been deprecated. It is now removed, config tool +binaries must be specified in the cross file now or the dependency will not +be found. diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 95a3956..bcb1531 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -437,28 +437,11 @@ class ConfigToolDependency(ExternalDependency): """ if not isinstance(versions, list) and versions is not None: versions = listify(versions) - - tool = self.env.lookup_binary_entry(self.for_machine, self.tool_name) - if tool is not None: - tools = [tool] - else: - if not self.env.machines.matches_build_machine(self.for_machine): - mlog.deprecation('No entry for {0} specified in your cross file. ' - 'Falling back to searching PATH. This may find a ' - 'native version of {0}! This will become a hard ' - 'error in a future version of meson'.format(self.tool_name)) - tools = [[t] for t in self.tools] - best_match = (None, None) - for tool in tools: - if len(tool) == 1: - # In some situations the command can't be directly executed. - # For example Shell scripts need to be called through sh on - # Windows (see issue #1423). - potential_bin = ExternalProgram(tool[0], silent=True) - if not potential_bin.found(): - continue - tool = potential_bin.get_command() + for potential_bin in self.search_tool(self.tool_name, self.tool_name, self.tools): + if not potential_bin.found(): + continue + tool = potential_bin.get_command() try: p, out = Popen_safe(tool + [self.version_arg])[:2] except (FileNotFoundError, PermissionError): diff --git a/test cases/frameworks/21 libwmf/meson.build b/test cases/frameworks/21 libwmf/meson.build index 6952bf7..9dbab6a 100644 --- a/test cases/frameworks/21 libwmf/meson.build +++ b/test cases/frameworks/21 libwmf/meson.build @@ -1,7 +1,7 @@ project('libwmf test', 'c') wm = find_program('libwmf-config', required : false) -if not wm.found() +if not wm.found() or meson.is_cross_build() error('MESON_SKIP_TEST: libwmf-config not installed') endif -- cgit v1.1 From ee790eec2ae7bf335d0b0229474ceabf0cebfcc4 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 5 May 2020 10:10:23 -0700 Subject: interpreter: Don't assign duplication and new feature warning to the same variable Currently The Deprecated and New features checkers share an attribute through a base class that should be per class. We need to duplicate this and move it into each of the sublcasses Fixes #7080 --- mesonbuild/interpreterbase.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 6246a06..fc666a6 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -215,9 +215,8 @@ class permittedKwargs: class FeatureCheckBase: "Base class for feature version checks" - # Class variable, shared across all instances - # - # Format: {subproject: {feature_version: set(feature_names)}} + # In python 3.6 we can just forward declare this, but in 3.5 we can't + # This will be overwritten by the subclasses by necessity feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]] def __init__(self, feature_name: str, version: str): @@ -283,6 +282,11 @@ class FeatureCheckBase: class FeatureNew(FeatureCheckBase): """Checks for new features""" + # Class variable, shared across all instances + # + # Format: {subproject: {feature_version: set(feature_names)}} + feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]] + @staticmethod def get_warning_str_prefix(tv: str) -> str: return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv) @@ -294,6 +298,11 @@ class FeatureNew(FeatureCheckBase): class FeatureDeprecated(FeatureCheckBase): """Checks for deprecated features""" + # Class variable, shared across all instances + # + # Format: {subproject: {feature_version: set(feature_names)}} + feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]] + @staticmethod def get_warning_str_prefix(tv: str) -> str: return 'Deprecated features used:' -- cgit v1.1 From f29f3f9f28fbe143e9785ed54f088f004be704cd Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 6 May 2020 13:12:39 -0700 Subject: interpreterbase: Fix version checking for deprecation Currently deprecation features use the same logic as new features, but that doesn't work correctly. FeatureNew wants to warn about cases where you claim to support >= 0.40, but use a feature from 0.42; deprecation wants to warn when you claim to support >= 0.50, but use a feature that was replaced in 0.45. To make this work we need to invert the version check in the deprecation function, so that if the deprecation is 0.45, and the supported version is >= 0.50, we get a true not a false. --- mesonbuild/interpreterbase.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index fc666a6..82d16f1 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -18,6 +18,7 @@ from . import mparser, mesonlib, mlog from . import environment, dependencies +import abc import os, copy, re import collections.abc from functools import wraps @@ -212,7 +213,7 @@ class permittedKwargs: return f(*wrapped_args, **wrapped_kwargs) return wrapped -class FeatureCheckBase: +class FeatureCheckBase(metaclass=abc.ABCMeta): "Base class for feature version checks" # In python 3.6 we can just forward declare this, but in 3.5 we can't @@ -230,13 +231,18 @@ class FeatureCheckBase: return '' return mesonlib.project_meson_versions[subproject] + @staticmethod + @abc.abstractmethod + def check_version(target_version: str, feature_Version: str) -> bool: + pass + def use(self, subproject: str) -> None: tv = self.get_target_version(subproject) # No target version if tv == '': return # Target version is new enough - if mesonlib.version_compare_condition_with_min(tv, self.feature_version): + if self.check_version(tv, self.feature_version): return # Feature is too new for target version, register it if subproject not in self.feature_registry: @@ -288,6 +294,10 @@ class FeatureNew(FeatureCheckBase): feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]] @staticmethod + def check_version(target_version: str, feature_version: str) -> bool: + return mesonlib.version_compare_condition_with_min(target_version, feature_version) + + @staticmethod def get_warning_str_prefix(tv: str) -> str: return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv) @@ -304,6 +314,11 @@ class FeatureDeprecated(FeatureCheckBase): feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]] @staticmethod + def check_version(target_version: str, feature_version: str) -> bool: + # For deprecatoin checks we need to return the inverse of FeatureNew checks + return not mesonlib.version_compare_condition_with_min(target_version, feature_version) + + @staticmethod def get_warning_str_prefix(tv: str) -> str: return 'Deprecated features used:' -- cgit v1.1 From 956cba02224d5e38642c5fd298a290927a2cb358 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 11 May 2020 17:41:20 +0200 Subject: cmake: Ignore unknown c(pp)? stds (fixes #7104) --- mesonbuild/cmake/interpreter.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 125f18b..0a452d1 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -289,7 +289,15 @@ class ConverterTarget: for j in self.compile_opts[i]: m = ConverterTarget.std_regex.match(j) if m: - self.override_options += ['{}_std={}'.format(i, m.group(2))] + std = m.group(2) + if std not in self._all_lang_stds(i): + mlog.warning( + 'Unknown {}_std "{}" -> Ingoring. Try setting the project' + 'level {}_std if build errors occur.'.format(i, std), + once=True + ) + continue + self.override_options += ['{}_std={}'.format(i, std)] elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']: self.pie = True elif j in blacklist_compiler_flags: @@ -539,6 +547,13 @@ class ConverterTarget: suffixes += [x for x in exts] return suffixes + @lru_cache(maxsize=None) + def _all_lang_stds(self, lang: str) -> T.List[str]: + lang_opts = self.env.coredata.compiler_options.build.get(lang, None) + if not lang_opts or 'std' not in lang_opts: + return [] + return lang_opts['std'].choices + def process_inter_target_dependencies(self): # Move the dependencies from all transfer_dependencies_from to the target to_process = list(self.depends) -- cgit v1.1 From c4fa0fac3dd4f4451f73a50c3c523c8f83a3a6e1 Mon Sep 17 00:00:00 2001 From: Christoph Reiter Date: Tue, 12 May 2020 18:55:15 +0200 Subject: Fix has_function() for clang on 64bit Windows has_function() tries to link an example program using the function to see if it is available, but with clang on 64bit Windows this example always already failed at the compile step: error: cast from pointer to smaller type 'long' loses information long b = (long) a; This is due to long!=pointer with LLP64 Change from "long" to "long long" which is min 64bit and should always fit a pointer. While "long long" is strictly a C99 feature every non super ancient compiler should still support it. --- mesonbuild/compilers/mixins/clike.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index df97598..0333ffa 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -658,7 +658,7 @@ class CLikeCompiler: # is not run so we don't care what the return value is. main = '''\nint main(void) {{ void *a = (void*) &{func}; - long b = (long) a; + long long b = (long long) a; return (int) b; }}''' return head, main -- cgit v1.1 From 859dc4255aa40611d323d1b22f70bb20c09f317d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ole=20Andr=C3=A9=20Vadla=20Ravn=C3=A5s?= Date: Thu, 9 Apr 2020 21:09:05 +0000 Subject: Fix outdated cross-compilation checks --- mesonbuild/backend/backends.py | 2 +- mesonbuild/compilers/mixins/clike.py | 5 +++-- mesonbuild/coredata.py | 4 +++- mesonbuild/environment.py | 20 ++++++++++---------- 4 files changed, 17 insertions(+), 14 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 2727abe..7f7c434 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -768,7 +768,7 @@ class Backend: # E.g. an external verifier or simulator program run on a generated executable. # Can always be run without a wrapper. test_for_machine = MachineChoice.BUILD - is_cross = not self.environment.machines.matches_build_machine(test_for_machine) + is_cross = self.environment.is_cross_build(test_for_machine) if is_cross and self.environment.need_exe_wrapper(): exe_wrapper = self.environment.get_exe_wrapper() else: diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 0333ffa..e7b0cd2 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -369,7 +369,8 @@ class CLikeCompiler: dependencies=dependencies, mode='link', disable_cache=disable_cache) def run(self, code: str, env, *, extra_args=None, dependencies=None): - if self.is_cross and self.exe_wrapper is None: + need_exe_wrapper = env.need_exe_wrapper(self.for_machine) + if need_exe_wrapper and self.exe_wrapper is None: raise compilers.CrossNoRunException('Can not run test applications in this cross environment.') with self._build_wrapper(code, env, extra_args, dependencies, mode='link', want_output=True) as p: if p.returncode != 0: @@ -377,7 +378,7 @@ class CLikeCompiler: p.input_name, p.returncode)) return compilers.RunResult(False) - if self.is_cross: + if need_exe_wrapper: cmdlist = self.exe_wrapper + [p.output_name] else: cmdlist = p.output_name diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index fcb13f6..c337dc3 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -682,7 +682,9 @@ class CoreData: if type(oldval) != type(value): self.user_options[name] = value - def is_cross_build(self) -> bool: + def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool: + if when_building_for == MachineChoice.BUILD: + return False return len(self.cross_files) > 0 def strip_build_option_names(self, options): diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 4ced8e0..8fad628 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -635,8 +635,8 @@ class Environment: self.coredata.meson_command = mesonlib.meson_command self.first_invocation = True - def is_cross_build(self) -> bool: - return self.coredata.is_cross_build() + def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool: + return self.coredata.is_cross_build(when_building_for) def dump_coredata(self): return coredata.save(self.coredata, self.get_build_dir()) @@ -899,7 +899,7 @@ class Environment: def _detect_c_or_cpp_compiler(self, lang: str, for_machine: MachineChoice) -> Compiler: popen_exceptions = {} compilers, ccache, exe_wrap = self._get_compilers(lang, for_machine) - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) info = self.machines[for_machine] for compiler in compilers: @@ -1152,7 +1152,7 @@ class Environment: def detect_cuda_compiler(self, for_machine): popen_exceptions = {} - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) compilers, ccache, exe_wrap = self._get_compilers('cuda', for_machine) info = self.machines[for_machine] for compiler in compilers: @@ -1192,7 +1192,7 @@ class Environment: def detect_fortran_compiler(self, for_machine: MachineChoice): popen_exceptions = {} compilers, ccache, exe_wrap = self._get_compilers('fortran', for_machine) - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) info = self.machines[for_machine] for compiler in compilers: if isinstance(compiler, str): @@ -1311,7 +1311,7 @@ class Environment: def _detect_objc_or_objcpp_compiler(self, for_machine: MachineInfo, objc: bool) -> 'Compiler': popen_exceptions = {} compilers, ccache, exe_wrap = self._get_compilers('objc' if objc else 'objcpp', for_machine) - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) info = self.machines[for_machine] for compiler in compilers: @@ -1402,7 +1402,7 @@ class Environment: def detect_vala_compiler(self, for_machine): exelist = self.lookup_binary_entry(for_machine, 'vala') - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) info = self.machines[for_machine] if exelist is None: # TODO support fallback @@ -1422,7 +1422,7 @@ class Environment: def detect_rust_compiler(self, for_machine): popen_exceptions = {} compilers, ccache, exe_wrap = self._get_compilers('rust', for_machine) - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) info = self.machines[for_machine] cc = self.detect_c_compiler(for_machine) @@ -1513,7 +1513,7 @@ class Environment: arch = 'x86_mscoff' popen_exceptions = {} - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) results, ccache, exe_wrap = self._get_compilers('d', for_machine) for exelist in results: # Search for a D compiler. @@ -1604,7 +1604,7 @@ class Environment: def detect_swift_compiler(self, for_machine): exelist = self.lookup_binary_entry(for_machine, 'swift') - is_cross = not self.machines.matches_build_machine(for_machine) + is_cross = self.is_cross_build(for_machine) info = self.machines[for_machine] if exelist is None: # TODO support fallback -- cgit v1.1 From ab6a410426d614f95b779797f3dfb289eb3672d9 Mon Sep 17 00:00:00 2001 From: Eric Lemanissier Date: Wed, 13 May 2020 11:24:52 +0200 Subject: macos: Remove framwork linkerlike args fixes-up 33fbc548ab74e79280d2f57b2cd499d14c1f1e91 --- mesonbuild/compilers/compilers.py | 2 +- run_unittests.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 3d3a503..385ef5e 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1136,7 +1136,7 @@ class Compiler: def remove_linkerlike_args(self, args): rm_exact = ('-headerpad_max_install_names',) rm_prefixes = ('-Wl,', '-L',) - rm_next = ('-L',) + rm_next = ('-L', '-framework',) ret = [] iargs = iter(args) for arg in iargs: diff --git a/run_unittests.py b/run_unittests.py index 2939b20..d6f7911 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -5325,7 +5325,7 @@ class DarwinTests(BasePlatformTests): def test_removing_unused_linker_args(self): testdir = os.path.join(self.common_test_dir, '108 has arg') - env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic'} + env = {'CFLAGS': '-L/tmp -L /var/tmp -headerpad_max_install_names -Wl,-export_dynamic -framework Foundation'} self.init(testdir, override_envvars=env) -- cgit v1.1 From d526af89ca0e52fa076a805e4f585d16dbd1562a Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Wed, 13 May 2020 14:02:29 +0200 Subject: interpretor: Do not add dependencies if we already have them in tree like dep structures with a lot of source: declarations, this can result in a lot of presure on the source list. this saves ~3s out of 7s in the interpretor stage in efl build. --- mesonbuild/build.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 2b4b1b9..2ba7c59 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -497,6 +497,7 @@ class BuildTarget(Target): self.link_targets = [] self.link_whole_targets = [] self.link_depends = [] + self.added_deps = set() self.name_prefix_set = False self.name_suffix_set = False self.filename = 'no_name' @@ -1053,6 +1054,8 @@ This will become a hard error in a future Meson release.''') def add_deps(self, deps): deps = listify(deps) for dep in unholder(deps): + if dep in self.added_deps: + continue if isinstance(dep, dependencies.InternalDependency): # Those parts that are internal. self.process_sourcelist(dep.sources) @@ -1091,6 +1094,7 @@ You probably should put it in link_with instead.''') 'either an external dependency (returned by find_library() or ' 'dependency()) or an internal dependency (returned by ' 'declare_dependency()).'.format(type(dep).__name__)) + self.added_deps.add(dep) def get_external_deps(self): return self.external_deps -- cgit v1.1 From 7ad8b5f221ed23ff9ca60349f76e91f79ecd211d Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:06:41 -0700 Subject: docs: Add a Howto about the null dependency [skip ci] --- docs/markdown/howtox.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index 8231d3d..abf7519 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -260,3 +260,26 @@ The `cmake_module_path` property is only needed for custom CMake scripts. System wide CMake scripts are found automatically. More information can be found [here](Dependencies.md#cmake) + +## Get a default not-found dependency? + +```meson +null_dep = dependency('', required : false) +``` + +This can be used in cases where you want a default value, but might override it +later. + +```meson +my_dep = dependency('', required : false) +if host_machine.system() in ['freebsd', 'netbsd', 'openbsd', 'dragonfly'] + my_dep = dependency('some dep', required : false) +elif host_machine.system() == 'linux' + my_dep = dependency('some other dep', required : false) +endif + +# Last ditch effort! +if no my_dep.found() + my_dep = meson.get_compiler('c').find_library('dep') +endif +``` -- cgit v1.1 From 57e55b1a9ddce282f0b5d0508139f0f06ce952e5 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 11 May 2020 14:41:22 +0200 Subject: boost: Try finding libraries with the matching arch (fixes #7110) --- mesonbuild/dependencies/boost.py | 47 +++++++++++++++++++++++++++++++++++----- mesonbuild/mesonlib.py | 10 +++++++++ 2 files changed, 52 insertions(+), 5 deletions(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 3262d8b..2e84820 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -412,10 +412,19 @@ class BoostDependency(ExternalDependency): break def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool: + mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs])) + mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs])) + # 2. Find all boost libraries libs = [] # type: T.List[BoostLibraryFile] for i in lib_dirs: - libs += self.detect_libraries(i) + libs = self.detect_libraries(i) + if libs: + mlog.debug(' - found boost library dir: {}'.format(i)) + # mlog.debug(' - raw library list:') + # for j in libs: + # mlog.debug(' - {}'.format(j)) + break libs = sorted(set(libs)) modules = ['boost_' + x for x in self.modules] @@ -423,9 +432,6 @@ class BoostDependency(ExternalDependency): mlog.debug(' - found boost {} include dir: {}'.format(inc.version, inc.path)) f_libs = self.filter_libraries(libs, inc.version_lib) - # mlog.debug(' - raw library list:') - # for j in libs: - # mlog.debug(' - {}'.format(j)) mlog.debug(' - filtered library list:') for j in f_libs: mlog.debug(' - {}'.format(j)) @@ -500,6 +506,19 @@ class BoostDependency(ExternalDependency): return [self._include_dir_from_version_header(x) for x in candidates] def detect_lib_dirs(self, root: Path) -> T.List[Path]: + # First check the system include paths. Only consider those within the + # given root path + system_dirs_t = self.clib_compiler.get_library_dirs(self.env) + system_dirs = [Path(x) for x in system_dirs_t] + system_dirs = [x.resolve() for x in system_dirs if x.exists()] + system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)] + system_dirs = list(mesonlib.OrderedSet(system_dirs)) + + if system_dirs: + return system_dirs + + # No system include paths were found --> fall back to manually looking + # for library dirs in root dirs = [] # type: T.List[Path] subdirs = [] # type: T.List[Path] for i in root.iterdir(): @@ -511,7 +530,25 @@ class BoostDependency(ExternalDependency): for j in i.iterdir(): if j.is_dir() and j.name.endswith('-linux-gnu'): subdirs += [j] - return dirs + subdirs + + # Filter out paths that don't match the target arch to avoid finding + # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110 + if not self.arch: + return dirs + subdirs + + arch_list_32 = ['32', 'i386'] + arch_list_64 = ['64'] + + raw_list = dirs + subdirs + no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])] + + matching_arch = [] # type: T.List[Path] + if '32' in self.arch: + matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])] + elif '64' in self.arch: + matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])] + + return sorted(matching_arch) + sorted(no_arch) def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]: # MSVC is very picky with the library tags diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index 6c1e466..b901ec9 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -1533,6 +1533,16 @@ def relpath(path: str, start: str) -> str: except (TypeError, ValueError): return path +def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool: + # Check wheter a path is within the root directory root + try: + if resolve: + path.resolve().relative_to(root.resolve()) + else: + path.relative_to(root) + except ValueError: + return False + return True class LibType(Enum): -- cgit v1.1 From 37bade6f8760a4e443a8daddbcf6acd4e84b5eab Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 14 May 2020 09:41:12 +0530 Subject: gfortran: Fix has_header implementation with GCC 10 __has_include is not accepted as a pre-processor directive in Fortran code since GCC 10. Closes https://github.com/mesonbuild/meson/issues/7017 --- mesonbuild/compilers/fortran.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index 01283a1..c155b5b 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -214,6 +214,18 @@ class GnuFortranCompiler(GnuCompiler, FortranCompiler): def language_stdlib_only_link_flags(self) -> T.List[str]: return ['-lgfortran', '-lm'] + def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False): + ''' + Derived from mixins/clike.py:has_header, but without C-style usage of + __has_include which breaks with GCC-Fortran 10: + https://github.com/mesonbuild/meson/issues/7017 + ''' + fargs = {'prefix': prefix, 'header': hname} + code = '{prefix}\n#include <{header}>' + return self.compiles(code.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies, mode='preprocess', disable_cache=disable_cache) + + class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler): def __init__(self, exelist, version, for_machine: MachineChoice, is_cross, info: 'MachineInfo', exe_wrapper=None, -- cgit v1.1 From bcf29ede06c95f2ee23f3a9994da8a61e1ebaebe Mon Sep 17 00:00:00 2001 From: Matthew Waters Date: Thu, 14 May 2020 13:07:26 +1000 Subject: ui/qt: use new directory layout for qt on android Now follows ios and other platform directory layouts. Moves from separate android_$arch directories to every library containing a _$arch suffix. e.g. libQt5Core_x86.a in a single directory. --- mesonbuild/dependencies/ui.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 4cec814..6e8cae7 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -406,6 +406,9 @@ class QtBaseDependency(ExternalDependency): if libfile: libfile = libfile[0] else: + mlog.log("Could not find:", module, + self.qtpkgname + module + modules_lib_suffix, + 'in', libdir) self.is_found = False break self.link_args.append(libfile) @@ -426,6 +429,17 @@ class QtBaseDependency(ExternalDependency): if self.env.machines[self.for_machine].is_darwin(): if is_debug: suffix += '_debug' + if mesonlib.version_compare(self.version, '>= 5.14.0'): + if self.env.machines[self.for_machine].is_android(): + cpu_family = self.env.machines[self.for_machine].cpu_family + if cpu_family == 'x86': + suffix += '_x86' + elif cpu_family == 'x86_64': + suffix += '_x86_64' + elif cpu_family == 'arm': + suffix += '_armeabi-v7a' + elif cpu_family == 'aarch64': + suffix += '_arm64-v8a' return suffix def _link_with_qtmain(self, is_debug, libdir): -- cgit v1.1 From 84cfa2bf5127cff73159e52f8494b9b836ce67e0 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 14 May 2020 15:12:58 +0530 Subject: ui/qt: Warn if Android cpu_family is unknown --- mesonbuild/dependencies/ui.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 6e8cae7..741f0b8 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -440,6 +440,9 @@ class QtBaseDependency(ExternalDependency): suffix += '_armeabi-v7a' elif cpu_family == 'aarch64': suffix += '_arm64-v8a' + else: + mlog.warning('Android target arch {!r} for Qt5 is unknown, ' + 'module detection may not work'.format(cpu_family)) return suffix def _link_with_qtmain(self, is_debug, libdir): -- cgit v1.1 From 76c636daac9bd2c7d7fb32b6930af2b0d8a6e020 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 14 May 2020 17:43:43 +0530 Subject: cmake: Fix string substitution index error ``` File "mesonbuild/cmake/interpreter.py", line 293, in postprocess 'Unknown {}_std "{}" -> Ingoring. Try setting the project' IndexError: Replacement index 2 out of range for positional args tuple ``` --- mesonbuild/cmake/interpreter.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 0a452d1..1e033c7 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -292,8 +292,8 @@ class ConverterTarget: std = m.group(2) if std not in self._all_lang_stds(i): mlog.warning( - 'Unknown {}_std "{}" -> Ingoring. Try setting the project' - 'level {}_std if build errors occur.'.format(i, std), + 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-' + 'level {0}_std if build errors occur.'.format(i, std), once=True ) continue -- cgit v1.1 From cb97c3baf8fdb106574657f5b6a77016aba542d1 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 14 May 2020 10:32:38 +0530 Subject: unit tests: Fix broken test_cross_libdir test test_cross_libdir() was broken because we were passing `--libdir=lib` when invoking meson. We didn't notice that https://github.com/mesonbuild/meson/issues/6115 broke because of this. --- run_unittests.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/run_unittests.py b/run_unittests.py index d6f7911..8dde4e5 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -1468,14 +1468,14 @@ class DataTests(unittest.TestCase): class BasePlatformTests(unittest.TestCase): + prefix = '/usr' + libdir = 'lib' def setUp(self): super().setUp() self.maxDiff = None src_root = os.path.dirname(__file__) src_root = os.path.join(os.getcwd(), src_root) self.src_root = src_root - self.prefix = '/usr' - self.libdir = 'lib' # Get the backend # FIXME: Extract this from argv? self.backend = getattr(Backend, os.environ.get('MESON_UNIT_TEST_BACKEND', 'ninja')) @@ -1588,8 +1588,9 @@ class BasePlatformTests(unittest.TestCase): extra_args = [extra_args] args = [srcdir, self.builddir] if default_args: - args += ['--prefix', self.prefix, - '--libdir', self.libdir] + args += ['--prefix', self.prefix] + if self.libdir: + args += ['--libdir', self.libdir] if self.meson_native_file: args += ['--native-file', self.meson_native_file] if self.meson_cross_file: @@ -6634,11 +6635,17 @@ c = ['{0}'] os.unlink(wrap_filename) +class BaseLinuxCrossTests(BasePlatformTests): + # Don't pass --libdir when cross-compiling. We have tests that + # check whether meson auto-detects it correctly. + libdir = None + + def should_run_cross_arm_tests(): return shutil.which('arm-linux-gnueabihf-gcc') and not platform.machine().lower().startswith('arm') @unittest.skipUnless(not is_windows() and should_run_cross_arm_tests(), "requires ability to cross compile to ARM") -class LinuxCrossArmTests(BasePlatformTests): +class LinuxCrossArmTests(BaseLinuxCrossTests): ''' Tests that cross-compilation to Linux/ARM works ''' @@ -6719,7 +6726,7 @@ def should_run_cross_mingw_tests(): return shutil.which('x86_64-w64-mingw32-gcc') and not (is_windows() or is_cygwin()) @unittest.skipUnless(not is_windows() and should_run_cross_mingw_tests(), "requires ability to cross compile with MinGW") -class LinuxCrossMingwTests(BasePlatformTests): +class LinuxCrossMingwTests(BaseLinuxCrossTests): ''' Tests that cross-compilation to Windows/MinGW works ''' -- cgit v1.1 From 3e134975749b67b8c799a8b8fd065721de1cb48a Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 14 May 2020 10:57:04 +0530 Subject: coredata: Fixup the default libdir value, not the set value We shouldn't change the value of libdir after builtins have been initialized because we want to change the *default* value. --- mesonbuild/coredata.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 754be1d..8774b80 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -384,8 +384,8 @@ class CoreData: self.compiler_check_cache = OrderedDict() # Only to print a warning if it changes between Meson invocations. self.config_files = self.__load_config_files(options, scratch_dir, 'native') + self.builtin_options_libdir_cross_fixup() self.init_builtins('') - self.libdir_cross_fixup() @staticmethod def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]: @@ -445,12 +445,12 @@ class CoreData: raise MesonException('Cannot find specified {} file: {}'.format(ftype, f)) return real - def libdir_cross_fixup(self): + def builtin_options_libdir_cross_fixup(self): # By default set libdir to "lib" when cross compiling since # getting the "system default" is always wrong on multiarch # platforms as it gets a value like lib/x86_64-linux-gnu. if self.cross_files: - self.builtins['libdir'].value = 'lib' + builtin_options['libdir'].default = 'lib' def sanitize_prefix(self, prefix): prefix = os.path.expanduser(prefix) -- cgit v1.1 From 93dc9cfcc3ef946680ebe4724977c4b93ffa4a0f Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 6 May 2020 14:10:42 -0400 Subject: gnome.generate_gir: Fix missing include directories This revert a part of #7020 because it was using gir_inc_dirs before it is set. Properly fix typelib_includes instead that was working only when g-i is a pkgconfig dependency. --- mesonbuild/modules/gnome.py | 10 +- test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c | 124 +++++++++++++++++++++ test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h | 21 ++++ .../frameworks/7 gnome/gir/dep1/dep3/meson.build | 22 ++++ test cases/frameworks/7 gnome/gir/dep1/meson.build | 5 +- test cases/frameworks/7 gnome/gir/meson.build | 2 +- test cases/frameworks/7 gnome/test.json | 4 + 7 files changed, 179 insertions(+), 9 deletions(-) create mode 100644 test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c create mode 100644 test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h create mode 100644 test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index a97fffa..01acb37 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -695,11 +695,10 @@ class GnomeModule(ExtensionModule): source.get_subdir()) if subdir not in typelib_includes: typelib_includes.append(subdir) - elif isinstance(dep, PkgConfigDependency): - girdir = dep.get_pkgconfig_variable("girdir", {'default': ''}) + if isinstance(dep, Dependency): + girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='') if girdir and girdir not in typelib_includes: typelib_includes.append(girdir) - return typelib_includes def _get_external_args_for_langs(self, state, langs): @@ -769,7 +768,6 @@ class GnomeModule(ExtensionModule): external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags)) girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets) inc_dirs = self._scan_inc_dirs(kwargs) - gi_includes.update(gir_inc_dirs + inc_dirs) scan_command = [giscanner] scan_command += ['--no-libtool'] @@ -790,7 +788,7 @@ class GnomeModule(ExtensionModule): scan_command += cflags scan_command += ['--cflags-end'] scan_command += get_include_args(inc_dirs) - scan_command += get_include_args(list(gi_includes), prefix='--add-include-path=') + scan_command += get_include_args(list(gi_includes) + gir_inc_dirs + inc_dirs, prefix='--add-include-path=') scan_command += list(internal_ldflags) scan_command += self._scan_gir_targets(state, girtargets) scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers]) @@ -804,7 +802,7 @@ class GnomeModule(ExtensionModule): typelib_output = '%s-%s.typelib' % (ns, nsversion) typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@'] - typelib_cmd += get_include_args(list(gi_includes), prefix='--includedir=') + typelib_cmd += get_include_args(gir_inc_dirs, prefix='--includedir=') for incdir in typelib_includes: typelib_cmd += ["--includedir=" + incdir] diff --git a/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c new file mode 100644 index 0000000..ee5c5e1 --- /dev/null +++ b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.c @@ -0,0 +1,124 @@ +#include "dep3.h" + +struct _MesonDep3 +{ + GObject parent_instance; + + gchar *msg; +}; + +G_DEFINE_TYPE (MesonDep3, meson_dep3, G_TYPE_OBJECT) + +enum { + PROP_0, + PROP_MSG, + LAST_PROP +}; + +static GParamSpec *gParamSpecs [LAST_PROP]; + +/** + * meson_dep3_new: + * @msg: The message to set. + * + * Allocates a new #MesonDep3. + * + * Returns: (transfer full): a #MesonDep3. + */ +MesonDep3 * +meson_dep3_new (const gchar *msg) +{ + g_return_val_if_fail (msg != NULL, NULL); + + return g_object_new (MESON_TYPE_DEP3, + "message", msg, + NULL); +} + +static void +meson_dep3_finalize (GObject *object) +{ + MesonDep3 *self = (MesonDep3 *)object; + + g_clear_pointer (&self->msg, g_free); + + G_OBJECT_CLASS (meson_dep3_parent_class)->finalize (object); +} + +static void +meson_dep3_get_property (GObject *object, + guint prop_id, + GValue *value, + GParamSpec *pspec) +{ + MesonDep3 *self = MESON_DEP3 (object); + + switch (prop_id) + { + case PROP_MSG: + g_value_set_string (value, self->msg); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_dep3_set_property (GObject *object, + guint prop_id, + const GValue *value, + GParamSpec *pspec) +{ + MesonDep3 *self = MESON_DEP3 (object); + + switch (prop_id) + { + case PROP_MSG: + self->msg = g_value_dup_string (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + } +} + +static void +meson_dep3_class_init (MesonDep3Class *klass) +{ + GObjectClass *object_class = G_OBJECT_CLASS (klass); + + object_class->finalize = meson_dep3_finalize; + object_class->get_property = meson_dep3_get_property; + object_class->set_property = meson_dep3_set_property; + + gParamSpecs [PROP_MSG] = + g_param_spec_string ("message", + "Message", + "The message to print.", + NULL, + (G_PARAM_READWRITE | + G_PARAM_CONSTRUCT_ONLY | + G_PARAM_STATIC_STRINGS)); + + g_object_class_install_properties (object_class, LAST_PROP, gParamSpecs); +} + +static void +meson_dep3_init (MesonDep3 *self) +{ +} + +/** + * meson_dep3_return_message: + * @self: a #MesonDep3. + * + * Returns the message. + * + * Returns: (transfer none): a const gchar* + */ +const gchar* +meson_dep3_return_message (MesonDep3 *self) +{ + g_return_val_if_fail (MESON_IS_DEP3 (self), NULL); + + return (const gchar*) self->msg; +} diff --git a/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h new file mode 100644 index 0000000..9883d76 --- /dev/null +++ b/test cases/frameworks/7 gnome/gir/dep1/dep3/dep3.h @@ -0,0 +1,21 @@ +#ifndef MESON_DEP3_H +#define MESON_DEP3_H + +#if !defined (MESON_TEST) +#error "MESON_TEST not defined." +#endif + +#include + +G_BEGIN_DECLS + +#define MESON_TYPE_DEP3 (meson_dep3_get_type()) + +G_DECLARE_FINAL_TYPE (MesonDep3, meson_dep3, MESON, DEP3, GObject) + +MesonDep3 *meson_dep3_new (const gchar *msg); +const gchar *meson_dep3_return_message (MesonDep3 *self); + +G_END_DECLS + +#endif /* MESON_DEP3_H */ diff --git a/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build b/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build new file mode 100644 index 0000000..1ef7765 --- /dev/null +++ b/test cases/frameworks/7 gnome/gir/dep1/dep3/meson.build @@ -0,0 +1,22 @@ +dep3sources = ['dep3.c', 'dep3.h'] + +dep3lib = shared_library( + 'dep3lib', + sources : dep3sources, + dependencies : gobj, + install : true +) + +dep3gir = gnome.generate_gir( + dep3lib, + sources : dep3sources, + nsversion : '1.0', + namespace : 'MesonDep3', + symbol_prefix : 'meson', + identifier_prefix : 'Meson', + includes : ['GObject-2.0'], + install : true +) + +dep3_dep = declare_dependency(link_with : dep3lib, + sources : [dep3gir]) diff --git a/test cases/frameworks/7 gnome/gir/dep1/meson.build b/test cases/frameworks/7 gnome/gir/dep1/meson.build index baa0b1d..2f03ede 100644 --- a/test cases/frameworks/7 gnome/gir/dep1/meson.build +++ b/test cases/frameworks/7 gnome/gir/dep1/meson.build @@ -1,4 +1,5 @@ subdir('dep2') +subdir('dep3') dep1sources = ['dep1.c', 'dep1.h'] @@ -20,11 +21,11 @@ dep1gir = gnome.generate_gir( symbol_prefix : 'meson', identifier_prefix : 'Meson', header: 'dep1.h', - includes : ['GObject-2.0', 'MesonDep2-1.0'], + includes : ['GObject-2.0', 'MesonDep2-1.0', dep3gir[0]], dependencies : [dep2_dep], install : true ) dep1_dep = declare_dependency(link_with : dep1lib, - dependencies : [dep2_dep], + dependencies : [dep2_dep, dep3_dep], sources : [dep1gir]) diff --git a/test cases/frameworks/7 gnome/gir/meson.build b/test cases/frameworks/7 gnome/gir/meson.build index 36bd09c..b1e0fa1 100644 --- a/test cases/frameworks/7 gnome/gir/meson.build +++ b/test cases/frameworks/7 gnome/gir/meson.build @@ -45,7 +45,7 @@ gnome.generate_gir( ) test('gobject introspection/c', girexe) -gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir()]) +gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir(), dep3lib.outdir()]) envdata = environment() envdata.append('GI_TYPELIB_PATH', gir_paths, separator : ':') envdata.append('LD_LIBRARY_PATH', gir_paths) diff --git a/test cases/frameworks/7 gnome/test.json b/test cases/frameworks/7 gnome/test.json index e69c9f0..badf410 100644 --- a/test cases/frameworks/7 gnome/test.json +++ b/test cases/frameworks/7 gnome/test.json @@ -13,12 +13,16 @@ {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep1lib.dll.a"}, {"type": "expr", "file": "usr/lib/?libdep2lib.so"}, {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep2lib.dll.a"}, + {"type": "expr", "file": "usr/lib/?libdep3lib.so"}, + {"type": "file", "platform": "cygwin", "file": "usr/lib/libdep3lib.dll.a"}, {"type": "file", "file": "usr/lib/girepository-1.0/Meson-1.0.typelib"}, {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep1-1.0.typelib"}, {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep2-1.0.typelib"}, + {"type": "file", "file": "usr/lib/girepository-1.0/MesonDep3-1.0.typelib"}, {"type": "file", "file": "usr/share/gir-1.0/Meson-1.0.gir"}, {"type": "file", "file": "usr/share/gir-1.0/MesonDep1-1.0.gir"}, {"type": "file", "file": "usr/share/gir-1.0/MesonDep2-1.0.gir"}, + {"type": "file", "file": "usr/share/gir-1.0/MesonDep3-1.0.gir"}, {"type": "file", "file": "usr/share/glib-2.0/schemas/com.github.meson.gschema.xml"}, {"type": "file", "file": "usr/share/simple-resources.gresource"}, {"type": "file", "file": "usr/include/enums6.h"}, -- cgit v1.1 From 4e9e35f3bd17a8f110ae1d3b40c8fbe04700120a Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 13 May 2020 12:10:25 -0700 Subject: interpreterbase: Allow passing an extra message in feature/deprecation warnings The intended use it to tell people the new thing to do. --- mesonbuild/interpreterbase.py | 26 +++++++++++++++++----- run_project_tests.py | 1 + .../warning/1 version for string div/test.json | 2 +- 3 files changed, 22 insertions(+), 7 deletions(-) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 82d16f1..af9018b 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -220,9 +220,10 @@ class FeatureCheckBase(metaclass=abc.ABCMeta): # This will be overwritten by the subclasses by necessity feature_registry = {} # type: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[str]]]] - def __init__(self, feature_name: str, version: str): + def __init__(self, feature_name: str, version: str, extra_message: T.Optional[str] = None): self.feature_name = feature_name # type: str self.feature_version = version # type: str + self.extra_message = extra_message or '' # type: str @staticmethod def get_target_version(subproject: str) -> str: @@ -302,8 +303,15 @@ class FeatureNew(FeatureCheckBase): return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv) def log_usage_warning(self, tv: str) -> None: - mlog.warning('Project targeting \'{}\' but tried to use feature introduced ' - 'in \'{}\': {}'.format(tv, self.feature_version, self.feature_name)) + args = [ + 'Project targeting', "'{}'".format(tv), + 'but tried to use feature introduced in', + "'{}':".format(self.feature_version), + '{}.'.format(self.feature_name), + ] + if self.extra_message: + args.append(self.extra_message) + mlog.warning(*args) class FeatureDeprecated(FeatureCheckBase): """Checks for deprecated features""" @@ -323,9 +331,15 @@ class FeatureDeprecated(FeatureCheckBase): return 'Deprecated features used:' def log_usage_warning(self, tv: str) -> None: - mlog.deprecation('Project targeting \'{}\' but tried to use feature ' - 'deprecated since \'{}\': {}' - ''.format(tv, self.feature_version, self.feature_name)) + args = [ + 'Project targeting', "'{}'".format(tv), + 'but tried to use feature deprecated since', + "'{}':".format(self.feature_version), + '{}.'.format(self.feature_name), + ] + if self.extra_message: + args.append(self.extra_message) + mlog.warning(*args) class FeatureCheckKwargsBase: diff --git a/run_project_tests.py b/run_project_tests.py index 18731d6..bcfe05c 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -417,6 +417,7 @@ def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) match = bool(re.match(expected, actual)) else: match = (expected == actual) + print(actual) if match: how, expected = next_expected(i) diff --git a/test cases/warning/1 version for string div/test.json b/test cases/warning/1 version for string div/test.json index ce1af59..c37931a 100644 --- a/test cases/warning/1 version for string div/test.json +++ b/test cases/warning/1 version for string div/test.json @@ -2,7 +2,7 @@ "stdout": [ { "comment": "literal '/' appears in output, irrespective of os.path.sep, as that's the operator", - "line": "WARNING: Project targeting '>=0.48.0' but tried to use feature introduced in '0.49.0': / with string arguments" + "line": "WARNING: Project targeting '>=0.48.0' but tried to use feature introduced in '0.49.0': / with string arguments." } ] } -- cgit v1.1 From a63e36f7b114d66f455936fa6621b30a3a54675f Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 13 May 2020 12:11:18 -0700 Subject: interpreter: Rename has_exe_wrapper -> can_run_host_binaries The implementation of this function has changed enough that the name doesn't really reflect what it actually does. It basically returns true unless you're cross compiling, need and exe_wrapper, and don't have one. The original function remains but is marked as deprecated. This makes one small change the meson source language, which is that it defines that can_run_host_binaries will return true in build == host compilation, which was the behavior that already existed. Previously this was undefined in build == host compilation. --- docs/markdown/Cross-compilation.md | 7 ++----- docs/markdown/Reference-manual.md | 12 ++++++++---- docs/markdown/snippets/can_run_host_binaries.md | 5 +++++ mesonbuild/interpreter.py | 14 +++++++++++--- test cases/common/36 tryrun/meson.build | 2 +- test cases/common/93 selfbuilt custom/meson.build | 2 +- test cases/unit/36 exe_wrapper behaviour/meson.build | 2 +- 7 files changed, 29 insertions(+), 15 deletions(-) create mode 100644 docs/markdown/snippets/can_run_host_binaries.md diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md index 4c4b7bf..1c53dcf 100644 --- a/docs/markdown/Cross-compilation.md +++ b/docs/markdown/Cross-compilation.md @@ -231,13 +231,10 @@ The main *meson* object provides two functions to determine cross compilation status. ```meson -meson.is_cross_build() # returns true when cross compiling -meson.has_exe_wrapper() # returns true if an exe wrapper has been defined +meson.is_cross_build() # returns true when cross compiling +meson.can_run_host_binaries() # returns true if the host binaries can be run, either with a wrapper or natively ``` -Note that the latter gives undefined return value when doing a native -build. - You can run system checks on both the system compiler or the cross compiler. You just have to specify which one to use. diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 97d3e83..1bd5ff0 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1836,10 +1836,14 @@ the following methods. If `native: false` or not specified, variable is retrieved from the cross-file if cross-compiling, and from the native-file when not cross-compiling. -- `has_exe_wrapper()` returns true when doing a cross build if there - is a wrapper command that can be used to execute cross built - binaries (for example when cross compiling from Linux to Windows, - one can use `wine` as the wrapper). +- `can_run_host_binaries()` returns true if the build machine can run + binaries compiled for the host. This returns true unless you are + cross compiling, need a helper to run host binaries, and don't have one. + For example when cross compiling from Linux to Windows, one can use `wine` + as the helper. *New in 0.55.0* + +- `has_exe_wrapper()` alias of `can_run_host_binaries` + *Deprecated since 0.55.0* - `install_dependency_manifest(output_name)` installs a manifest file containing a list of all subprojects, their versions and license diff --git a/docs/markdown/snippets/can_run_host_binaries.md b/docs/markdown/snippets/can_run_host_binaries.md new file mode 100644 index 0000000..0108184 --- /dev/null +++ b/docs/markdown/snippets/can_run_host_binaries.md @@ -0,0 +1,5 @@ +## Rename has_exe_wrapper -> can_run_host_binaries + +The old name was confusing as it didn't really match the behavior of the +function. The old name remains as an alias (the behavior hasn't changed), but +is now deprecated. diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 7901e5a..8c7a82c 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1873,6 +1873,7 @@ class MesonMain(InterpreterObject): self.methods.update({'get_compiler': self.get_compiler_method, 'is_cross_build': self.is_cross_build_method, 'has_exe_wrapper': self.has_exe_wrapper_method, + 'can_run_host_binaries': self.can_run_host_binaries_method, 'is_unity': self.is_unity_method, 'is_subproject': self.is_subproject_method, 'current_source_dir': self.current_source_dir_method, @@ -2023,9 +2024,16 @@ class MesonMain(InterpreterObject): @noPosargs @permittedKwargs({}) - def has_exe_wrapper_method(self, args, kwargs): - if self.is_cross_build_method(None, None) and \ - self.build.environment.need_exe_wrapper(): + @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.') + def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool: + return self.can_run_host_binaries_method(args, kwargs) + + @noPosargs + @permittedKwargs({}) + @FeatureNew('meson.can_run_host_binaries', '0.55.0') + def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool: + if (self.is_cross_build_method(None, None) and + self.build.environment.need_exe_wrapper()): if self.build.environment.exe_wrapper is None: return False # We return True when exe_wrap is defined, when it's not needed, and diff --git a/test cases/common/36 tryrun/meson.build b/test cases/common/36 tryrun/meson.build index 261adf2..5580974 100644 --- a/test cases/common/36 tryrun/meson.build +++ b/test cases/common/36 tryrun/meson.build @@ -2,7 +2,7 @@ project('tryrun', 'c', 'cpp') # Complex to exercise all code paths. if meson.is_cross_build() - if meson.has_exe_wrapper() + if meson.can_run_host_binaries() compilers = [meson.get_compiler('c', native : false), meson.get_compiler('cpp', native : false)] else compilers = [meson.get_compiler('c', native : true), meson.get_compiler('cpp', native : true)] diff --git a/test cases/common/93 selfbuilt custom/meson.build b/test cases/common/93 selfbuilt custom/meson.build index 3cc3906..b536352 100644 --- a/test cases/common/93 selfbuilt custom/meson.build +++ b/test cases/common/93 selfbuilt custom/meson.build @@ -26,7 +26,7 @@ ctlib = custom_target('ctlib', build_by_default : true, ) -if meson.is_cross_build() and meson.has_exe_wrapper() +if meson.is_cross_build() and meson.can_run_host_binaries() checkarg_host = executable('checkarg_host', 'checkarg.cpp') ctlib_host = custom_target( diff --git a/test cases/unit/36 exe_wrapper behaviour/meson.build b/test cases/unit/36 exe_wrapper behaviour/meson.build index 16a44d5..d0817ba 100644 --- a/test cases/unit/36 exe_wrapper behaviour/meson.build +++ b/test cases/unit/36 exe_wrapper behaviour/meson.build @@ -1,7 +1,7 @@ project('exe wrapper behaviour', 'c') assert(meson.is_cross_build(), 'not setup as cross build') -assert(meson.has_exe_wrapper(), 'exe wrapper not defined?') +assert(meson.has_exe_wrapper(), 'exe wrapper not defined?') # intentionally not changed to can_run_host_binaries, exe = executable('prog', 'prog.c') -- cgit v1.1 From 06481666f4e74ecef01e59351fc345ab0962d998 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:36:55 -0700 Subject: interpreter: Replace some uses of mlog.deprecation with FeatureDeprecated This gives the version that the feature was deprecated in, and doesn't print the warning if the project supports versions of meson in which the project wasn't deprecated. --- mesonbuild/build.py | 1 - mesonbuild/interpreter.py | 7 +++++-- mesonbuild/modules/gnome.py | 9 +++------ 3 files changed, 8 insertions(+), 9 deletions(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index fdfca73..7c065c8 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -2172,7 +2172,6 @@ class CustomTarget(Target): if 'build_always' in kwargs and 'build_always_stale' in kwargs: raise InvalidArguments('build_always and build_always_stale are mutually exclusive. Combine build_by_default and build_always_stale.') elif 'build_always' in kwargs: - mlog.deprecation('build_always is deprecated. Combine build_by_default and build_always_stale instead.') if 'build_by_default' not in kwargs: self.build_by_default = kwargs['build_always'] self.build_always_stale = kwargs['build_always'] diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 8c7a82c..f80faa8 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -29,7 +29,7 @@ from .interpreterbase import InterpreterBase from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound -from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs +from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs, FeatureDeprecatedKwargs from .interpreterbase import ObjectHolder from .modules import ModuleReturnValue from .cmake import CMakeInterpreter @@ -527,8 +527,9 @@ class ExternalProgramHolder(InterpreterObject, ObjectHolder): @noPosargs @permittedKwargs({}) + @FeatureDeprecated('ExternalProgram.path', '0.55.0', + 'use ExternalProgram.full_path() instead') def path_method(self, args, kwargs): - mlog.deprecation('path() method is deprecated and replaced by full_path()') return self._full_path() @noPosargs @@ -3686,6 +3687,8 @@ external dependencies (including libraries) must go to "dependencies".''') raise InterpreterException('Unknown target_type.') @permittedKwargs(permitted_kwargs['vcs_tag']) + @FeatureDeprecatedKwargs('custom_target', '0.47.0', ['build_always'], + 'combine build_by_default and build_always_stale instead.') def func_vcs_tag(self, node, args, kwargs): if 'input' not in kwargs or 'output' not in kwargs: raise InterpreterException('Keyword arguments input and output must exist') diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 01acb37..ea1b325 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -33,7 +33,7 @@ from ..mesonlib import ( join_args, unholder, ) from ..dependencies import Dependency, PkgConfigDependency, InternalDependency, ExternalProgram -from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs +from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs # gresource compilation is broken due to the way # the resource compiler and Ninja clash about it @@ -834,6 +834,8 @@ class GnomeModule(ExtensionModule): return ModuleReturnValue(target_g, [target_g]) @permittedKwargs({'sources', 'media', 'symlink_media', 'languages'}) + @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'], + 'Use a LINGUAS file in the source directory instead') def yelp(self, state, args, kwargs): if len(args) < 1: raise MesonException('Yelp requires a project id') @@ -848,11 +850,6 @@ class GnomeModule(ExtensionModule): source_str = '@@'.join(sources) langs = mesonlib.stringlistify(kwargs.pop('languages', [])) - if langs: - mlog.deprecation('''The "languages" argument of gnome.yelp() is deprecated. -Use a LINGUAS file in the sources directory instead. -This will become a hard error in the future.''') - media = mesonlib.stringlistify(kwargs.pop('media', [])) symlinks = kwargs.pop('symlink_media', True) -- cgit v1.1 From 8f1db99cec2e8c4eec0bb04698b3e0ce72f921e9 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 14 May 2020 17:48:27 +0200 Subject: boost: always use compiler include paths --- mesonbuild/dependencies/boost.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 2e84820..6e85c53 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -605,6 +605,12 @@ class BoostDependency(ExternalDependency): roots += paths return roots # Do not add system paths if BOOST_ROOT is present + # Add roots from system paths + inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()] + inc_paths = [x.parent for x in inc_paths if x.exists()] + inc_paths = [x.resolve() for x in inc_paths] + roots += inc_paths + # Add system paths if self.env.machines[self.for_machine].is_windows(): # Where boost built from source actually installs it @@ -626,8 +632,6 @@ class BoostDependency(ExternalDependency): roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()] else: tmp = [] # type: T.List[Path] - # Add unix paths - tmp += [Path(x).parent for x in self.clib_compiler.get_default_include_dirs()] # Homebrew brew_boost = Path('/usr/local/Cellar/boost') -- cgit v1.1 From f1d00e86f1a83e2fed301c59d22f1415222b29e2 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Fri, 8 May 2020 19:06:05 -0400 Subject: backend/vs: Fix b_vscrt=from_buildtype for debugoptimized The ninja backend only uses the debug C runtime for 'debug', not for 'debugoptimized'. The ninja backend always uses the DLL C runtime for all configurations. The documentation matches the ninja backend. Make the visual studio backend follow the documentation (and the precedent set by the ninja backend). --- mesonbuild/backend/vs2010backend.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index b5803bf..614d357 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -821,12 +821,12 @@ class Vs2010Backend(backends.Backend): clconf = ET.SubElement(compiles, 'ClCompile') # CRT type; debug or release if vscrt_type.value == 'from_buildtype': - if self.buildtype == 'debug' or self.buildtype == 'debugoptimized': + if self.buildtype == 'debug': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' else: ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' - ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' elif vscrt_type.value == 'mdd': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' -- cgit v1.1 From e3b2f1b82f2987a233b8a386457958b533acce04 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:39:50 -0700 Subject: interpreterbase: Add a oneline helper method for Feature(New|Deprecated) This allows us to replace FeatureNew(..).use() with just FeatureNew.single_use(...). It's a lttle cleaner and hides some of the smell. --- mesonbuild/interpreterbase.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index af9018b..6c2e73f 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -286,6 +286,13 @@ class FeatureCheckBase(metaclass=abc.ABCMeta): return f(*wrapped_args, **wrapped_kwargs) return wrapped + @classmethod + def single_use(cls, feature_name: str, version: str, subproject: str, + extra_message: T.Optional[str] = None) -> None: + """Oneline version that instantiates and calls use().""" + cls(feature_name, version, extra_message).use(subproject) + + class FeatureNew(FeatureCheckBase): """Checks for new features""" @@ -342,7 +349,13 @@ class FeatureDeprecated(FeatureCheckBase): mlog.warning(*args) -class FeatureCheckKwargsBase: +class FeatureCheckKwargsBase(metaclass=abc.ABCMeta): + + @property + @abc.abstractmethod + def feature_check_class(self) -> T.Type[FeatureCheckBase]: + pass + def __init__(self, feature_name: str, feature_version: str, kwargs: T.List[str]): self.feature_name = feature_name self.feature_version = feature_version @@ -351,8 +364,6 @@ class FeatureCheckKwargsBase: def __call__(self, f): @wraps(f) def wrapped(*wrapped_args, **wrapped_kwargs): - # Which FeatureCheck class to invoke - FeatureCheckClass = self.feature_check_class kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5] if subproject is None: raise AssertionError('{!r}'.format(wrapped_args)) @@ -360,7 +371,7 @@ class FeatureCheckKwargsBase: if arg not in kwargs: continue name = arg + ' arg in ' + self.feature_name - FeatureCheckClass(name, self.feature_version).use(subproject) + self.feature_check_class.single_use(name, self.feature_version, subproject) return f(*wrapped_args, **wrapped_kwargs) return wrapped -- cgit v1.1 From 21c8582d150942f299bbd66ae8f93e7479cae909 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 14 May 2020 11:18:17 -0700 Subject: interpreterbase: Proxy extra_message through to feature_check_class --- mesonbuild/interpreterbase.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 6c2e73f..bb88e2c 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -356,10 +356,12 @@ class FeatureCheckKwargsBase(metaclass=abc.ABCMeta): def feature_check_class(self) -> T.Type[FeatureCheckBase]: pass - def __init__(self, feature_name: str, feature_version: str, kwargs: T.List[str]): + def __init__(self, feature_name: str, feature_version: str, + kwargs: T.List[str], extra_message: T.Optional[str] = None): self.feature_name = feature_name self.feature_version = feature_version self.kwargs = kwargs + self.extra_message = extra_message def __call__(self, f): @wraps(f) @@ -371,7 +373,8 @@ class FeatureCheckKwargsBase(metaclass=abc.ABCMeta): if arg not in kwargs: continue name = arg + ' arg in ' + self.feature_name - self.feature_check_class.single_use(name, self.feature_version, subproject) + self.feature_check_class.single_use( + name, self.feature_version, subproject, self.extra_message) return f(*wrapped_args, **wrapped_kwargs) return wrapped -- cgit v1.1 From a313f46be7f146314e10760db016862f9516939b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:53:24 -0700 Subject: modules/pkgconfig: Remove duplicate FeatureNew --- mesonbuild/modules/pkgconfig.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 666a93d..ac6c9b1 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -394,8 +394,6 @@ class PkgConfigModule(ExtensionModule): 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions', 'dataonly', 'conflicts'}) def generate(self, state, args, kwargs): - if 'variables' in kwargs: - FeatureNew('custom pkgconfig variables', '0.41.0').use(state.subproject) default_version = state.project_version['version'] default_install_dir = None default_description = None -- cgit v1.1 From d51551231ffa19c48ec5a5c36da11e7f03921262 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:53:37 -0700 Subject: use FeatureNew.single_use This is just slightly cleaner looking --- mesonbuild/build.py | 4 ++-- mesonbuild/interpreter.py | 43 ++++++++++++++++++++++------------------- mesonbuild/interpreterbase.py | 2 +- mesonbuild/modules/pkgconfig.py | 6 +++--- 4 files changed, 29 insertions(+), 26 deletions(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 7c065c8..67b92a5 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -776,7 +776,7 @@ class BuildTarget(Target): if isinstance(src, str): src = File(False, self.subdir, src) elif isinstance(src, File): - FeatureNew('File argument for extract_objects', '0.50.0').use(self.subproject) + FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject) else: raise MesonException('Object extraction arguments must be strings or Files.') # FIXME: It could be a generated source @@ -2160,7 +2160,7 @@ class CustomTarget(Target): 'when installing a target') if isinstance(kwargs['install_dir'], list): - FeatureNew('multiple install_dir for custom_target', '0.40.0').use(self.subproject) + FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject) # If an item in this list is False, the output corresponding to # the list index of that item will not be installed self.install_dir = typeslistify(kwargs['install_dir'], (str, bool)) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index f80faa8..f199774 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1958,8 +1958,10 @@ class MesonMain(InterpreterObject): 'Arguments to {} must be strings, Files, CustomTargets, ' 'Indexes of CustomTargets, or ConfigureFiles'.format(name)) if new: - FeatureNew('Calling "{}" with File, CustomTaget, Index of CustomTarget, ConfigureFile, Executable, or ExternalProgram'.format(name), '0.55.0').use( - self.interpreter.subproject) + FeatureNew.single_use( + 'Calling "{}" with File, CustomTaget, Index of CustomTarget, ' + 'ConfigureFile, Executable, or ExternalProgram'.format(name), + '0.55.0', self.interpreter.subproject) return script_args @permittedKwargs(set()) @@ -1983,7 +1985,8 @@ class MesonMain(InterpreterObject): if len(args) < 1: raise InterpreterException('add_dist_script takes one or more arguments') if len(args) > 1: - FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject) + FeatureNew.single_use('Calling "add_dist_script" with multiple arguments', + '0.49.0', self.interpreter.subproject) if self.interpreter.subproject != '': raise InterpreterException('add_dist_script may not be used in a subproject.') script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True) @@ -2606,7 +2609,7 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_assert(self, node, args, kwargs): if len(args) == 1: - FeatureNew('assert function without message argument', '0.53.0').use(self.subproject) + FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject) value = args[0] message = None elif len(args) == 2: @@ -2942,7 +2945,7 @@ external dependencies (including libraries) must go to "dependencies".''') if len(args) > 1: raise InterpreterException('configuration_data takes only one optional positional arguments') elif len(args) == 1: - FeatureNew('configuration_data dictionary', '0.49.0').use(self.subproject) + FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject) initial_values = args[0] if not isinstance(initial_values, dict): raise InterpreterException('configuration_data first argument must be a dictionary') @@ -3083,7 +3086,7 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_message(self, node, args, kwargs): if len(args) > 1: - FeatureNew('message with more than one argument', '0.54.0').use(self.subproject) + FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject) args_str = [self.get_message_string_arg(i) for i in args] self.message_impl(args_str) @@ -3145,7 +3148,7 @@ external dependencies (including libraries) must go to "dependencies".''') @noKwargs def func_warning(self, node, args, kwargs): if len(args) > 1: - FeatureNew('warning with more than one argument', '0.54.0').use(self.subproject) + FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject) args_str = [self.get_message_string_arg(i) for i in args] mlog.warning(*args_str, location=node) @@ -3469,15 +3472,15 @@ external dependencies (including libraries) must go to "dependencies".''') def _handle_featurenew_dependencies(self, name): 'Do a feature check on dependencies used by this subproject' if name == 'mpi': - FeatureNew('MPI Dependency', '0.42.0').use(self.subproject) + FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject) elif name == 'pcap': - FeatureNew('Pcap Dependency', '0.42.0').use(self.subproject) + FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject) elif name == 'vulkan': - FeatureNew('Vulkan Dependency', '0.42.0').use(self.subproject) + FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject) elif name == 'libwmf': - FeatureNew('LibWMF Dependency', '0.44.0').use(self.subproject) + FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject) elif name == 'openmp': - FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject) + FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject) @FeatureNewKwargs('dependency', '0.54.0', ['components']) @FeatureNewKwargs('dependency', '0.52.0', ['include_type']) @@ -3599,7 +3602,7 @@ external dependencies (including libraries) must go to "dependencies".''') def get_subproject_infos(self, kwargs): fbinfo = mesonlib.stringlistify(kwargs['fallback']) if len(fbinfo) == 1: - FeatureNew('Fallback without variable name', '0.53.0').use(self.subproject) + FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject) return fbinfo[0], None elif len(fbinfo) != 2: raise InterpreterException('Fallback info must have one or two items.') @@ -3693,7 +3696,7 @@ external dependencies (including libraries) must go to "dependencies".''') if 'input' not in kwargs or 'output' not in kwargs: raise InterpreterException('Keyword arguments input and output must exist') if 'fallback' not in kwargs: - FeatureNew('Optional fallback in vcs_tag', '0.41.0').use(self.subproject) + FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject) fallback = kwargs.pop('fallback', self.project_version) if not isinstance(fallback, str): raise InterpreterException('Keyword argument fallback must be a string.') @@ -3746,7 +3749,7 @@ external dependencies (including libraries) must go to "dependencies".''') if len(args) != 1: raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name') if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']): - FeatureNew('substitutions in custom_target depfile', '0.47.0').use(self.subproject) + FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject) return self._func_custom_target_impl(node, args, kwargs) def _func_custom_target_impl(self, node, args, kwargs): @@ -3835,7 +3838,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self @permittedKwargs(permitted_kwargs['test']) def func_test(self, node, args, kwargs): if kwargs.get('protocol') == 'gtest': - FeatureNew('"gtest" protocol for tests', '0.55.0').use(self.subproject) + FeatureNew.single_use('"gtest" protocol for tests', '0.55.0', self.subproject) self.add_test(node, args, kwargs, True) def unpack_env_kwarg(self, kwargs) -> build.EnvironmentVariables: @@ -3843,7 +3846,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self if isinstance(envlist, EnvironmentVariablesHolder): env = envlist.held_object elif isinstance(envlist, dict): - FeatureNew('environment dictionary', '0.52.0').use(self.subproject) + FeatureNew.single_use('environment dictionary', '0.52.0', self.subproject) env = EnvironmentVariablesHolder(envlist) env = env.held_object else: @@ -4159,7 +4162,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self if 'configuration' in kwargs: conf = kwargs['configuration'] if isinstance(conf, dict): - FeatureNew('configure_file.configuration dictionary', '0.49.0').use(self.subproject) + FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject) conf = ConfigurationDataHolder(self.subproject, conf) elif not isinstance(conf, ConfigurationDataHolder): raise InterpreterException('Argument "configuration" is not of type configuration_data') @@ -4189,7 +4192,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self conf.mark_used() elif 'command' in kwargs: if len(inputs) > 1: - FeatureNew('multiple inputs in configure_file()', '0.52.0').use(self.subproject) + FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject) # We use absolute paths for input and output here because the cwd # that the command is run from is 'unspecified', so it could change. # Currently it's builddir/subdir for in_builddir else srcdir/subdir. @@ -4437,7 +4440,7 @@ different subdirectory. if len(args) > 1: raise InterpreterException('environment takes only one optional positional arguments') elif len(args) == 1: - FeatureNew('environment positional arguments', '0.52.0').use(self.subproject) + FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject) initial_values = args[0] if not isinstance(initial_values, dict) and not isinstance(initial_values, list): raise InterpreterException('environment first argument must be a dictionary or a list') diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index bb88e2c..634f4f2 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -584,7 +584,7 @@ class InterpreterBase: self.argument_depth += 1 for key, value in kwargs.items(): if not isinstance(key, mparser.StringNode): - FeatureNew('Dictionary entry using non literal key', '0.53.0').use(self.subproject) + FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject) assert isinstance(key, mparser.BaseNode) # All keys must be nodes due to resolve_key_nodes=False str_key = self.evaluate_statement(key) if not isinstance(str_key, str): diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index ac6c9b1..7597eeb 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -76,7 +76,7 @@ class DependenciesHelper: processed_reqs = [] for obj in mesonlib.unholder(mesonlib.listify(reqs)): if not isinstance(obj, str): - FeatureNew('pkgconfig.generate requirement from non-string object', '0.46.0').use(self.state.subproject) + FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject) if hasattr(obj, 'generated_pc'): self._check_generated_pc_deprecation(obj) processed_reqs.append(obj.generated_pc) @@ -401,9 +401,9 @@ class PkgConfigModule(ExtensionModule): mainlib = None default_subdirs = ['.'] if not args and 'version' not in kwargs: - FeatureNew('pkgconfig.generate implicit version keyword', '0.46.0').use(state.subproject) + FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject) elif len(args) == 1: - FeatureNew('pkgconfig.generate optional positional argument', '0.46.0').use(state.subproject) + FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject) mainlib = getattr(args[0], 'held_object', args[0]) if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)): raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object') -- cgit v1.1 From e35584e9ff33a2cba8128487b14b0a3dcfe9fbc5 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:55:31 -0700 Subject: interpreter: Add always set default value for version and set it ASAP Ideally we wouldn't need to have the default dict here and could just rely on it being set as soon as project is called. There is a corner case exercised by test case common/35 run program, which is that if a FeatureNew or FeatureDeprecated is called to generate the meson version it will be unset, to work around this I've changed the type from a dict to a default dict with '' as the default value. A better fix would probably be to store all of the FeatureNew/FeatureDeprecated checks until the end, then evaluate them, but for now this results in no loss of functionality, only more functionality, even if it isn't prefect. --- mesonbuild/interpreter.py | 7 +++---- mesonbuild/mesonlib.py | 4 +++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index f199774..07ab4f0 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2985,11 +2985,14 @@ external dependencies (including libraries) must go to "dependencies".''') if ':' in proj_name: raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name)) + # This needs to be evaluated as early as possible, as meson uses this + # for things like deprecation testing. if 'meson_version' in kwargs: cv = coredata.version pv = kwargs['meson_version'] if not mesonlib.version_compare(cv, pv): raise InterpreterException('Meson version is %s but project requires %s' % (cv, pv)) + mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version'] if os.path.exists(self.option_file): oi = optinterpreter.OptionInterpreter(self.subproject) @@ -3036,10 +3039,6 @@ external dependencies (including libraries) must go to "dependencies".''') self.build.subproject_dir = self.subproject_dir - mesonlib.project_meson_versions[self.subproject] = '' - if 'meson_version' in kwargs: - mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version'] - self.build.projects[self.subproject] = proj_name mlog.log('Project name:', mlog.bold(proj_name)) mlog.log('Project version:', mlog.bold(self.project_version)) diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index b901ec9..26fe6eb 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -39,8 +39,10 @@ _U = T.TypeVar('_U') have_fcntl = False have_msvcrt = False +# TODO: this is such a hack, this really should be either in coredata or in the +# interpreter # {subproject: project_meson_version} -project_meson_versions = {} # type: T.Dict[str, str] +project_meson_versions = collections.defaultdict(str) # type: T.DefaultDict[str, str] try: import fcntl -- cgit v1.1 From c64715b99a310e031fd145c64b111891861e4ddc Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 12 May 2020 10:57:43 -0700 Subject: optinterpreter: Enable and update FeatureNew to use_single With the version information fixed we can use a FeatureNew inside the optinterpreter, so let's do it. --- mesonbuild/optinterpreter.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index c13cc5d..dfbe6d7 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -16,10 +16,11 @@ import re import functools import typing as T -from . import mparser +from . import compilers from . import coredata from . import mesonlib -from . import compilers +from . import mparser +from .interpreterbase import FeatureNew forbidden_option_names = set(coredata.builtin_options.keys()) forbidden_prefixes = [lang + '_' for lang in compilers.all_languages] + ['b_', 'backend_'] @@ -200,11 +201,8 @@ class OptionInterpreter: raise OptionException('Only calls to option() are allowed in option files.') (posargs, kwargs) = self.reduce_arguments(node.args) - # FIXME: Cannot use FeatureNew while parsing options because we parse - # it before reading options in project(). See func_project() in - # interpreter.py - #if 'yield' in kwargs: - # FeatureNew('option yield', '0.45.0').use(self.subproject) + if 'yield' in kwargs: + FeatureNew.single_use('option yield', '0.45.0', self.subproject) if 'type' not in kwargs: raise OptionException('Option call missing mandatory "type" keyword argument') -- cgit v1.1 From cb4e4f625f8f191eb5ed1bd435a263b0c7bd11cf Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 13 May 2020 14:33:40 -0700 Subject: run_unittests: Use unittest.mock instead of a handrolled mock --- run_unittests.py | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/run_unittests.py b/run_unittests.py index 8dde4e5..e088467 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -717,25 +717,22 @@ class InternalTests(unittest.TestCase): self.assertEqual([1, 2, 3], extract(kwargs, 'sources')) def test_pkgconfig_module(self): - - class Mock: - pass - - dummystate = Mock() + dummystate = mock.Mock() dummystate.subproject = 'dummy' - mock = Mock() - mock.pcdep = Mock() - mock.pcdep.name = "some_name" - mock.version_reqs = [] + _mock = mock.Mock(spec=mesonbuild.dependencies.ExternalDependency) + _mock.pcdep = mock.Mock() + _mock.pcdep.name = "some_name" + _mock.version_reqs = [] + _mock = mock.Mock(held_object=_mock) # pkgconfig dependency as lib deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib") - deps.add_pub_libs([mock]) + deps.add_pub_libs([_mock]) self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") # pkgconfig dependency as requires deps = mesonbuild.modules.pkgconfig.DependenciesHelper(dummystate, "thislib") - deps.add_pub_reqs([mock]) + deps.add_pub_reqs([_mock]) self.assertEqual(deps.format_reqs(deps.pub_reqs), "some_name") def _test_all_naming(self, cc, env, patterns, platform): -- cgit v1.1 From 66f3ba9fd0fd19c4ebf94ce3873723a83046f4b5 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 14 May 2020 09:34:55 -0700 Subject: Docs: Make the suggested use for the not-found dependency better [skip ci] --- docs/markdown/howtox.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index abf7519..84546b7 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -271,6 +271,7 @@ This can be used in cases where you want a default value, but might override it later. ```meson +# Not needed on Windows! my_dep = dependency('', required : false) if host_machine.system() in ['freebsd', 'netbsd', 'openbsd', 'dragonfly'] my_dep = dependency('some dep', required : false) @@ -278,8 +279,9 @@ elif host_machine.system() == 'linux' my_dep = dependency('some other dep', required : false) endif -# Last ditch effort! -if no my_dep.found() - my_dep = meson.get_compiler('c').find_library('dep') -endif +executable( + 'myexe', + my_sources, + deps : [my_dep] +) ``` -- cgit v1.1 From f2d0551941d4131b8a5b9d3320b27d1333b24034 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 14 May 2020 23:54:34 +0530 Subject: cmake: Print supported stds when warning This was helpful while debugging CI failure on the 0.54 branch due to a difference in the structure of self.env.coredata.compiler_options: https://github.com/mesonbuild/meson/runs/674391139 https://travis-ci.org/github/mesonbuild/meson/jobs/686982807 --- mesonbuild/cmake/interpreter.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 1e033c7..35eb17c 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -290,10 +290,12 @@ class ConverterTarget: m = ConverterTarget.std_regex.match(j) if m: std = m.group(2) - if std not in self._all_lang_stds(i): + supported = self._all_lang_stds(i) + if std not in supported: mlog.warning( 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-' - 'level {0}_std if build errors occur.'.format(i, std), + 'level {0}_std if build errors occur. Known ' + '{0}_stds are: {2}'.format(i, std, ' '.join(supported)), once=True ) continue -- cgit v1.1 From cd3e65a790b98e9106b56ad76863abe55b4096fa Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Fri, 15 May 2020 09:17:18 +0530 Subject: travis/macos: Restore the old pkg-config behaviour We need to test both "have pkg-config" and "don't have pkg-config" pathways on macOS, which is why pkg-config was only installed in one branch based on --unity=on/off. --- ci/travis_install.sh | 4 +++- test cases/failing/34 dependency not-required then required/test.json | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ci/travis_install.sh b/ci/travis_install.sh index 5d191f1..d9d308a 100755 --- a/ci/travis_install.sh +++ b/ci/travis_install.sh @@ -7,9 +7,11 @@ msg() { echo -e "\x1b[1;32mINFO: \x1b[37m$*\x1b[0m"; } if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then msg "Running OSX setup" brew update + # Run one macOS build with pkg-config available (pulled in by qt), and the + # other (unity=on) without pkg-config brew install qt ldc llvm ninja if [[ "$MESON_ARGS" =~ .*unity=on.* ]]; then - which pkg-config || brew install pkg-config + which pkg-config && rm -f $(which pkg-config) fi python3 -m pip install jsonschema elif [[ "$TRAVIS_OS_NAME" == "linux" ]]; then diff --git a/test cases/failing/34 dependency not-required then required/test.json b/test cases/failing/34 dependency not-required then required/test.json index bed1a45..3cf35f5 100644 --- a/test cases/failing/34 dependency not-required then required/test.json +++ b/test cases/failing/34 dependency not-required then required/test.json @@ -2,7 +2,7 @@ "stdout": [ { "match": "re", - "line": "test cases/failing/34 dependency not\\-required then required/meson\\.build:4:0: ERROR: Dependency \"foo\\-bar\\-xyz\\-12\\.3\" not found, tried .*" + "line": ".*/meson\\.build:4:0: ERROR: (Pkg-config binary for machine MachineChoice\\.HOST not found\\. Giving up\\.|Dependency \"foo\\-bar\\-xyz\\-12\\.3\" not found, tried .*)" } ] } -- cgit v1.1 From a25f0741e8ac663c8b3a6e33df8a0875dceab804 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 13 May 2020 16:19:33 +0530 Subject: ninjabackend: Use order-only gen-header deps for gen-sources We do not need to *always* rebuild generated sources when a generated header changes. We will get that information from the compiler's dependency file, and ninja will track it for us. This is exactly the same as static sources. However, we do need an order-only dependency on all generated headers, because we cannot know what headers will be needed at compile time (which is when the compiler's dependency file is generated). This fixes spurious rebuilds and relinking in many cases. --- mesonbuild/backend/ninjabackend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 9b895c9..b1e6afa 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -585,7 +585,7 @@ int dummy; o = self.generate_llvm_ir_compile(target, src) else: o = self.generate_single_compile(target, src, True, - header_deps=header_deps) + order_deps=header_deps) obj_list.append(o) use_pch = self.environment.coredata.base_options.get('b_pch', False) -- cgit v1.1 From b1e3440e596b632e09c48ef88ada6a5224628720 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 13 May 2020 16:56:09 +0530 Subject: ninjabackend: Treat GNOME gir/typelib as libraries When classifying generated sources, we were treating gir/typelib files generated by gobject-introspection as headers. This is bad because it serializes the build by adding order-only dependencies to every target even though sources will never actually use them for anything. Treat them as libraries, which is somewhat more accurate. --- mesonbuild/backend/ninjabackend.py | 2 +- mesonbuild/modules/__init__.py | 11 +++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b1e6afa..2aa5c2c 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -570,7 +570,7 @@ int dummy; generated_source_files.append(raw_src) elif self.environment.is_object(rel_src): obj_list.append(rel_src) - elif self.environment.is_library(rel_src): + elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src): pass else: # Assume anything not specifically a source file is a header. This is because diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py index dc86a1b..47be039 100644 --- a/mesonbuild/modules/__init__.py +++ b/mesonbuild/modules/__init__.py @@ -57,6 +57,17 @@ def get_include_args(include_dirs, prefix='-I'): return dirs_str +def is_module_library(fname): + ''' + Check if the file is a library-like file generated by a module-specific + target, such as GirTarget or TypelibTarget + ''' + if hasattr(fname, 'fname'): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in ('gir', 'typelib') + + class ModuleReturnValue: def __init__(self, return_value, new_objects): self.return_value = return_value -- cgit v1.1 From 717a2ae128505f94bd0133192c42bec48a6c9f09 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 13 May 2020 15:04:17 +0530 Subject: symbolextractor: Do not store the size of code objects This will almost always change and cause a relink of everything. Our other symbol extractor implementations do not store this either. We only need to store the size of data objects, since that necessitates a relink due to copy relocations. Drastically reduces the amount of relinking required in gstreamer and gtk on Linux. --- mesonbuild/scripts/symbolextractor.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index d393f93..66161e2 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -113,7 +113,10 @@ def gnu_syms(libfilename: str, outfilename: str): continue line_split = line.split() entry = line_split[0:2] - if len(line_split) >= 4: + # Store the size of symbols pointing to data objects so we relink + # when those change, which is needed because of copy relocations + # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702 + if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4: entry += [line_split[3]] result += [' '.join(entry)] write_if_changed('\n'.join(result) + '\n', outfilename) -- cgit v1.1 From 19b44575d1e9b8e71ae75ba517baa245432a75c1 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 13 May 2020 20:48:04 +0100 Subject: Make expected stdout mandatory for warning-meson and failing-meson tests Unify present or absent test.json file cases in gather_tests Make expected stdout mandatory in test.json for some test categories Use a trivial TestCategory class rather than a tuple, to make it easier to default category attributes --- run_project_tests.py | 85 +++++++++++++++++++++++++++------------------------- 1 file changed, 45 insertions(+), 40 deletions(-) diff --git a/run_project_tests.py b/run_project_tests.py index 18731d6..f372436 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -593,18 +593,16 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, return testresult -def gather_tests(testdir: Path) -> T.List[TestDef]: +def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]: tests = [t.name for t in testdir.iterdir() if t.is_dir()] tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc) test_defs = [TestDef(testdir / t, None, []) for t in tests] all_tests = [] # type: T.List[TestDef] for t in test_defs: + test_def = {} test_def_file = t.path / 'test.json' - if not test_def_file.is_file(): - all_tests += [t] - continue - - test_def = json.loads(test_def_file.read_text()) + if test_def_file.is_file(): + test_def = json.loads(test_def_file.read_text()) # Handle additional environment variables env = {} # type: T.Dict[str, str] @@ -622,6 +620,8 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: # Handle expected output stdout = test_def.get('stdout', []) + if stdout_mandatory and not stdout: + raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file)) # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] @@ -896,45 +896,50 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, shutil.which('pgfortran') or shutil.which('ifort')) - # Name, subdirectory, skip condition. + class TestCategory: + def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False): + self.category = category # category name + self.subdir = subdir # subdirectory + self.skip = skip # skip condition + self.stdout_mandatory = stdout_mandatory # expected stdout is mandatory for tests in this categroy + all_tests = [ - ('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), - ('common', 'common', False), - ('warning-meson', 'warning', False), - ('failing-meson', 'failing', False), - ('failing-build', 'failing build', False), - ('failing-test', 'failing test', False), - ('keyval', 'keyval', False), - - ('platform-osx', 'osx', not mesonlib.is_osx()), - ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), - ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), - - ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), - ('C#', 'csharp', skip_csharp(backend)), - ('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), - ('rust', 'rust', should_skip_rust(backend)), - ('d', 'd', backend is not Backend.ninja or not have_d_compiler()), - ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), - ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), - ('fortran', 'fortran', skip_fortran or backend != Backend.ninja), - ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), + TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), + TestCategory('common', 'common'), + TestCategory('warning-meson', 'warning', stdout_mandatory=True), + TestCategory('failing-meson', 'failing', stdout_mandatory=True), + TestCategory('failing-build', 'failing build'), + TestCategory('failing-test', 'failing test'), + TestCategory('keyval', 'keyval'), + TestCategory('platform-osx', 'osx', not mesonlib.is_osx()), + TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), + TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), + TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), + TestCategory('C#', 'csharp', skip_csharp(backend)), + TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), + TestCategory('rust', 'rust', should_skip_rust(backend)), + TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()), + TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), + TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), + TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja), + TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja - ('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), - ('python3', 'python3', backend is not Backend.ninja), - ('python', 'python', backend is not Backend.ninja), - ('fpga', 'fpga', shutil.which('yosys') is None), - ('frameworks', 'frameworks', False), - ('nasm', 'nasm', False), - ('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), + TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), + TestCategory('python3', 'python3', backend is not Backend.ninja), + TestCategory('python', 'python', backend is not Backend.ninja), + TestCategory('fpga', 'fpga', shutil.which('yosys') is None), + TestCategory('frameworks', 'frameworks'), + TestCategory('nasm', 'nasm'), + TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), ] - names = [t[0] for t in all_tests] - assert names == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests names' + categories = [t.category for t in all_tests] + assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories' + if only: - ind = [names.index(o) for o in only] - all_tests = [all_tests[i] for i in ind] - gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] + all_tests = [t for t in all_tests if t.category in only] + + gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory), t.skip) for t in all_tests] return gathered_tests def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], -- cgit v1.1 From 2a3015b5dd455e76723e4b50fd6d493715d1e3ba Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 13 May 2020 21:10:34 +0100 Subject: Add expected stdout for failing-meson tests which are missing it --- test cases/failing/100 fallback consistency/test.json | 7 +++++++ test cases/failing/101 no native compiler/test.json | 7 +++++++ test cases/failing/102 subdir parse error/test.json | 7 +++++++ test cases/failing/103 invalid option file/test.json | 7 +++++++ test cases/failing/104 no lang/test.json | 7 +++++++ test cases/failing/105 no glib-compile-resources/test.json | 7 +++++++ .../36 pkgconfig dependency impossible conditions/test.json | 7 +++++++ test cases/failing/67 subproj different versions/test.json | 7 +++++++ test cases/failing/84 gtest dependency with version/test.json | 7 +++++++ test cases/failing/98 fallback consistency/test.json | 7 +++++++ 10 files changed, 70 insertions(+) create mode 100644 test cases/failing/100 fallback consistency/test.json create mode 100644 test cases/failing/101 no native compiler/test.json create mode 100644 test cases/failing/102 subdir parse error/test.json create mode 100644 test cases/failing/103 invalid option file/test.json create mode 100644 test cases/failing/104 no lang/test.json create mode 100644 test cases/failing/105 no glib-compile-resources/test.json create mode 100644 test cases/failing/36 pkgconfig dependency impossible conditions/test.json create mode 100644 test cases/failing/67 subproj different versions/test.json create mode 100644 test cases/failing/84 gtest dependency with version/test.json create mode 100644 test cases/failing/98 fallback consistency/test.json diff --git a/test cases/failing/100 fallback consistency/test.json b/test cases/failing/100 fallback consistency/test.json new file mode 100644 index 0000000..a783d8c --- /dev/null +++ b/test cases/failing/100 fallback consistency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/100 fallback consistency/meson.build:7:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'" + } + ] +} diff --git a/test cases/failing/101 no native compiler/test.json b/test cases/failing/101 no native compiler/test.json new file mode 100644 index 0000000..c7b5d1c --- /dev/null +++ b/test cases/failing/101 no native compiler/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/101 no native compiler/meson.build:12:0: ERROR: No host machine compiler for \"main.c\"" + } + ] +} diff --git a/test cases/failing/102 subdir parse error/test.json b/test cases/failing/102 subdir parse error/test.json new file mode 100644 index 0000000..06fd4d3 --- /dev/null +++ b/test cases/failing/102 subdir parse error/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/102 subdir parse error/subdir/meson.build:1:0: ERROR: Plusassignment target must be an id." + } + ] +} diff --git a/test cases/failing/103 invalid option file/test.json b/test cases/failing/103 invalid option file/test.json new file mode 100644 index 0000000..20dbec3 --- /dev/null +++ b/test cases/failing/103 invalid option file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/103 invalid option file/meson_options.txt:1:0: ERROR: lexer" + } + ] +} diff --git a/test cases/failing/104 no lang/test.json b/test cases/failing/104 no lang/test.json new file mode 100644 index 0000000..62999be --- /dev/null +++ b/test cases/failing/104 no lang/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/104 no lang/meson.build:2:0: ERROR: No host machine compiler for \"main.c\"" + } + ] +} diff --git a/test cases/failing/105 no glib-compile-resources/test.json b/test cases/failing/105 no glib-compile-resources/test.json new file mode 100644 index 0000000..67dc7e4 --- /dev/null +++ b/test cases/failing/105 no glib-compile-resources/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/105 no glib-compile-resources/meson.build:8:0: ERROR: Could not execute glib-compile-resources." + } + ] +} diff --git a/test cases/failing/36 pkgconfig dependency impossible conditions/test.json b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json new file mode 100644 index 0000000..2ce62ac --- /dev/null +++ b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/36 pkgconfig dependency impossible conditions/meson.build:7:0: ERROR: Dependency 'zlib' was already checked and was not found" + } + ] +} diff --git a/test cases/failing/67 subproj different versions/test.json b/test cases/failing/67 subproj different versions/test.json new file mode 100644 index 0000000..d16daf9 --- /dev/null +++ b/test cases/failing/67 subproj different versions/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/67 subproj different versions/subprojects/b/meson.build:3:0: ERROR: Dependency 'c' was already checked and was not found" + } + ] +} diff --git a/test cases/failing/84 gtest dependency with version/test.json b/test cases/failing/84 gtest dependency with version/test.json new file mode 100644 index 0000000..e1bbcac --- /dev/null +++ b/test cases/failing/84 gtest dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/84 gtest dependency with version/meson.build:8:0: ERROR: Dependency 'gtest' was already checked and was not found" + } + ] +} diff --git a/test cases/failing/98 fallback consistency/test.json b/test cases/failing/98 fallback consistency/test.json new file mode 100644 index 0000000..fd77bad --- /dev/null +++ b/test cases/failing/98 fallback consistency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/98 fallback consistency/meson.build:4:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'" + } + ] +} -- cgit v1.1 From 697bb2808418cf4588c00d425278508ed982997b Mon Sep 17 00:00:00 2001 From: Drew Reed Date: Mon, 11 May 2020 09:04:53 +0100 Subject: Call to CPPCompilers initilisation function was missing the info parameter during ArmClangCPPCompiler initialisation --- mesonbuild/compilers/cpp.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index d30017f..f4bcfa9 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -238,7 +238,9 @@ class EmscriptenCPPCompiler(EmscriptenMixin, LinkerEnvVarsMixin, ClangCPPCompile class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): def __init__(self, exelist, version, for_machine: MachineChoice, is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): - CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, exe_wrapper, **kwargs) + CPPCompiler.__init__(self, exelist=exelist, version=version, + for_machine=for_machine, is_cross=is_cross, + info=info, exe_wrapper=exe_wrapper, **kwargs) ArmclangCompiler.__init__(self) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'0': [], -- cgit v1.1 From cce5f860b94e6601a99fb207b82c0d181aaf19a9 Mon Sep 17 00:00:00 2001 From: Drew Reed Date: Mon, 11 May 2020 09:08:34 +0100 Subject: Modifed buildtypes and armcc compiler flags to match documented results --- mesonbuild/compilers/mixins/arm.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py index aa5d15d..dc28aef 100644 --- a/mesonbuild/compilers/mixins/arm.py +++ b/mesonbuild/compilers/mixins/arm.py @@ -27,10 +27,10 @@ if T.TYPE_CHECKING: arm_buildtype_args = { 'plain': [], - 'debug': ['-O0', '--debug'], - 'debugoptimized': ['-O1', '--debug'], - 'release': ['-O3', '-Otime'], - 'minsize': ['-O3', '-Ospace'], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], 'custom': [], } # type: T.Dict[str, T.List[str]] @@ -38,9 +38,9 @@ arm_optimization_args = { '0': ['-O0'], 'g': ['-g'], '1': ['-O1'], - '2': ['-O2'], - '3': ['-O3'], - 's': [], + '2': [], # Compiler defaults to -O2 + '3': ['-O3', '-Otime'], + 's': ['-O3'], # Compiler defaults to -Ospace } # type: T.Dict[str, T.List[str]] armclang_buildtype_args = { -- cgit v1.1 From 3d41fa9b1ec9675ec36cb3557d8f9488d937dfee Mon Sep 17 00:00:00 2001 From: Drew Reed Date: Mon, 11 May 2020 09:12:55 +0100 Subject: Add flags to support generation of dependency files with armclang --- mesonbuild/compilers/mixins/arm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py index dc28aef..2e2ed94 100644 --- a/mesonbuild/compilers/mixins/arm.py +++ b/mesonbuild/compilers/mixins/arm.py @@ -181,7 +181,7 @@ class ArmclangCompiler: # Override CCompiler.get_dependency_gen_args def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: - return [] + return ['-MD', '-MT', outtarget, '-MF', outfile] def get_optimization_args(self, optimization_level: str) -> T.List[str]: return armclang_optimization_args[optimization_level] -- cgit v1.1 From d7235c5905fa98207d90f3ad34bf590493498d5b Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Thu, 7 May 2020 10:15:06 -0700 Subject: Let .pc files specify rpath. Fixes #4027 --- mesonbuild/backend/backends.py | 15 +++--- mesonbuild/backend/ninjabackend.py | 7 ++- mesonbuild/build.py | 2 + mesonbuild/compilers/compilers.py | 2 +- mesonbuild/compilers/cuda.py | 7 +-- mesonbuild/compilers/d.py | 9 ++-- mesonbuild/compilers/mixins/islinker.py | 4 +- mesonbuild/linkers.py | 49 ++++++++++-------- mesonbuild/minstall.py | 2 +- mesonbuild/scripts/depfixer.py | 32 +++++++++--- run_unittests.py | 59 ++++++++++++++++++++-- .../built library/meson.build | 5 ++ .../external library/meson.build | 6 +-- .../unit/76 pkgconfig prefixes/client/client.c | 8 +++ .../unit/76 pkgconfig prefixes/client/meson.build | 3 ++ .../unit/76 pkgconfig prefixes/val1/meson.build | 5 ++ test cases/unit/76 pkgconfig prefixes/val1/val1.c | 3 ++ test cases/unit/76 pkgconfig prefixes/val1/val1.h | 1 + .../unit/76 pkgconfig prefixes/val2/meson.build | 8 +++ test cases/unit/76 pkgconfig prefixes/val2/val2.c | 4 ++ test cases/unit/76 pkgconfig prefixes/val2/val2.h | 1 + 21 files changed, 177 insertions(+), 55 deletions(-) create mode 100644 test cases/unit/76 pkgconfig prefixes/client/client.c create mode 100644 test cases/unit/76 pkgconfig prefixes/client/meson.build create mode 100644 test cases/unit/76 pkgconfig prefixes/val1/meson.build create mode 100644 test cases/unit/76 pkgconfig prefixes/val1/val1.c create mode 100644 test cases/unit/76 pkgconfig prefixes/val1/val1.h create mode 100644 test cases/unit/76 pkgconfig prefixes/val2/meson.build create mode 100644 test cases/unit/76 pkgconfig prefixes/val2/val2.c create mode 100644 test cases/unit/76 pkgconfig prefixes/val2/val2.h diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 2727abe..6f6d3db 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -90,12 +90,13 @@ class InstallData: self.mesonintrospect = mesonintrospect class TargetInstallData: - def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode, optional=False): + def __init__(self, fname, outdir, aliases, strip, install_name_mappings, rpath_dirs_to_remove, install_rpath, install_mode, optional=False): self.fname = fname self.outdir = outdir self.aliases = aliases self.strip = strip self.install_name_mappings = install_name_mappings + self.rpath_dirs_to_remove = rpath_dirs_to_remove self.install_rpath = install_rpath self.install_mode = install_mode self.optional = optional @@ -476,6 +477,7 @@ class Backend: result = OrderedSet() result.add('meson-out') result.update(self.rpaths_for_bundled_shared_libraries(target)) + target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result]) return tuple(result) def object_filename_from_source(self, target, source): @@ -1140,6 +1142,7 @@ class Backend: mappings = t.get_link_deps_mapping(d.prefix, self.environment) i = TargetInstallData(self.get_target_filename(t), outdirs[0], t.get_aliases(), should_strip, mappings, + t.rpath_dirs_to_remove, t.install_rpath, install_mode) d.targets.append(i) @@ -1157,14 +1160,14 @@ class Backend: implib_install_dir = self.environment.get_import_lib_dir() # Install the import library; may not exist for shared modules i = TargetInstallData(self.get_target_filename_for_linking(t), - implib_install_dir, {}, False, {}, '', install_mode, + implib_install_dir, {}, False, {}, set(), '', install_mode, optional=isinstance(t, build.SharedModule)) d.targets.append(i) if not should_strip and t.get_debug_filename(): debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename()) i = TargetInstallData(debug_file, outdirs[0], - {}, False, {}, '', + {}, False, {}, set(), '', install_mode, optional=True) d.targets.append(i) # Install secondary outputs. Only used for Vala right now. @@ -1174,7 +1177,7 @@ class Backend: if outdir is False: continue f = os.path.join(self.get_target_dir(t), output) - i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode) + i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode) d.targets.append(i) elif isinstance(t, build.CustomTarget): # If only one install_dir is specified, assume that all @@ -1187,7 +1190,7 @@ class Backend: if num_outdirs == 1 and num_out > 1: for output in t.get_outputs(): f = os.path.join(self.get_target_dir(t), output) - i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode, + i = TargetInstallData(f, outdirs[0], {}, False, {}, set(), None, install_mode, optional=not t.build_by_default) d.targets.append(i) else: @@ -1196,7 +1199,7 @@ class Backend: if outdir is False: continue f = os.path.join(self.get_target_dir(t), output) - i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode, + i = TargetInstallData(f, outdir, {}, False, {}, set(), None, install_mode, optional=not t.build_by_default) d.targets.append(i) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 9b895c9..49025f9 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1348,7 +1348,8 @@ int dummy; self.get_target_dir(target)) else: target_slashname_workaround_dir = self.get_target_dir(target) - rpath_args = rustc.build_rpath_args(self.environment, + (rpath_args, target.rpath_dirs_to_remove) = \ + rustc.build_rpath_args(self.environment, self.environment.get_build_dir(), target_slashname_workaround_dir, self.determine_rpath_dirs(target), @@ -2580,12 +2581,14 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) self.get_target_dir(target)) else: target_slashname_workaround_dir = self.get_target_dir(target) - commands += linker.build_rpath_args(self.environment, + (rpath_args, target.rpath_dirs_to_remove) = \ + linker.build_rpath_args(self.environment, self.environment.get_build_dir(), target_slashname_workaround_dir, self.determine_rpath_dirs(target), target.build_rpath, target.install_rpath) + commands += rpath_args # Add libraries generated by custom targets custom_target_libraries = self.get_custom_target_provided_libraries(target) commands += extra_args diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 2b4b1b9..7c833a6 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -511,6 +511,8 @@ class BuildTarget(Target): self.d_features = {} self.pic = False self.pie = False + # Track build_rpath entries so we can remove them at install time + self.rpath_dirs_to_remove = set() # Sources can be: # 1. Pre-existing source files in the source tree # 2. Pre-existing sources generated by configure_file in the build tree diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 3d3a503..b2bea26 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1077,7 +1077,7 @@ class Compiler: def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: return self.linker.build_rpath_args( env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py index e839f53..4e89f5d 100644 --- a/mesonbuild/compilers/cuda.py +++ b/mesonbuild/compilers/cuda.py @@ -271,9 +271,10 @@ class CudaCompiler(Compiler): def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return self._cook_link_args(self.host_compiler.build_rpath_args( - env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)) + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + return (self._cook_link_args(rpath_args), rpath_dirs_to_remove) def linker_to_compiler_args(self, args): return args diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index a83e221..e7bd280 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -220,7 +220,7 @@ class DmdLikeCompilerMixin: def build_rpath_args(self, env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): if self.info.is_windows(): - return [] + return ([], set()) # GNU ld, solaris ld, and lld acting like GNU ld if self.linker.id.startswith('ld'): @@ -228,15 +228,16 @@ class DmdLikeCompilerMixin: # do directly, each argument -rpath and the value to rpath, need to be # split into two separate arguments both prefaced with the -L=. args = [] - for r in super().build_rpath_args( - env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath): + (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + for r in rpath_args: if ',' in r: a, b = r.split(',', maxsplit=1) args.append(a) args.append(self.LINKER_PREFIX + b) else: args.append(r) - return args + return (args, rpath_dirs_to_remove) return super().build_rpath_args( env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py index 681c816..bf1d339 100644 --- a/mesonbuild/compilers/mixins/islinker.py +++ b/mesonbuild/compilers/mixins/islinker.py @@ -107,8 +107,8 @@ class BasicLinkerIsCompilerMixin: def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return [] + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return ([], set()) def get_linker_debug_crt_args(self) -> T.List[str]: return [] diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index db735e7..f02c297 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -56,8 +56,8 @@ class StaticLinker: def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return [] + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return ([], set()) def thread_link_flags(self, env: 'Environment') -> T.List[str]: return [] @@ -444,8 +444,8 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta): def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return [] + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return ([], set()) def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, suffix: str, soversion: str, darwin_versions: T.Tuple[str, str], @@ -551,12 +551,12 @@ class GnuLikeDynamicLinkerMixin: def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: m = env.machines[self.for_machine] if m.is_windows() or m.is_cygwin(): - return [] + return ([], set()) if not rpath_paths and not install_rpath and not build_rpath: - return [] + return ([], set()) args = [] origin_placeholder = '$ORIGIN' processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir) @@ -564,9 +564,14 @@ class GnuLikeDynamicLinkerMixin: # is *very* allergic to duplicate -delete_rpath arguments # when calling depfixer on installation. all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths]) + rpath_dirs_to_remove = set() + for p in all_paths: + rpath_dirs_to_remove.add(p.encode('utf8')) # Build_rpath is used as-is (it is usually absolute). if build_rpath != '': all_paths.add(build_rpath) + for p in build_rpath.split(':'): + rpath_dirs_to_remove.add(p.encode('utf8')) # TODO: should this actually be "for (dragonfly|open)bsd"? if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd(): @@ -590,7 +595,7 @@ class GnuLikeDynamicLinkerMixin: # TODO: should this actually be "for solaris/sunos"? if mesonlib.is_sunos(): - return args + return (args, rpath_dirs_to_remove) # Rpaths to use while linking must be absolute. These are not # written to the binary. Needed only with GNU ld: @@ -610,7 +615,7 @@ class GnuLikeDynamicLinkerMixin: for p in rpath_paths: args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p))) - return args + return (args, rpath_dirs_to_remove) class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): @@ -676,9 +681,9 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: if not rpath_paths and not install_rpath and not build_rpath: - return [] + return ([], set()) # Ensure that there is enough space for install_name_tool in-place # editing of large RPATHs args = self._apply_prefix('-headerpad_max_install_names') @@ -692,7 +697,7 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): for rp in all_paths: args.extend(self._apply_prefix('-rpath,' + rp)) - return args + return (args, set()) class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker): @@ -763,8 +768,8 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return [] + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return ([], set()) class CcrxDynamicLinker(DynamicLinker): @@ -839,8 +844,8 @@ class Xc16DynamicLinker(DynamicLinker): def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: - return [] + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return ([], set()) class C2000DynamicLinker(DynamicLinker): @@ -938,10 +943,10 @@ class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: if not env.machines[self.for_machine].is_windows(): - return ['-R' + os.path.join(build_dir, p) for p in rpath_paths] - return [] + return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set()) + return ([], set()) class PGIStaticLinker(StaticLinker): @@ -1091,9 +1096,9 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, - install_rpath: str) -> T.List[str]: + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: if not rpath_paths and not install_rpath and not build_rpath: - return [] + return ([], set()) processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir) all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths]) if build_rpath != '': @@ -1108,7 +1113,7 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): paths = padding else: paths = paths + ':' + padding - return self._apply_prefix('-rpath,{}'.format(paths)) + return (self._apply_prefix('-rpath,{}'.format(paths)), set()) def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, suffix: str, soversion: str, darwin_versions: T.Tuple[str, str], diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 9c64429..0be01fe 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -512,7 +512,7 @@ class Installer: if file_copied: self.did_install_something = True try: - depfixer.fix_rpath(outname, install_rpath, final_path, + depfixer.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path, install_name_mappings, verbose=False) except SystemExit as e: if isinstance(e.code, int) and e.code == 0: diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index 5ba3a97..a3a3eff 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -290,13 +290,13 @@ class Elf(DataSizes): self.bf.seek(offset) self.bf.write(newname) - def fix_rpath(self, new_rpath): + def fix_rpath(self, rpath_dirs_to_remove, new_rpath): # The path to search for can be either rpath or runpath. # Fix both of them to be sure. - self.fix_rpathtype_entry(new_rpath, DT_RPATH) - self.fix_rpathtype_entry(new_rpath, DT_RUNPATH) + self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH) + self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH) - def fix_rpathtype_entry(self, new_rpath, entrynum): + def fix_rpathtype_entry(self, rpath_dirs_to_remove, new_rpath, entrynum): if isinstance(new_rpath, str): new_rpath = new_rpath.encode('utf8') rp_off = self.get_entry_offset(entrynum) @@ -305,7 +305,23 @@ class Elf(DataSizes): print('File does not have rpath. It should be a fully static executable.') return self.bf.seek(rp_off) + old_rpath = self.read_str() + new_rpaths = [] + if new_rpath: + new_rpaths.append(new_rpath) + if old_rpath: + # Filter out build-only rpath entries + # added by get_link_dep_subdirs() or + # specified by user with build_rpath. + for dir in old_rpath.split(b':'): + if not (dir in rpath_dirs_to_remove or + dir == (b'X' * len(dir))): + new_rpaths.append(dir) + + # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc. + new_rpath = b':'.join(new_rpaths) + if len(old_rpath) < len(new_rpath): sys.exit("New rpath must not be longer than the old one.") # The linker does read-only string deduplication. If there is a @@ -343,13 +359,13 @@ class Elf(DataSizes): entry.write(self.bf) return None -def fix_elf(fname, new_rpath, verbose=True): +def fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose=True): with Elf(fname, verbose) as e: if new_rpath is None: e.print_rpath() e.print_runpath() else: - e.fix_rpath(new_rpath) + e.fix_rpath(rpath_dirs_to_remove, new_rpath) def get_darwin_rpaths_to_remove(fname): out = subprocess.check_output(['otool', '-l', fname], @@ -430,7 +446,7 @@ def fix_jar(fname): f.truncate() subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF']) -def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True): +def fix_rpath(fname, rpath_dirs_to_remove, new_rpath, final_path, install_name_mappings, verbose=True): global INSTALL_NAME_TOOL # Static libraries, import libraries, debug information, headers, etc # never have rpaths @@ -441,7 +457,7 @@ def fix_rpath(fname, new_rpath, final_path, install_name_mappings, verbose=True) if fname.endswith('.jar'): fix_jar(fname) return - fix_elf(fname, new_rpath, verbose) + fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose) return except SystemExit as e: if isinstance(e.code, int) and e.code == 0: diff --git a/run_unittests.py b/run_unittests.py index 2939b20..d7fc221 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -56,7 +56,7 @@ from mesonbuild.mesonlib import ( BuildDirLock, LibType, MachineChoice, PerMachine, Version, is_windows, is_osx, is_cygwin, is_dragonflybsd, is_openbsd, is_haiku, is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, - quote_arg, relpath + quote_arg, relpath, is_linux ) from mesonbuild.environment import detect_ninja from mesonbuild.mesonlib import MesonException, EnvironmentException @@ -6390,13 +6390,15 @@ class LinuxlikeTests(BasePlatformTests): self.build(override_envvars=env) # test uninstalled self.run_tests(override_envvars=env) - if not is_osx(): - # Rest of the workflow only works on macOS + if not (is_osx() or is_linux()): return # test running after installation self.install(use_destdir=False) prog = os.path.join(self.installdir, 'bin', 'prog') self._run([prog]) + if not is_osx(): + # Rest of the workflow only works on macOS + return out = self._run(['otool', '-L', prog]) self.assertNotIn('@rpath', out) ## New builddir for testing that DESTDIR is not added to install_name @@ -6413,6 +6415,57 @@ class LinuxlikeTests(BasePlatformTests): # Ensure that the otool output does not contain self.installdir self.assertNotRegex(out, self.installdir + '.*dylib ') + @skipIfNoPkgconfig + def test_usage_pkgconfig_prefixes(self): + ''' + Build and install two external libraries, to different prefixes, + then build and install a client program that finds them via pkgconfig, + and verify the installed client program runs. + ''' + oldinstalldir = self.installdir + + # Build and install both external libraries without DESTDIR + val1dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val1') + val1prefix = os.path.join(oldinstalldir, 'val1') + self.prefix = val1prefix + self.installdir = val1prefix + self.init(val1dir) + self.build() + self.install(use_destdir=False) + self.new_builddir() + + env1 = {} + env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig') + val2dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val2') + val2prefix = os.path.join(oldinstalldir, 'val2') + self.prefix = val2prefix + self.installdir = val2prefix + self.init(val2dir, override_envvars=env1) + self.build() + self.install(use_destdir=False) + self.new_builddir() + + # Build, install, and run the client program + env2 = {} + env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig') + testdir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'client') + testprefix = os.path.join(oldinstalldir, 'client') + self.prefix = testprefix + self.installdir = testprefix + self.init(testdir, override_envvars=env2) + self.build() + self.install(use_destdir=False) + prog = os.path.join(self.installdir, 'bin', 'client') + env3 = {} + if is_cygwin(): + env3['PATH'] = os.path.join(val1prefix, 'bin') + \ + os.pathsep + \ + os.path.join(val2prefix, 'bin') + \ + os.pathsep + os.environ['PATH'] + out = self._run([prog], override_envvars=env3).strip() + # Expected output is val1 + val2 = 3 + self.assertEqual(out, '3') + def install_subdir_invalid_symlinks(self, testdir, subdir_path): ''' Test that installation of broken symlinks works fine. diff --git a/test cases/unit/40 external, internal library rpath/built library/meson.build b/test cases/unit/40 external, internal library rpath/built library/meson.build index f633996..07fe7bb 100644 --- a/test cases/unit/40 external, internal library rpath/built library/meson.build +++ b/test cases/unit/40 external, internal library rpath/built library/meson.build @@ -18,4 +18,9 @@ l = shared_library('bar_built', 'bar.c', if host_machine.system() == 'darwin' e = executable('prog', 'prog.c', link_with: l, install: true) test('testprog', e) +elif host_machine.system() == 'linux' + e = executable('prog', 'prog.c', link_with: l, install: true, + install_rpath: '$ORIGIN/..' / get_option('libdir'), + ) + test('testprog', e) endif diff --git a/test cases/unit/40 external, internal library rpath/external library/meson.build b/test cases/unit/40 external, internal library rpath/external library/meson.build index 3c311f5..06ffa0f 100644 --- a/test cases/unit/40 external, internal library rpath/external library/meson.build +++ b/test cases/unit/40 external, internal library rpath/external library/meson.build @@ -4,16 +4,16 @@ shared_library('foo_in_system', 'foo.c', install : true) l = shared_library('faa_pkg', 'faa.c', install: true) if host_machine.system() == 'darwin' - frameworks = ['-framework', 'CoreFoundation', '-framework', 'CoreMedia'] + ldflags = ['-framework', 'CoreFoundation', '-framework', 'CoreMedia'] allow_undef_args = ['-Wl,-undefined,dynamic_lookup'] else - frameworks = [] + ldflags = ['-Wl,-rpath,${libdir}'] allow_undef_args = [] endif pkg = import('pkgconfig') pkg.generate(name: 'faa_pkg', - libraries: [l] + frameworks, + libraries: [l] + ldflags, description: 'FAA, a pkg-config test library') # cygwin DLLs can't have undefined symbols diff --git a/test cases/unit/76 pkgconfig prefixes/client/client.c b/test cases/unit/76 pkgconfig prefixes/client/client.c new file mode 100644 index 0000000..be9bead --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/client/client.c @@ -0,0 +1,8 @@ +#include +#include + +int main(int argc, char **argv) +{ + printf("%d\n", val2()); + return 0; +} diff --git a/test cases/unit/76 pkgconfig prefixes/client/meson.build b/test cases/unit/76 pkgconfig prefixes/client/meson.build new file mode 100644 index 0000000..491937b --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/client/meson.build @@ -0,0 +1,3 @@ +project('client', 'c') +val2_dep = dependency('val2') +executable('client', 'client.c', dependencies : [val2_dep], install: true) diff --git a/test cases/unit/76 pkgconfig prefixes/val1/meson.build b/test cases/unit/76 pkgconfig prefixes/val1/meson.build new file mode 100644 index 0000000..cc63e31 --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/val1/meson.build @@ -0,0 +1,5 @@ +project('val1', 'c') +val1 = shared_library('val1', 'val1.c', install: true) +install_headers('val1.h') +pkgconfig = import('pkgconfig') +pkgconfig.generate(val1, libraries : ['-Wl,-rpath,${libdir}']) diff --git a/test cases/unit/76 pkgconfig prefixes/val1/val1.c b/test cases/unit/76 pkgconfig prefixes/val1/val1.c new file mode 100644 index 0000000..591e521 --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/val1/val1.c @@ -0,0 +1,3 @@ +#include "val1.h" + +int val1(void) { return 1; } diff --git a/test cases/unit/76 pkgconfig prefixes/val1/val1.h b/test cases/unit/76 pkgconfig prefixes/val1/val1.h new file mode 100644 index 0000000..6bd435e --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/val1/val1.h @@ -0,0 +1 @@ +int val1(void); diff --git a/test cases/unit/76 pkgconfig prefixes/val2/meson.build b/test cases/unit/76 pkgconfig prefixes/val2/meson.build new file mode 100644 index 0000000..ce69481 --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/val2/meson.build @@ -0,0 +1,8 @@ +project('val2', 'c') +val1_dep = dependency('val1') +val2 = shared_library('val2', 'val2.c', + dependencies : [val1_dep], + install: true) +install_headers('val2.h') +pkgconfig = import('pkgconfig') +pkgconfig.generate(val2, libraries : ['-Wl,-rpath,${libdir}']) diff --git a/test cases/unit/76 pkgconfig prefixes/val2/val2.c b/test cases/unit/76 pkgconfig prefixes/val2/val2.c new file mode 100644 index 0000000..d7d4857 --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/val2/val2.c @@ -0,0 +1,4 @@ +#include "val1.h" +#include "val2.h" + +int val2(void) { return val1() + 2; } diff --git a/test cases/unit/76 pkgconfig prefixes/val2/val2.h b/test cases/unit/76 pkgconfig prefixes/val2/val2.h new file mode 100644 index 0000000..995023d --- /dev/null +++ b/test cases/unit/76 pkgconfig prefixes/val2/val2.h @@ -0,0 +1 @@ +int val2(void); -- cgit v1.1 From f8cfb74e9b084afb748e03aaed532679e9cf3948 Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Tue, 12 May 2020 09:28:01 -0700 Subject: Let LDFLAGS specify rpath. Fixes #2567 --- mesonbuild/backend/backends.py | 18 ++++++++++++ run_unittests.py | 33 ++++++++++++++++++++++ test cases/unit/77 global-rpath/meson.build | 3 ++ test cases/unit/77 global-rpath/rpathified.cpp | 6 ++++ test cases/unit/77 global-rpath/yonder/meson.build | 5 ++++ test cases/unit/77 global-rpath/yonder/yonder.cpp | 3 ++ test cases/unit/77 global-rpath/yonder/yonder.h | 1 + 7 files changed, 69 insertions(+) create mode 100644 test cases/unit/77 global-rpath/meson.build create mode 100644 test cases/unit/77 global-rpath/rpathified.cpp create mode 100644 test cases/unit/77 global-rpath/yonder/meson.build create mode 100644 test cases/unit/77 global-rpath/yonder/yonder.cpp create mode 100644 test cases/unit/77 global-rpath/yonder/yonder.h diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 6f6d3db..5649909 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -444,6 +444,21 @@ class Backend: return True return False + def get_external_rpath_dirs(self, target): + dirs = set() + args = [] + # FIXME: is there a better way? + for lang in ['c', 'cpp']: + try: + args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang)) + except Exception: + pass + for arg in args: + if arg.startswith('-Wl,-rpath='): + for dir in arg.replace('-Wl,-rpath=','').split(':'): + dirs.add(dir) + return dirs + def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True): paths = [] for dep in target.external_deps: @@ -458,6 +473,9 @@ class Backend: if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment): # No point in adding system paths. continue + # Don't remove rpaths specified in LDFLAGS. + if libdir in self.get_external_rpath_dirs(target): + continue # Windows doesn't support rpaths, but we use this function to # emulate rpaths by setting PATH, so also accept DLLs here if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']: diff --git a/run_unittests.py b/run_unittests.py index d7fc221..8ad64be 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -6079,6 +6079,39 @@ class LinuxlikeTests(BasePlatformTests): install_rpath = get_rpath(os.path.join(self.installdir, 'usr/bin/progcxx')) self.assertEqual(install_rpath, 'baz') + def test_global_rpath(self): + if is_cygwin(): + raise unittest.SkipTest('Windows PE/COFF binaries do not use RPATH') + if is_osx(): + raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)') + + testdir = os.path.join(self.unit_test_dir, '77 global-rpath') + oldinstalldir = self.installdir + + # Build and install an external library without DESTDIR. + # The external library generates a .pc file without an rpath. + yonder_dir = os.path.join(testdir, 'yonder') + yonder_prefix = os.path.join(oldinstalldir, 'yonder') + yonder_libdir = os.path.join(yonder_prefix, self.libdir) + self.prefix = yonder_prefix + self.installdir = yonder_prefix + self.init(yonder_dir) + self.build() + self.install(use_destdir=False) + self.new_builddir() + + # Build an app that uses that installed library. + # Supply the rpath to the installed library via LDFLAGS + # (as systems like buildroot and guix are wont to do) + # and verify install preserves that rpath. + env = {'LDFLAGS': '-Wl,-rpath=' + yonder_libdir, + 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} + self.init(testdir, override_envvars=env) + self.build() + self.install(use_destdir=False) + got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) + self.assertEqual(got_rpath, yonder_libdir) + @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): if is_cygwin(): diff --git a/test cases/unit/77 global-rpath/meson.build b/test cases/unit/77 global-rpath/meson.build new file mode 100644 index 0000000..c67d9e0 --- /dev/null +++ b/test cases/unit/77 global-rpath/meson.build @@ -0,0 +1,3 @@ +project('global-rpath', 'cpp') +yonder_dep = dependency('yonder') +executable('rpathified', 'rpathified.cpp', dependencies: [yonder_dep], install: true) diff --git a/test cases/unit/77 global-rpath/rpathified.cpp b/test cases/unit/77 global-rpath/rpathified.cpp new file mode 100644 index 0000000..3788906 --- /dev/null +++ b/test cases/unit/77 global-rpath/rpathified.cpp @@ -0,0 +1,6 @@ +#include +#include +int main(int argc, char **argv) +{ + return strcmp(yonder(), "AB54 6BR"); +} diff --git a/test cases/unit/77 global-rpath/yonder/meson.build b/test cases/unit/77 global-rpath/yonder/meson.build new file mode 100644 index 0000000..e32f383 --- /dev/null +++ b/test cases/unit/77 global-rpath/yonder/meson.build @@ -0,0 +1,5 @@ +project('yonder', 'cpp') +yonder = shared_library('yonder', 'yonder.cpp', install: true) +install_headers('yonder.h') +pkgconfig = import('pkgconfig') +pkgconfig.generate(yonder) diff --git a/test cases/unit/77 global-rpath/yonder/yonder.cpp b/test cases/unit/77 global-rpath/yonder/yonder.cpp new file mode 100644 index 0000000..b182d34 --- /dev/null +++ b/test cases/unit/77 global-rpath/yonder/yonder.cpp @@ -0,0 +1,3 @@ +#include "yonder.h" + +char *yonder(void) { return "AB54 6BR"; } diff --git a/test cases/unit/77 global-rpath/yonder/yonder.h b/test cases/unit/77 global-rpath/yonder/yonder.h new file mode 100644 index 0000000..9d9ad16 --- /dev/null +++ b/test cases/unit/77 global-rpath/yonder/yonder.h @@ -0,0 +1 @@ +char *yonder(void); -- cgit v1.1 From ac8319add14c80369f9d3e8e8c034a2fcb47ef9d Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Sat, 16 May 2020 20:37:34 +0000 Subject: docs: add snippet documenting rpath behavior change --- docs/markdown/snippets/rpath_behavior.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 docs/markdown/snippets/rpath_behavior.md diff --git a/docs/markdown/snippets/rpath_behavior.md b/docs/markdown/snippets/rpath_behavior.md new file mode 100644 index 0000000..c46f0c2 --- /dev/null +++ b/docs/markdown/snippets/rpath_behavior.md @@ -0,0 +1,7 @@ +## rpath removal now more careful + +On Linux-like systems, meson adds rpath entries to allow running apps +in the build tree, and then removes those build-time-only +rpath entries when installing. Rpath entries may also come +in via LDFLAGS and via .pc files. Meson used to remove those +latter rpath entries by accident, but is now more careful. -- cgit v1.1 From ea04e9b0c1e9c7f073b0f8f1fcbea1413b4b4172 Mon Sep 17 00:00:00 2001 From: georgev93 Date: Sun, 17 May 2020 15:08:31 -0400 Subject: Add cmake files to msi package. --- msi/createmsi.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/msi/createmsi.py b/msi/createmsi.py index f80d1dc..4d03593 100644 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -153,6 +153,7 @@ class PackageGenerator: if os.path.exists(sdir): shutil.rmtree(sdir) main_stage, ninja_stage = self.staging_dirs + dep_data_dir = 'mesonbuild/dependencies/data' modules = self.get_all_modules_from_dir('mesonbuild/modules') modules += self.get_all_modules_from_dir('mesonbuild/scripts') modules += self.get_more_modules() @@ -174,6 +175,7 @@ class PackageGenerator: pyinst_cmd += ['meson.py'] subprocess.check_call(pyinst_cmd) shutil.move(pyinstaller_tmpdir + '/meson', main_stage) + shutil.copytree(dep_data_dir, main_stage + '/mesonbuild/dependencies/data') if not os.path.exists(os.path.join(main_stage, 'meson.exe')): sys.exit('Meson exe missing from staging dir.') os.mkdir(ninja_stage) -- cgit v1.1 From 6fe68edbf889ac824a9f6d54009739577c047501 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 15 May 2020 10:25:34 -0700 Subject: compilers/d: Enable pgo for GDC --- mesonbuild/compilers/d.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index a83e221..b8f29cc 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -645,7 +645,8 @@ class GnuDCompiler(GnuCompiler, DCompiler): '1': default_warn_args, '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} - self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt', 'b_coverage'] + self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', + 'b_vscrt', 'b_coverage', 'b_pgo'] self._has_color_support = version_compare(self.version, '>=4.9') # dependencies were implemented before, but broken - support was fixed in GCC 7.1+ -- cgit v1.1 From bf34b971121d46d54f8870cd1faf420d6c0bafe5 Mon Sep 17 00:00:00 2001 From: Antoine Jacoutot Date: Sat, 16 May 2020 19:01:00 +0200 Subject: symbolextractor: add OpenBSD support --- mesonbuild/scripts/symbolextractor.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index 66161e2..41cca26 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -142,6 +142,23 @@ def osx_syms(libfilename: str, outfilename: str): result += [' '.join(x.split()[0:2]) for x in output.split('\n')] write_if_changed('\n'.join(result) + '\n', outfilename) +def openbsd_syms(libfilename: str, outfilename: str): + # Get the name of the library + output = call_tool('readelf', ['-d', libfilename]) + if not output: + dummy_syms(outfilename) + return + result = [x for x in output.split('\n') if 'SONAME' in x] + assert(len(result) <= 1) + # Get a list of all symbols exported + output = call_tool('nm', ['-D', '-P', '-g', libfilename]) + if not output: + dummy_syms(outfilename) + return + # U = undefined (cope with the lack of --defined-only option) + result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')] + write_if_changed('\n'.join(result) + '\n', outfilename) + def cygwin_syms(impfilename: str, outfilename: str): # Get the name of the library output = call_tool('dlltool', ['-I', impfilename]) @@ -237,6 +254,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host gnu_syms(libfilename, outfilename) elif mesonlib.is_osx(): osx_syms(libfilename, outfilename) + elif mesonlib.is_openbsd(): + openbsd_syms(libfilename, outfilename) elif mesonlib.is_windows(): if os.path.isfile(impfilename): windows_syms(impfilename, outfilename) -- cgit v1.1 From 754080843b5c84c44f99c2e0133f50a38629f259 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 1 Oct 2019 08:56:22 -0700 Subject: mtest: don't use len() to test container emptiness It's not idiomatic python and is significantly slower than not using the bool protocol. --- mesonbuild/mtest.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 4592c90..e04d365 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -311,7 +311,7 @@ class TAPParser: yield self.Version(version=version) continue - if len(line) == 0: + if not line: continue yield self.Error('unexpected input at line {}'.format((lineno,))) @@ -638,7 +638,7 @@ class SingleTestRunner: def _run_cmd(self, cmd: T.List[str]) -> TestRun: starttime = time.time() - if len(self.test.extra_paths) > 0: + if self.test.extra_paths: self.env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + self.env['PATH'] winecmd = [] for c in cmd: @@ -941,7 +941,7 @@ class TestHarness: self.junit.write() def print_collected_logs(self) -> None: - if len(self.collected_logs) > 0: + if self.collected_logs: if len(self.collected_logs) > 10: print('\nThe output from 10 first failed tests:\n') else: @@ -1023,7 +1023,7 @@ class TestHarness: print('No tests defined.') return [] - if len(self.options.include_suites) or len(self.options.exclude_suites): + if self.options.include_suites or self.options.exclude_suites: tests = [] for tst in self.tests: if self.test_suitable(tst): @@ -1085,7 +1085,7 @@ class TestHarness: if len(self.suites) > 1 and test.suite: rv = TestHarness.split_suite_string(test.suite[0])[0] s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite) - if len(s): + if s: rv += ":" return rv + s + " / " + test.name else: -- cgit v1.1 From 88f4bc6b55f5fb77e69796b5bee92f109a0ab160 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 1 Oct 2019 09:05:12 -0700 Subject: run_unittests: move cross_file_system_paths to the cross tests --- run_unittests.py | 124 ++++++++++++++++++++++++++++--------------------------- 1 file changed, 63 insertions(+), 61 deletions(-) diff --git a/run_unittests.py b/run_unittests.py index b21f785..6712343 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -3420,67 +3420,6 @@ int main(int argc, char **argv) { f.write('public class Foo { public static void main() {} }') self._run(self.meson_command + ['init', '-b'], workdir=tmpdir) - # The test uses mocking and thus requires that - # the current process is the one to run the Meson steps. - # If we are using an external test executable (most commonly - # in Debian autopkgtests) then the mocking won't work. - @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.') - def test_cross_file_system_paths(self): - if is_windows(): - raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)') - if is_sunos(): - cc = 'gcc' - else: - cc = 'cc' - - testdir = os.path.join(self.common_test_dir, '1 trivial') - cross_content = textwrap.dedent("""\ - [binaries] - c = '/usr/bin/{}' - ar = '/usr/bin/ar' - strip = '/usr/bin/ar' - - [properties] - - [host_machine] - system = 'linux' - cpu_family = 'x86' - cpu = 'i686' - endian = 'little' - """.format(cc)) - - with tempfile.TemporaryDirectory() as d: - dir_ = os.path.join(d, 'meson', 'cross') - os.makedirs(dir_) - with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: - f.write(cross_content) - name = os.path.basename(f.name) - - with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}): - self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) - self.wipe() - - with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}): - os.environ.pop('XDG_DATA_HOME', None) - self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) - self.wipe() - - with tempfile.TemporaryDirectory() as d: - dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') - os.makedirs(dir_) - with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: - f.write(cross_content) - name = os.path.basename(f.name) - - # If XDG_DATA_HOME is set in the environment running the - # tests this test will fail, os mock the environment, pop - # it, then test - with mock.patch.dict(os.environ): - os.environ.pop('XDG_DATA_HOME', None) - with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)): - self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) - self.wipe() - def test_compiler_run_command(self): ''' The test checks that the compiler object can be passed to @@ -7606,6 +7545,69 @@ class CrossFileTests(BasePlatformTests): This is mainly aimed to testing overrides from cross files. """ + def _cross_file_generator(self) -> str: + if is_sunos(): + cc = 'gcc' + else: + cc = 'cc' + + return textwrap.dedent("""\ + [binaries] + c = '/usr/bin/{}' + ar = '/usr/bin/ar' + strip = '/usr/bin/ar' + + [properties] + + [host_machine] + system = 'linux' + cpu_family = 'x86' + cpu = 'i686' + endian = 'little' + """.format(cc)) + + # The test uses mocking and thus requires that the current process is the + # one to run the Meson steps. If we are using an external test executable + # (most commonly in Debian autopkgtests) then the mocking won't work. + @unittest.skipIf('MESON_EXE' in os.environ, 'MESON_EXE is defined, can not use mocking.') + def test_cross_file_system_paths(self): + if is_windows(): + raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)') + + testdir = os.path.join(self.common_test_dir, '1 trivial') + cross_content = self._cross_file_generator() + with tempfile.TemporaryDirectory() as d: + dir_ = os.path.join(d, 'meson', 'cross') + os.makedirs(dir_) + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + f.write(cross_content) + name = os.path.basename(f.name) + + with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}): + self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) + self.wipe() + + with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}): + os.environ.pop('XDG_DATA_HOME', None) + self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) + self.wipe() + + with tempfile.TemporaryDirectory() as d: + dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') + os.makedirs(dir_) + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + f.write(cross_content) + name = os.path.basename(f.name) + + # If XDG_DATA_HOME is set in the environment running the + # tests this test will fail, os mock the environment, pop + # it, then test + with mock.patch.dict(os.environ): + os.environ.pop('XDG_DATA_HOME', None) + with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)): + self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) + self.wipe() + def test_cross_file_dirs(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, -- cgit v1.1 From e822889754a3f9ba1a1c9d9179dd24d102db3969 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 1 Oct 2019 09:41:03 -0700 Subject: tests: Add tests for cross file exe_wrapper tests needs_exe wrapper but doesn't have one, !needs_exe_wrapper, and needs_exe_wrapper and has one. --- run_unittests.py | 54 ++++++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 52 insertions(+), 2 deletions(-) diff --git a/run_unittests.py b/run_unittests.py index 6712343..7467107 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -40,6 +40,7 @@ from contextlib import contextmanager from glob import glob from pathlib import (PurePath, Path) from distutils.dir_util import copy_tree +import typing import mesonbuild.mlog import mesonbuild.depfile @@ -7545,7 +7546,10 @@ class CrossFileTests(BasePlatformTests): This is mainly aimed to testing overrides from cross files. """ - def _cross_file_generator(self) -> str: + def _cross_file_generator(self, *, needs_exe_wrapper: bool = False, + exe_wrapper: typing.Optional[typing.List[str]] = None) -> str: + if is_windows(): + raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows') if is_sunos(): cc = 'gcc' else: @@ -7558,13 +7562,59 @@ class CrossFileTests(BasePlatformTests): strip = '/usr/bin/ar' [properties] + needs_exe_wrapper = {} + {} [host_machine] system = 'linux' cpu_family = 'x86' cpu = 'i686' endian = 'little' - """.format(cc)) + """.format(cc, needs_exe_wrapper, + 'exe_wrapper = {}'.format(str(exe_wrapper)) + if exe_wrapper is not None else '')) + + def test_needs_exe_wrapper_true(self): + testdir = os.path.join(self.common_test_dir, '1 trivial') + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'crossfile' + with p.open('wt') as f: + f.write(self._cross_file_generator(needs_exe_wrapper=True)) + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + out = self.run_target('test') + self.assertRegex(out, r'Skipped:\s*1\n') + + def test_needs_exe_wrapper_false(self): + testdir = os.path.join(self.common_test_dir, '1 trivial') + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'crossfile' + with p.open('wt') as f: + f.write(self._cross_file_generator(needs_exe_wrapper=False)) + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + out = self.run_target('test') + self.assertNotRegex(out, r'Skipped:\s*1\n') + + def test_needs_exe_wrapper_true_wrapper(self): + testdir = os.path.join(self.common_test_dir, '1 trivial') + with tempfile.TemporaryDirectory() as d: + s = Path(d) / 'wrapper.py' + with s.open('wt') as f: + f.write(textwrap.dedent(''' + #!/usr/bin/env python3 + import subprocess + import sys + + return subprocess.run(sys.argv[1:]).returnncode + ''')) + p = Path(d) / 'crossfile' + with p.open('wt') as f: + f.write(self._cross_file_generator( + needs_exe_wrapper=True, + exe_wrapper=[str(s)])) + + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + out = self.run_target('test') + self.assertNotRegex(out, r'Skipped:\s*1\n') # The test uses mocking and thus requires that the current process is the # one to run the Meson steps. If we are using an external test executable -- cgit v1.1 From cb6662b57299c3644719593115b2ffb828679c36 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 1 Oct 2019 11:02:50 -0700 Subject: backends: ensure that test executables can be run when passed as arguments If an executable is passed as an argument to a script in the build directory that it resides in then it will not execute (on *nix) due to a lack of ./. Ie, `foo` must be called as `./foo`. If it is called from a different directory it will work. Ie `../foo` or `bar/foo`. Fixes #5984 --- mesonbuild/backend/backends.py | 5 +++++ run_unittests.py | 25 +++++++++++++----------- test cases/unit/71 cross test passed/meson.build | 12 ++++++++++++ test cases/unit/71 cross test passed/script.py | 7 +++++++ test cases/unit/71 cross test passed/src/main.c | 6 ++++++ 5 files changed, 44 insertions(+), 11 deletions(-) create mode 100644 test cases/unit/71 cross test passed/meson.build create mode 100644 test cases/unit/71 cross test passed/script.py create mode 100644 test cases/unit/71 cross test passed/src/main.c diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 9d527cb..5ef7f44 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -810,6 +810,11 @@ class Backend: cmd_args.append(a) elif isinstance(a, str): cmd_args.append(a) + elif isinstance(a, build.Executable): + p = self.construct_target_rel_path(a, t.workdir) + if p == a.get_filename(): + p = './' + p + cmd_args.append(p) elif isinstance(a, build.Target): cmd_args.append(self.construct_target_rel_path(a, t.workdir)) else: diff --git a/run_unittests.py b/run_unittests.py index 7467107..f8ca253 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -7560,19 +7560,28 @@ class CrossFileTests(BasePlatformTests): c = '/usr/bin/{}' ar = '/usr/bin/ar' strip = '/usr/bin/ar' + {} [properties] needs_exe_wrapper = {} - {} [host_machine] system = 'linux' cpu_family = 'x86' cpu = 'i686' endian = 'little' - """.format(cc, needs_exe_wrapper, - 'exe_wrapper = {}'.format(str(exe_wrapper)) - if exe_wrapper is not None else '')) + """.format(cc, + 'exe_wrapper = {}'.format(str(exe_wrapper)) if exe_wrapper is not None else '', + needs_exe_wrapper)) + + def _stub_exe_wrapper(self) -> str: + return textwrap.dedent('''\ + #!/usr/bin/env python3 + import subprocess + import sys + + sys.exit(subprocess.run(sys.argv[1:]).returncode) + ''') def test_needs_exe_wrapper_true(self): testdir = os.path.join(self.common_test_dir, '1 trivial') @@ -7599,13 +7608,7 @@ class CrossFileTests(BasePlatformTests): with tempfile.TemporaryDirectory() as d: s = Path(d) / 'wrapper.py' with s.open('wt') as f: - f.write(textwrap.dedent(''' - #!/usr/bin/env python3 - import subprocess - import sys - - return subprocess.run(sys.argv[1:]).returnncode - ''')) + f.write(self._stub_exe_wrapper()) p = Path(d) / 'crossfile' with p.open('wt') as f: f.write(self._cross_file_generator( diff --git a/test cases/unit/71 cross test passed/meson.build b/test cases/unit/71 cross test passed/meson.build new file mode 100644 index 0000000..cb3bb6d --- /dev/null +++ b/test cases/unit/71 cross test passed/meson.build @@ -0,0 +1,12 @@ +project( + 'cross test passed', + 'c', + version : '>= 0.51' +) + +e = executable('exec', 'src/main.c') + +py = import('python').find_installation() + +test('root', e) +test('main', py, args : [meson.current_source_dir() / 'script.py', e]) diff --git a/test cases/unit/71 cross test passed/script.py b/test cases/unit/71 cross test passed/script.py new file mode 100644 index 0000000..257cd30 --- /dev/null +++ b/test cases/unit/71 cross test passed/script.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import subprocess +import sys + +if __name__ == "__main__": + sys.exit(subprocess.run(sys.argv[1:]).returncode) diff --git a/test cases/unit/71 cross test passed/src/main.c b/test cases/unit/71 cross test passed/src/main.c new file mode 100644 index 0000000..490b4a6 --- /dev/null +++ b/test cases/unit/71 cross test passed/src/main.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char const *argv[]) +{ + return 0; +} -- cgit v1.1 From 0ec94ca0629415d4b555e8ef38a5093a65e0539e Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 1 Oct 2019 11:19:08 -0700 Subject: backends: Consider arguments passed to a test when cross compiling Otherwise a wrapper script which takes an executable as an argument will mistakenly run when that executable is cross compiled. This does not wrap said executable in an exe_wrapper, just skip it. Fixes #5982 --- mesonbuild/backend/backends.py | 18 +++++++++++++-- mesonbuild/mtest.py | 7 +++--- run_unittests.py | 25 ++++++++++++++++----- test cases/unit/71 cross test passed/meson.build | 12 ---------- test cases/unit/71 cross test passed/script.py | 7 ------ test cases/unit/71 cross test passed/src/main.c | 6 ----- test cases/unit/72 cross test passed/meson.build | 12 ++++++++++ test cases/unit/72 cross test passed/script.py | 7 ++++++ test cases/unit/72 cross test passed/src/main.c | 6 +++++ test cases/unit/72 summary/meson.build | 15 ------------- .../unit/72 summary/subprojects/sub/meson.build | 4 ---- .../unit/72 summary/subprojects/sub2/meson.build | 5 ----- test cases/unit/73 summary/meson.build | 15 +++++++++++++ .../unit/73 summary/subprojects/sub/meson.build | 4 ++++ .../unit/73 summary/subprojects/sub2/meson.build | 5 +++++ test cases/unit/73 wrap file url/meson.build | 4 ---- .../73 wrap file url/subprojects/foo-patch.tar.xz | Bin 228 -> 0 bytes .../unit/73 wrap file url/subprojects/foo.tar.xz | Bin 216 -> 0 bytes test cases/unit/74 dep files/foo.c | 0 test cases/unit/74 dep files/meson.build | 16 ------------- test cases/unit/74 wrap file url/meson.build | 4 ++++ .../74 wrap file url/subprojects/foo-patch.tar.xz | Bin 0 -> 228 bytes .../unit/74 wrap file url/subprojects/foo.tar.xz | Bin 0 -> 216 bytes test cases/unit/75 dep files/foo.c | 0 test cases/unit/75 dep files/meson.build | 16 +++++++++++++ test cases/unit/75 subdir libdir/meson.build | 2 -- .../75 subdir libdir/subprojects/flub/meson.build | 1 - test cases/unit/76 subdir libdir/meson.build | 2 ++ .../76 subdir libdir/subprojects/flub/meson.build | 1 + 29 files changed, 111 insertions(+), 83 deletions(-) delete mode 100644 test cases/unit/71 cross test passed/meson.build delete mode 100644 test cases/unit/71 cross test passed/script.py delete mode 100644 test cases/unit/71 cross test passed/src/main.c create mode 100644 test cases/unit/72 cross test passed/meson.build create mode 100644 test cases/unit/72 cross test passed/script.py create mode 100644 test cases/unit/72 cross test passed/src/main.c delete mode 100644 test cases/unit/72 summary/meson.build delete mode 100644 test cases/unit/72 summary/subprojects/sub/meson.build delete mode 100644 test cases/unit/72 summary/subprojects/sub2/meson.build create mode 100644 test cases/unit/73 summary/meson.build create mode 100644 test cases/unit/73 summary/subprojects/sub/meson.build create mode 100644 test cases/unit/73 summary/subprojects/sub2/meson.build delete mode 100644 test cases/unit/73 wrap file url/meson.build delete mode 100644 test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz delete mode 100644 test cases/unit/73 wrap file url/subprojects/foo.tar.xz delete mode 100644 test cases/unit/74 dep files/foo.c delete mode 100644 test cases/unit/74 dep files/meson.build create mode 100644 test cases/unit/74 wrap file url/meson.build create mode 100644 test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz create mode 100644 test cases/unit/74 wrap file url/subprojects/foo.tar.xz create mode 100644 test cases/unit/75 dep files/foo.c create mode 100644 test cases/unit/75 dep files/meson.build delete mode 100644 test cases/unit/75 subdir libdir/meson.build delete mode 100644 test cases/unit/75 subdir libdir/subprojects/flub/meson.build create mode 100644 test cases/unit/76 subdir libdir/meson.build create mode 100644 test cases/unit/76 subdir libdir/subprojects/flub/meson.build diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 5ef7f44..ceea94a 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -119,7 +119,8 @@ class TestSerialisation: needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables, should_fail: bool, timeout: T.Optional[int], workdir: T.Optional[str], - extra_paths: T.List[str], protocol: TestProtocol, priority: int): + extra_paths: T.List[str], protocol: TestProtocol, priority: int, + cmd_is_built: bool): self.name = name self.project_name = project self.suite = suite @@ -138,6 +139,8 @@ class TestSerialisation: self.protocol = protocol self.priority = priority self.needs_exe_wrapper = needs_exe_wrapper + self.cmd_is_built = cmd_is_built + def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']: if backend == 'ninja': @@ -788,6 +791,15 @@ class Backend: # E.g. an external verifier or simulator program run on a generated executable. # Can always be run without a wrapper. test_for_machine = MachineChoice.BUILD + + # we allow passing compiled executables to tests, which may be cross built. + # We need to consider these as well when considering whether the target is cross or not. + for a in t.cmd_args: + if isinstance(a, build.BuildTarget): + if a.for_machine is MachineChoice.HOST: + test_for_machine = MachineChoice.HOST + break + is_cross = self.environment.is_cross_build(test_for_machine) if is_cross and self.environment.need_exe_wrapper(): exe_wrapper = self.environment.get_exe_wrapper() @@ -801,6 +813,7 @@ class Backend: extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps) else: extra_paths = [] + cmd_args = [] for a in unholder(t.cmd_args): if isinstance(a, build.BuildTarget): @@ -823,7 +836,8 @@ class Backend: exe_wrapper, self.environment.need_exe_wrapper(), t.is_parallel, cmd_args, t.env, t.should_fail, t.timeout, t.workdir, - extra_paths, t.protocol, t.priority) + extra_paths, t.protocol, t.priority, + isinstance(exe, build.Executable)) arr.append(ts) return arr diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index e04d365..8806932 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -615,14 +615,15 @@ class SingleTestRunner: # Can not run test on cross compiled executable # because there is no execute wrapper. return None - else: + elif self.test.cmd_is_built: + # If the command is not built (ie, its a python script), + # then we don't check for the exe-wrapper if not self.test.exe_runner.found(): msg = 'The exe_wrapper defined in the cross file {!r} was not ' \ 'found. Please check the command and/or add it to PATH.' raise TestException(msg.format(self.test.exe_runner.name)) return self.test.exe_runner.get_command() + self.test.fname - else: - return self.test.fname + return self.test.fname def run(self) -> TestRun: cmd = self._get_cmd() diff --git a/run_unittests.py b/run_unittests.py index f8ca253..e326aa4 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4476,7 +4476,7 @@ recommended as it is not supported on some platforms''') self._run(self.mconf_command + [self.builddir]) def test_summary(self): - testdir = os.path.join(self.unit_test_dir, '72 summary') + testdir = os.path.join(self.unit_test_dir, '73 summary') out = self.init(testdir) expected = textwrap.dedent(r''' Some Subproject 2.0 @@ -4530,7 +4530,7 @@ recommended as it is not supported on some platforms''') self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) def test_spurious_reconfigure_built_dep_file(self): - testdir = os.path.join(self.unit_test_dir, '74 dep files') + testdir = os.path.join(self.unit_test_dir, '75 dep files') # Regression test: Spurious reconfigure was happening when build # directory is inside source directory. @@ -6630,7 +6630,7 @@ c = ['{0}'] return hashlib.sha256(f.read()).hexdigest() def test_wrap_with_file_url(self): - testdir = os.path.join(self.unit_test_dir, '73 wrap file url') + testdir = os.path.join(self.unit_test_dir, '74 wrap file url') source_filename = os.path.join(testdir, 'subprojects', 'foo.tar.xz') patch_filename = os.path.join(testdir, 'subprojects', 'foo-patch.tar.xz') wrap_filename = os.path.join(testdir, 'subprojects', 'foo.wrap') @@ -6721,7 +6721,7 @@ class LinuxCrossArmTests(BaseLinuxCrossTests): def test_cross_libdir_subproject(self): # Guard against a regression where calling "subproject" # would reset the value of libdir to its default value. - testdir = os.path.join(self.unit_test_dir, '75 subdir libdir') + testdir = os.path.join(self.unit_test_dir, '76 subdir libdir') self.init(testdir, extra_args=['--libdir=fuf']) for i in self.introspect('--buildoptions'): if i['name'] == 'libdir': @@ -7591,7 +7591,7 @@ class CrossFileTests(BasePlatformTests): f.write(self._cross_file_generator(needs_exe_wrapper=True)) self.init(testdir, extra_args=['--cross-file=' + str(p)]) out = self.run_target('test') - self.assertRegex(out, r'Skipped:\s*1\n') + self.assertRegex(out, r'Skipped:\s*1\s*\n') def test_needs_exe_wrapper_false(self): testdir = os.path.join(self.common_test_dir, '1 trivial') @@ -7609,6 +7609,7 @@ class CrossFileTests(BasePlatformTests): s = Path(d) / 'wrapper.py' with s.open('wt') as f: f.write(self._stub_exe_wrapper()) + s.chmod(0o774) p = Path(d) / 'crossfile' with p.open('wt') as f: f.write(self._cross_file_generator( @@ -7617,7 +7618,19 @@ class CrossFileTests(BasePlatformTests): self.init(testdir, extra_args=['--cross-file=' + str(p)]) out = self.run_target('test') - self.assertNotRegex(out, r'Skipped:\s*1\n') + self.assertNotRegex(out, r'Skipped:\s*1\s*\n') + + def test_cross_exe_passed_no_wrapper(self): + testdir = os.path.join(self.unit_test_dir, '72 cross test passed') + with tempfile.TemporaryDirectory() as d: + p = Path(d) / 'crossfile' + with p.open('wt') as f: + f.write(self._cross_file_generator(needs_exe_wrapper=True)) + + self.init(testdir, extra_args=['--cross-file=' + str(p)]) + self.build() + out = self.run_target('test') + self.assertRegex(out, r'Skipped:\s*2\s*\n') # The test uses mocking and thus requires that the current process is the # one to run the Meson steps. If we are using an external test executable diff --git a/test cases/unit/71 cross test passed/meson.build b/test cases/unit/71 cross test passed/meson.build deleted file mode 100644 index cb3bb6d..0000000 --- a/test cases/unit/71 cross test passed/meson.build +++ /dev/null @@ -1,12 +0,0 @@ -project( - 'cross test passed', - 'c', - version : '>= 0.51' -) - -e = executable('exec', 'src/main.c') - -py = import('python').find_installation() - -test('root', e) -test('main', py, args : [meson.current_source_dir() / 'script.py', e]) diff --git a/test cases/unit/71 cross test passed/script.py b/test cases/unit/71 cross test passed/script.py deleted file mode 100644 index 257cd30..0000000 --- a/test cases/unit/71 cross test passed/script.py +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env python3 - -import subprocess -import sys - -if __name__ == "__main__": - sys.exit(subprocess.run(sys.argv[1:]).returncode) diff --git a/test cases/unit/71 cross test passed/src/main.c b/test cases/unit/71 cross test passed/src/main.c deleted file mode 100644 index 490b4a6..0000000 --- a/test cases/unit/71 cross test passed/src/main.c +++ /dev/null @@ -1,6 +0,0 @@ -#include - -int main(int argc, char const *argv[]) -{ - return 0; -} diff --git a/test cases/unit/72 cross test passed/meson.build b/test cases/unit/72 cross test passed/meson.build new file mode 100644 index 0000000..cb3bb6d --- /dev/null +++ b/test cases/unit/72 cross test passed/meson.build @@ -0,0 +1,12 @@ +project( + 'cross test passed', + 'c', + version : '>= 0.51' +) + +e = executable('exec', 'src/main.c') + +py = import('python').find_installation() + +test('root', e) +test('main', py, args : [meson.current_source_dir() / 'script.py', e]) diff --git a/test cases/unit/72 cross test passed/script.py b/test cases/unit/72 cross test passed/script.py new file mode 100644 index 0000000..257cd30 --- /dev/null +++ b/test cases/unit/72 cross test passed/script.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import subprocess +import sys + +if __name__ == "__main__": + sys.exit(subprocess.run(sys.argv[1:]).returncode) diff --git a/test cases/unit/72 cross test passed/src/main.c b/test cases/unit/72 cross test passed/src/main.c new file mode 100644 index 0000000..490b4a6 --- /dev/null +++ b/test cases/unit/72 cross test passed/src/main.c @@ -0,0 +1,6 @@ +#include + +int main(int argc, char const *argv[]) +{ + return 0; +} diff --git a/test cases/unit/72 summary/meson.build b/test cases/unit/72 summary/meson.build deleted file mode 100644 index df4540d..0000000 --- a/test cases/unit/72 summary/meson.build +++ /dev/null @@ -1,15 +0,0 @@ -project('My Project', version : '1.0') - -subproject('sub') -subproject('sub2', required : false) - -summary({'Some boolean': false, - 'Another boolean': true, - 'Some string': 'Hello World', - 'A list': ['string', 1, true], - 'empty list': [], - }, section: 'Configuration') -summary('A number', 1, section: 'Configuration') -summary('yes', true, bool_yn : true, section: 'Configuration') -summary('no', false, bool_yn : true, section: 'Configuration') -summary('coma list', ['a', 'b', 'c'], list_sep: ', ', section: 'Configuration') diff --git a/test cases/unit/72 summary/subprojects/sub/meson.build b/test cases/unit/72 summary/subprojects/sub/meson.build deleted file mode 100644 index e7d7833..0000000 --- a/test cases/unit/72 summary/subprojects/sub/meson.build +++ /dev/null @@ -1,4 +0,0 @@ -project('Some Subproject', version : '2.0') - -summary('string', 'bar') -summary({'integer': 1, 'boolean': true}) diff --git a/test cases/unit/72 summary/subprojects/sub2/meson.build b/test cases/unit/72 summary/subprojects/sub2/meson.build deleted file mode 100644 index 86b9cfd..0000000 --- a/test cases/unit/72 summary/subprojects/sub2/meson.build +++ /dev/null @@ -1,5 +0,0 @@ -project('sub2') - -error('This subproject failed') - -summary('Section', 'Should not be seen') diff --git a/test cases/unit/73 summary/meson.build b/test cases/unit/73 summary/meson.build new file mode 100644 index 0000000..df4540d --- /dev/null +++ b/test cases/unit/73 summary/meson.build @@ -0,0 +1,15 @@ +project('My Project', version : '1.0') + +subproject('sub') +subproject('sub2', required : false) + +summary({'Some boolean': false, + 'Another boolean': true, + 'Some string': 'Hello World', + 'A list': ['string', 1, true], + 'empty list': [], + }, section: 'Configuration') +summary('A number', 1, section: 'Configuration') +summary('yes', true, bool_yn : true, section: 'Configuration') +summary('no', false, bool_yn : true, section: 'Configuration') +summary('coma list', ['a', 'b', 'c'], list_sep: ', ', section: 'Configuration') diff --git a/test cases/unit/73 summary/subprojects/sub/meson.build b/test cases/unit/73 summary/subprojects/sub/meson.build new file mode 100644 index 0000000..e7d7833 --- /dev/null +++ b/test cases/unit/73 summary/subprojects/sub/meson.build @@ -0,0 +1,4 @@ +project('Some Subproject', version : '2.0') + +summary('string', 'bar') +summary({'integer': 1, 'boolean': true}) diff --git a/test cases/unit/73 summary/subprojects/sub2/meson.build b/test cases/unit/73 summary/subprojects/sub2/meson.build new file mode 100644 index 0000000..86b9cfd --- /dev/null +++ b/test cases/unit/73 summary/subprojects/sub2/meson.build @@ -0,0 +1,5 @@ +project('sub2') + +error('This subproject failed') + +summary('Section', 'Should not be seen') diff --git a/test cases/unit/73 wrap file url/meson.build b/test cases/unit/73 wrap file url/meson.build deleted file mode 100644 index 3bd3b25..0000000 --- a/test cases/unit/73 wrap file url/meson.build +++ /dev/null @@ -1,4 +0,0 @@ -project('test wrap with file url') - -exe = subproject('foo').get_variable('foo_exe') -test('test1', exe) diff --git a/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz b/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz deleted file mode 100644 index fdb026c..0000000 Binary files a/test cases/unit/73 wrap file url/subprojects/foo-patch.tar.xz and /dev/null differ diff --git a/test cases/unit/73 wrap file url/subprojects/foo.tar.xz b/test cases/unit/73 wrap file url/subprojects/foo.tar.xz deleted file mode 100644 index 2ed6ab4..0000000 Binary files a/test cases/unit/73 wrap file url/subprojects/foo.tar.xz and /dev/null differ diff --git a/test cases/unit/74 dep files/foo.c b/test cases/unit/74 dep files/foo.c deleted file mode 100644 index e69de29..0000000 diff --git a/test cases/unit/74 dep files/meson.build b/test cases/unit/74 dep files/meson.build deleted file mode 100644 index 4829f56..0000000 --- a/test cases/unit/74 dep files/meson.build +++ /dev/null @@ -1,16 +0,0 @@ -project('test', 'c') - -python = import('python').find_installation() - -lib = library('foo', 'foo.c') - -# The library does not yet exist but we can already use its path during -# configuration. This should not trigger a reconfigure when the library is -# rebuilt. -configure_file( - output: 'out.txt', - capture: true, - command: [python, '-c', 'import sys; print(sys.argv[1])', lib.full_path()], -) - -message('Project configured') diff --git a/test cases/unit/74 wrap file url/meson.build b/test cases/unit/74 wrap file url/meson.build new file mode 100644 index 0000000..3bd3b25 --- /dev/null +++ b/test cases/unit/74 wrap file url/meson.build @@ -0,0 +1,4 @@ +project('test wrap with file url') + +exe = subproject('foo').get_variable('foo_exe') +test('test1', exe) diff --git a/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz b/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz new file mode 100644 index 0000000..fdb026c Binary files /dev/null and b/test cases/unit/74 wrap file url/subprojects/foo-patch.tar.xz differ diff --git a/test cases/unit/74 wrap file url/subprojects/foo.tar.xz b/test cases/unit/74 wrap file url/subprojects/foo.tar.xz new file mode 100644 index 0000000..2ed6ab4 Binary files /dev/null and b/test cases/unit/74 wrap file url/subprojects/foo.tar.xz differ diff --git a/test cases/unit/75 dep files/foo.c b/test cases/unit/75 dep files/foo.c new file mode 100644 index 0000000..e69de29 diff --git a/test cases/unit/75 dep files/meson.build b/test cases/unit/75 dep files/meson.build new file mode 100644 index 0000000..4829f56 --- /dev/null +++ b/test cases/unit/75 dep files/meson.build @@ -0,0 +1,16 @@ +project('test', 'c') + +python = import('python').find_installation() + +lib = library('foo', 'foo.c') + +# The library does not yet exist but we can already use its path during +# configuration. This should not trigger a reconfigure when the library is +# rebuilt. +configure_file( + output: 'out.txt', + capture: true, + command: [python, '-c', 'import sys; print(sys.argv[1])', lib.full_path()], +) + +message('Project configured') diff --git a/test cases/unit/75 subdir libdir/meson.build b/test cases/unit/75 subdir libdir/meson.build deleted file mode 100644 index 5099c91..0000000 --- a/test cases/unit/75 subdir libdir/meson.build +++ /dev/null @@ -1,2 +0,0 @@ -project('toplevel', 'c') -subproject('flub') diff --git a/test cases/unit/75 subdir libdir/subprojects/flub/meson.build b/test cases/unit/75 subdir libdir/subprojects/flub/meson.build deleted file mode 100644 index 7bfd2c5..0000000 --- a/test cases/unit/75 subdir libdir/subprojects/flub/meson.build +++ /dev/null @@ -1 +0,0 @@ -project('subflub', 'c') diff --git a/test cases/unit/76 subdir libdir/meson.build b/test cases/unit/76 subdir libdir/meson.build new file mode 100644 index 0000000..5099c91 --- /dev/null +++ b/test cases/unit/76 subdir libdir/meson.build @@ -0,0 +1,2 @@ +project('toplevel', 'c') +subproject('flub') diff --git a/test cases/unit/76 subdir libdir/subprojects/flub/meson.build b/test cases/unit/76 subdir libdir/subprojects/flub/meson.build new file mode 100644 index 0000000..7bfd2c5 --- /dev/null +++ b/test cases/unit/76 subdir libdir/subprojects/flub/meson.build @@ -0,0 +1 @@ +project('subflub', 'c') -- cgit v1.1 From af787874a8e4eab8222382128ccfb5549b31c801 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 7 Oct 2019 13:13:54 -0700 Subject: pass exe_wrapper to test scripts through the environment This adds a new MESON_EXE_WRAPPER environment variable containing the string form of the exe_wrapper, if there is an exe_wrapper defined. Fixes #4427 --- docs/markdown/Reference-manual.md | 7 +++++ .../snippets/exe_wrapper_for_cross_built_tests.md | 9 ++++++ mesonbuild/mtest.py | 32 ++++++++++++---------- run_unittests.py | 12 ++++---- test cases/unit/72 cross test passed/exewrapper.py | 24 ++++++++++++++++ test cases/unit/72 cross test passed/meson.build | 7 +++++ .../unit/72 cross test passed/meson_options.txt | 5 ++++ 7 files changed, 75 insertions(+), 21 deletions(-) create mode 100644 docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md create mode 100755 test cases/unit/72 cross test passed/exewrapper.py create mode 100644 test cases/unit/72 cross test passed/meson_options.txt diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 1bd5ff0..9b5d657 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1613,6 +1613,13 @@ object](#build-target-object) returned by object](#external-program-object) returned by [`find_program()`](#find_program). +*Since 0.55.0* When cross compiling, if an exe_wrapper is needed and defined +the environment variable `MESON_EXE_WRAPPER` will be set to the string value +of that wrapper (implementation detail: using `mesonlib.join_args`). Test +scripts may use this to run cross built binaries. If your test needs +`MESON_EXE_WRAPPER` in cross build situations it is your responsibility to +return code 77 to tell the harness to report "skip" + By default, environment variable [`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) is automatically set by `meson test` to a random value between 1..255. diff --git a/docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md b/docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md new file mode 100644 index 0000000..ebdd8a7 --- /dev/null +++ b/docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md @@ -0,0 +1,9 @@ +## Test scripts are given the exe wrapper if needed + +Meson will now set the `MESON_EXE_WRAPPER` as the properly wrapped and joined +representation. For Unix-like OSes this means python's shelx.join, on Windows +an implementation that attempts to properly quote windows argument is used. +This allow wrapper scripts to run test binaries, instead of just skipping. + +for example, if the wrapper is `['emulator', '--script']`, it will be passed +as `MESON_EXE_WRAPPER="emulator --script"`. diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 8806932..4aafe62 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -42,7 +42,7 @@ from . import build from . import environment from . import mlog from .dependencies import ExternalProgram -from .mesonlib import MesonException, get_wine_shortpath, split_args +from .mesonlib import MesonException, get_wine_shortpath, split_args, join_args from .backend.backends import TestProtocol if T.TYPE_CHECKING: @@ -609,20 +609,19 @@ class SingleTestRunner: return ['java', '-jar'] + self.test.fname elif not self.test.is_cross_built and run_with_mono(self.test.fname[0]): return ['mono'] + self.test.fname - else: - if self.test.is_cross_built and self.test.needs_exe_wrapper: - if self.test.exe_runner is None: - # Can not run test on cross compiled executable - # because there is no execute wrapper. - return None - elif self.test.cmd_is_built: - # If the command is not built (ie, its a python script), - # then we don't check for the exe-wrapper - if not self.test.exe_runner.found(): - msg = 'The exe_wrapper defined in the cross file {!r} was not ' \ - 'found. Please check the command and/or add it to PATH.' - raise TestException(msg.format(self.test.exe_runner.name)) - return self.test.exe_runner.get_command() + self.test.fname + elif self.test.cmd_is_built and self.test.needs_exe_wrapper: + if self.test.exe_runner is None: + # Can not run test on cross compiled executable + # because there is no execute wrapper. + return None + elif self.test.cmd_is_built: + # If the command is not built (ie, its a python script), + # then we don't check for the exe-wrapper + if not self.test.exe_runner.found(): + msg = ('The exe_wrapper defined in the cross file {!r} was not ' + 'found. Please check the command and/or add it to PATH.') + raise TestException(msg.format(self.test.exe_runner.name)) + return self.test.exe_runner.get_command() + self.test.fname return self.test.fname def run(self) -> TestRun: @@ -868,6 +867,9 @@ class TestHarness: env = os.environ.copy() test_env = test.env.get_env(env) env.update(test_env) + if (test.is_cross_built and test.needs_exe_wrapper and + test.exe_runner and test.exe_runner.found()): + env['MESON_EXE_WRAPPER'] = join_args(test.exe_runner.get_command()) return SingleTestRunner(test, test_env, env, options) def process_test_result(self, result: TestRun) -> None: diff --git a/run_unittests.py b/run_unittests.py index e326aa4..3e25f94 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -7584,7 +7584,7 @@ class CrossFileTests(BasePlatformTests): ''') def test_needs_exe_wrapper_true(self): - testdir = os.path.join(self.common_test_dir, '1 trivial') + testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: p = Path(d) / 'crossfile' with p.open('wt') as f: @@ -7594,7 +7594,7 @@ class CrossFileTests(BasePlatformTests): self.assertRegex(out, r'Skipped:\s*1\s*\n') def test_needs_exe_wrapper_false(self): - testdir = os.path.join(self.common_test_dir, '1 trivial') + testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: p = Path(d) / 'crossfile' with p.open('wt') as f: @@ -7604,7 +7604,7 @@ class CrossFileTests(BasePlatformTests): self.assertNotRegex(out, r'Skipped:\s*1\n') def test_needs_exe_wrapper_true_wrapper(self): - testdir = os.path.join(self.common_test_dir, '1 trivial') + testdir = os.path.join(self.unit_test_dir, '72 cross test passed') with tempfile.TemporaryDirectory() as d: s = Path(d) / 'wrapper.py' with s.open('wt') as f: @@ -7616,9 +7616,9 @@ class CrossFileTests(BasePlatformTests): needs_exe_wrapper=True, exe_wrapper=[str(s)])) - self.init(testdir, extra_args=['--cross-file=' + str(p)]) + self.init(testdir, extra_args=['--cross-file=' + str(p), '-Dexpect=true']) out = self.run_target('test') - self.assertNotRegex(out, r'Skipped:\s*1\s*\n') + self.assertRegex(out, r'Ok:\s*3\s*\n') def test_cross_exe_passed_no_wrapper(self): testdir = os.path.join(self.unit_test_dir, '72 cross test passed') @@ -7630,7 +7630,7 @@ class CrossFileTests(BasePlatformTests): self.init(testdir, extra_args=['--cross-file=' + str(p)]) self.build() out = self.run_target('test') - self.assertRegex(out, r'Skipped:\s*2\s*\n') + self.assertRegex(out, r'Skipped:\s*1\s*\n') # The test uses mocking and thus requires that the current process is the # one to run the Meson steps. If we are using an external test executable diff --git a/test cases/unit/72 cross test passed/exewrapper.py b/test cases/unit/72 cross test passed/exewrapper.py new file mode 100755 index 0000000..2c15ed6 --- /dev/null +++ b/test cases/unit/72 cross test passed/exewrapper.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# Test that the MESON_EXE_WRAPPER environment variable is set + +import argparse +import os +import sys + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('binary') # unused, but needed for test behavior + parser.add_argument('--expected', action='store_true') + args = parser.parse_args() + + defined = 'MESON_EXE_WRAPPER' in os.environ + + if args.expected != defined: + print(os.environ, file=sys.stderr) + return 1 + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/test cases/unit/72 cross test passed/meson.build b/test cases/unit/72 cross test passed/meson.build index cb3bb6d..4deb74b 100644 --- a/test cases/unit/72 cross test passed/meson.build +++ b/test cases/unit/72 cross test passed/meson.build @@ -10,3 +10,10 @@ py = import('python').find_installation() test('root', e) test('main', py, args : [meson.current_source_dir() / 'script.py', e]) + +wrapper_args = [] +if get_option('expect') + wrapper_args += '--expected' +endif + +test('exe_wrapper in env', py, args : [meson.current_source_dir() / 'exewrapper.py', e, wrapper_args]) diff --git a/test cases/unit/72 cross test passed/meson_options.txt b/test cases/unit/72 cross test passed/meson_options.txt new file mode 100644 index 0000000..084c776 --- /dev/null +++ b/test cases/unit/72 cross test passed/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'expect', + type : 'boolean', + value : false, +) -- cgit v1.1 From b4b1a2c5a145c1459fc4563a289e164e23bd6a02 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sat, 11 Apr 2020 00:08:51 +0300 Subject: Build private directory name from output file name. --- mesonbuild/backend/backends.py | 2 +- mesonbuild/backend/vs2010backend.py | 3 +++ run_unittests.py | 38 ++++++++++++++++++++++++------------- 3 files changed, 29 insertions(+), 14 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 9d527cb..3d12651 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -259,7 +259,7 @@ class Backend: return self.build_to_src def get_target_private_dir(self, target): - return os.path.join(self.get_target_dir(target), target.get_id()) + return os.path.join(self.get_target_filename(target) + '.p') def get_target_private_dir_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 614d357..6965c42 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -98,6 +98,9 @@ class Vs2010Backend(backends.Backend): self.subdirs = {} self.handled_target_deps = {} + def get_target_private_dir(self, target): + return os.path.join(self.get_target_dir(target), target.get_id()) + def generate_custom_generator_commands(self, target, parent_node): generator_output_files = [] custom_target_include_dirs = [] diff --git a/run_unittests.py b/run_unittests.py index b21f785..c04b825 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -2449,9 +2449,12 @@ class AllPlatformTests(BasePlatformTests): # Check include order for 'someexe' incs = [a for a in split_args(execmd) if a.startswith("-I")] self.assertEqual(len(incs), 9) - # target private dir - someexe_id = Target.construct_id_from_path("sub4", "someexe", "@exe") - self.assertPathEqual(incs[0], "-I" + os.path.join("sub4", someexe_id)) + # Need to run the build so the private dir is created. + self.build() + pdirs = glob(os.path.join(self.builddir, 'sub4/someexe*.p')) + self.assertEqual(len(pdirs), 1) + privdir = pdirs[0][len(self.builddir)+1:] + self.assertPathEqual(incs[0], "-I" + privdir) # target build subdir self.assertPathEqual(incs[1], "-Isub4") # target source subdir @@ -2472,7 +2475,10 @@ class AllPlatformTests(BasePlatformTests): incs = [a for a in split_args(fxecmd) if a.startswith('-I')] self.assertEqual(len(incs), 9) # target private dir - self.assertPathEqual(incs[0], '-Isomefxe@exe') + pdirs = glob(os.path.join(self.builddir, 'somefxe*.p')) + self.assertEqual(len(pdirs), 1) + privdir = pdirs[0][len(self.builddir)+1:] + self.assertPathEqual(incs[0], '-I' + privdir) # target build dir self.assertPathEqual(incs[1], '-I.') # target source dir @@ -5577,6 +5583,10 @@ class LinuxlikeTests(BasePlatformTests): self.assertRegex('\n'.join(mesonlog), r'Run-time dependency qt5 \(modules: Core\) found: YES .* \((qmake|qmake-qt5)\)\n') + def glob_sofiles_without_privdir(self, g): + files = glob(g) + return [f for f in files if not f.endswith('.p')] + def _test_soname_impl(self, libpath, install): if is_cygwin() or is_osx(): raise unittest.SkipTest('Test only applicable to ELF and linuxlike sonames') @@ -5592,28 +5602,28 @@ class LinuxlikeTests(BasePlatformTests): self.assertPathExists(nover) self.assertFalse(os.path.islink(nover)) self.assertEqual(get_soname(nover), 'libnover.so') - self.assertEqual(len(glob(nover[:-3] + '*')), 1) + self.assertEqual(len(self.glob_sofiles_without_privdir(nover[:-3] + '*')), 1) # File with version set verset = os.path.join(libpath, 'libverset.so') self.assertPathExists(verset + '.4.5.6') self.assertEqual(os.readlink(verset), 'libverset.so.4') self.assertEqual(get_soname(verset), 'libverset.so.4') - self.assertEqual(len(glob(verset[:-3] + '*')), 3) + self.assertEqual(len(self.glob_sofiles_without_privdir(verset[:-3] + '*')), 3) # File with soversion set soverset = os.path.join(libpath, 'libsoverset.so') self.assertPathExists(soverset + '.1.2.3') self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3') self.assertEqual(get_soname(soverset), 'libsoverset.so.1.2.3') - self.assertEqual(len(glob(soverset[:-3] + '*')), 2) + self.assertEqual(len(self.glob_sofiles_without_privdir(soverset[:-3] + '*')), 2) # File with version and soversion set to same values settosame = os.path.join(libpath, 'libsettosame.so') self.assertPathExists(settosame + '.7.8.9') self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9') self.assertEqual(get_soname(settosame), 'libsettosame.so.7.8.9') - self.assertEqual(len(glob(settosame[:-3] + '*')), 2) + self.assertEqual(len(self.glob_sofiles_without_privdir(settosame[:-3] + '*')), 2) # File with version and soversion set to different values bothset = os.path.join(libpath, 'libbothset.so') @@ -5621,7 +5631,7 @@ class LinuxlikeTests(BasePlatformTests): self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3') self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6') self.assertEqual(get_soname(bothset), 'libbothset.so.1.2.3') - self.assertEqual(len(glob(bothset[:-3] + '*')), 3) + self.assertEqual(len(self.glob_sofiles_without_privdir(bothset[:-3] + '*')), 3) def test_soname(self): self._test_soname_impl(self.builddir, False) @@ -5741,10 +5751,12 @@ class LinuxlikeTests(BasePlatformTests): def test_unity_subproj(self): testdir = os.path.join(self.common_test_dir, '45 subproject') self.init(testdir, extra_args='--unity=subprojects') - simpletest_id = Target.construct_id_from_path('subprojects/sublib', 'simpletest', '@exe') - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', simpletest_id, 'simpletest-unity0.c')) - sublib_id = Target.construct_id_from_path('subprojects/sublib', 'sublib', '@sha') - self.assertPathExists(os.path.join(self.builddir, 'subprojects/sublib', sublib_id, 'sublib-unity0.c')) + pdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/simpletest*.p')) + self.assertEqual(len(pdirs), 1) + self.assertPathExists(os.path.join(pdirs[0], 'simpletest-unity0.c')) + sdirs = glob(os.path.join(self.builddir, 'subprojects/sublib/*sublib*.p')) + self.assertEqual(len(sdirs), 1) + self.assertPathExists(os.path.join(sdirs[0], 'sublib-unity0.c')) self.assertPathDoesNotExist(os.path.join(self.builddir, 'user@exe/user-unity.c')) self.build() -- cgit v1.1 From d04e2c2f1fcf35501a1fdc87524b890c5f367995 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 5 May 2020 11:53:42 -0700 Subject: compilers: Move b_ndebug into the compiler classes Right now we hardcode -DNDEBUG as the value to be added for b_ndebug. Which is a not the correct behavior for non C/C++ languages. By pushing this back into the compiler classes we can change this for other languages. --- mesonbuild/compilers/compilers.py | 5 ++++- mesonbuild/compilers/mixins/clike.py | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 9575273..d950e8f 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -320,7 +320,7 @@ def get_base_compile_args(options, compiler): if (options['b_ndebug'].value == 'true' or (options['b_ndebug'].value == 'if-release' and options['buildtype'].value in {'release', 'plain'})): - args += ['-DNDEBUG'] + args += compiler.get_disable_assert_args() except KeyError: pass # This does not need a try...except @@ -1204,6 +1204,9 @@ class Compiler: def get_coverage_link_args(self) -> T.List[str]: return self.linker.get_coverage_args() + def get_disable_assert_args(self) -> T.List[str]: + return [] + def get_largefile_args(compiler): ''' diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index e7b0cd2..01c984d 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -1137,3 +1137,6 @@ class CLikeCompiler: return self.compiles(self.attribute_check_func(name), env, extra_args=self.get_has_func_attribute_extra_args(name)) + + def get_disable_assert_args(self) -> T.List[str]: + return ['-DNDEBUG'] -- cgit v1.1 From 29ef4478df6d3aaca40c7993f125b29409be1de2 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 20 May 2020 10:40:18 -0700 Subject: compilers/d: Add b_ndebug support D lang compilers have an option -release (or similar) which turns off asserts, contracts, and other runtime type checking. This patch wires that up to the b_ndebug flag. Fixes #7082 --- docs/markdown/snippets/d-lang_n_debug.md | 4 ++++ mesonbuild/compilers/compilers.py | 6 +++--- mesonbuild/compilers/d.py | 15 ++++++++++++--- 3 files changed, 19 insertions(+), 6 deletions(-) create mode 100644 docs/markdown/snippets/d-lang_n_debug.md diff --git a/docs/markdown/snippets/d-lang_n_debug.md b/docs/markdown/snippets/d-lang_n_debug.md new file mode 100644 index 0000000..59f09e4 --- /dev/null +++ b/docs/markdown/snippets/d-lang_n_debug.md @@ -0,0 +1,4 @@ +## b_ndebug support for D language compilers + +D Language compilers will now set -release/--release/-frelease (depending on +the compiler) when the b_ndebug flag is set. diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index d950e8f..f3c171f 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -186,7 +186,7 @@ rust_buildtype_args = {'plain': [], d_gdc_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': ['-finline-functions'], - 'release': ['-frelease', '-finline-functions'], + 'release': ['-finline-functions'], 'minsize': [], 'custom': [], } @@ -194,7 +194,7 @@ d_gdc_buildtype_args = {'plain': [], d_ldc_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'], - 'release': ['-release', '-enable-inlining', '-Hkeep-all-bodies'], + 'release': ['-enable-inlining', '-Hkeep-all-bodies'], 'minsize': [], 'custom': [], } @@ -202,7 +202,7 @@ d_ldc_buildtype_args = {'plain': [], d_dmd_buildtype_args = {'plain': [], 'debug': [], 'debugoptimized': ['-inline'], - 'release': ['-release', '-inline'], + 'release': ['-inline'], 'minsize': [], 'custom': [], } diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index d2d03a3..777fa19 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -647,7 +647,7 @@ class GnuDCompiler(GnuCompiler, DCompiler): '2': default_warn_args + ['-Wextra'], '3': default_warn_args + ['-Wextra', '-Wpedantic']} self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic', - 'b_vscrt', 'b_coverage', 'b_pgo'] + 'b_vscrt', 'b_coverage', 'b_pgo', 'b_ndebug'] self._has_color_support = version_compare(self.version, '>=4.9') # dependencies were implemented before, but broken - support was fixed in GCC 7.1+ @@ -686,6 +686,9 @@ class GnuDCompiler(GnuCompiler, DCompiler): return args return args + ['-shared-libphobos'] + def get_disable_assert_args(self): + return ['-frelease'] + class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): @@ -693,7 +696,7 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): info: 'MachineInfo', arch, **kwargs): DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs) self.id = 'llvm' - self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt'] + self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug'] def get_colorout_args(self, colortype): if colortype == 'always': @@ -735,6 +738,9 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): return args return args + ['-link-defaultlib-shared'] + def get_disable_assert_args(self) -> T.List[str]: + return ['--release'] + class DmdDCompiler(DmdLikeCompilerMixin, DCompiler): @@ -742,7 +748,7 @@ class DmdDCompiler(DmdLikeCompilerMixin, DCompiler): info: 'MachineInfo', arch, **kwargs): DCompiler.__init__(self, exelist, version, for_machine, info, arch, False, None, **kwargs) self.id = 'dmd' - self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt'] + self.base_options = ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug'] def get_colorout_args(self, colortype): if colortype == 'always': @@ -805,3 +811,6 @@ class DmdDCompiler(DmdLikeCompilerMixin, DCompiler): if self.info.is_windows(): return args return args + ['-defaultlib=phobos2', '-debuglib=phobos2'] + + def get_disable_assert_args(self) -> T.List[str]: + return ['-release'] -- cgit v1.1 From 9dc3ca2c1c9fbb47e731551c6432df144f725261 Mon Sep 17 00:00:00 2001 From: Yevhenii Kolesnikov Date: Thu, 21 May 2020 18:58:47 +0300 Subject: compilers: add fetching of define list for clang Simmilar to gcc, the list of pre-processor defines can be fetched with `-dM -E` option. The way cpu_family is determined on linux relies on this list. Fixes incorrect value of cpu_family on linux, when crosscompiling: ``` CC="clang -m32" meson ./build ``` Signed-off-by: Yevhenii Kolesnikov Co-authored-by: Dylan Baker --- mesonbuild/compilers/c.py | 5 +++-- mesonbuild/compilers/cpp.py | 5 +++-- mesonbuild/compilers/fortran.py | 2 +- mesonbuild/compilers/mixins/clang.py | 9 ++++++++- mesonbuild/compilers/objc.py | 2 +- mesonbuild/compilers/objcpp.py | 2 +- mesonbuild/environment.py | 26 +++++++++++++++++++++++++- 7 files changed, 42 insertions(+), 9 deletions(-) diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 1bc9e84..aac99b4 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -86,9 +86,10 @@ class ClangCCompiler(ClangCompiler, CCompiler): _C18_VERSION = '>=8.0.0' def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + is_cross, info: 'MachineInfo', exe_wrapper=None, + defines: T.Optional[T.List[str]] = None, **kwargs): CCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + ClangCompiler.__init__(self, defines) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'0': [], '1': default_warn_args, diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index f4bcfa9..478a68c 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -155,10 +155,11 @@ class CPPCompiler(CLikeCompiler, Compiler): class ClangCPPCompiler(ClangCompiler, CPPCompiler): def __init__(self, exelist, version, for_machine: MachineChoice, - is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): + is_cross, info: 'MachineInfo', exe_wrapper=None, + defines : T.Optional[T.List[str]] = None, **kwargs): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + ClangCompiler.__init__(self, defines) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'0': [], '1': default_warn_args, diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py index c155b5b..af83c0e56 100644 --- a/mesonbuild/compilers/fortran.py +++ b/mesonbuild/compilers/fortran.py @@ -424,7 +424,7 @@ class FlangFortranCompiler(ClangCompiler, FortranCompiler): **kwargs): FortranCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + ClangCompiler.__init__(self, []) self.id = 'flang' default_warn_args = ['-Minform=inform'] self.warn_args = {'0': [], diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index 1c0ee45..0ee10ad 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -42,9 +42,10 @@ clang_optimization_args = { } # type: T.Dict[str, T.List[str]] class ClangCompiler(GnuLikeCompiler): - def __init__(self): + def __init__(self, defines: T.Optional[T.Dict[str, str]]): super().__init__() self.id = 'clang' + self.defines = defines or {} self.base_options.append('b_colorout') # TODO: this really should be part of the linker base_options, but # linkers don't have base_options. @@ -56,6 +57,12 @@ class ClangCompiler(GnuLikeCompiler): def get_colorout_args(self, colortype: str) -> T.List[str]: return clang_color_args[colortype][:] + def has_builtin_define(self, define: str) -> bool: + return define in self.defines + + def get_builtin_define(self, define: str) -> T.Optional[str]: + return self.defines.get(define) + def get_optimization_args(self, optimization_level: str) -> T.List[str]: return clang_optimization_args[optimization_level] diff --git a/mesonbuild/compilers/objc.py b/mesonbuild/compilers/objc.py index 52d258d..d351c88 100644 --- a/mesonbuild/compilers/objc.py +++ b/mesonbuild/compilers/objc.py @@ -86,7 +86,7 @@ class ClangObjCCompiler(ClangCompiler, ObjCCompiler): **kwargs): ObjCCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + ClangCompiler.__init__(self, []) default_warn_args = ['-Wall', '-Winvalid-pch'] self.warn_args = {'0': [], '1': default_warn_args, diff --git a/mesonbuild/compilers/objcpp.py b/mesonbuild/compilers/objcpp.py index c8b422b..10555b4 100644 --- a/mesonbuild/compilers/objcpp.py +++ b/mesonbuild/compilers/objcpp.py @@ -84,7 +84,7 @@ class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler): is_cross, info: 'MachineInfo', exe_wrapper=None, **kwargs): ObjCPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrapper, **kwargs) - ClangCompiler.__init__(self) + ClangCompiler.__init__(self, []) default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] self.warn_args = {'0': [], '1': default_warn_args, diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 8fad628..cb6ae7d 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -726,6 +726,28 @@ class Environment: minor = defines.get('__LCC_MINOR__', '0') return dot.join((generation, major, minor)) + @staticmethod + def get_clang_compiler_defines(compiler): + """ + Get the list of Clang pre-processor defines + """ + args = compiler + ['-E', '-dM', '-'] + p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to get clang pre-processor defines:\n' + output + error) + defines = {} + for line in output.split('\n'): + if not line: + continue + d, *rest = line.split(' ', 2) + if d != '#define': + continue + if len(rest) == 1: + defines[rest] = True + if len(rest) == 2: + defines[rest[0]] = rest[1] + return defines + def _get_compilers(self, lang, for_machine): ''' The list of compilers is detected in the exact same way for @@ -1043,6 +1065,8 @@ class Environment: if 'clang' in out: linker = None + defines = self.get_clang_compiler_defines(compiler) + # Even if the for_machine is darwin, we could be using vanilla # clang. if 'Apple' in out: @@ -1063,7 +1087,7 @@ class Environment: return cls( ccache + compiler, version, for_machine, is_cross, info, - exe_wrap, full_version=full_version, linker=linker) + exe_wrap, defines, full_version=full_version, linker=linker) if 'Intel(R) C++ Intel(R)' in err: version = search_version(err) -- cgit v1.1 From 5862ad6965c60caa861dfdcd29e499c34c4d00da Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 21 May 2020 13:35:27 +0200 Subject: boost: Always sort shared before static (fixes #7171) --- mesonbuild/dependencies/boost.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 6e85c53..3849704 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -189,13 +189,13 @@ class BoostLibraryFile(): def __lt__(self, other: T.Any) -> bool: if isinstance(other, BoostLibraryFile): return ( - self.mod_name, self.version_lib, self.arch, self.static, + self.mod_name, self.static, self.version_lib, self.arch, not self.mt, not self.runtime_static, not self.debug, self.runtime_debug, self.python_debug, self.stlport, self.deprecated_iostreams, self.name, ) < ( - other.mod_name, other.version_lib, other.arch, other.static, + other.mod_name, other.static, other.version_lib, other.arch, not other.mt, not other.runtime_static, not other.debug, other.runtime_debug, other.python_debug, other.stlport, other.deprecated_iostreams, -- cgit v1.1 From 2fd838d62dc16af0687b4be7da4ffb28cb6a9725 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 22 May 2020 14:59:00 +0200 Subject: boost: Try extracting BOOST_ROOT from boost.pc This is especially useful for Conan, where only the boost.pc file is provided and manually setting BOOST_ROOT is not a good solution since it is in a private cache directory. See #5438 --- mesonbuild/dependencies/boost.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 3849704..907c0c2 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -22,7 +22,7 @@ from .. import mlog from .. import mesonlib from ..environment import Environment -from .base import (DependencyException, ExternalDependency) +from .base import DependencyException, ExternalDependency, PkgConfigDependency from .misc import threads_factory # On windows 3 directory layouts are supported: @@ -605,6 +605,17 @@ class BoostDependency(ExternalDependency): roots += paths return roots # Do not add system paths if BOOST_ROOT is present + # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarely + # allows BoostDependency to find boost from Conan. See #5438 + try: + boost_pc = PkgConfigDependency('boost', self.env, {'required': False}) + if boost_pc.found(): + boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None}) + if boost_root: + roots += [Path(boost_root)] + except DependencyException: + pass + # Add roots from system paths inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()] inc_paths = [x.parent for x in inc_paths if x.exists()] -- cgit v1.1 From eee117aa24b46d93169a92c15eb2120f2d97eea4 Mon Sep 17 00:00:00 2001 From: georgev93 Date: Wed, 29 Apr 2020 18:24:41 -0400 Subject: Allow building with b_coverage set to true when clang is being used regardless of linker selection. --- mesonbuild/compilers/mixins/clang.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index 0ee10ad..ecfbc64 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -124,3 +124,6 @@ class ClangCompiler(GnuLikeCompiler): # Clang only warns about unknown or ignored attributes, so force an # error. return ['-Werror=attributes'] + + def get_coverage_link_args(self) -> T.List[str]: + return ['--coverage'] -- cgit v1.1 From 9a94ffe0610c18559292a2334dc7bc27eecf3827 Mon Sep 17 00:00:00 2001 From: Drew Reed Date: Mon, 11 May 2020 09:11:14 +0100 Subject: Modifed buildtypes and armclang compiler flags to match documented results --- mesonbuild/compilers/mixins/arm.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py index 2e2ed94..b331d8f 100644 --- a/mesonbuild/compilers/mixins/arm.py +++ b/mesonbuild/compilers/mixins/arm.py @@ -45,20 +45,20 @@ arm_optimization_args = { armclang_buildtype_args = { 'plain': [], - 'debug': ['-O0', '-g'], - 'debugoptimized': ['-O1', '-g'], - 'release': ['-Os'], - 'minsize': ['-Oz'], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], 'custom': [], } # type: T.Dict[str, T.List[str]] armclang_optimization_args = { - '0': ['-O0'], + '0': [], # Compiler defaults to -O0 'g': ['-g'], '1': ['-O1'], '2': ['-O2'], '3': ['-O3'], - 's': ['-Os'] + 's': ['-Oz'] } # type: T.Dict[str, T.List[str]] -- cgit v1.1 From 22bc0d46a541caa8528b254252bdb1e5c13599f7 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sat, 23 May 2020 02:36:58 +0300 Subject: Revert "Merge pull request #7172 from jon-turney/test-output-check-mandatory" This reverts commit 0871b1032c53287a1ed3ce5108799fb0daccaec5, reversing changes made to 9dc3ca2c1c9fbb47e731551c6432df144f725261. --- run_project_tests.py | 85 ++++++++++------------ .../failing/100 fallback consistency/test.json | 7 -- .../failing/101 no native compiler/test.json | 7 -- .../failing/102 subdir parse error/test.json | 7 -- .../failing/103 invalid option file/test.json | 7 -- test cases/failing/104 no lang/test.json | 7 -- .../105 no glib-compile-resources/test.json | 7 -- .../test.json | 7 -- .../67 subproj different versions/test.json | 7 -- .../84 gtest dependency with version/test.json | 7 -- .../failing/98 fallback consistency/test.json | 7 -- 11 files changed, 40 insertions(+), 115 deletions(-) delete mode 100644 test cases/failing/100 fallback consistency/test.json delete mode 100644 test cases/failing/101 no native compiler/test.json delete mode 100644 test cases/failing/102 subdir parse error/test.json delete mode 100644 test cases/failing/103 invalid option file/test.json delete mode 100644 test cases/failing/104 no lang/test.json delete mode 100644 test cases/failing/105 no glib-compile-resources/test.json delete mode 100644 test cases/failing/36 pkgconfig dependency impossible conditions/test.json delete mode 100644 test cases/failing/67 subproj different versions/test.json delete mode 100644 test cases/failing/84 gtest dependency with version/test.json delete mode 100644 test cases/failing/98 fallback consistency/test.json diff --git a/run_project_tests.py b/run_project_tests.py index 22c0205..bcfe05c 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -594,16 +594,18 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, return testresult -def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]: +def gather_tests(testdir: Path) -> T.List[TestDef]: tests = [t.name for t in testdir.iterdir() if t.is_dir()] tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc) test_defs = [TestDef(testdir / t, None, []) for t in tests] all_tests = [] # type: T.List[TestDef] for t in test_defs: - test_def = {} test_def_file = t.path / 'test.json' - if test_def_file.is_file(): - test_def = json.loads(test_def_file.read_text()) + if not test_def_file.is_file(): + all_tests += [t] + continue + + test_def = json.loads(test_def_file.read_text()) # Handle additional environment variables env = {} # type: T.Dict[str, str] @@ -621,8 +623,6 @@ def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]: # Handle expected output stdout = test_def.get('stdout', []) - if stdout_mandatory and not stdout: - raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file)) # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] @@ -897,50 +897,45 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, shutil.which('pgfortran') or shutil.which('ifort')) - class TestCategory: - def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False): - self.category = category # category name - self.subdir = subdir # subdirectory - self.skip = skip # skip condition - self.stdout_mandatory = stdout_mandatory # expected stdout is mandatory for tests in this categroy - + # Name, subdirectory, skip condition. all_tests = [ - TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), - TestCategory('common', 'common'), - TestCategory('warning-meson', 'warning', stdout_mandatory=True), - TestCategory('failing-meson', 'failing', stdout_mandatory=True), - TestCategory('failing-build', 'failing build'), - TestCategory('failing-test', 'failing test'), - TestCategory('keyval', 'keyval'), - TestCategory('platform-osx', 'osx', not mesonlib.is_osx()), - TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), - TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), - TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), - TestCategory('C#', 'csharp', skip_csharp(backend)), - TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), - TestCategory('rust', 'rust', should_skip_rust(backend)), - TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()), - TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), - TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), - TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja), - TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), + ('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), + ('common', 'common', False), + ('warning-meson', 'warning', False), + ('failing-meson', 'failing', False), + ('failing-build', 'failing build', False), + ('failing-test', 'failing test', False), + ('keyval', 'keyval', False), + + ('platform-osx', 'osx', not mesonlib.is_osx()), + ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), + ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), + + ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), + ('C#', 'csharp', skip_csharp(backend)), + ('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), + ('rust', 'rust', should_skip_rust(backend)), + ('d', 'd', backend is not Backend.ninja or not have_d_compiler()), + ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), + ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), + ('fortran', 'fortran', skip_fortran or backend != Backend.ninja), + ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja - TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), - TestCategory('python3', 'python3', backend is not Backend.ninja), - TestCategory('python', 'python', backend is not Backend.ninja), - TestCategory('fpga', 'fpga', shutil.which('yosys') is None), - TestCategory('frameworks', 'frameworks'), - TestCategory('nasm', 'nasm'), - TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), + ('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), + ('python3', 'python3', backend is not Backend.ninja), + ('python', 'python', backend is not Backend.ninja), + ('fpga', 'fpga', shutil.which('yosys') is None), + ('frameworks', 'frameworks', False), + ('nasm', 'nasm', False), + ('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), ] - categories = [t.category for t in all_tests] - assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories' - + names = [t[0] for t in all_tests] + assert names == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests names' if only: - all_tests = [t for t in all_tests if t.category in only] - - gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory), t.skip) for t in all_tests] + ind = [names.index(o) for o in only] + all_tests = [all_tests[i] for i in ind] + gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] return gathered_tests def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], diff --git a/test cases/failing/100 fallback consistency/test.json b/test cases/failing/100 fallback consistency/test.json deleted file mode 100644 index a783d8c..0000000 --- a/test cases/failing/100 fallback consistency/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/100 fallback consistency/meson.build:7:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'" - } - ] -} diff --git a/test cases/failing/101 no native compiler/test.json b/test cases/failing/101 no native compiler/test.json deleted file mode 100644 index c7b5d1c..0000000 --- a/test cases/failing/101 no native compiler/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/101 no native compiler/meson.build:12:0: ERROR: No host machine compiler for \"main.c\"" - } - ] -} diff --git a/test cases/failing/102 subdir parse error/test.json b/test cases/failing/102 subdir parse error/test.json deleted file mode 100644 index 06fd4d3..0000000 --- a/test cases/failing/102 subdir parse error/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/102 subdir parse error/subdir/meson.build:1:0: ERROR: Plusassignment target must be an id." - } - ] -} diff --git a/test cases/failing/103 invalid option file/test.json b/test cases/failing/103 invalid option file/test.json deleted file mode 100644 index 20dbec3..0000000 --- a/test cases/failing/103 invalid option file/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/103 invalid option file/meson_options.txt:1:0: ERROR: lexer" - } - ] -} diff --git a/test cases/failing/104 no lang/test.json b/test cases/failing/104 no lang/test.json deleted file mode 100644 index 62999be..0000000 --- a/test cases/failing/104 no lang/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/104 no lang/meson.build:2:0: ERROR: No host machine compiler for \"main.c\"" - } - ] -} diff --git a/test cases/failing/105 no glib-compile-resources/test.json b/test cases/failing/105 no glib-compile-resources/test.json deleted file mode 100644 index 67dc7e4..0000000 --- a/test cases/failing/105 no glib-compile-resources/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/105 no glib-compile-resources/meson.build:8:0: ERROR: Could not execute glib-compile-resources." - } - ] -} diff --git a/test cases/failing/36 pkgconfig dependency impossible conditions/test.json b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json deleted file mode 100644 index 2ce62ac..0000000 --- a/test cases/failing/36 pkgconfig dependency impossible conditions/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/36 pkgconfig dependency impossible conditions/meson.build:7:0: ERROR: Dependency 'zlib' was already checked and was not found" - } - ] -} diff --git a/test cases/failing/67 subproj different versions/test.json b/test cases/failing/67 subproj different versions/test.json deleted file mode 100644 index d16daf9..0000000 --- a/test cases/failing/67 subproj different versions/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/67 subproj different versions/subprojects/b/meson.build:3:0: ERROR: Dependency 'c' was already checked and was not found" - } - ] -} diff --git a/test cases/failing/84 gtest dependency with version/test.json b/test cases/failing/84 gtest dependency with version/test.json deleted file mode 100644 index e1bbcac..0000000 --- a/test cases/failing/84 gtest dependency with version/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/84 gtest dependency with version/meson.build:8:0: ERROR: Dependency 'gtest' was already checked and was not found" - } - ] -} diff --git a/test cases/failing/98 fallback consistency/test.json b/test cases/failing/98 fallback consistency/test.json deleted file mode 100644 index fd77bad..0000000 --- a/test cases/failing/98 fallback consistency/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "stdout": [ - { - "line": "test cases/failing/98 fallback consistency/meson.build:4:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'" - } - ] -} -- cgit v1.1 From f2e2e910d9c646849e55f97215217b87de491805 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sat, 23 May 2020 16:07:35 +0300 Subject: Remove stray print call. --- run_project_tests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/run_project_tests.py b/run_project_tests.py index bcfe05c..18731d6 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -417,7 +417,6 @@ def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) match = bool(re.match(expected, actual)) else: match = (expected == actual) - print(actual) if match: how, expected = next_expected(i) -- cgit v1.1 From 96eeef62ea791b19fbf9bd57c8494743b2de80bf Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 12 Dec 2019 17:03:53 +0100 Subject: ast: Add AST JSON printer --- mesonbuild/ast/__init__.py | 3 +- mesonbuild/ast/printer.py | 160 ++++++++++++++++++++++++++++++++++++++++++++- mesonbuild/ast/visitor.py | 3 +- mesonbuild/mintro.py | 8 ++- mesonbuild/mparser.py | 12 ++-- 5 files changed, 174 insertions(+), 12 deletions(-) diff --git a/mesonbuild/ast/__init__.py b/mesonbuild/ast/__init__.py index 48de523..4fb56cb 100644 --- a/mesonbuild/ast/__init__.py +++ b/mesonbuild/ast/__init__.py @@ -20,6 +20,7 @@ __all__ = [ 'AstInterpreter', 'AstIDGenerator', 'AstIndentationGenerator', + 'AstJSONPrinter', 'AstVisitor', 'AstPrinter', 'IntrospectionInterpreter', @@ -30,4 +31,4 @@ from .interpreter import AstInterpreter from .introspection import IntrospectionInterpreter, build_target_functions from .visitor import AstVisitor from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator -from .printer import AstPrinter +from .printer import AstPrinter, AstJSONPrinter diff --git a/mesonbuild/ast/printer.py b/mesonbuild/ast/printer.py index 39e2cca..a57ba20 100644 --- a/mesonbuild/ast/printer.py +++ b/mesonbuild/ast/printer.py @@ -18,6 +18,7 @@ from .. import mparser from . import AstVisitor import re +import typing as T arithmic_map = { 'add': '+', @@ -155,7 +156,7 @@ class AstPrinter(AstVisitor): self.append_padded(prefix + 'if', node) prefix = 'el' i.accept(self) - if node.elseblock: + if not isinstance(node.elseblock, mparser.EmptyNode): self.append('else', node) node.elseblock.accept(self) self.append('endif', node) @@ -199,3 +200,160 @@ class AstPrinter(AstVisitor): self.result = re.sub(r', \n$', '\n', self.result) else: self.result = re.sub(r', $', '', self.result) + +class AstJSONPrinter(AstVisitor): + def __init__(self) -> None: + self.result = {} # type: T.Dict[str, T.Any] + self.current = self.result + + def _accept(self, key: str, node: mparser.BaseNode) -> None: + old = self.current + data = {} # type: T.Dict[str, T.Any] + self.current = data + node.accept(self) + self.current = old + self.current[key] = data + + def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None: + old = self.current + datalist = [] # type: T.List[T.Dict[str, T.Any]] + for i in nodes: + self.current = {} + i.accept(self) + datalist += [self.current] + self.current = old + self.current[key] = datalist + + def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None: + old = self.current + self.current = data + node.accept(self) + self.current = old + + def setbase(self, node: mparser.BaseNode) -> None: + self.current['node'] = type(node).__name__ + self.current['lineno'] = node.lineno + self.current['colno'] = node.colno + self.current['end_lineno'] = node.end_lineno + self.current['end_colno'] = node.end_colno + + def visit_default_func(self, node: mparser.BaseNode) -> None: + self.setbase(node) + + def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None: + self.current['value'] = node.value + self.setbase(node) + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.gen_ElementaryNode(node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + self.gen_ElementaryNode(node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.gen_ElementaryNode(node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + self.gen_ElementaryNode(node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['ctype'] = node.ctype + self.setbase(node) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['op'] = arithmic_map[node.operation] + self.setbase(node) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + self._accept_list('lines', node.lines) + self.setbase(node) + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + self._accept('object', node.iobject) + self._accept('index', node.index) + self.setbase(node) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self._accept('object', node.source_object) + self._accept('args', node.args) + self.current['name'] = node.name + self.setbase(node) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self._accept('args', node.args) + self.current['name'] = node.func_name + self.setbase(node) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self._accept('items', node.items) + self._accept('block', node.block) + self.current['varnames'] = node.varnames + self.setbase(node) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self._accept_list('ifs', node.ifs) + self._accept('else', node.elseblock) + self.setbase(node) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self._accept('condition', node.condition) + self._accept('block', node.block) + self.setbase(node) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + self._accept('condition', node.condition) + self._accept('true', node.trueblock) + self._accept('false', node.falseblock) + self.setbase(node) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + self._accept_list('positional', node.arguments) + kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]] + for key, val in node.kwargs.items(): + key_res = {} # type: T.Dict[str, T.Any] + val_res = {} # type: T.Dict[str, T.Any] + self._raw_accept(key, key_res) + self._raw_accept(val, val_res) + kwargs_list += [{'key': key_res, 'val': val_res}] + self.current['kwargs'] = kwargs_list + self.setbase(node) diff --git a/mesonbuild/ast/visitor.py b/mesonbuild/ast/visitor.py index 37be463..451020d 100644 --- a/mesonbuild/ast/visitor.py +++ b/mesonbuild/ast/visitor.py @@ -113,8 +113,7 @@ class AstVisitor: self.visit_default_func(node) for i in node.ifs: i.accept(self) - if node.elseblock: - node.elseblock.accept(self) + node.elseblock.accept(self) def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: self.visit_default_func(node) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 54e302b..8eb659b 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -22,7 +22,7 @@ project files and don't need this info.""" import json from . import build, coredata as cdata from . import mesonlib -from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator +from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter from . import mlog from .backend import backends from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode @@ -62,6 +62,7 @@ def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, benchmarkdata = testdata = installdata = None return { + 'ast': IntroCommand('Dump the AST of the meson file', no_bd=dump_ast), 'benchmarks': IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)), 'buildoptions': IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source), 'buildsystem_files': IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter)), @@ -89,6 +90,11 @@ def add_arguments(parser): help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)') parser.add_argument('builddir', nargs='?', default='.', help='The build directory') +def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]: + printer = AstJSONPrinter() + intr.ast.accept(printer) + return printer.result + def list_installed(installdata): res = {} if installdata is not None: diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index 2cffc47..b9e381e 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -426,8 +426,8 @@ class IfNode(BaseNode): class IfClauseNode(BaseNode): def __init__(self, linenode: BaseNode): super().__init__(linenode.lineno, linenode.colno, linenode.filename) - self.ifs = [] # type: T.List[IfNode] - self.elseblock = EmptyNode(linenode.lineno, linenode.colno, linenode.filename) # type: T.Union[EmptyNode, CodeBlockNode] + self.ifs = [] # type: T.List[IfNode] + self.elseblock = None # type: T.Union[EmptyNode, CodeBlockNode] class UMinusNode(BaseNode): def __init__(self, current_location: Token, value: BaseNode): @@ -747,9 +747,7 @@ class Parser: block = self.codeblock() clause.ifs.append(IfNode(clause, condition, block)) self.elseifblock(clause) - elseblock = self.elseblock() - if elseblock: - clause.elseblock = elseblock + clause.elseblock = self.elseblock() return clause def elseifblock(self, clause) -> None: @@ -759,11 +757,11 @@ class Parser: b = self.codeblock() clause.ifs.append(IfNode(s, s, b)) - def elseblock(self) -> T.Optional[CodeBlockNode]: + def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]: if self.accept('else'): self.expect('eol') return self.codeblock() - return None + return EmptyNode(self.current.lineno, self.current.colno, self.current.filename) def line(self) -> BaseNode: block_start = self.current -- cgit v1.1 From 54511b4a0f319514ecc16dc29d964187192f716d Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 12 Dec 2019 18:35:29 +0100 Subject: ast: Handle NotNode --- mesonbuild/ast/interpreter.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py index cc5c94c..6a826ef 100644 --- a/mesonbuild/ast/interpreter.py +++ b/mesonbuild/ast/interpreter.py @@ -297,6 +297,11 @@ class AstInterpreter(interpreterbase.InterpreterBase): elif isinstance(node, ElementaryNode): result = node.value + elif isinstance(node, NotNode): + result = self.resolve_node(node.value, include_unknown_args, id_loop_detect) + if isinstance(result, bool): + result = not result + elif isinstance(node, ArrayNode): result = [x for x in node.args.arguments] -- cgit v1.1 From 210b57a136ebaa1b1cd8a4d08c536b601ba77efb Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 12 Dec 2019 18:35:43 +0100 Subject: ast: add unittest --- run_unittests.py | 77 ++++++++++++++++++++++++++++ test cases/unit/57 introspection/meson.build | 17 +++++- 2 files changed, 92 insertions(+), 2 deletions(-) diff --git a/run_unittests.py b/run_unittests.py index 651e366..7e0c403 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4400,6 +4400,83 @@ recommended as it is not supported on some platforms''') self.maxDiff = None self.assertListEqual(res_nb, res_wb) + def test_introspect_ast_source(self): + testdir = os.path.join(self.unit_test_dir, '57 introspection') + testfile = os.path.join(testdir, 'meson.build') + res_nb = self.introspect_directory(testfile, ['--ast'] + self.meson_args) + + node_counter = {} + + def accept_node(json_node): + self.assertIsInstance(json_node, dict) + for i in ['lineno', 'colno', 'end_lineno', 'end_colno']: + self.assertIn(i, json_node) + self.assertIsInstance(json_node[i], int) + self.assertIn('node', json_node) + n = json_node['node'] + self.assertIsInstance(n, str) + self.assertIn(n, nodes) + if n not in node_counter: + node_counter[n] = 0 + node_counter[n] = node_counter[n] + 1 + for nodeDesc in nodes[n]: + key = nodeDesc[0] + func = nodeDesc[1] + self.assertIn(key, json_node) + if func is None: + tp = nodeDesc[2] + self.assertIsInstance(json_node[key], tp) + continue + func(json_node[key]) + + def accept_node_list(node_list): + self.assertIsInstance(node_list, list) + for i in node_list: + accept_node(i) + + def accept_kwargs(kwargs): + self.assertIsInstance(kwargs, list) + for i in kwargs: + self.assertIn('key', i) + self.assertIn('val', i) + accept_node(i['key']) + accept_node(i['val']) + + nodes = { + 'BooleanNode': [('value', None, bool)], + 'IdNode': [('value', None, str)], + 'NumberNode': [('value', None, int)], + 'StringNode': [('value', None, str)], + 'ContinueNode': [], + 'BreakNode': [], + 'ArgumentNode': [('positional', accept_node_list), ('kwargs', accept_kwargs)], + 'ArrayNode': [('args', accept_node)], + 'DictNode': [('args', accept_node)], + 'EmptyNode': [], + 'OrNode': [('left', accept_node), ('right', accept_node)], + 'AndNode': [('left', accept_node), ('right', accept_node)], + 'ComparisonNode': [('left', accept_node), ('right', accept_node), ('ctype', None, str)], + 'ArithmeticNode': [('left', accept_node), ('right', accept_node), ('op', None, str)], + 'NotNode': [('right', accept_node)], + 'CodeBlockNode': [('lines', accept_node_list)], + 'IndexNode': [('object', accept_node), ('index', accept_node)], + 'MethodNode': [('object', accept_node), ('args', accept_node), ('name', None, str)], + 'FunctionNode': [('args', accept_node), ('name', None, str)], + 'AssignmentNode': [('value', accept_node), ('var_name', None, str)], + 'PlusAssignmentNode': [('value', accept_node), ('var_name', None, str)], + 'ForeachClauseNode': [('items', accept_node), ('block', accept_node), ('varnames', None, list)], + 'IfClauseNode': [('ifs', accept_node_list), ('else', accept_node)], + 'IfNode': [('condition', accept_node), ('block', accept_node)], + 'UMinusNode': [('right', accept_node)], + 'TernaryNode': [('condition', accept_node), ('true', accept_node), ('false', accept_node)], + } + + accept_node(res_nb) + + for n, c in [('ContinueNode', 2), ('BreakNode', 1), ('NotNode', 3)]: + self.assertIn(n, node_counter) + self.assertEqual(node_counter[n], c) + def test_introspect_dependencies_from_source(self): testdir = os.path.join(self.unit_test_dir, '57 introspection') testfile = os.path.join(testdir, 'meson.build') diff --git a/test cases/unit/57 introspection/meson.build b/test cases/unit/57 introspection/meson.build index 9716eae..5d4dd02 100644 --- a/test cases/unit/57 introspection/meson.build +++ b/test cases/unit/57 introspection/meson.build @@ -13,7 +13,7 @@ test_bool = not test_bool set_variable('list_test_plusassign', []) list_test_plusassign += ['bugs everywhere'] -if false +if not true vers_str = '<=99.9.9' dependency('somethingthatdoesnotexist', required: true, version: '>=1.2.3') dependency('look_i_have_a_fallback', version: ['>=1.0.0', vers_str], fallback: ['oh_no', 'the_subproject_does_not_exist']) @@ -26,7 +26,7 @@ var1 = '1' var2 = 2.to_string() var3 = 'test3' -t1 = executable('test' + var1, ['t1.cpp'], link_with: [sharedlib], install: true, build_by_default: get_option('test_opt2')) +t1 = executable('test' + var1, ['t1.cpp'], link_with: [sharedlib], install: not false, build_by_default: get_option('test_opt2')) t2 = executable('test@0@'.format('@0@'.format(var2)), sources: ['t2.cpp'], link_with: [staticlib]) t3 = executable(var3, 't3.cpp', link_with: [sharedlib, staticlib], dependencies: [dep1]) @@ -46,3 +46,16 @@ message(osmesa_lib_name) # Infinite recursion gets triggered here when the param test('test case 1', t1) test('test case 2', t2) benchmark('benchmark 1', t3) + +### Stuff to test the AST JSON printer +foreach x : ['a', 'b', 'c'] + if x == 'a' + message('a') + elif x == 'b' + message('a') + else + continue + endif + break + continue +endforeach -- cgit v1.1 From 550a450324c493d6a60a793c617f855cc55381fe Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 12 Dec 2019 20:08:24 +0100 Subject: ast: Add docs for --ast --- docs/markdown/IDE-integration.md | 87 ++++++++++++++++++++++++++++-------- docs/markdown/snippets/introspect.md | 4 ++ 2 files changed, 73 insertions(+), 18 deletions(-) create mode 100644 docs/markdown/snippets/introspect.md diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 73737e8..f51075e 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -29,16 +29,16 @@ watch for changes in this directory to know when something changed. The `meson-info` directory should contain the following files: -| File | Description | -| ---- | ----------- | -| `intro-benchmarks.json` | Lists all benchmarks | -| `intro-buildoptions.json` | Contains a full list of meson configuration options for the project | -| `intro-buildsystem_files.json` | Full list of all meson build files | -| `intro-dependencies.json` | Lists all dependencies used in the project | -| `intro-installed.json` | Contains mapping of files to their installed location | -| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) | -| `intro-targets.json` | Full list of all build targets | -| `intro-tests.json` | Lists all tests with instructions how to run them | +| File | Description | +| ------------------------------ | ------------------------------------------------------------------- | +| `intro-benchmarks.json` | Lists all benchmarks | +| `intro-buildoptions.json` | Contains a full list of meson configuration options for the project | +| `intro-buildsystem_files.json` | Full list of all meson build files | +| `intro-dependencies.json` | Lists all dependencies used in the project | +| `intro-installed.json` | Contains mapping of files to their installed location | +| `intro-projectinfo.json` | Stores basic information about the project (name, version, etc.) | +| `intro-targets.json` | Full list of all build targets | +| `intro-tests.json` | Lists all tests with instructions how to run them | The content of the JSON files is further specified in the remainder of this document. @@ -99,15 +99,15 @@ for actual compilation. The following table shows all valid types for a target. -| value of `type` | Description | -| --------------- | ----------- | -| `executable` | This target will generate an executable file | -| `static library` | Target for a static library | -| `shared library` | Target for a shared library | +| value of `type` | Description | +| ---------------- | --------------------------------------------------------------------------------------------- | +| `executable` | This target will generate an executable file | +| `static library` | Target for a static library | +| `shared library` | Target for a shared library | | `shared module` | A shared library that is meant to be used with dlopen rather than linking into something else | -| `custom` | A custom target | -| `run` | A Meson run target | -| `jar` | A Java JAR target | +| `custom` | A custom target | +| `run` | A Meson run target | +| `jar` | A Java JAR target | ### Using `--targets` without a build directory @@ -275,6 +275,57 @@ command line. Use `meson introspect -h` to see all available options. This API can also work without a build directory for the `--projectinfo` command. +# AST of a `meson.build` + +Since meson *0.55.0* it is possible to dump the AST of a `meson.build` as a JSON +object. The interface for this is `meson introspect --ast /path/to/meson.build`. + +Each node of the AST has at least the following entries: + +| Key | Description | +| ------------ | ------------------------------------------------------- | +| `node` | Type of the node (see following table) | +| `lineno` | Line number of the node in the file | +| `colno` | Column number of the node in the file | +| `end_lineno` | Marks the end of the node (may be the same as `lineno`) | +| `end_colno` | Marks the end of the node (may be the same as `colno`) | + +Possible values for `node` with additional keys: + +| Node type | Additional keys | +| -------------------- | ------------------------------------------------ | +| `BooleanNode` | `value`: bool | +| `IdNode` | `value`: str | +| `NumberNode` | `value`: int | +| `StringNode` | `value`: str | +| `ContinueNode` | | +| `BreakNode` | | +| `ArgumentNode` | `positional`: node list; `kwargs`: accept_kwargs | +| `ArrayNode` | `args`: node | +| `DictNode` | `args`: node | +| `EmptyNode` | | +| `OrNode` | `left`: node; `right`: node | +| `AndNode` | `left`: node; `right`: node | +| `ComparisonNode` | `left`: node; `right`: node; `ctype`: str | +| `ArithmeticNode` | `left`: node; `right`: node; `op`: str | +| `NotNode` | `right`: node | +| `CodeBlockNode` | `lines`: node list | +| `IndexNode` | `object`: node; `index`: node | +| `MethodNode` | `object`: node; `args`: node; `name`: str | +| `FunctionNode` | `args`: node; `name`: str | +| `AssignmentNode` | `value`: node; `var_name`: str | +| `PlusAssignmentNode` | `value`: node; `var_name`: str | +| `ForeachClauseNode` | `items`: node; `block`: node; `varnames`: list | +| `IfClauseNode` | `ifs`: node list; `else`: node | +| `IfNode` | `condition`: node; `block`: node | +| `UMinusNode` | `right`: node | +| `TernaryNode` | `condition`: node; `true`: node; `false`: node | + +We do not guarantee the stability of this format since it is heavily linked to +the internal Meson AST. However, breaking changes (removal of a node type or the +removal of a key) are unlikely and will be announced in the release notes. + + # Existing integrations - [Gnome Builder](https://wiki.gnome.org/Apps/Builder) diff --git a/docs/markdown/snippets/introspect.md b/docs/markdown/snippets/introspect.md new file mode 100644 index 0000000..8eab486 --- /dev/null +++ b/docs/markdown/snippets/introspect.md @@ -0,0 +1,4 @@ +## Introspection API changes + +dumping the AST (--ast): **new in 0.55.0** +- prints the AST of a meson.build as JSON -- cgit v1.1 From 0ac4376990c7a6cd55dedd6afd7df4510d00691a Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 13 May 2020 20:48:04 +0100 Subject: Make expected stdout mandatory for warning-meson and failing-meson tests Unify present or absent test.json file cases in gather_tests Make expected stdout mandatory in test.json for some test categories Use a trivial TestCategory class rather than a tuple, to make it easier to default category attributes --- run_project_tests.py | 85 +++++++++++++++++++++++++++------------------------- 1 file changed, 45 insertions(+), 40 deletions(-) diff --git a/run_project_tests.py b/run_project_tests.py index 18731d6..f372436 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -593,18 +593,16 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, return testresult -def gather_tests(testdir: Path) -> T.List[TestDef]: +def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]: tests = [t.name for t in testdir.iterdir() if t.is_dir()] tests = [t for t in tests if not t.startswith('.')] # Filter non-tests files (dot files, etc) test_defs = [TestDef(testdir / t, None, []) for t in tests] all_tests = [] # type: T.List[TestDef] for t in test_defs: + test_def = {} test_def_file = t.path / 'test.json' - if not test_def_file.is_file(): - all_tests += [t] - continue - - test_def = json.loads(test_def_file.read_text()) + if test_def_file.is_file(): + test_def = json.loads(test_def_file.read_text()) # Handle additional environment variables env = {} # type: T.Dict[str, str] @@ -622,6 +620,8 @@ def gather_tests(testdir: Path) -> T.List[TestDef]: # Handle expected output stdout = test_def.get('stdout', []) + if stdout_mandatory and not stdout: + raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file)) # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] @@ -896,45 +896,50 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, shutil.which('pgfortran') or shutil.which('ifort')) - # Name, subdirectory, skip condition. + class TestCategory: + def __init__(self, category: str, subdir: str, skip: bool = False, stdout_mandatory: bool = False): + self.category = category # category name + self.subdir = subdir # subdirectory + self.skip = skip # skip condition + self.stdout_mandatory = stdout_mandatory # expected stdout is mandatory for tests in this categroy + all_tests = [ - ('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), - ('common', 'common', False), - ('warning-meson', 'warning', False), - ('failing-meson', 'failing', False), - ('failing-build', 'failing build', False), - ('failing-test', 'failing test', False), - ('keyval', 'keyval', False), - - ('platform-osx', 'osx', not mesonlib.is_osx()), - ('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), - ('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), - - ('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), - ('C#', 'csharp', skip_csharp(backend)), - ('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), - ('rust', 'rust', should_skip_rust(backend)), - ('d', 'd', backend is not Backend.ninja or not have_d_compiler()), - ('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), - ('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), - ('fortran', 'fortran', skip_fortran or backend != Backend.ninja), - ('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), + TestCategory('cmake', 'cmake', not shutil.which('cmake') or (os.environ.get('compiler') == 'msvc2015' and under_ci)), + TestCategory('common', 'common'), + TestCategory('warning-meson', 'warning', stdout_mandatory=True), + TestCategory('failing-meson', 'failing', stdout_mandatory=True), + TestCategory('failing-build', 'failing build'), + TestCategory('failing-test', 'failing test'), + TestCategory('keyval', 'keyval'), + TestCategory('platform-osx', 'osx', not mesonlib.is_osx()), + TestCategory('platform-windows', 'windows', not mesonlib.is_windows() and not mesonlib.is_cygwin()), + TestCategory('platform-linux', 'linuxlike', mesonlib.is_osx() or mesonlib.is_windows()), + TestCategory('java', 'java', backend is not Backend.ninja or mesonlib.is_osx() or not have_java()), + TestCategory('C#', 'csharp', skip_csharp(backend)), + TestCategory('vala', 'vala', backend is not Backend.ninja or not shutil.which(os.environ.get('VALAC', 'valac'))), + TestCategory('rust', 'rust', should_skip_rust(backend)), + TestCategory('d', 'd', backend is not Backend.ninja or not have_d_compiler()), + TestCategory('objective c', 'objc', backend not in (Backend.ninja, Backend.xcode) or not have_objc_compiler(options.use_tmpdir)), + TestCategory('objective c++', 'objcpp', backend not in (Backend.ninja, Backend.xcode) or not have_objcpp_compiler(options.use_tmpdir)), + TestCategory('fortran', 'fortran', skip_fortran or backend != Backend.ninja), + TestCategory('swift', 'swift', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('swiftc')), # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja - ('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), - ('python3', 'python3', backend is not Backend.ninja), - ('python', 'python', backend is not Backend.ninja), - ('fpga', 'fpga', shutil.which('yosys') is None), - ('frameworks', 'frameworks', False), - ('nasm', 'nasm', False), - ('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), + TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), + TestCategory('python3', 'python3', backend is not Backend.ninja), + TestCategory('python', 'python', backend is not Backend.ninja), + TestCategory('fpga', 'fpga', shutil.which('yosys') is None), + TestCategory('frameworks', 'frameworks'), + TestCategory('nasm', 'nasm'), + TestCategory('wasm', 'wasm', shutil.which('emcc') is None or backend is not Backend.ninja), ] - names = [t[0] for t in all_tests] - assert names == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests names' + categories = [t.category for t in all_tests] + assert categories == ALL_TESTS, 'argparse("--only", choices=ALL_TESTS) need to be updated to match all_tests categories' + if only: - ind = [names.index(o) for o in only] - all_tests = [all_tests[i] for i in ind] - gathered_tests = [(name, gather_tests(Path('test cases', subdir)), skip) for name, subdir, skip in all_tests] + all_tests = [t for t in all_tests if t.category in only] + + gathered_tests = [(t.category, gather_tests(Path('test cases', t.subdir), t.stdout_mandatory), t.skip) for t in all_tests] return gathered_tests def run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], -- cgit v1.1 From 6b1e1ffa773a7d781429b4a00218f5d49b78b872 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 13 May 2020 21:10:34 +0100 Subject: Add expected stdout for failing-meson tests which are missing it --- test cases/failing/100 fallback consistency/test.json | 7 +++++++ test cases/failing/101 no native compiler/test.json | 7 +++++++ test cases/failing/102 subdir parse error/test.json | 7 +++++++ test cases/failing/103 invalid option file/test.json | 7 +++++++ test cases/failing/104 no lang/test.json | 7 +++++++ test cases/failing/105 no glib-compile-resources/test.json | 7 +++++++ .../36 pkgconfig dependency impossible conditions/test.json | 7 +++++++ test cases/failing/67 subproj different versions/test.json | 7 +++++++ test cases/failing/84 gtest dependency with version/test.json | 7 +++++++ test cases/failing/98 fallback consistency/test.json | 7 +++++++ 10 files changed, 70 insertions(+) create mode 100644 test cases/failing/100 fallback consistency/test.json create mode 100644 test cases/failing/101 no native compiler/test.json create mode 100644 test cases/failing/102 subdir parse error/test.json create mode 100644 test cases/failing/103 invalid option file/test.json create mode 100644 test cases/failing/104 no lang/test.json create mode 100644 test cases/failing/105 no glib-compile-resources/test.json create mode 100644 test cases/failing/36 pkgconfig dependency impossible conditions/test.json create mode 100644 test cases/failing/67 subproj different versions/test.json create mode 100644 test cases/failing/84 gtest dependency with version/test.json create mode 100644 test cases/failing/98 fallback consistency/test.json diff --git a/test cases/failing/100 fallback consistency/test.json b/test cases/failing/100 fallback consistency/test.json new file mode 100644 index 0000000..a783d8c --- /dev/null +++ b/test cases/failing/100 fallback consistency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/100 fallback consistency/meson.build:7:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'" + } + ] +} diff --git a/test cases/failing/101 no native compiler/test.json b/test cases/failing/101 no native compiler/test.json new file mode 100644 index 0000000..c7b5d1c --- /dev/null +++ b/test cases/failing/101 no native compiler/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/101 no native compiler/meson.build:12:0: ERROR: No host machine compiler for \"main.c\"" + } + ] +} diff --git a/test cases/failing/102 subdir parse error/test.json b/test cases/failing/102 subdir parse error/test.json new file mode 100644 index 0000000..06fd4d3 --- /dev/null +++ b/test cases/failing/102 subdir parse error/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/102 subdir parse error/subdir/meson.build:1:0: ERROR: Plusassignment target must be an id." + } + ] +} diff --git a/test cases/failing/103 invalid option file/test.json b/test cases/failing/103 invalid option file/test.json new file mode 100644 index 0000000..20dbec3 --- /dev/null +++ b/test cases/failing/103 invalid option file/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/103 invalid option file/meson_options.txt:1:0: ERROR: lexer" + } + ] +} diff --git a/test cases/failing/104 no lang/test.json b/test cases/failing/104 no lang/test.json new file mode 100644 index 0000000..62999be --- /dev/null +++ b/test cases/failing/104 no lang/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/104 no lang/meson.build:2:0: ERROR: No host machine compiler for \"main.c\"" + } + ] +} diff --git a/test cases/failing/105 no glib-compile-resources/test.json b/test cases/failing/105 no glib-compile-resources/test.json new file mode 100644 index 0000000..67dc7e4 --- /dev/null +++ b/test cases/failing/105 no glib-compile-resources/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/105 no glib-compile-resources/meson.build:8:0: ERROR: Could not execute glib-compile-resources." + } + ] +} diff --git a/test cases/failing/36 pkgconfig dependency impossible conditions/test.json b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json new file mode 100644 index 0000000..2ce62ac --- /dev/null +++ b/test cases/failing/36 pkgconfig dependency impossible conditions/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/36 pkgconfig dependency impossible conditions/meson.build:7:0: ERROR: Dependency 'zlib' was already checked and was not found" + } + ] +} diff --git a/test cases/failing/67 subproj different versions/test.json b/test cases/failing/67 subproj different versions/test.json new file mode 100644 index 0000000..d16daf9 --- /dev/null +++ b/test cases/failing/67 subproj different versions/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/67 subproj different versions/subprojects/b/meson.build:3:0: ERROR: Dependency 'c' was already checked and was not found" + } + ] +} diff --git a/test cases/failing/84 gtest dependency with version/test.json b/test cases/failing/84 gtest dependency with version/test.json new file mode 100644 index 0000000..e1bbcac --- /dev/null +++ b/test cases/failing/84 gtest dependency with version/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/84 gtest dependency with version/meson.build:8:0: ERROR: Dependency 'gtest' was already checked and was not found" + } + ] +} diff --git a/test cases/failing/98 fallback consistency/test.json b/test cases/failing/98 fallback consistency/test.json new file mode 100644 index 0000000..fd77bad --- /dev/null +++ b/test cases/failing/98 fallback consistency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/98 fallback consistency/meson.build:4:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'dep2'" + } + ] +} -- cgit v1.1 From 527536dd4ae102d2d14e7ee512b6886d57fc0149 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sat, 23 May 2020 21:35:44 +0300 Subject: Clear internal caches before running each test. --- run_project_tests.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/run_project_tests.py b/run_project_tests.py index 18731d6..927d0fe 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -431,6 +431,15 @@ def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) def validate_output(test: TestDef, stdo: str, stde: str) -> str: return _compare_output(test.stdout, stdo, 'stdout') +# There are some class variables and such that cahce +# information. Clear all of these. The better solution +# would be to change the code so that no state is persisted +# but that would be a lot of work given that Meson was originally +# coded to run as a batch process. +def clear_internal_caches(): + import mesonbuild.interpreterbase + mesonbuild.interpreterbase.FeatureNew.feature_registry = {} + def run_test_inprocess(testdir): old_stdout = sys.stdout sys.stdout = mystdout = StringIO() @@ -551,6 +560,7 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, force_regenerate() # Test in-process + clear_internal_caches() test_start = time.time() (returncode, tstdo, tstde, test_log) = run_test_inprocess(test_build_dir) testresult.add_step(BuildStep.test, tstdo, tstde, test_log, time.time() - test_start) -- cgit v1.1 From ec7ee8fd9e9d5578b10fcd32b5121215065aaf98 Mon Sep 17 00:00:00 2001 From: Alexander Neumann Date: Sun, 24 May 2020 00:40:19 +0200 Subject: fix cmake target configuration selection. --- mesonbuild/cmake/interpreter.py | 13 ++++++++++--- mesonbuild/dependencies/base.py | 11 +++++++++-- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 35eb17c..09b633e 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -355,9 +355,16 @@ class ConverterTarget: if 'CONFIGURATIONS' in tgt.properties: cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x] cfg = cfgs[0] - - if 'RELEASE' in cfgs: - cfg = 'RELEASE' + + is_debug = self.env.coredata.get_builtin_option('debug'); + if is_debug: + if 'DEBUG' in cfgs: + cfg = 'DEBUG' + elif 'RELEASE' in cfgs: + cfg = 'RELEASE' + else: + if 'RELEASE' in cfgs: + cfg = 'RELEASE' if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x] diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index bcb1531..b0401c6 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -1443,8 +1443,15 @@ class CMakeDependency(ExternalDependency): cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x] cfg = cfgs[0] - if 'RELEASE' in cfgs: - cfg = 'RELEASE' + is_debug = self.env.coredata.get_builtin_option('debug'); + if is_debug: + if 'DEBUG' in cfgs: + cfg = 'DEBUG' + elif 'RELEASE' in cfgs: + cfg = 'RELEASE' + else: + if 'RELEASE' in cfgs: + cfg = 'RELEASE' if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x] -- cgit v1.1 From 91db25ac855deb3a6fdf5fd2cc4203721d2e13f5 Mon Sep 17 00:00:00 2001 From: p01arst0rm Date: Sun, 24 May 2020 01:31:27 +0100 Subject: fixed typo --- docs/markdown/Dependencies.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md index 572a3d1..a8f6d8a 100644 --- a/docs/markdown/Dependencies.md +++ b/docs/markdown/Dependencies.md @@ -76,7 +76,7 @@ and config-tool based variables. ```meson foo_dep = dependency('foo') -var = foo.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default') +var = foo_dep.get_variable(cmake : 'CMAKE_VAR', pkgconfig : 'pkg-config-var', configtool : 'get-var', default_value : 'default') ``` It accepts the keywords 'cmake', 'pkgconfig', 'pkgconfig_define', -- cgit v1.1 From 6f199db95b45dca4db7a7f2d478760f7789e21a3 Mon Sep 17 00:00:00 2001 From: georgev93 Date: Sun, 24 May 2020 11:58:11 -0400 Subject: Use --internal script call to call delwithsuffix when cleaning up the gcno and gcda files in a coverage enabled build. Otherwise, meson will crash when running from an MSI installation. --- mesonbuild/backend/ninjabackend.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 7300aaf..f7b697f 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -2647,18 +2647,14 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) def generate_gcov_clean(self): gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY') - script_root = self.environment.get_script_dir() - clean_script = os.path.join(script_root, 'delwithsuffix.py') - gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno']) + gcno_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcno']) gcno_elem.add_item('description', 'Deleting gcno files') self.add_build(gcno_elem) # Alias that runs the target defined above self.create_target_alias('meson-clean-gcno') gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY') - script_root = self.environment.get_script_dir() - clean_script = os.path.join(script_root, 'delwithsuffix.py') - gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda']) + gcda_elem.add_item('COMMAND', mesonlib.meson_command + ['--internal', 'delwithsuffix', '.', 'gcda']) gcda_elem.add_item('description', 'Deleting gcda files') self.add_build(gcda_elem) # Alias that runs the target defined above -- cgit v1.1 From b9c9024e841450766fb68f2afc215229df6a3505 Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Sat, 23 May 2020 16:06:33 +0300 Subject: mcompile: replaced backend divination code + cleanup --- mesonbuild/mcompile.py | 35 ++++++++++++++++++++++++----------- 1 file changed, 24 insertions(+), 11 deletions(-) diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index 7829ffc..a957c84 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -14,11 +14,12 @@ """Entrypoint script for backend agnostic compile.""" +import json import os -import pathlib import shutil import sys import typing as T +from pathlib import Path from . import mlog from . import mesonlib @@ -27,6 +28,20 @@ from .mesonlib import MesonException if T.TYPE_CHECKING: import argparse +def get_backend_from_introspect(builddir: Path) -> str: + """ + Gets `backend` option value from introspection data + """ + path_to_intro = builddir / 'meson-info' / 'intro-buildoptions.json' + if not path_to_intro.exists(): + raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name)) + with (path_to_intro).open() as f: + schema = json.load(f) + + for option in schema: + if option['name'] == 'backend': + return option['value'] + raise MesonException('`{}` is missing `backend` option!'.format(path_to_intro.name)) def add_arguments(parser: 'argparse.ArgumentParser') -> None: """Add compile specific arguments.""" @@ -53,24 +68,23 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None: '-C', action='store', dest='builddir', - type=pathlib.Path, + type=Path, default='.', help='The directory containing build files to be built.' ) def run(options: 'argparse.Namespace') -> int: - bdir = options.builddir # type: pathlib.Path + bdir = options.builddir # type: Path if not bdir.exists(): raise MesonException('Path to builddir {} does not exist!'.format(str(bdir.resolve()))) if not bdir.is_dir(): raise MesonException('builddir path should be a directory.') cmd = [] # type: T.List[str] - runner = None # type T.Optional[str] - slns = list(bdir.glob('*.sln')) - if (bdir / 'build.ninja').exists(): + backend = get_backend_from_introspect(bdir) + if backend == 'ninja': runner = os.environ.get('NINJA') if not runner: if shutil.which('ninja'): @@ -80,6 +94,7 @@ def run(options: 'argparse.Namespace') -> int: if runner is None: raise MesonException('Cannot find either ninja or samu.') + mlog.log('Found runner:', runner) cmd = [runner, '-C', bdir.as_posix()] @@ -92,8 +107,8 @@ def run(options: 'argparse.Namespace') -> int: if options.clean: cmd.append('clean') - # TODO: with python 3.8 this could be `elif slns := bdir.glob('*.sln'):` - elif slns: + elif backend.startswith('vs'): + slns = list(bdir.glob('*.sln')) assert len(slns) == 1, 'More than one solution in a project?' sln = slns[0] @@ -113,9 +128,7 @@ def run(options: 'argparse.Namespace') -> int: # TODO: xcode? else: raise MesonException( - 'Could not find any runner or backend for directory {}'.format(bdir.resolve().as_posix())) - - mlog.log('Found runner:', runner) + 'Backend `{}` is not yet supported by `compile`. Use generated project files directly instead.'.format(backend)) p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer) -- cgit v1.1 From 7c2f9e2b57daca67d98dde163100112f47512bfc Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 26 May 2020 15:00:46 +0300 Subject: mcompile: replaced intro with cdata + extracted code to funcs --- mesonbuild/mcompile.py | 123 +++++++++++++++++++++++++------------------------ 1 file changed, 63 insertions(+), 60 deletions(-) diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index a957c84..cc17871 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -14,7 +14,6 @@ """Entrypoint script for backend agnostic compile.""" -import json import os import shutil import sys @@ -23,26 +22,70 @@ from pathlib import Path from . import mlog from . import mesonlib +from . import coredata from .mesonlib import MesonException if T.TYPE_CHECKING: import argparse - -def get_backend_from_introspect(builddir: Path) -> str: + +def validate_builddir(builddir: Path): + if not (builddir / 'meson-private' / 'coredata.dat' ).is_file(): + raise MesonException('Current directory is not a meson build directory: `{}`.\n' + 'Please specify a valid build dir or change the working directory to it.\n' + 'It is also possible that the build directory was generated with an old\n' + 'meson version. Please regenerate it in this case.'.format(builddir)) + +def get_backend_from_coredata(builddir: Path) -> str: """ - Gets `backend` option value from introspection data + Gets `backend` option value from coredata """ - path_to_intro = builddir / 'meson-info' / 'intro-buildoptions.json' - if not path_to_intro.exists(): - raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name)) - with (path_to_intro).open() as f: - schema = json.load(f) - - for option in schema: - if option['name'] == 'backend': - return option['value'] - raise MesonException('`{}` is missing `backend` option!'.format(path_to_intro.name)) - + return coredata.load(str(builddir)).get_builtin_option('backend') + +def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path): + runner = os.environ.get('NINJA') + if not runner: + if shutil.which('ninja'): + runner = 'ninja' + elif shutil.which('samu'): + runner = 'samu' + + if runner is None: + raise MesonException('Cannot find either ninja or samu.') + mlog.log('Found runner:', runner) + + cmd = [runner, '-C', builddir.as_posix()] + + # If the value is set to < 1 then don't set anything, which let's + # ninja/samu decide what to do. + if options.jobs > 0: + cmd.extend(['-j', str(options.jobs)]) + if options.load_average > 0: + cmd.extend(['-l', str(options.load_average)]) + if options.clean: + cmd.append('clean') + + return cmd + +def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path): + slns = list(builddir.glob('*.sln')) + assert len(slns) == 1, 'More than one solution in a project?' + + sln = slns[0] + cmd = ['msbuild', str(sln.resolve())] + + # In msbuild `-m` with no number means "detect cpus", the default is `-m1` + if options.jobs > 0: + cmd.append('-m{}'.format(options.jobs)) + else: + cmd.append('-m') + + if options.load_average: + mlog.warning('Msbuild does not have a load-average switch, ignoring.') + if options.clean: + cmd.extend(['/t:Clean']) + + return cmd + def add_arguments(parser: 'argparse.ArgumentParser') -> None: """Add compile specific arguments.""" parser.add_argument( @@ -76,57 +119,17 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None: def run(options: 'argparse.Namespace') -> int: bdir = options.builddir # type: Path - if not bdir.exists(): - raise MesonException('Path to builddir {} does not exist!'.format(str(bdir.resolve()))) - if not bdir.is_dir(): - raise MesonException('builddir path should be a directory.') + validate_builddir(bdir.resolve()) cmd = [] # type: T.List[str] - backend = get_backend_from_introspect(bdir) + backend = get_backend_from_coredata(bdir) if backend == 'ninja': - runner = os.environ.get('NINJA') - if not runner: - if shutil.which('ninja'): - runner = 'ninja' - elif shutil.which('samu'): - runner = 'samu' - - if runner is None: - raise MesonException('Cannot find either ninja or samu.') - mlog.log('Found runner:', runner) - - cmd = [runner, '-C', bdir.as_posix()] - - # If the value is set to < 1 then don't set anything, which let's - # ninja/samu decide what to do. - if options.jobs > 0: - cmd.extend(['-j', str(options.jobs)]) - if options.load_average > 0: - cmd.extend(['-l', str(options.load_average)]) - if options.clean: - cmd.append('clean') - + cmd = get_parsed_args_ninja(options, bdir) elif backend.startswith('vs'): - slns = list(bdir.glob('*.sln')) - assert len(slns) == 1, 'More than one solution in a project?' - - sln = slns[0] - cmd = ['msbuild', str(sln.resolve())] - - # In msbuild `-m` with no number means "detect cpus", the default is `-m1` - if options.jobs > 0: - cmd.append('-m{}'.format(options.jobs)) - else: - cmd.append('-m') - - if options.load_average: - mlog.warning('Msbuild does not have a load-average switch, ignoring.') - if options.clean: - cmd.extend(['/t:Clean']) - - # TODO: xcode? + cmd = get_parsed_args_vs(options, bdir) else: + # TODO: xcode? raise MesonException( 'Backend `{}` is not yet supported by `compile`. Use generated project files directly instead.'.format(backend)) -- cgit v1.1 From 4b6471f1d599c582754272d0046559d0b11869fa Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 26 May 2020 16:53:34 +0300 Subject: mcompile: detect_ninja --- mesonbuild/mcompile.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index cc17871..51a5555 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -24,6 +24,7 @@ from . import mlog from . import mesonlib from . import coredata from .mesonlib import MesonException +from mesonbuild.environment import detect_ninja if T.TYPE_CHECKING: import argparse @@ -42,15 +43,9 @@ def get_backend_from_coredata(builddir: Path) -> str: return coredata.load(str(builddir)).get_builtin_option('backend') def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path): - runner = os.environ.get('NINJA') - if not runner: - if shutil.which('ninja'): - runner = 'ninja' - elif shutil.which('samu'): - runner = 'samu' - + runner = detect_ninja() if runner is None: - raise MesonException('Cannot find either ninja or samu.') + raise MesonException('Cannot find ninja.') mlog.log('Found runner:', runner) cmd = [runner, '-C', builddir.as_posix()] -- cgit v1.1 From 1d02fd924bdd407f99b278286efff5aae98535cb Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 26 May 2020 17:04:46 +0300 Subject: mcompile: removed unneeded imports --- mesonbuild/mcompile.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index 51a5555..e457623 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -14,8 +14,6 @@ """Entrypoint script for backend agnostic compile.""" -import os -import shutil import sys import typing as T from pathlib import Path -- cgit v1.1 From 7e8f1de063eea43bd9ff390e1573bd7ff475db54 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 25 May 2020 10:06:41 +0200 Subject: opts: Allow string concatenation (fixes #7199) --- mesonbuild/optinterpreter.py | 7 +++++++ test cases/common/43 options/meson_options.txt | 6 +++--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index dfbe6d7..81206ab 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -177,6 +177,13 @@ class OptionInterpreter: if not isinstance(res, bool): raise OptionException('Token after "not" is not a a boolean') return not res + elif isinstance(arg, mparser.ArithmeticNode): + l = self.reduce_single(arg.left) + r = self.reduce_single(arg.right) + if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)): + raise OptionException('Only string concatenation with the "+" operator is allowed') + FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject) + return l + r else: raise OptionException('Arguments may only be string, int, bool, or array of those.') diff --git a/test cases/common/43 options/meson_options.txt b/test cases/common/43 options/meson_options.txt index c5986ba..db649de 100644 --- a/test cases/common/43 options/meson_options.txt +++ b/test cases/common/43 options/meson_options.txt @@ -1,7 +1,7 @@ -option('testoption', type : 'string', value : 'optval', description : 'An option to do something') +option('testoption', type : 'string', value : 'optval', description : 'An option ' + 'to do something') option('other_one', type : 'boolean', value : not (not (not (not false)))) -option('combo_opt', type : 'combo', choices : ['one', 'two', 'combo'], value : 'combo') +option('combo_opt', type : 'co' + 'mbo', choices : ['one', 'two', 'combo'], value : 'combo') option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two']) option('free_array_opt', type : 'array') option('integer_opt', type : 'integer', min : 0, max : -(-5), value : 3) -option('neg_int_opt', type : 'integer', min : -5, max : 5, value : -3) +option('neg' + '_' + 'int' + '_' + 'opt', type : 'integer', min : -5, max : 5, value : -3) -- cgit v1.1 From 534b340a56419b9e5b892eef9d8b3fa3bb647afe Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 25 May 2020 10:07:36 +0200 Subject: opts: Add FeatureNew for '-' and 'not' introduced in 0.54.1 --- mesonbuild/optinterpreter.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index 81206ab..d47a3d2 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -171,11 +171,13 @@ class OptionInterpreter: res = self.reduce_single(arg.value) if not isinstance(res, (int, float)): raise OptionException('Token after "-" is not a number') + FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject) return -res elif isinstance(arg, mparser.NotNode): res = self.reduce_single(arg.value) if not isinstance(res, bool): raise OptionException('Token after "not" is not a a boolean') + FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject) return not res elif isinstance(arg, mparser.ArithmeticNode): l = self.reduce_single(arg.left) -- cgit v1.1 From c9cd235af4b58b169c30fa497f0adae4e69c5f4c Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 25 May 2020 10:25:50 +0200 Subject: opts: added docs --- docs/markdown/Build-options.md | 3 +++ docs/markdown/snippets/options_string_concat.md | 14 ++++++++++++++ 2 files changed, 17 insertions(+) create mode 100644 docs/markdown/snippets/options_string_concat.md diff --git a/docs/markdown/Build-options.md b/docs/markdown/Build-options.md index 2d53e28..429b9b2 100644 --- a/docs/markdown/Build-options.md +++ b/docs/markdown/Build-options.md @@ -20,6 +20,9 @@ option('integer_opt', type : 'integer', min : 0, max : 5, value : 3) # Since 0.4 option('free_array_opt', type : 'array', value : ['one', 'two']) # Since 0.44.0 option('array_opt', type : 'array', choices : ['one', 'two', 'three'], value : ['one', 'two']) option('some_feature', type : 'feature', value : 'enabled') # Since 0.47.0 +option('long_desc', type : 'string', value : 'optval', + description : 'An option with a very long description' + + 'that does something in a specific context') # Since 0.55.0 ``` For built-in options, see [Built-in options][builtin_opts]. diff --git a/docs/markdown/snippets/options_string_concat.md b/docs/markdown/snippets/options_string_concat.md new file mode 100644 index 0000000..0fbf0f4 --- /dev/null +++ b/docs/markdown/snippets/options_string_concat.md @@ -0,0 +1,14 @@ +## String concatenation in meson_options.txt + +It is now possible to use string concatenation (with the `+` opperator) in the +meson_options.txt file. This allows splitting long option descriptions. + +```meson +option( + 'testoption', + type : 'string', + value : 'optval', + description : 'An option with a very long description' + + 'that does something in a specific context' +) +``` -- cgit v1.1 From 228fd24ca4039fe48d8f30a82cccc83d72b0e786 Mon Sep 17 00:00:00 2001 From: Soapux <35306504+Soapux@users.noreply.github.com> Date: Wed, 27 May 2020 11:51:39 -0500 Subject: docs: Fix typo in Release notes [skip ci] --- docs/markdown/Release-notes-for-0.54.0.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Release-notes-for-0.54.0.md b/docs/markdown/Release-notes-for-0.54.0.md index 2c8880c..3202b57 100644 --- a/docs/markdown/Release-notes-for-0.54.0.md +++ b/docs/markdown/Release-notes-for-0.54.0.md @@ -14,7 +14,7 @@ If it set to 0 then the PTHREAD_POOL_SIZE option will not be passed. ## Introduce dataonly for the pkgconfig module This allows users to disable writing out the inbuilt variables to -the pkg-config file as they might actualy not be required. +the pkg-config file as they might actually not be required. One reason to have this is for architecture-independent pkg-config files in projects which also have architecture-dependent outputs. -- cgit v1.1 From bdfd46e579472d24f8856b79938c8daa1b70d15f Mon Sep 17 00:00:00 2001 From: Phillip Johnston Date: Tue, 26 May 2020 11:19:07 -0700 Subject: Recognize Arduino .ino files as C++ Renaming .ino files is not an option when working with the IDE. Meson should recognize it as C++ however. --- mesonbuild/compilers/compilers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index f3c171f..3def159 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -52,7 +52,7 @@ lib_suffixes = ('a', 'lib', 'dll', 'dll.a', 'dylib', 'so') # This means we can't include .h headers here since they could be C, C++, ObjC, etc. lang_suffixes = { 'c': ('c',), - 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'), + 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino'), 'cuda': ('cu',), # f90, f95, f03, f08 are for free-form fortran ('f90' recommended) # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended) -- cgit v1.1 From 4852ee8cebb2e0a6c00d3ddf41fe85f54a43240e Mon Sep 17 00:00:00 2001 From: Soapux <35306504+Soapux@users.noreply.github.com> Date: Tue, 26 May 2020 13:46:24 -0500 Subject: Fix lack of space after 'Cflags:' in pkgconfig files --- mesonbuild/modules/pkgconfig.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 7597eeb..18baf0c 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -379,7 +379,7 @@ class PkgConfigModule(ExtensionModule): return cflags_buf cflags = generate_compiler_flags() - ofile.write('Cflags:') + ofile.write('Cflags: ') if uninstalled: ofile.write(' '.join(generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs))) elif not dataonly and cflags: -- cgit v1.1 From a340b413ef7f25d458806f92d342e52a7356dc01 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Tue, 19 May 2020 14:43:06 +0200 Subject: ninja: Always use to_native on CompilerArgs (fixes #7167) --- mesonbuild/backend/ninjabackend.py | 13 ++++--------- test cases/common/226 include_type dependency/main.cpp | 8 ++++++++ test cases/common/226 include_type dependency/meson.build | 10 ++++++++++ test cases/common/226 include_type dependency/pch/test.hpp | 1 + 4 files changed, 23 insertions(+), 9 deletions(-) create mode 100644 test cases/common/226 include_type dependency/main.cpp create mode 100644 test cases/common/226 include_type dependency/pch/test.hpp diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index f7b697f..69e7618 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -151,6 +151,10 @@ class NinjaBuildElement: self.orderdeps.add(dep) def add_item(self, name, elems): + # Always convert from GCC-style argument naming to the naming used by the + # current compiler. Also filter system include paths, deduplicate, etc. + if isinstance(elems, CompilerArgs): + elems = elems.to_native() if isinstance(elems, str): elems = [elems] self.elems.append((name, elems)) @@ -1985,9 +1989,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # Write the Ninja build command compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) - # Convert from GCC-style link argument naming to the naming used by the - # current compiler. - commands = commands.to_native() element.add_item('ARGS', commands) self.add_build(element) return rel_obj @@ -2204,9 +2205,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) d = os.path.join(self.get_target_private_dir(target), d) element.add_orderdep(d) element.add_dep(pch_dep) - # Convert from GCC-style link argument naming to the naming used by the - # current compiler. - commands = commands.to_native() for i in self.get_fortran_orderdeps(target, compiler): element.add_orderdep(i) element.add_item('DEPFILE', dep_file) @@ -2594,9 +2592,6 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) commands += extra_args commands += custom_target_libraries commands += stdlib_args # Standard library arguments go last, because they never depend on anything. - # Convert from GCC-style link argument naming to the naming used by the - # current compiler. - commands = commands.to_native() dep_targets.extend([self.get_dependency_filename(t) for t in dependencies]) dep_targets.extend([self.get_dependency_filename(t) for t in target.link_depends]) diff --git a/test cases/common/226 include_type dependency/main.cpp b/test cases/common/226 include_type dependency/main.cpp new file mode 100644 index 0000000..bf8c4a4 --- /dev/null +++ b/test cases/common/226 include_type dependency/main.cpp @@ -0,0 +1,8 @@ +#include +#include + +using namespace std; + +int main(void) { + return 0; +} diff --git a/test cases/common/226 include_type dependency/meson.build b/test cases/common/226 include_type dependency/meson.build index fafceaf..d17e920 100644 --- a/test cases/common/226 include_type dependency/meson.build +++ b/test cases/common/226 include_type dependency/meson.build @@ -4,10 +4,16 @@ project( ) dep = dependency('zlib', method: 'pkg-config', required : false) +boost_dep = dependency('boost', modules: ['graph'], include_type : 'system', required: false) + if not dep.found() error('MESON_SKIP_TEST zlib was not found') endif +if not boost_dep.found() + error('MESON_SKIP_TEST boost was not found') +endif + assert(dep.include_type() == 'preserve', 'include_type must default to "preserve"') dep_sys = dep.as_system() @@ -26,3 +32,7 @@ assert(sp_dep.include_type() == 'preserve', 'default is preserve') sp_dep_sys = sp_dep.as_system('system') assert(sp_dep_sys.include_type() == 'system', 'changing include_type works') assert(sp_dep.include_type() == 'preserve', 'as_system must not mutate the original object') + +# Check that PCH works with `include_type : 'system'` See https://github.com/mesonbuild/meson/issues/7167 +main_exe = executable('main_exe', 'main.cpp', cpp_pch: 'pch/test.hpp', dependencies: boost_dep) +test('main_test', main_exe) diff --git a/test cases/common/226 include_type dependency/pch/test.hpp b/test cases/common/226 include_type dependency/pch/test.hpp new file mode 100644 index 0000000..0d40fe1 --- /dev/null +++ b/test cases/common/226 include_type dependency/pch/test.hpp @@ -0,0 +1 @@ +#include -- cgit v1.1 From e2c475939eca7d49b9039be3c0a565c0e38c32ac Mon Sep 17 00:00:00 2001 From: "Michael Hirsch, Ph.D" Date: Wed, 13 May 2020 01:19:11 -0400 Subject: add type anno: compilers/clike --- mesonbuild/compilers/compilers.py | 6 +++--- mesonbuild/compilers/mixins/clike.py | 30 +++++++++++++++--------------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 3def159..b0fa5f5 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -494,7 +494,7 @@ class CompilerArgs(collections.abc.MutableSequence): value = self.__container[index] del self.__container[index] if value in self.__seen_args and value in self.__container: # this is also honoring that you can have duplicated entries - self.__seen_args.remove(value) + self.__seen_args.remove(value) def __len__(self) -> int: return len(self.__container) @@ -688,7 +688,7 @@ class CompilerArgs(collections.abc.MutableSequence): should_prepend = self._should_prepend(arg) if dedup == 2: # Remove all previous occurrences of the arg and add it anew - if arg in self.__seen_args and arg not in this_round_added: #if __seen_args contains arg as well as this_round_added, then its not yet part in self. + if arg in self.__seen_args and arg not in this_round_added: # if __seen_args contains arg as well as this_round_added, then its not yet part in self. self.remove(arg) if should_prepend: if arg in pre: @@ -954,7 +954,7 @@ class Compiler: return args @contextlib.contextmanager - def compile(self, code, extra_args=None, *, mode='link', want_output=False, temp_dir=None): + def compile(self, code: str, extra_args: list = None, *, mode: str = 'link', want_output: bool = False, temp_dir: str = None): if extra_args is None: extra_args = [] try: diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 01c984d..0ed0baa 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -254,14 +254,14 @@ class CLikeCompiler: code = 'int main(void) { int class=0; return class; }\n' return self.sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) - def check_header(self, hname, prefix, env, *, extra_args=None, dependencies=None): + def check_header(self, hname: str, prefix: str, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #include <{header}>''' return self.compiles(code.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def has_header(self, hname, prefix, env, *, extra_args=None, dependencies=None, disable_cache=False): + def has_header(self, hname: str, prefix: str, env, *, extra_args=None, dependencies=None, disable_cache: bool = False): fargs = {'prefix': prefix, 'header': hname} code = '''{prefix} #ifdef __has_include @@ -274,7 +274,7 @@ class CLikeCompiler: return self.compiles(code.format(**fargs), env, extra_args=extra_args, dependencies=dependencies, mode='preprocess', disable_cache=disable_cache) - def has_header_symbol(self, hname, symbol, prefix, env, *, extra_args=None, dependencies=None): + def has_header_symbol(self, hname: str, symbol: str, prefix: str, env, *, extra_args=None, dependencies=None): fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} t = '''{prefix} #include <{header}> @@ -288,7 +288,7 @@ class CLikeCompiler: return self.compiles(t.format(**fargs), env, extra_args=extra_args, dependencies=dependencies) - def _get_basic_compiler_args(self, env, mode): + def _get_basic_compiler_args(self, env, mode: str): cargs, largs = [], [] # Select a CRT if needed since we're linking if mode == 'link': @@ -354,11 +354,11 @@ class CLikeCompiler: def compiles(self, code: str, env, *, extra_args: T.Sequence[T.Union[T.Sequence[str], str]] = None, - dependencies=None, mode: str = 'compile', disable_cache=False) -> T.Tuple[bool, bool]: + dependencies=None, mode: str = 'compile', disable_cache: bool = False) -> T.Tuple[bool, bool]: with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p: return p.returncode == 0, p.cached - def _build_wrapper(self, code: str, env, extra_args, dependencies=None, mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir=None) -> T.Tuple[bool, bool]: + def _build_wrapper(self, code: str, env, extra_args, dependencies=None, mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir: str = None) -> T.Tuple[bool, bool]: args = self._get_compiler_check_args(env, extra_args, dependencies, mode) if disable_cache or want_output: return self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir) @@ -916,21 +916,21 @@ class CLikeCompiler: architecture. ''' # If not building on macOS for Darwin, do a simple file check - files = [Path(f) for f in files] + paths = [Path(f) for f in files] if not env.machines.host.is_darwin() or not env.machines.build.is_darwin(): - for f in files: - if f.is_file(): - return f + for p in paths: + if p.is_file(): + return p # Run `lipo` and check if the library supports the arch we want - for f in files: - if not f.is_file(): + for p in paths: + if not p.is_file(): continue - archs = mesonlib.darwin_get_object_archs(str(f)) + archs = mesonlib.darwin_get_object_archs(str(p)) if archs and env.machines.host.cpu_family in archs: - return f + return p else: mlog.debug('Rejected {}, supports {} but need {}' - .format(f, archs, env.machines.host.cpu_family)) + .format(p, archs, env.machines.host.cpu_family)) return None @functools.lru_cache() -- cgit v1.1 From 6ecb716f9cb2d8bd3aa194fe6b3123ed61f65909 Mon Sep 17 00:00:00 2001 From: Lisa White Date: Mon, 1 Jun 2020 15:32:19 +0100 Subject: [skip ci] mesonwrap docs: limit line length and remove trailing spaces --- docs/markdown/Adding-new-projects-to-wrapdb.md | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md index bbe945d..1d5a596 100644 --- a/docs/markdown/Adding-new-projects-to-wrapdb.md +++ b/docs/markdown/Adding-new-projects-to-wrapdb.md @@ -6,14 +6,17 @@ Each wrap repository has a master branch with only one initial commit and *no* wrap files. And that is the only commit ever made on that branch. -For every release of a project a new branch is created. The new branch is named after the -the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for +For every release of a project a new branch is created. The new branch is named after the +the upstream release number (e.g. `1.0.0`). This branch holds a wrap file for this particular release. There are two types of wraps on WrapDB - regular wraps and wraps with Meson build definition patches. A wrap file in a repository on WrapDB must have a name `upstream.wrap`. -Wraps with Meson build definition patches work in much the same way as Debian: we take the unaltered upstream source package and add a new build system to it as a patch. These build systems are stored as Git repositories on GitHub. They only contain build definition files. You may also think of them as an overlay to upstream source. +Wraps with Meson build definition patches work in much the same way as Debian: +we take the unaltered upstream source package and add a new build system to it as a patch. +These build systems are stored as Git repositories on GitHub. They only contain build definition files. +You may also think of them as an overlay to upstream source. Whenever a new commit is pushed into GitHub's project branch, a new wrap is generated with an incremented version number. All the old releases remain unaltered. @@ -21,13 +24,13 @@ New commits are always done via GitHub merge requests and must be reviewed by someone other than the submitter. Note that your Git repo with wrap must not contain the subdirectory of the source -release. That gets added automatically by the service. You also must not commit +release. That gets added automatically by the service. You also must not commit any source code from the original tarball into the wrap repository. ## Choosing the repository name Wrapped subprojects are used much like external dependencies. Thus -they should have the same name as the upstream projects. +they should have the same name as the upstream projects. If the project provides a pkg-config file, then the repository name should be the same as the pkg-config name. Usually this is the name of the @@ -36,13 +39,13 @@ however. As an example the libogg project's chosen pkg-config name is `ogg` instead of `libogg`, which is the reason why the repository is named plain `ogg`. -If there is no a pkg-config file, the name the project uses/promotes should be used, +If there is no a pkg-config file, the name the project uses/promotes should be used, lowercase only (Catch2 -> catch2). ## How to contribute a new wrap If the project already uses Meson build system, then only a wrap file - `upstream.wrap` -should be provided. In other case a Meson build definition patch - a set of `meson.build` +should be provided. In other case a Meson build definition patch - a set of `meson.build` files - should be also provided. ### Request a new repository or branch @@ -80,8 +83,8 @@ git commit -a -m 'Add wrap files for libfoo-1.0.0' git push origin 1.0.0 ``` -Now you should create a pull request on GitHub. Remember to create it against the -correct branch rather than master (`1.0.0` branch in this example). GitHub should do +Now you should create a pull request on GitHub. Remember to create it against the +correct branch rather than master (`1.0.0` branch in this example). GitHub should do this automatically. ## What is done by WrapDB maintainers @@ -99,7 +102,8 @@ git remote add origin git push -u origin master ``` -Note that this is the *only* commit that will ever be made to master branch. All other commits are done to branches. +Note that this is the *only* commit that will ever be made to master branch. +All other commits are done to branches. Repo names must fully match this regexp: `[a-z0-9._]+`. -- cgit v1.1 From fcbff1de7c2c65708fc8d0079074177b3fa7e800 Mon Sep 17 00:00:00 2001 From: Lisa White Date: Mon, 1 Jun 2020 15:36:46 +0100 Subject: [skip ci] mesonwrap docs - Add ambiguous naming documentation. - Update branch request documentation. - Add mesonwrap token documentation. - Update review guidelines. --- docs/markdown/Adding-new-projects-to-wrapdb.md | 33 ++++++++++---------- docs/markdown/Wrap-maintainer-tools.md | 17 +++++++++++ docs/markdown/Wrap-review-guidelines.md | 42 +++++++++++++++++--------- docs/sitemap.txt | 1 + 4 files changed, 62 insertions(+), 31 deletions(-) create mode 100644 docs/markdown/Wrap-maintainer-tools.md diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md index 1d5a596..0e481d0 100644 --- a/docs/markdown/Adding-new-projects-to-wrapdb.md +++ b/docs/markdown/Adding-new-projects-to-wrapdb.md @@ -32,6 +32,8 @@ any source code from the original tarball into the wrap repository. Wrapped subprojects are used much like external dependencies. Thus they should have the same name as the upstream projects. +NOTE: Repo names must fully match this regexp: `[a-z0-9._]+`. + If the project provides a pkg-config file, then the repository name should be the same as the pkg-config name. Usually this is the name of the project, such as `libpng`. Sometimes it is slightly different, @@ -42,13 +44,16 @@ named plain `ogg`. If there is no a pkg-config file, the name the project uses/promotes should be used, lowercase only (Catch2 -> catch2). +If the project name is too generic or ambiguous (e.g. `benchmark`), +consider using `organization-project` naming format (e.g. `google-benchmark`). + ## How to contribute a new wrap If the project already uses Meson build system, then only a wrap file - `upstream.wrap` should be provided. In other case a Meson build definition patch - a set of `meson.build` files - should be also provided. -### Request a new repository or branch +### Request a new repository Create an issue on the [wrapdb bug tracker](https://github.com/mesonbuild/wrapdb/issues) using *Title* and *Description* below as a template. @@ -64,6 +69,9 @@ version: Wait until the new repository or branch is created. A link to the new repository or branch will be posted in a comment to this issue. +NOTE: Requesting a branch is not necessary. WrapDB maintainer can create the branch and +modify the PR accordingly if the project repository exists. + ### Add a new wrap First you need to fork the repository to your own page. @@ -87,25 +95,24 @@ Now you should create a pull request on GitHub. Remember to create it against th correct branch rather than master (`1.0.0` branch in this example). GitHub should do this automatically. +If the branch doesn't exist file a pull request against master. +WrapDB maintainers can fix it before merging. + ## What is done by WrapDB maintainers +[mesonwrap tools](Wrap-tools.md) must be used for the tasks below. + ### Adding new project to the Wrap provider service Each project gets its own repo. It is initialized like this: ``` -git init -git add readme.txt -git add LICENSE.build -git commit -a -m 'Create project foobar' -git remote add origin -git push -u origin master +mesonwrap new_repo --homepage=$HOMEPAGE --directory=$NEW_LOCAL_PROJECT_DIR $PROJECT_NAME ``` -Note that this is the *only* commit that will ever be made to master branch. -All other commits are done to branches. +The command creates a new repository and uploads it to Github. -Repo names must fully match this regexp: `[a-z0-9._]+`. +`--version` flag may be used to create a branch immediately. ### Adding a new branch to an existing project @@ -133,12 +140,6 @@ to functionality. All such changes must be submitted to upstream. You may also host your own Git repo with the changes if you wish. The Wrap system has native support for Git subprojects. -## Creator script - -The WrapDB repository has a -[helper script](https://github.com/mesonbuild/mesonwrap/blob/master/mesonwrap.py) -to generate new repositories, verify them and update them. - ## Reviewing wraps See [Wrap review guidelines](Wrap-review-guidelines.md). diff --git a/docs/markdown/Wrap-maintainer-tools.md b/docs/markdown/Wrap-maintainer-tools.md new file mode 100644 index 0000000..717d0d2 --- /dev/null +++ b/docs/markdown/Wrap-maintainer-tools.md @@ -0,0 +1,17 @@ +# Wrap maintainer tools + +The [mesonwrap repository](https://github.com/mesonbuild/mesonwrap) provides tools +to maintain the WrapDB. Read-only features such can be used by anyone without Meson admin rights. + +## Personal access token + +Some tools require access to the Github API. +A [personal access token](https://github.com/settings/tokens) may be required +if the freebie Github API quota is exhausted. `public_repo` scope is required +for write operations. + +``` +$ cat ~/.config/mesonwrap.ini +[mesonwrap] +github_token = +``` diff --git a/docs/markdown/Wrap-review-guidelines.md b/docs/markdown/Wrap-review-guidelines.md index 512353c..3e41a8d 100644 --- a/docs/markdown/Wrap-review-guidelines.md +++ b/docs/markdown/Wrap-review-guidelines.md @@ -7,18 +7,30 @@ package is rejected. What should be done will be determined on a case-by-case basis. Similarly meeting all these requirements does not guarantee that the package will get accepted. Use common sense. -## Checklist - -Reviewer: copy-paste this to MR discussion box and tick all boxes that apply. - - - [ ] project() has version string - - [ ] project() has license string - - [ ] if new project, master has tagged commit as only commit - - [ ] if new branch, it is branched from master - - [ ] contains a readme.txt - - [ ] contains an upstream.wrap file - - [ ] download link points to authoritative upstream location - - [ ] wrap repository contains only build system files - - [ ] merge request is pointed to correct target branch (not master) - - [ ] wrap works - - [ ] repo does not have useless top level directory (i.e. libfoobar-1.0.0) +The review process is partially automated by the [mesonwrap](Wrap-maintainer-tools.md) +`review` tool. + +``` +mesonwrap review zlib --pull-request=1 [--approve] +``` + +Since not every check can be automated please pay attention to the following during the review: + +- Download link points to an authoritative upstream location. +- Version branch is created from master. +- Except for the existing code, `LICENSE.build` is mandatory. +- `project()` has a version and it matches the source version. +- `project()` has a license. +- Complex `configure_file()` inputs are documented. + If the file is a copy of a project file make sure it is clear what was changed. +- Unit tests are enabled if the project provides them. +- There are no guidelines if `install()` is a good or a bad thing in wraps. +- If the project can't be tested on the host platform consider using the `--cross-file` flag. + See [the issue](https://github.com/mesonbuild/mesonwrap/issues/125). + +Encourage wrap readability. Use your own judgement. + +## Approval + +If the code looks good use the `--approve` flag to merge it. +The tool automatically creates a release. diff --git a/docs/sitemap.txt b/docs/sitemap.txt index 4029a60..aa3f51a 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -72,6 +72,7 @@ index.md Adding-new-projects-to-wrapdb.md Using-the-WrapDB.md Using-wraptool.md + Wrap-maintainer-tools.md Wrap-best-practices-and-tips.md Wrap-review-guidelines.md Shipping-prebuilt-binaries-as-wraps.md -- cgit v1.1 From 647f19b5aa977e63933bb19b77be97f2a9349934 Mon Sep 17 00:00:00 2001 From: Ebrahim Byagowi Date: Tue, 2 Jun 2020 22:09:18 +0430 Subject: docs/unit-tests: Show a test can have multiple suites [skip ci] --- docs/markdown/Unit-tests.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index bd91dbb..06664db6 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -136,7 +136,7 @@ For clarity, consider the meson.build containing: ```meson test('A', ..., suite: 'foo') -test('B', ..., suite: 'foo') +test('B', ..., suite: ['foo', 'bar']) test('C', ..., suite: 'bar') test('D', ..., suite: 'baz') -- cgit v1.1 From 9ada7e18a898118bcf177ec4d39c677acaee1606 Mon Sep 17 00:00:00 2001 From: jonathanmist Date: Sat, 30 May 2020 17:37:58 +0100 Subject: dependencies/cuda: Add support for ARM linux --- mesonbuild/dependencies/cuda.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py index 9c189be..c962cae 100644 --- a/mesonbuild/dependencies/cuda.py +++ b/mesonbuild/dependencies/cuda.py @@ -157,11 +157,15 @@ class CudaDependency(ExternalDependency): mlog.debug('Falling back to extracting version from path') path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex - m = path_version_regex.match(os.path.basename(path)) - if m: - return m[1] + try: + m = path_version_regex.match(os.path.basename(path)) + if m: + return m.group(1) + else: + mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path)) + except Exception as e: + mlog.warning('Could not detect CUDA Toolkit version for {}: {}'.format(path, str(e))) - mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path)) return '0.0' def _read_toolkit_version_txt(self, path): @@ -172,7 +176,7 @@ class CudaDependency(ExternalDependency): version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168' m = self.toolkit_version_regex.match(version_str) if m: - return self._strip_patch_version(m[1]) + return self._strip_patch_version(m.group(1)) except Exception as e: mlog.debug('Could not read CUDA Toolkit\'s version file {}: {}'.format(version_file_path, str(e))) @@ -192,7 +196,7 @@ class CudaDependency(ExternalDependency): raise DependencyException(msg.format(arch, 'Windows')) return os.path.join('lib', libdirs[arch]) elif machine.is_linux(): - libdirs = {'x86_64': 'lib64', 'ppc64': 'lib'} + libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64'} if arch not in libdirs: raise DependencyException(msg.format(arch, 'Linux')) return libdirs[arch] -- cgit v1.1 From a252a17e6e0314c3bd77ef8cd50ada9f53cee678 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 31 May 2020 23:22:30 +0200 Subject: cmake: always split property lists (fixes #7228) --- mesonbuild/cmake/interpreter.py | 2 +- mesonbuild/cmake/traceparser.py | 5 +++-- test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt | 2 +- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 09b633e..a5bf545 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -355,7 +355,7 @@ class ConverterTarget: if 'CONFIGURATIONS' in tgt.properties: cfgs += [x for x in tgt.properties['CONFIGURATIONS'] if x] cfg = cfgs[0] - + is_debug = self.env.coredata.get_builtin_option('debug'); if is_debug: if 'DEBUG' in cfgs: diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index f20bcc8..d94e774 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -64,6 +64,7 @@ class CMakeTarget: return for key, val in self.properties.items(): self.properties[key] = [x.strip() for x in val] + assert all([';' not in x for x in self.properties[key]]) class CMakeGeneratorTarget(CMakeTarget): def __init__(self, name): @@ -574,10 +575,10 @@ class CMakeTraceParser: continue if mode in ['INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC']: - interface += [i] + interface += i.split(';') if mode in ['PUBLIC', 'PRIVATE', 'LINK_PRIVATE']: - private += [i] + private += i.split(';') if paths: interface = self._guess_files(interface) diff --git a/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt index 50b1049..c9b2a20 100644 --- a/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt +++ b/test cases/cmake/2 advanced/subprojects/cmMod/CMakeLists.txt @@ -20,7 +20,7 @@ set_target_properties(cmModLib PROPERTIES VERSION 1.0.1) add_executable(testEXE main.cpp) target_link_libraries(cmModLib ZLIB::ZLIB) -target_link_libraries(cmModLibStatic ZLIB::ZLIB) +target_link_libraries(cmModLibStatic ;ZLIB::ZLIB;) target_link_libraries(testEXE cmModLib) target_compile_definitions(cmModLibStatic PUBLIC CMMODLIB_STATIC_DEFINE) -- cgit v1.1 From 5b3bed525d9a0857f57a6e4cc5ad5948fe46d2dd Mon Sep 17 00:00:00 2001 From: Mike Gilbert Date: Sun, 31 May 2020 23:08:40 -0400 Subject: Ignore file access errors when scanning .so files in system libdirs Bug: https://bugs.gentoo.org/726524 --- mesonbuild/compilers/mixins/clike.py | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 0ed0baa..56a9ea6 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -152,15 +152,24 @@ class CLikeCompiler: if not files: retval.append(d) continue - file_to_check = os.path.join(d, files[0]) - with open(file_to_check, 'rb') as fd: - header = fd.read(5) - # if file is not an ELF file, it's weird, but accept dir - # if it is elf, and the class matches, accept dir - if header[1:4] != b'ELF' or int(header[4]) == elf_class: - retval.append(d) - # at this point, it's an ELF file which doesn't match the - # appropriate elf_class, so skip this one + + for f in files: + file_to_check = os.path.join(d, f) + try: + with open(file_to_check, 'rb') as fd: + header = fd.read(5) + # if file is not an ELF file, it's weird, but accept dir + # if it is elf, and the class matches, accept dir + if header[1:4] != b'ELF' or int(header[4]) == elf_class: + retval.append(d) + # at this point, it's an ELF file which doesn't match the + # appropriate elf_class, so skip this one + # stop scanning after the first sucessful read + break + except OSError: + # Skip the file if we can't read it + pass + return tuple(retval) @functools.lru_cache() -- cgit v1.1 From 5e3f9b4b06cc4814160abed74735b0ad55967d9a Mon Sep 17 00:00:00 2001 From: Lisa White Date: Wed, 3 Jun 2020 13:32:22 +0100 Subject: [skip ci] mesonwrap docs: fix a broken link --- docs/markdown/Adding-new-projects-to-wrapdb.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Adding-new-projects-to-wrapdb.md b/docs/markdown/Adding-new-projects-to-wrapdb.md index 0e481d0..25fb61c 100644 --- a/docs/markdown/Adding-new-projects-to-wrapdb.md +++ b/docs/markdown/Adding-new-projects-to-wrapdb.md @@ -100,7 +100,7 @@ WrapDB maintainers can fix it before merging. ## What is done by WrapDB maintainers -[mesonwrap tools](Wrap-tools.md) must be used for the tasks below. +[mesonwrap tools](Wrap-maintainer-tools.md) must be used for the tasks below. ### Adding new project to the Wrap provider service -- cgit v1.1 From f818d961e45a6fcaf502f0174abeb4aeb16729c6 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Tue, 2 Jun 2020 22:08:00 +0300 Subject: Add android to os list informally. Closes #6233. [skip ci] --- docs/markdown/Reference-tables.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index c42d608..60a9720 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -121,6 +121,7 @@ These are provided by the `.system()` method call. | Value | Comment | | ----- | ------- | +| android | By convention only, subject to change | | cygwin | The Cygwin environment for Windows | | darwin | Either OSX or iOS | | dragonfly | DragonFly BSD | -- cgit v1.1 From 6d2255ffec94f1ad05b1ff8a0970d77ef902b0b7 Mon Sep 17 00:00:00 2001 From: Joshua Gawley <13jgawley@thelangton.org.uk> Date: Wed, 3 Jun 2020 22:48:01 +0100 Subject: mesonlib.py: refactored detect_vcs() to use pathlib.Path (#7230) --- mesonbuild/mesonlib.py | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index 26fe6eb..2413cb1 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -544,20 +544,24 @@ def darwin_get_object_archs(objpath: str) -> T.List[str]: return stdo.split() -def detect_vcs(source_dir: str) -> T.Optional[T.Dict[str, str]]: +def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]: vcs_systems = [ dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'), dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'), dict(name = 'subversion', cmd = 'svn', repo_dir = '.svn', get_rev = 'svn info', rev_regex = 'Revision: (.*)', dep = '.svn/wc.db'), dict(name = 'bazaar', cmd = 'bzr', repo_dir = '.bzr', get_rev = 'bzr revno', rev_regex = '(.*)', dep = '.bzr'), ] - # FIXME: this is much cleaner with pathlib.Path - segs = source_dir.replace('\\', '/').split('/') - for i in range(len(segs), -1, -1): - curdir = '/'.join(segs[:i]) + if isinstance(source_dir, str): + source_dir = Path(source_dir) + + parent_paths_and_self = collections.deque(source_dir.parents) + # Prepend the source directory to the front so we can check it; + # source_dir.parents doesn't include source_dir + parent_paths_and_self.appendleft(source_dir) + for curdir in parent_paths_and_self: for vcs in vcs_systems: - if os.path.isdir(os.path.join(curdir, vcs['repo_dir'])) and shutil.which(vcs['cmd']): - vcs['wc_dir'] = curdir + if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']): + vcs['wc_dir'] = str(curdir) return vcs return None -- cgit v1.1 From 2e30afb23b0f34308c747b1e799b66a2f992d398 Mon Sep 17 00:00:00 2001 From: Richard Brown Date: Thu, 4 Jun 2020 16:10:47 +0200 Subject: Add libeconf to users.md libeconf is now using meson as its primary/default build system, so we'd like to see ourselves on the meson users list . --- docs/markdown/Users.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index 41d8dfa..1867407 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -72,6 +72,7 @@ topic](https://github.com/topics/meson). - [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network - [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android - [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces + - [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one - [Libepoxy](https://github.com/anholt/libepoxy/), a library for handling OpenGL function pointer management - [libfuse](https://github.com/libfuse/libfuse), the reference implementation of the Linux FUSE (Filesystem in Userspace) interface - [Libgit2-glib](https://git.gnome.org/browse/libgit2-glib), a GLib wrapper for libgit2 -- cgit v1.1 From cca06e4c336da1684f90da607c6a2cd591747fce Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Thu, 7 May 2020 13:35:35 +0200 Subject: ninjabackend: cache calls to normpaths calls to normpaths are expansive. We should cache the results. This safes 2s in the configure time of efl. --- mesonbuild/backend/ninjabackend.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 69e7618..6cf8026 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -2004,6 +2004,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) curdir = '.' return compiler.get_include_args(curdir, False) + @lru_cache(maxsize=None) + def get_normpath_target(self, source) -> str: + return os.path.normpath(source) + def get_custom_target_dir_include_args(self, target, compiler): custom_target_include_dirs = [] for i in target.get_generated_sources(): @@ -2012,7 +2016,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # own target build dir. if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): continue - idir = os.path.normpath(self.get_target_dir(i)) + idir = self.get_normpath_target(self.get_target_dir(i)) if not idir: idir = '.' if idir not in custom_target_include_dirs: -- cgit v1.1 From 256e910dee90bda81286f31081b56a707f267e0d Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Thu, 7 May 2020 13:37:11 +0200 Subject: cache up regex mathings the names passed in here are often the same. We should ensure that we cache the regex match, as this will speed up our runtime a lot. --- mesonbuild/compilers/compilers.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index b0fa5f5..ce1016d 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -138,11 +138,15 @@ def is_llvm_ir(fname): fname = fname.fname return fname.split('.')[-1] == 'll' +@lru_cache(maxsize=None) +def cached_by_name(fname): + suffix = fname.split('.')[-1] + return suffix in obj_suffixes + def is_object(fname): if hasattr(fname, 'fname'): fname = fname.fname - suffix = fname.split('.')[-1] - return suffix in obj_suffixes + return cached_by_name(fname) def is_library(fname): if hasattr(fname, 'fname'): -- cgit v1.1 From ba8e838dcffa2b7a82afc9a75afae8a2c27d8883 Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Mon, 11 May 2020 22:00:12 +0200 Subject: Revert "CompilerArgs: make lookup faster" This was a not so nice solution, and should be replaced with something better. This reverts commit 4524088d386d2e2315d8fef6ffedc11d8e9a394a. --- mesonbuild/compilers/compilers.py | 33 +++++++-------------------------- 1 file changed, 7 insertions(+), 26 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index ce1016d..07b07d9 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -16,7 +16,6 @@ import contextlib, os.path, re, tempfile import collections.abc import itertools import typing as T -from functools import lru_cache from ..linkers import ( GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker, @@ -466,9 +465,6 @@ class CompilerArgs(collections.abc.MutableSequence): iterable: T.Optional[T.Iterable[str]] = None): self.compiler = compiler self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] - self.__seen_args = set() - for arg in self.__container: - self.__seen_args.add(arg) @T.overload # noqa: F811 def __getitem__(self, index: int) -> str: # noqa: F811 @@ -491,27 +487,20 @@ class CompilerArgs(collections.abc.MutableSequence): def __setitem__(self, index, value) -> None: # noqa: F811 self.__container[index] = value - for v in value: - self.__seen_args.add(v) def __delitem__(self, index: T.Union[int, slice]) -> None: - value = self.__container[index] del self.__container[index] - if value in self.__seen_args and value in self.__container: # this is also honoring that you can have duplicated entries - self.__seen_args.remove(value) def __len__(self) -> int: return len(self.__container) def insert(self, index: int, value: str) -> None: self.__container.insert(index, value) - self.__seen_args.add(value) def copy(self) -> 'CompilerArgs': return CompilerArgs(self.compiler, self.__container.copy()) @classmethod - @lru_cache(maxsize=None) def _can_dedup(cls, arg): ''' Returns whether the argument can be safely de-duped. This is dependent @@ -566,7 +555,6 @@ class CompilerArgs(collections.abc.MutableSequence): return 0 @classmethod - @lru_cache(maxsize=None) def _should_prepend(cls, arg): if arg.startswith(cls.prepend_prefixes): return True @@ -643,7 +631,6 @@ class CompilerArgs(collections.abc.MutableSequence): self.append(arg) else: self.__container.append(arg) - self.__seen_args.add(arg) def extend_direct(self, iterable: T.Iterable[str]) -> None: ''' @@ -675,7 +662,6 @@ class CompilerArgs(collections.abc.MutableSequence): Add two CompilerArgs while taking into account overriding of arguments and while preserving the order of arguments as much as possible ''' - this_round_added = set() # a dict that contains a value, when the value was added this round pre = [] # type: T.List[str] post = [] # type: T.List[str] if not isinstance(args, collections.abc.Iterable): @@ -687,25 +673,20 @@ class CompilerArgs(collections.abc.MutableSequence): dedup = self._can_dedup(arg) if dedup == 1: # Argument already exists and adding a new instance is useless - if arg in self.__seen_args or arg in pre or arg in post: + if arg in self or arg in pre or arg in post: continue - should_prepend = self._should_prepend(arg) if dedup == 2: # Remove all previous occurrences of the arg and add it anew - if arg in self.__seen_args and arg not in this_round_added: # if __seen_args contains arg as well as this_round_added, then its not yet part in self. + if arg in self: self.remove(arg) - if should_prepend: - if arg in pre: - pre.remove(arg) - else: - if arg in post: - post.remove(arg) - if should_prepend: + if arg in pre: + pre.remove(arg) + if arg in post: + post.remove(arg) + if self._should_prepend(arg): pre.append(arg) else: post.append(arg) - self.__seen_args.add(arg) - this_round_added.add(arg) # Insert at the beginning self[:0] = pre # Append to the end -- cgit v1.1 From 032ab3606d7bbb4b40da7871f6193be0dee126a8 Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Tue, 12 May 2020 10:26:21 +0200 Subject: CompilerArgs: refactor __iadd__ the previous optimizations from 4524088d386d2e2315d8fef6ffedc11d8e9a394a were not relaly good, and not really scaleable, since only the lookup was improved. However, the really heavy calls to remove have not been improved. With this commit we are refactoring CompilerArgs into a data structure which does not use remove at all. This works that we are building a pre and post list, which gets flushed into __container at some point. However, we build pre and post by deduplicating forward. Later on, when we are flushing pre and post into __container, we are deduplicating backwards the list, so we are not changing behaviour here. This overall cuts off 10s of the efl configuration time. Further more this improves configure times on arm devices a lot more, since remove does seem to be a lot slower there. In general this results in the fact that __iadd__ is not within the top 5 of costly functions in generate_single_complie. --- mesonbuild/compilers/compilers.py | 82 ++++++++++++++++++++++++++++++--------- 1 file changed, 64 insertions(+), 18 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 07b07d9..6b6df94 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -14,8 +14,10 @@ import contextlib, os.path, re, tempfile import collections.abc +from collections import deque import itertools import typing as T +from functools import lru_cache from ..linkers import ( GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker, @@ -465,6 +467,47 @@ class CompilerArgs(collections.abc.MutableSequence): iterable: T.Optional[T.Iterable[str]] = None): self.compiler = compiler self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] + self.pre = deque() + self.post = deque() + + # Flush the saved pre and post list into the __container list + # + # This correctly deduplicates the entries after _can_dedup definition + # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. + def flush_pre_post(self): + pre_flush = deque() + pre_flush_set = set() + post_flush = deque() + post_flush_set = set() + + #The two lists are here walked from the front to the back, in order to not need removals for deduplication + for a in reversed(self.pre): + dedup = self._can_dedup(a) + if a not in pre_flush_set: + pre_flush.appendleft(a) + if dedup == 2: + pre_flush_set.add(a) + for a in reversed(self.post): + dedup = self._can_dedup(a) + if a not in post_flush_set: + post_flush.appendleft(a) + if dedup == 2: + post_flush_set.add(a) + + #pre and post will overwrite every element that is in the container + #only copy over args that are in __container but not in the post flush or pre flush set + + for a in self.__container: + if a not in post_flush_set and a not in pre_flush_set: + pre_flush.append(a) + + self.__container = list(pre_flush) + list(post_flush) + self.pre.clear() + self.post.clear() + + def __iter__(self): + self.flush_pre_post() + return iter(self.__container); @T.overload # noqa: F811 def __getitem__(self, index: int) -> str: # noqa: F811 @@ -475,6 +518,7 @@ class CompilerArgs(collections.abc.MutableSequence): pass def __getitem__(self, index): # noqa: F811 + self.flush_pre_post() return self.__container[index] @T.overload # noqa: F811 @@ -486,21 +530,26 @@ class CompilerArgs(collections.abc.MutableSequence): pass def __setitem__(self, index, value) -> None: # noqa: F811 + self.flush_pre_post() self.__container[index] = value def __delitem__(self, index: T.Union[int, slice]) -> None: + self.flush_pre_post() del self.__container[index] def __len__(self) -> int: - return len(self.__container) + return len(self.__container) + len(self.pre) + len(self.post) def insert(self, index: int, value: str) -> None: + self.flush_pre_post() self.__container.insert(index, value) def copy(self) -> 'CompilerArgs': + self.flush_pre_post() return CompilerArgs(self.compiler, self.__container.copy()) @classmethod + @lru_cache(maxsize=None) def _can_dedup(cls, arg): ''' Returns whether the argument can be safely de-duped. This is dependent @@ -555,6 +604,7 @@ class CompilerArgs(collections.abc.MutableSequence): return 0 @classmethod + @lru_cache(maxsize=None) def _should_prepend(cls, arg): if arg.startswith(cls.prepend_prefixes): return True @@ -568,6 +618,7 @@ class CompilerArgs(collections.abc.MutableSequence): # between static libraries, and for recursively searching for symbols # needed by static libraries that are provided by object files or # shared libraries. + self.flush_pre_post() if copy: new = self.copy() else: @@ -627,6 +678,7 @@ class CompilerArgs(collections.abc.MutableSequence): for absolute paths to libraries, etc, which can always be de-duped safely. ''' + self.flush_pre_post() if os.path.isabs(arg): self.append(arg) else: @@ -638,6 +690,7 @@ class CompilerArgs(collections.abc.MutableSequence): reordering or de-dup except for absolute paths where the order of include search directories is not relevant ''' + self.flush_pre_post() for elem in iterable: self.append_direct(elem) @@ -653,6 +706,7 @@ class CompilerArgs(collections.abc.MutableSequence): self.extend_direct(lflags) def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() new = self.copy() new += args return new @@ -662,8 +716,7 @@ class CompilerArgs(collections.abc.MutableSequence): Add two CompilerArgs while taking into account overriding of arguments and while preserving the order of arguments as much as possible ''' - pre = [] # type: T.List[str] - post = [] # type: T.List[str] + tmp_pre = deque() if not isinstance(args, collections.abc.Iterable): raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) for arg in args: @@ -673,32 +726,24 @@ class CompilerArgs(collections.abc.MutableSequence): dedup = self._can_dedup(arg) if dedup == 1: # Argument already exists and adding a new instance is useless - if arg in self or arg in pre or arg in post: + if arg in self.__container or arg in self.pre or arg in self.post: continue - if dedup == 2: - # Remove all previous occurrences of the arg and add it anew - if arg in self: - self.remove(arg) - if arg in pre: - pre.remove(arg) - if arg in post: - post.remove(arg) if self._should_prepend(arg): - pre.append(arg) + tmp_pre.appendleft(arg) else: - post.append(arg) - # Insert at the beginning - self[:0] = pre - # Append to the end - self.__container += post + self.post.append(arg) + self.pre.extendleft(tmp_pre) + #pre and post is going to be merged later before a iter call return self def __radd__(self, args: T.Iterable[str]): + self.flush_pre_post() new = CompilerArgs(self.compiler, args) new += self return new def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + self.flush_pre_post() # Only allow equality checks against other CompilerArgs and lists instances if isinstance(other, CompilerArgs): return self.compiler == other.compiler and self.__container == other.__container @@ -713,6 +758,7 @@ class CompilerArgs(collections.abc.MutableSequence): self.__iadd__(args) def __repr__(self) -> str: + self.flush_pre_post() return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container) class Compiler: -- cgit v1.1 From a2f94ca18b237ccaf072474a1a4405a996e84395 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 31 May 2020 22:27:14 +0200 Subject: cmake: Add more advanced subproject configuration options This is done with the new cmake subprojects options object that is similar to the already exisiting configuration data object. It is consumed by the new `options` kwarg of the cmake.subproject function. --- mesonbuild/cmake/__init__.py | 5 +- mesonbuild/cmake/common.py | 95 ++++++++++++++++++++++++++++++++++ mesonbuild/cmake/interpreter.py | 21 +++++--- mesonbuild/interpreter.py | 12 ++++- mesonbuild/modules/cmake.py | 110 ++++++++++++++++++++++++++++++++++++---- 5 files changed, 223 insertions(+), 20 deletions(-) diff --git a/mesonbuild/cmake/__init__.py b/mesonbuild/cmake/__init__.py index 01cc3f9..db7aefd 100644 --- a/mesonbuild/cmake/__init__.py +++ b/mesonbuild/cmake/__init__.py @@ -24,11 +24,14 @@ __all__ = [ 'CMakeTarget', 'CMakeTraceLine', 'CMakeTraceParser', + 'SingleTargetOptions', + 'TargetOptions', 'parse_generator_expressions', 'language_map', + 'cmake_defines_to_args', ] -from .common import CMakeException +from .common import CMakeException, SingleTargetOptions, TargetOptions, cmake_defines_to_args from .client import CMakeClient from .executor import CMakeExecutor from .fileapi import CMakeFileAPI diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py index e7da0d7..4510b5d 100644 --- a/mesonbuild/cmake/common.py +++ b/mesonbuild/cmake/common.py @@ -60,6 +60,26 @@ def _flags_to_list(raw: str) -> T.List[str]: res = list(filter(lambda x: len(x) > 0, res)) return res +def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]: + res = [] # type: T.List[str] + if not isinstance(raw, list): + raw = [raw] + + for i in raw: + if not isinstance(i, dict): + raise MesonException('Invalid CMake defines. Expected a dict, but got a {}'.format(type(i).__name__)) + for key, val in i.items(): + assert isinstance(key, str) + if isinstance(val, (str, int, float)): + res += ['-D{}={}'.format(key, val)] + elif isinstance(val, bool): + val_str = 'ON' if val else 'OFF' + res += ['-D{}={}'.format(key, val_str)] + else: + raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key)) + + return res + class CMakeFileGroup: def __init__(self, data: dict): self.defines = data.get('defines', '') @@ -163,3 +183,78 @@ class CMakeConfiguration: mlog.log('Project {}:'.format(idx)) with mlog.nested(): i.log() + +class SingleTargetOptions: + def __init__(self) -> None: + self.opts = {} # type: T.Dict[str, str] + self.lang_args = {} # type: T.Dict[str, T.List[str]] + self.link_args = [] # type: T.List[str] + self.install = 'preserve' + + def set_opt(self, opt: str, val: str) -> None: + self.opts[opt] = val + + def append_args(self, lang: str, args: T.List[str]) -> None: + if lang not in self.lang_args: + self.lang_args[lang] = [] + self.lang_args[lang] += args + + def append_link_args(self, args: T.List[str]) -> None: + self.link_args += args + + def set_install(self, install: bool) -> None: + self.install = 'true' if install else 'false' + + def get_override_options(self, initial: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + for i in initial: + opt = i[:i.find('=')] + if opt not in self.opts: + res += [i] + res += ['{}={}'.format(k, v) for k, v in self.opts.items()] + return res + + def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]: + if lang in self.lang_args: + return initial + self.lang_args[lang] + return initial + + def get_link_args(self, initial: T.List[str]) -> T.List[str]: + return initial + self.link_args + + def get_install(self, initial: bool) -> bool: + return {'preserve': initial, 'true': True, 'false': False}[self.install] + +class TargetOptions: + def __init__(self) -> None: + self.global_options = SingleTargetOptions() + self.target_options = {} # type: T.Dict[str, SingleTargetOptions] + + def __getitem__(self, tgt: str) -> SingleTargetOptions: + if tgt not in self.target_options: + self.target_options[tgt] = SingleTargetOptions() + return self.target_options[tgt] + + def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_override_options(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_override_options(initial) + return initial + + def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_compile_args(lang, initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_compile_args(lang, initial) + return initial + + def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_link_args(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_link_args(initial) + return initial + + def get_install(self, tgt: str, initial: bool) -> bool: + initial = self.global_options.get_install(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_install(initial) + return initial diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index a5bf545..57e6e1d 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -17,7 +17,7 @@ import pkg_resources -from .common import CMakeException, CMakeTarget +from .common import CMakeException, CMakeTarget, TargetOptions from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel from .fileapi import CMakeFileAPI from .executor import CMakeExecutor @@ -994,7 +994,7 @@ class CMakeInterpreter: mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.') - def pretend_to_be_meson(self) -> CodeBlockNode: + def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode: if not self.project_name: raise CMakeException('CMakeInterpreter was not analysed') @@ -1158,21 +1158,26 @@ class CMakeInterpreter: dep_var = '{}_dep'.format(tgt.name) tgt_var = tgt.name + install_tgt = options.get_install(tgt.cmake_name, tgt.install) + # Generate target kwargs tgt_kwargs = { - 'build_by_default': tgt.install, - 'link_args': tgt.link_flags + tgt.link_libraries, + 'build_by_default': install_tgt, + 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries), 'link_with': link_with, 'include_directories': id_node(inc_var), - 'install': tgt.install, - 'install_dir': tgt.install_dir, - 'override_options': tgt.override_options, + 'install': install_tgt, + 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options), 'objects': [method(x, 'extract_all_objects') for x in objec_libs], } + # Only set if installed and only override if it is set + if install_tgt and tgt.install_dir: + tgt_kwargs['install_dir'] = tgt.install_dir + # Handle compiler args for key, val in tgt.compile_opts.items(): - tgt_kwargs['{}_args'.format(key)] = val + tgt_kwargs['{}_args'.format(key)] = options.get_compile_args(tgt.cmake_name, key, val) # Handle -fPCI, etc if tgt_func == 'executable': diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index b8d4fec..740b0bc 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2440,7 +2440,7 @@ class Interpreter(InterpreterBase): if isinstance(item, build.CustomTarget): return CustomTargetHolder(item, self) - elif isinstance(item, (int, str, bool, Disabler)) or item is None: + elif isinstance(item, (int, str, bool, Disabler, InterpreterObject)) or item is None: return item elif isinstance(item, build.Executable): return ExecutableHolder(item, self) @@ -2851,13 +2851,21 @@ external dependencies (including libraries) must go to "dependencies".''') with mlog.nested(): new_build = self.build.copy() prefix = self.coredata.builtins['prefix'].value + + from .modules.cmake import CMakeSubprojectOptions + options = kwargs.get('options', CMakeSubprojectOptions()) + if not isinstance(options, CMakeSubprojectOptions): + raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions' + ' object (created by cmake.subproject_options())') + cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', [])) + cmake_options += options.cmake_options cm_int = CMakeInterpreter(new_build, subdir, subdir_abs, prefix, new_build.environment, self.backend) cm_int.initialise(cmake_options) cm_int.analyse() # Generate a meson ast and execute it with the normal do_subproject_meson - ast = cm_int.pretend_to_be_meson() + ast = cm_int.pretend_to_be_meson(options.target_options) mlog.log() with mlog.nested(): diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py index 6c4098b..dcbeda8 100644 --- a/mesonbuild/modules/cmake.py +++ b/mesonbuild/modules/cmake.py @@ -14,12 +14,28 @@ import re import os, os.path, pathlib import shutil +import typing as T from . import ExtensionModule, ModuleReturnValue from .. import build, dependencies, mesonlib, mlog -from ..interpreterbase import permittedKwargs, FeatureNew, stringArgs, InterpreterObject, ObjectHolder, noPosargs +from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args from ..interpreter import ConfigurationDataHolder, InterpreterException, SubprojectHolder +from ..interpreterbase import ( + InterpreterObject, + ObjectHolder, + + FeatureNew, + FeatureNewKwargs, + FeatureDeprecatedKwargs, + + stringArgs, + permittedKwargs, + noPosargs, + noKwargs, + + InvalidArguments, +) COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion'] @@ -82,42 +98,107 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder): assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']])) return res - @permittedKwargs({}) + @noKwargs + @stringArgs def get_variable(self, args, kwargs): return self.held_object.get_variable_method(args, kwargs) - @permittedKwargs({}) + @noKwargs + @stringArgs def dependency(self, args, kwargs): info = self._args_to_info(args) return self.get_variable([info['dep']], kwargs) - @permittedKwargs({}) + @noKwargs + @stringArgs def include_directories(self, args, kwargs): info = self._args_to_info(args) return self.get_variable([info['inc']], kwargs) - @permittedKwargs({}) + @noKwargs + @stringArgs def target(self, args, kwargs): info = self._args_to_info(args) return self.get_variable([info['tgt']], kwargs) - @permittedKwargs({}) + @noKwargs + @stringArgs def target_type(self, args, kwargs): info = self._args_to_info(args) return info['func'] @noPosargs - @permittedKwargs({}) + @noKwargs def target_list(self, args, kwargs): return self.held_object.cm_interpreter.target_list() @noPosargs - @permittedKwargs({}) + @noKwargs @FeatureNew('CMakeSubproject.found()', '0.53.2') def found_method(self, args, kwargs): return self.held_object is not None +class CMakeSubprojectOptions(InterpreterObject): + def __init__(self) -> None: + super().__init__() + self.cmake_options = [] # type: T.List[str] + self.target_options = TargetOptions() + + self.methods.update( + { + 'add_cmake_defines': self.add_cmake_defines, + 'set_override_option': self.set_override_option, + 'set_install': self.set_install, + 'append_compile_args': self.append_compile_args, + 'append_link_args': self.append_link_args, + 'clear': self.clear, + } + ) + + def _get_opts(self, kwargs: dict) -> SingleTargetOptions: + if 'target' in kwargs: + return self.target_options[kwargs['target']] + return self.target_options.global_options + + @noKwargs + def add_cmake_defines(self, args, kwargs) -> None: + self.cmake_options += cmake_defines_to_args(args) + + @stringArgs + @permittedKwargs({'target'}) + def set_override_option(self, args, kwargs) -> None: + if len(args) != 2: + raise InvalidArguments('set_override_option takes exactly 2 positional arguments') + self._get_opts(kwargs).set_opt(args[0], args[1]) + + @permittedKwargs({'target'}) + def set_install(self, args, kwargs) -> None: + if len(args) != 1 or not isinstance(args[0], bool): + raise InvalidArguments('set_install takes exactly 1 boolean argument') + self._get_opts(kwargs).set_install(args[0]) + + @stringArgs + @permittedKwargs({'target'}) + def append_compile_args(self, args, kwargs) -> None: + if len(args) < 2: + raise InvalidArguments('append_compile_args takes at least 2 positional arguments') + self._get_opts(kwargs).append_args(args[0], args[1:]) + + @stringArgs + @permittedKwargs({'target'}) + def append_link_args(self, args, kwargs) -> None: + if not args: + raise InvalidArguments('append_link_args takes at least 1 positional argument') + self._get_opts(kwargs).append_link_args(args) + + @noPosargs + @noKwargs + def clear(self, args, kwargs) -> None: + self.cmake_options.clear() + self.target_options = TargetOptions() + + class CmakeModule(ExtensionModule): cmake_detected = False cmake_root = None @@ -287,16 +368,27 @@ class CmakeModule(ExtensionModule): return res @FeatureNew('subproject', '0.51.0') - @permittedKwargs({'cmake_options', 'required'}) + @FeatureNewKwargs('subproject', '0.55.0', ['options']) + @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options']) + @permittedKwargs({'cmake_options', 'required', 'options'}) @stringArgs def subproject(self, interpreter, state, args, kwargs): if len(args) != 1: raise InterpreterException('Subproject takes exactly one argument') + if 'cmake_options' in kwargs and 'options' in kwargs: + raise InterpreterException('"options" cannot be used together with "cmake_options"') dirname = args[0] subp = interpreter.do_subproject(dirname, 'cmake', kwargs) if not subp.held_object: return subp return CMakeSubprojectHolder(subp, dirname) + @FeatureNew('subproject_options', '0.55.0') + @noKwargs + @noPosargs + def subproject_options(self, state, args, kwargs) -> ModuleReturnValue: + opts = CMakeSubprojectOptions() + return ModuleReturnValue(opts, []) + def initialize(*args, **kwargs): return CmakeModule(*args, **kwargs) -- cgit v1.1 From 08e838a235f5b1207694463dbd619dcebdc4eb61 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 31 May 2020 22:30:13 +0200 Subject: cmake: added test case --- test cases/cmake/19 advanced options/main.cpp | 18 +++++++++++++ test cases/cmake/19 advanced options/meson.build | 24 +++++++++++++++++ .../subprojects/cmOpts/CMakeLists.txt | 18 +++++++++++++ .../subprojects/cmOpts/cmMod.cpp | 31 ++++++++++++++++++++++ .../subprojects/cmOpts/cmMod.hpp | 14 ++++++++++ .../subprojects/cmOpts/cmTest.cpp | 25 +++++++++++++++++ .../subprojects/cmOpts/cmTest.hpp | 3 +++ .../subprojects/cmOpts/main.cpp | 10 +++++++ test cases/cmake/19 advanced options/test.json | 8 ++++++ 9 files changed, 151 insertions(+) create mode 100644 test cases/cmake/19 advanced options/main.cpp create mode 100644 test cases/cmake/19 advanced options/meson.build create mode 100644 test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt create mode 100644 test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp create mode 100644 test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp create mode 100644 test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp create mode 100644 test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp create mode 100644 test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp create mode 100644 test cases/cmake/19 advanced options/test.json diff --git a/test cases/cmake/19 advanced options/main.cpp b/test cases/cmake/19 advanced options/main.cpp new file mode 100644 index 0000000..6a071cc --- /dev/null +++ b/test cases/cmake/19 advanced options/main.cpp @@ -0,0 +1,18 @@ +#include +#include +#include + +using namespace std; + +int main(void) { + cmModClass obj("Hello"); + cout << obj.getStr() << endl; + + int v1 = obj.getInt(); + int v2 = getTestInt(); + if (v1 != ((1 + v2) * 2)) { + cerr << "Number test failed" << endl; + return 1; + } + return 0; +} diff --git a/test cases/cmake/19 advanced options/meson.build b/test cases/cmake/19 advanced options/meson.build new file mode 100644 index 0000000..7e2d4e8 --- /dev/null +++ b/test cases/cmake/19 advanced options/meson.build @@ -0,0 +1,24 @@ +project('cmake_set_opt', ['c', 'cpp']) + +cm = import('cmake') +opts = cm.subproject_options() + +opts.add_cmake_defines({'SOME_CMAKE_VAR': 'something', 'SOME_OTHER_VAR': true}) + +opts.set_override_option('cpp_std', 'c++11') # Global is C++11 +opts.set_override_option('cpp_std', 'c++14', target: 'cmModLib++') # Override it with C++14 for cmModLib++ + +opts.append_compile_args('cpp', '-DMESON_GLOBAL_FLAG=1') +opts.append_compile_args('cpp', ['-DMESON_SPECIAL_FLAG1=1', ['-DMESON_SPECIAL_FLAG2=1']], target: 'cmModLib++') +opts.append_compile_args('cpp', '-DMESON_MAGIC_INT=42', target: 'cmModLib++') +opts.append_compile_args('cpp', [[[['-DMESON_MAGIC_INT=20']]]], target: 'cmTestLib') + +opts.set_install(false) +opts.set_install(true, target: 'testEXE') + +sp = cm.subproject('cmOpts', options: opts) +dep1 = sp.dependency('cmModLib++') +dep2 = sp.dependency('cmTestLib') + +exe1 = executable('main', ['main.cpp'], dependencies: [dep1, dep2]) +test('test1', exe1) diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt b/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt new file mode 100644 index 0000000..584841e --- /dev/null +++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/CMakeLists.txt @@ -0,0 +1,18 @@ +cmake_minimum_required(VERSION 3.7) + +project(CmOpts) + +set(CMAKE_CXX_STANDARD 98) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something") + message(FATAL_ERROR "Setting the CMake var failed") +endif() + +add_library(cmModLib++ STATIC cmMod.cpp) +add_library(cmTestLib STATIC cmTest.cpp) +add_executable(testEXE main.cpp) + +target_link_libraries(testEXE cmModLib++) + +install(TARGETS cmTestLib ARCHIVE DESTINATION lib RUNTIME DESTINATION bin) diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp new file mode 100644 index 0000000..7651b60 --- /dev/null +++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.cpp @@ -0,0 +1,31 @@ +#include "cmMod.hpp" + +using namespace std; + +#if __cplusplus < 201402L +#error "At least C++14 is required" +#endif + +#ifndef MESON_GLOBAL_FLAG +#error "MESON_GLOBAL_FLAG was not set" +#endif + +#ifndef MESON_SPECIAL_FLAG1 +#error "MESON_SPECIAL_FLAG1 was not set" +#endif + +#ifndef MESON_SPECIAL_FLAG2 +#error "MESON_SPECIAL_FLAG2 was not set" +#endif + +cmModClass::cmModClass(string foo) { + str = foo + " World"; +} + +string cmModClass::getStr() const { + return str; +} + +int cmModClass::getInt() const { + return MESON_MAGIC_INT; +} diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp new file mode 100644 index 0000000..0748936 --- /dev/null +++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmMod.hpp @@ -0,0 +1,14 @@ +#pragma once + +#include + +class cmModClass { +private: + std::string str; + +public: + cmModClass(std::string foo); + + std::string getStr() const; + int getInt() const; +}; diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp new file mode 100644 index 0000000..a00cdcd --- /dev/null +++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.cpp @@ -0,0 +1,25 @@ +#include "cmTest.hpp" + +#if __cplusplus < 201103L +#error "At least C++11 is required" +#endif + +#if __cplusplus >= 201402L +#error "At most C++11 is required" +#endif + +#ifndef MESON_GLOBAL_FLAG +#error "MESON_GLOBAL_FLAG was not set" +#endif + +#ifdef MESON_SPECIAL_FLAG1 +#error "MESON_SPECIAL_FLAG1 *was* set" +#endif + +#ifdef MESON_SPECIAL_FLAG2 +#error "MESON_SPECIAL_FLAG2 *was* set" +#endif + +int getTestInt() { + return MESON_MAGIC_INT; +} diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp new file mode 100644 index 0000000..5a3bf7b --- /dev/null +++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/cmTest.hpp @@ -0,0 +1,3 @@ +#pragma once + +int getTestInt(); diff --git a/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp b/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp new file mode 100644 index 0000000..497d1ce --- /dev/null +++ b/test cases/cmake/19 advanced options/subprojects/cmOpts/main.cpp @@ -0,0 +1,10 @@ +#include +#include "cmMod.hpp" + +using namespace std; + +int main(void) { + cmModClass obj("Hello (LIB TEST)"); + cout << obj.getStr() << endl; + return 0; +} diff --git a/test cases/cmake/19 advanced options/test.json b/test cases/cmake/19 advanced options/test.json new file mode 100644 index 0000000..e2d9c05 --- /dev/null +++ b/test cases/cmake/19 advanced options/test.json @@ -0,0 +1,8 @@ +{ + "installed": [ + {"type": "exe", "file": "usr/bin/cm_testEXE"} + ], + "tools": { + "cmake": ">=3.11" + } +} -- cgit v1.1 From ede2cd556c0568dabcb8826fe87bd00c376b797e Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 31 May 2020 22:30:24 +0200 Subject: cmake: added docs --- docs/markdown/CMake-module.md | 77 +++++++++++++++++++++++++++++++++++++++-- docs/markdown/snippets/cmake.md | 17 +++++++++ 2 files changed, 91 insertions(+), 3 deletions(-) create mode 100644 docs/markdown/snippets/cmake.md diff --git a/docs/markdown/CMake-module.md b/docs/markdown/CMake-module.md index 7103608..fc6157e 100644 --- a/docs/markdown/CMake-module.md +++ b/docs/markdown/CMake-module.md @@ -48,8 +48,6 @@ The `subproject` method is almost identical to the normal meson `subproject` function. The only difference is that a CMake project instead of a meson project is configured. -Also, project specific CMake options can be added with the `cmake_options` key. - The returned `sub_proj` supports the same options as a "normal" subproject. Meson automatically detects CMake build targets, which can be accessed with the methods listed [below](#subproject-object). @@ -87,6 +85,49 @@ It should be noted that not all projects are guaranteed to work. The safest approach would still be to create a `meson.build` for the subprojects in question. +### Configuration options + +*New in meson 0.55.0* + +Meson also supports passing configuration options to CMake and overriding +certain build details extracted from the CMake subproject. + +```meson +cmake = import('cmake') +opt_var = cmake.subproject_options() + +# Call CMake with `-DSOME_OTHER_VAR=ON` +opt_var.add_cmake_defines({'SOME_OTHER_VAR': true}) + +# Globally override the C++ standard to c++11 +opt_var.set_override_option('cpp_std', 'c++11') + +# Override the previous global C++ standard +# with c++14 only for the CMake target someLib +opt_var.set_override_option('cpp_std', 'c++14', target: 'someLib') + +sub_pro = cmake.subproject('someLibProject', options: opt_var) + +# Further changes to opt_var have no effect +``` + +See [the CMake options object](#cmake-options-object) for a complete reference +of all supported functions. + +The CMake configuration options object is very similar to the +[configuration data object](Reference-manual.md#configuration-data-object) object +returned by [`configuration_data`](Reference-manual.md#configuration_data). It +is generated by the `subproject_options` function + +All configuration options have to be set *before* the subproject is configured +and must be passed to the `subproject` method via the `options` key. Altering +the configuration object won't have any effect on previous `cmake.subproject` +calls. + +In earlier meson versions CMake command-line parameters could be set with the +`cmake_options` kwarg. However, this feature is deprecated since 0.55.0 and only +kept for compatibility. It will not work together with the `options` kwarg. + ### `subproject` object This object is returned by the `subproject` function described above @@ -103,7 +144,37 @@ and supports the following methods: the subproject. Usually `dependency()` or `target()` should be preferred to extract build targets. - `found` returns true if the subproject is available, otherwise false - *new in in 0.53.2* + *new in meson 0.53.2* + +### `cmake options` object + +This object is returned by the `subproject_options()` function and consumed by +the `options` kwarg of the `subproject` function. The following methods are +supported: + + - `add_cmake_defines({'opt1': val1, ...})` add additional CMake commandline defines + - `set_override_option(opt, val)` set specific [build options](Build-options.md) + for targets. This will effectively add `opt=val` to the `override_options` + array of the [build target](Reference-manual.md#executable) + - `set_install(bool)` override wether targets should be installed or not + - `append_compile_args(lang, arg1, ...)` append compile flags for a specific + language to the targets + - `append_link_args(arg1, ...)` append linger args to the targets + - `clear()` reset all data in the `cmake options` object + +The methods `set_override_option`, `set_install`, `append_compile_args` and +`append_link_args` support the optional `target` kwarg. If specified, the set +options affect the specific target. The effect of the option is global for the +subproject otherwise. + +If, for instance, `opt_var.set_install(false)` is called, no target will be +installed regardless of what is set by CMake. However, it is still possible to +install specific targets (here `foo`) by setting the `target` kwarg: +`opt_var.set_install(true, target: 'foo')` + +Options that are not set won't affect the generated subproject. So, if for +instance, `set_install` was not called then the values extracted from CMake will +be used. ## CMake configuration files diff --git a/docs/markdown/snippets/cmake.md b/docs/markdown/snippets/cmake.md new file mode 100644 index 0000000..16da78e --- /dev/null +++ b/docs/markdown/snippets/cmake.md @@ -0,0 +1,17 @@ +## Configure CMake subprojects with meson.subproject_options + +Meson now supports passing configuration options to CMake and overriding +certain build details extracted from the CMake subproject. + +The new CMake configuration options object is very similar to the +[configuration data object](Reference-manual.md#configuration-data-object) object +returned by [`configuration_data`](Reference-manual.md#configuration_data). It +is generated by the `subproject_options` function + +All configuration options have to be set *before* the subproject is configured +and must be passed to the `subproject` method via the `options` key. Altering +the configuration object won't have any effect on previous `cmake.subproject` +calls. + +**Note:** The `cmake_options` kwarg for the `subproject` function is now +deprecated since it is replaced by the new `options` system. -- cgit v1.1 From 0e98a7679a658b2a53a5de578f202a022c69f0c1 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 1 Jun 2020 12:25:05 +0200 Subject: cmake: Skip MSVC like compilers, since C++11 is not supported --- test cases/cmake/19 advanced options/meson.build | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test cases/cmake/19 advanced options/meson.build b/test cases/cmake/19 advanced options/meson.build index 7e2d4e8..6332ca4 100644 --- a/test cases/cmake/19 advanced options/meson.build +++ b/test cases/cmake/19 advanced options/meson.build @@ -1,5 +1,10 @@ project('cmake_set_opt', ['c', 'cpp']) +comp = meson.get_compiler('cpp') +if comp.get_argument_syntax() == 'msvc' + error('MESON_SKIP_TEST: MSVC is not supported because it does not support C++11') +endif + cm = import('cmake') opts = cm.subproject_options() -- cgit v1.1 From 702d03e426b76c5a8cb86389f4507aea43a9a35b Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Mon, 15 Oct 2018 15:52:05 +0100 Subject: Add a test case for very long command lines This exercises commands of about 20K in length Also test short commandlines to make sure they don't regress. --- .../common/234 very long commmand line/codegen.py | 6 ++++++ .../common/234 very long commmand line/main.c | 5 +++++ .../common/234 very long commmand line/meson.build | 21 +++++++++++++++++++++ .../common/234 very long commmand line/seq.py | 6 ++++++ 4 files changed, 38 insertions(+) create mode 100755 test cases/common/234 very long commmand line/codegen.py create mode 100644 test cases/common/234 very long commmand line/main.c create mode 100644 test cases/common/234 very long commmand line/meson.build create mode 100755 test cases/common/234 very long commmand line/seq.py diff --git a/test cases/common/234 very long commmand line/codegen.py b/test cases/common/234 very long commmand line/codegen.py new file mode 100755 index 0000000..4de78ce --- /dev/null +++ b/test cases/common/234 very long commmand line/codegen.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +with open(sys.argv[2], 'w') as f: + print('int func{n}(void) {{ return {n}; }}'.format(n=sys.argv[1]), file=f) diff --git a/test cases/common/234 very long commmand line/main.c b/test cases/common/234 very long commmand line/main.c new file mode 100644 index 0000000..dbb64a8 --- /dev/null +++ b/test cases/common/234 very long commmand line/main.c @@ -0,0 +1,5 @@ +int main(int argc, char **argv) { + (void) argc; + (void) argv; + return 0; +} diff --git a/test cases/common/234 very long commmand line/meson.build b/test cases/common/234 very long commmand line/meson.build new file mode 100644 index 0000000..7a316e1 --- /dev/null +++ b/test cases/common/234 very long commmand line/meson.build @@ -0,0 +1,21 @@ +project('very long command lines', 'c') + +seq = run_command('seq.py', '1', '256').stdout().strip().split('\n') + +sources = [] +codegen = find_program('codegen.py') + +foreach i : seq + sources += custom_target('codegen' + i, + command: [codegen, i, '@OUTPUT@'], + output: 'test' + i + '.c') +endforeach + +shared_library('sharedlib', sources) +static_library('staticlib', sources) +executable('app', 'main.c', sources) + +# Also test short commandlines to make sure that doesn't regress +shared_library('sharedlib0', sources[0]) +static_library('staticlib0', sources[0]) +executable('app0', 'main.c', sources[0]) diff --git a/test cases/common/234 very long commmand line/seq.py b/test cases/common/234 very long commmand line/seq.py new file mode 100755 index 0000000..637bf57 --- /dev/null +++ b/test cases/common/234 very long commmand line/seq.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 + +import sys + +for i in range(int(sys.argv[1]), int(sys.argv[2])): + print(i) -- cgit v1.1 From 50f98f3726a920a858bab5a7e7d6334813fc6048 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Tue, 30 Apr 2019 12:44:45 +0100 Subject: ninja: Rename 'rule' -> 'rulename' in NinjaBuildElement Rename 'rule' to 'rulename' in the NinjaBuildElement class, we're going to want a reference to the NinjaRule object as well. --- mesonbuild/backend/ninjabackend.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 69e7618..e6eb0ec 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -121,14 +121,14 @@ class NinjaRule: outfile.write('\n') class NinjaBuildElement: - def __init__(self, all_outputs, outfilenames, rule, infilenames, implicit_outs=None): + def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None): self.implicit_outfilenames = implicit_outs or [] if isinstance(outfilenames, str): self.outfilenames = [outfilenames] else: self.outfilenames = outfilenames - assert(isinstance(rule, str)) - self.rule = rule + assert(isinstance(rulename, str)) + self.rulename = rulename if isinstance(infilenames, str): self.infilenames = [infilenames] else: @@ -166,7 +166,7 @@ class NinjaBuildElement: implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) if implicit_outs: implicit_outs = ' | ' + implicit_outs - line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rule, ins) + line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rulename, ins) if len(self.deps) > 0: line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps]) if len(self.orderdeps) > 0: @@ -901,8 +901,8 @@ int dummy; self.build_elements.append(build) # increment rule refcount - if build.rule != 'phony': - self.ruledict[build.rule].refcount += 1 + if build.rulename != 'phony': + self.ruledict[build.rulename].refcount += 1 def write_rules(self, outfile): for r in self.rules: -- cgit v1.1 From f9c03dfd292913d4d6d1560911ce8b58ab29f22e Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Mon, 15 Oct 2018 15:36:50 +0100 Subject: ninja: Only use response files when needed Writing rsp files on Windows is moderately expensive, so only use them when the command line is long enough to need them. This also makes the output of 'ninja -v' useful more often (something like 'cl @exec@exe/main.c.obj.rsp' is not very useful if you don't have the response file to look at) For a rule where using a rspfile is possible, write rspfile and non-rspfile versions of that rule. Choose which one to use for each build statement, depending on the anticpated length of the command line. --- mesonbuild/backend/ninjabackend.py | 92 +++++++++++++++++++++++++++++--------- 1 file changed, 72 insertions(+), 20 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index e6eb0ec..b9378b8 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -57,6 +57,9 @@ else: execute_wrapper = [] rmfile_prefix = ['rm', '-f', '{}', '&&'] +# a conservative estimate of the command-line length limit on windows +rsp_threshold = 4096 + def ninja_quote(text, is_build_line=False): if is_build_line: qcs = ('$', ' ', ':') @@ -101,24 +104,54 @@ class NinjaRule: if not self.refcount: return - outfile.write('rule {}\n'.format(self.name)) - if self.rspable: - outfile.write(' command = {} @$out.rsp\n'.format(' '.join(self.command))) - outfile.write(' rspfile = $out.rsp\n') - outfile.write(' rspfile_content = {}\n'.format(' '.join(self.args))) - else: - outfile.write(' command = {}\n'.format(' '.join(self.command + self.args))) - if self.deps: - outfile.write(' deps = {}\n'.format(self.deps)) - if self.depfile: - outfile.write(' depfile = {}\n'.format(self.depfile)) - outfile.write(' description = {}\n'.format(self.description)) - if self.extra: - for l in self.extra.split('\n'): - outfile.write(' ') - outfile.write(l) - outfile.write('\n') - outfile.write('\n') + for rsp in [''] + (['_RSP'] if self.rspable else []): + outfile.write('rule {}{}\n'.format(self.name, rsp)) + if self.rspable: + outfile.write(' command = {} @$out.rsp\n'.format(' '.join(self.command))) + outfile.write(' rspfile = $out.rsp\n') + outfile.write(' rspfile_content = {}\n'.format(' '.join(self.args))) + else: + outfile.write(' command = {}\n'.format(' '.join(self.command + self.args))) + if self.deps: + outfile.write(' deps = {}\n'.format(self.deps)) + if self.depfile: + outfile.write(' depfile = {}\n'.format(self.depfile)) + outfile.write(' description = {}\n'.format(self.description)) + if self.extra: + for l in self.extra.split('\n'): + outfile.write(' ') + outfile.write(l) + outfile.write('\n') + outfile.write('\n') + + def length_estimate(self, infiles, outfiles, elems): + # determine variables + # this order of actions only approximates ninja's scoping rules, as + # documented at: https://ninja-build.org/manual.html#ref_scope + ninja_vars = {} + for e in elems: + (name, value) = e + ninja_vars[name] = value + ninja_vars['deps'] = self.deps + ninja_vars['depfile'] = self.depfile + ninja_vars['in'] = infiles + ninja_vars['out'] = outfiles + + # expand variables in command (XXX: this ignores any escaping/quoting + # that NinjaBuildElement.write() might do) + command = ' '.join(self.command + self.args) + expanded_command = '' + for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command): + chunk = m.group() + if chunk.startswith('$'): + chunk = chunk[1:] + chunk = re.sub(r'{(.*)}', r'\1', chunk) + chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty + chunk = ' '.join(chunk) + expanded_command += chunk + + # determine command length + return len(expanded_command) class NinjaBuildElement: def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None): @@ -159,6 +192,17 @@ class NinjaBuildElement: elems = [elems] self.elems.append((name, elems)) + def _should_use_rspfile(self, infiles, outfiles): + # 'phony' is a rule built-in to ninja + if self.rulename == 'phony': + return False + + if not self.rule.rspable: + return False + + return self.rule.length_estimate(infiles, outfiles, + self.elems) >= rsp_threshold + def write(self, outfile): self.check_outputs() ins = ' '.join([ninja_quote(i, True) for i in self.infilenames]) @@ -166,7 +210,12 @@ class NinjaBuildElement: implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) if implicit_outs: implicit_outs = ' | ' + implicit_outs - line = 'build {}{}: {} {}'.format(outs, implicit_outs, self.rulename, ins) + if self._should_use_rspfile(ins, outs): + rulename = self.rulename + '_RSP' + mlog.log("Command line for building %s is long, using a response file" % self.outfilenames) + else: + rulename = self.rulename + line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins) if len(self.deps) > 0: line += ' | ' + ' '.join([ninja_quote(x, True) for x in self.deps]) if len(self.orderdeps) > 0: @@ -900,10 +949,13 @@ int dummy; def add_build(self, build): self.build_elements.append(build) - # increment rule refcount if build.rulename != 'phony': + # increment rule refcount self.ruledict[build.rulename].refcount += 1 + # reference rule + build.rule = self.ruledict[build.rulename] + def write_rules(self, outfile): for r in self.rules: r.write(outfile) -- cgit v1.1 From 9967a2276db4a126963554e259c210e16b4c2934 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 10 Apr 2019 12:57:05 +0100 Subject: ninja: Expose response file rules in compdb Possibly this should now be done by marking rules as being wanted in compdb, rather than listing the rule names... --- mesonbuild/backend/ninjabackend.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b9378b8..6a8a2ec 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -399,10 +399,14 @@ int dummy; # http://clang.llvm.org/docs/JSONCompilationDatabase.html def generate_compdb(self): rules = [] + # TODO: Rather than an explicit list here, rules could be marked in the + # rule store as being wanted in compdb for for_machine in MachineChoice: for lang in self.environment.coredata.compilers[for_machine]: - rules += [self.get_compiler_rule_name(lang, for_machine)] - rules += [self.get_pch_rule_name(lang, for_machine)] + rules += [ "%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)] + for ext in ['', '_RSP']] + rules += [ "%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)] + for ext in ['', '_RSP']] compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else [] ninja_compdb = [self.ninja_command, '-t', 'compdb'] + compdb_options + rules builddir = self.environment.get_build_dir() -- cgit v1.1 From 92ee8932cd0b7754d491d2a6cedbba19ee55ed9d Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 10 Apr 2019 14:49:29 +0100 Subject: ninja: Refcount rsp and non-rsp rule usage separately We need to count rsp and non-rsp references separately, which we need to do after build statement variables have been set so we can tell the difference, which introduces a bit of complexity. --- mesonbuild/backend/ninjabackend.py | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 6a8a2ec..ef3fc45 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -99,14 +99,18 @@ class NinjaRule: self.extra = extra self.rspable = rspable # if a rspfile can be used self.refcount = 0 + self.rsprefcount = 0 def write(self, outfile): - if not self.refcount: - return + def rule_iter(): + if self.refcount: + yield '' + if self.rsprefcount: + yield '_RSP' - for rsp in [''] + (['_RSP'] if self.rspable else []): + for rsp in rule_iter(): outfile.write('rule {}{}\n'.format(self.name, rsp)) - if self.rspable: + if rsp == '_RSP': outfile.write(' command = {} @$out.rsp\n'.format(' '.join(self.command))) outfile.write(' rspfile = $out.rsp\n') outfile.write(' rspfile_content = {}\n'.format(' '.join(self.args))) @@ -192,7 +196,7 @@ class NinjaBuildElement: elems = [elems] self.elems.append((name, elems)) - def _should_use_rspfile(self, infiles, outfiles): + def _should_use_rspfile(self): # 'phony' is a rule built-in to ninja if self.rulename == 'phony': return False @@ -200,9 +204,20 @@ class NinjaBuildElement: if not self.rule.rspable: return False - return self.rule.length_estimate(infiles, outfiles, + infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + + return self.rule.length_estimate(infilenames, + outfilenames, self.elems) >= rsp_threshold + def count_rule_references(self): + if self.rulename != 'phony': + if self._should_use_rspfile(): + self.rule.rsprefcount += 1 + else: + self.rule.refcount += 1 + def write(self, outfile): self.check_outputs() ins = ' '.join([ninja_quote(i, True) for i in self.infilenames]) @@ -210,7 +225,7 @@ class NinjaBuildElement: implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) if implicit_outs: implicit_outs = ' | ' + implicit_outs - if self._should_use_rspfile(ins, outs): + if self._should_use_rspfile(): rulename = self.rulename + '_RSP' mlog.log("Command line for building %s is long, using a response file" % self.outfilenames) else: @@ -954,13 +969,14 @@ int dummy; self.build_elements.append(build) if build.rulename != 'phony': - # increment rule refcount - self.ruledict[build.rulename].refcount += 1 - # reference rule build.rule = self.ruledict[build.rulename] def write_rules(self, outfile): + for b in self.build_elements: + if isinstance(b, NinjaBuildElement): + b.count_rule_references() + for r in self.rules: r.write(outfile) -- cgit v1.1 From 2f070c54bd415bbe2207fbd313c346d26f003215 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Sun, 14 Apr 2019 14:16:17 +0100 Subject: Extended test case for special characters to compiler arguments --- .../common/145 special characters/arg-char-test.c | 10 ++++++ .../145 special characters/arg-string-test.c | 12 +++++++ .../145 special characters/arg-unquoted-test.c | 17 ++++++++++ .../common/145 special characters/meson.build | 38 ++++++++++++++++++++++ 4 files changed, 77 insertions(+) create mode 100644 test cases/common/145 special characters/arg-char-test.c create mode 100644 test cases/common/145 special characters/arg-string-test.c create mode 100644 test cases/common/145 special characters/arg-unquoted-test.c diff --git a/test cases/common/145 special characters/arg-char-test.c b/test cases/common/145 special characters/arg-char-test.c new file mode 100644 index 0000000..04e02f8 --- /dev/null +++ b/test cases/common/145 special characters/arg-char-test.c @@ -0,0 +1,10 @@ +#include +#include + +int main(int argc, char **argv) { + char c = CHAR; + assert(argc == 2); + if (c != argv[1][0]) + fprintf(stderr, "Expected %x, got %x\n", (unsigned int) c, (unsigned int) argv[1][0]); + assert(c == argv[1][0]); +} diff --git a/test cases/common/145 special characters/arg-string-test.c b/test cases/common/145 special characters/arg-string-test.c new file mode 100644 index 0000000..199fd79 --- /dev/null +++ b/test cases/common/145 special characters/arg-string-test.c @@ -0,0 +1,12 @@ +#include +#include +#include + +int main(int argc, char **argv) { + const char *s = CHAR; + assert(argc == 2); + assert(strlen(s) == 1); + if (s[0] != argv[1][0]) + fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]); + assert(s[0] == argv[1][0]); +} diff --git a/test cases/common/145 special characters/arg-unquoted-test.c b/test cases/common/145 special characters/arg-unquoted-test.c new file mode 100644 index 0000000..7f679ca --- /dev/null +++ b/test cases/common/145 special characters/arg-unquoted-test.c @@ -0,0 +1,17 @@ +#include +#include +#include + +#define Q(x) #x +#define QUOTE(x) Q(x) + +int main(int argc, char **argv) { + const char *s = QUOTE(CHAR); + assert(argc == 2); + assert(strlen(s) == 1); + if (s[0] != argv[1][0]) + fprintf(stderr, "Expected %x, got %x\n", (unsigned int) s[0], (unsigned int) argv[1][0]); + assert(s[0] == argv[1][0]); + // There is no way to convert a macro argument into a character constant. + // Otherwise we'd test that as well +} diff --git a/test cases/common/145 special characters/meson.build b/test cases/common/145 special characters/meson.build index ecba650..579601e 100644 --- a/test cases/common/145 special characters/meson.build +++ b/test cases/common/145 special characters/meson.build @@ -35,3 +35,41 @@ gen2 = custom_target('gen2', output : 'result2', install : true, install_dir : get_option('datadir')) + +# Test that we can pass these special characters in compiler arguments +# +# (this part of the test is crafted so we don't try to use these special +# characters in filenames or target names) +# +# TODO: similar tests needed for languages other than C +# TODO: add similar test for quote, doublequote, and hash, carefully +# Re hash, see +# https://docs.microsoft.com/en-us/cpp/build/reference/d-preprocessor-definitions + +special = [ + ['amp', '&'], + ['at', '@'], + ['backslash', '\\'], + ['dollar', '$'], + ['gt', '>'], + ['lt', '<'], + ['slash', '/'], +] + +cc = meson.get_compiler('c') + +foreach s : special + args = '-DCHAR="@0@"'.format(s[1]) + e = executable('arg-string-' + s[0], 'arg-string-test.c', c_args: args) + test('arg-string-' + s[0], e, args: s[1]) + + args = '-DCHAR=@0@'.format(s[1]) + e = executable('arg-unquoted-' + s[0], 'arg-unquoted-test.c', c_args: args) + test('arg-unquoted-' + s[0], e, args: s[1]) +endforeach + +foreach s : special + args = '-DCHAR=\'@0@\''.format(s[1]) + e = executable('arg-char-' + s[0], 'arg-char-test.c', c_args: args) + test('arg-char-' + s[0], e, args: s[1]) +endforeach -- cgit v1.1 From 9cec5f3521407d50307eeef3c34cbd608949b0a1 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Fri, 12 Apr 2019 17:26:25 +0100 Subject: ninja: Push ninja and shell quoting down into NinjaRule Rather than ad-hoc avoiding quoting where harmful, identify arguments which contain shell constructs and ninja variables, and don't apply quoting to those arguments. This is made more complex by some arguments which might contain ninja variables anywhere, not just at start, e.g. '/Fo$out' (This implementation would fall down if there was an argument which contained both a literal $ or shell metacharacter and a ninja variable, but there are no instances of such a thing and it seems unlikely) $DEPFILE needs special treatment. It's used in the special variable depfile, so it's value can't be shell quoted (as it used as a filename to read by ninja). So instead that variable needs to be shell quoted when it appears in a command. (Test common/129, which uses a depfile with a space in it's name, exercises that) If 'targetdep' is not in raw_names, test cases/rust all fail. --- mesonbuild/backend/ninjabackend.py | 141 ++++++++++++++++++++++++------------- 1 file changed, 91 insertions(+), 50 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index ef3fc45..9af079e 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -17,6 +17,7 @@ import re import pickle import subprocess from collections import OrderedDict +from enum import Enum, unique import itertools from pathlib import PurePath, Path from functools import lru_cache @@ -60,6 +61,12 @@ else: # a conservative estimate of the command-line length limit on windows rsp_threshold = 4096 +# ninja variables whose value should remain unquoted. The value of these ninja +# variables (or variables we use them in) is interpreted directly by ninja +# (e.g. the value of the depfile variable is a pathname that ninja will read +# from, etc.), so it must not be shell quoted. +raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'} + def ninja_quote(text, is_build_line=False): if is_build_line: qcs = ('$', ' ', ':') @@ -76,6 +83,25 @@ Please report this error with a test case to the Meson bug tracker.'''.format(te raise MesonException(errmsg) return text +@unique +class Quoting(Enum): + both = 0 + notShell = 1 + notNinja = 2 + none = 3 + +class NinjaCommandArg: + def __init__(self, s, quoting = Quoting.both): + self.s = s + self.quoting = quoting + + def __str__(self): + return self.s + + @staticmethod + def list(l, q): + return [NinjaCommandArg(i, q) for i in l] + class NinjaComment: def __init__(self, comment): self.comment = comment @@ -90,9 +116,32 @@ class NinjaComment: class NinjaRule: def __init__(self, rule, command, args, description, rspable = False, deps = None, depfile = None, extra = None): + + def strToCommandArg(c): + if isinstance(c, NinjaCommandArg): + return c + + # deal with common cases here, so we don't have to explicitly + # annotate the required quoting everywhere + if c == '&&': + # shell constructs shouldn't be shell quoted + return NinjaCommandArg(c, Quoting.notShell) + if c.startswith('$'): + var = re.search(r'\$\{?(\w*)\}?', c).group(1) + if var not in raw_names: + # ninja variables shouldn't be ninja quoted, and their value + # is already shell quoted + return NinjaCommandArg(c, Quoting.none) + else: + # shell quote the use of ninja variables whose value must + # not be shell quoted (as it also used by ninja) + return NinjaCommandArg(c, Quoting.notNinja) + + return NinjaCommandArg(c) + self.name = rule - self.command = command # includes args which never go into a rspfile - self.args = args # args which will go into a rspfile, if used + self.command = list(map(strToCommandArg, command)) # includes args which never go into a rspfile + self.args = list(map(strToCommandArg, args)) # args which will go into a rspfile, if used self.description = description self.deps = deps # depstyle 'gcc' or 'msvc' self.depfile = depfile @@ -101,6 +150,18 @@ class NinjaRule: self.refcount = 0 self.rsprefcount = 0 + @staticmethod + def _quoter(x): + if isinstance(x, NinjaCommandArg): + if x.quoting == Quoting.none: + return x.s + elif x.quoting == Quoting.notNinja: + return quote_func(x.s) + elif x.quoting == Quoting.notShell: + return ninja_quote(x.s) + # fallthrough + return ninja_quote(quote_func(str(x))) + def write(self, outfile): def rule_iter(): if self.refcount: @@ -111,11 +172,11 @@ class NinjaRule: for rsp in rule_iter(): outfile.write('rule {}{}\n'.format(self.name, rsp)) if rsp == '_RSP': - outfile.write(' command = {} @$out.rsp\n'.format(' '.join(self.command))) + outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command]))) outfile.write(' rspfile = $out.rsp\n') - outfile.write(' rspfile_content = {}\n'.format(' '.join(self.args))) + outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x) for x in self.args]))) else: - outfile.write(' command = {}\n'.format(' '.join(self.command + self.args))) + outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)]))) if self.deps: outfile.write(' deps = {}\n'.format(self.deps)) if self.depfile: @@ -141,9 +202,8 @@ class NinjaRule: ninja_vars['in'] = infiles ninja_vars['out'] = outfiles - # expand variables in command (XXX: this ignores any escaping/quoting - # that NinjaBuildElement.write() might do) - command = ' '.join(self.command + self.args) + # expand variables in command + command = ' '.join([self._quoter(x) for x in self.command + self.args]) expanded_command = '' for m in re.finditer(r'(\${\w*})|(\$\w*)|([^$]*)', command): chunk = m.group() @@ -244,12 +304,6 @@ class NinjaBuildElement: line = line.replace('\\', '/') outfile.write(line) - # ninja variables whose value should remain unquoted. The value of these - # ninja variables (or variables we use them in) is interpreted directly - # by ninja (e.g. the value of the depfile variable is a pathname that - # ninja will read from, etc.), so it must not be shell quoted. - raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'} - for e in self.elems: (name, elems) = e should_quote = name not in raw_names @@ -945,13 +999,15 @@ int dummy; deps='gcc', depfile='$DEPFILE', extra='restat = 1')) - c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \ + c = self.environment.get_build_command() + \ ['--internal', 'regenerate', - ninja_quote(quote_func(self.environment.get_source_dir())), - ninja_quote(quote_func(self.environment.get_build_dir()))] + self.environment.get_source_dir(), + self.environment.get_build_dir(), + '--backend', + 'ninja'] self.add_rule(NinjaRule('REGENERATE_BUILD', - c + ['--backend', 'ninja'], [], + c, [], 'Regenerating build files.', extra='generator = 1')) @@ -1630,7 +1686,7 @@ int dummy; cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix] cmdlist += static_linker.get_exelist() cmdlist += ['$LINK_ARGS'] - cmdlist += static_linker.get_output_args('$out') + cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none) description = 'Linking static target $out' if num_pools > 0: pool = 'pool = link_pool' @@ -1652,7 +1708,7 @@ int dummy; continue rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine)) command = compiler.get_linker_exelist() - args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS'] + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS'] description = 'Linking target $out' if num_pools > 0: pool = 'pool = link_pool' @@ -1662,10 +1718,10 @@ int dummy; rspable=compiler.can_linker_accept_rsp(), extra=pool)) - args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \ + args = self.environment.get_build_command() + \ ['--internal', 'symbolextractor', - ninja_quote(quote_func(self.environment.get_build_dir())), + self.environment.get_build_dir(), '$in', '$IMPLIB', '$out'] @@ -1677,15 +1733,13 @@ int dummy; def generate_java_compile_rule(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + ['$ARGS', '$in'] + command = compiler.get_exelist() + ['$ARGS', '$in'] description = 'Compiling Java object $in' self.add_rule(NinjaRule(rule, command, [], description)) def generate_cs_compile_rule(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + command = compiler.get_exelist() args = ['$ARGS', '$in'] description = 'Compiling C Sharp target $out' self.add_rule(NinjaRule(rule, command, args, description, @@ -1693,15 +1747,13 @@ int dummy; def generate_vala_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + ['$ARGS', '$in'] + command = compiler.get_exelist() + ['$ARGS', '$in'] description = 'Compiling Vala source $in' self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1')) def generate_rust_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) - invoc = [ninja_quote(i) for i in compiler.get_exelist()] - command = invoc + ['$ARGS', '$in'] + command = compiler.get_exelist() + ['$ARGS', '$in'] description = 'Compiling Rust source $in' depfile = '$targetdep' depstyle = 'gcc' @@ -1710,12 +1762,12 @@ int dummy; def generate_swift_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) - full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [ + full_exe = self.environment.get_build_command() + [ '--internal', 'dirchanger', '$RUNDIR', ] - invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()] + invoc = full_exe + compiler.get_exelist() command = invoc + ['$ARGS', '$in'] description = 'Compiling Swift source $in' self.add_rule(NinjaRule(rule, command, [], description)) @@ -1735,8 +1787,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) if self.created_llvm_ir_rule[compiler.for_machine]: return rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) - command = [ninja_quote(i) for i in compiler.get_exelist()] - args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in'] + command = compiler.get_exelist() + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] description = 'Compiling LLVM IR object $in' self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp())) @@ -1765,15 +1817,9 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) if langname == 'fortran': self.generate_fortran_dep_hack(crstr) rule = self.get_compiler_rule_name(langname, compiler.for_machine) - depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') - quoted_depargs = [] - for d in depargs: - if d != '$out' and d != '$in': - d = quote_func(d) - quoted_depargs.append(d) - - command = [ninja_quote(i) for i in compiler.get_exelist()] - args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in'] + depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none) + command = compiler.get_exelist() + args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] description = 'Compiling {} object $out'.format(compiler.get_display_language()) if isinstance(compiler, VisualStudioLikeCompiler): deps = 'msvc' @@ -1791,16 +1837,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) rule = self.compiler_to_pch_rule_name(compiler) depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') - quoted_depargs = [] - for d in depargs: - if d != '$out' and d != '$in': - d = quote_func(d) - quoted_depargs.append(d) if isinstance(compiler, VisualStudioLikeCompiler): output = [] else: - output = compiler.get_output_args('$out') - command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in'] + output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in'] description = 'Precompiling header $in' if isinstance(compiler, VisualStudioLikeCompiler): deps = 'msvc' -- cgit v1.1 From fbacf87af525f7b69e4f1a310f6ef38c5853406c Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Sat, 13 Apr 2019 17:07:19 +0100 Subject: ninja: Quoting in rspfile depends on the compiler, not the shell In certain exotic configurations, the style of quoting expected in the response file may not match that expected by the shell. e.g. under MSYS2, ninja invokes commands via CreateProcess (which results in cmd-style quoting processed by parse_cmdline or CommandLineToArgvW), but gcc will use sh-style quoting in any response file it reads. Future work: The rspfile quoting style should be a method of the compiler or linker object, rather than hardcoded in ninjabackend. (In fact, can_linker_accept_rsp() should be extended to do this, since if we can accept rsp, we should know the quoting style) --- mesonbuild/backend/ninjabackend.py | 65 ++++++++++++++++++++++++++++---------- 1 file changed, 48 insertions(+), 17 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 9af079e..5743d48 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -15,6 +15,7 @@ import typing as T import os import re import pickle +import shlex import subprocess from collections import OrderedDict from enum import Enum, unique @@ -29,9 +30,14 @@ from .. import build from .. import mlog from .. import dependencies from .. import compilers -from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler, - PGICCompiler, VisualStudioLikeCompiler) -from ..linkers import ArLinker +from ..compilers import ( + Compiler, CompilerArgs, CCompiler, + DmdDCompiler, + FortranCompiler, PGICCompiler, + VisualStudioCsCompiler, + VisualStudioLikeCompiler, +) +from ..linkers import ArLinker, VisualStudioLinker from ..mesonlib import ( File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine, ProgressBar, quote_arg, unholder, @@ -46,12 +52,15 @@ FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$" FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)" FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)" +def cmd_quote(s): + # XXX: this needs to understand how to escape any existing double quotes(") + return '"{}"'.format(s) + +# How ninja executes command lines differs between Unix and Windows +# (see https://ninja-build.org/manual.html#ref_rule_command) if mesonlib.is_windows(): - # FIXME: can't use quote_arg on Windows just yet; there are a number of existing workarounds - # throughout the codebase that cumulatively make the current code work (see, e.g. Backend.escape_extra_args - # and NinjaBuildElement.write below) and need to be properly untangled before attempting this - quote_func = lambda s: '"{}"'.format(s) - execute_wrapper = ['cmd', '/c'] + quote_func = cmd_quote + execute_wrapper = ['cmd', '/c'] # unused rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&'] else: quote_func = quote_arg @@ -115,7 +124,8 @@ class NinjaComment: class NinjaRule: def __init__(self, rule, command, args, description, - rspable = False, deps = None, depfile = None, extra = None): + rspable = False, deps = None, depfile = None, extra = None, + rspfile_quote_style = 'sh'): def strToCommandArg(c): if isinstance(c, NinjaCommandArg): @@ -149,20 +159,26 @@ class NinjaRule: self.rspable = rspable # if a rspfile can be used self.refcount = 0 self.rsprefcount = 0 + self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'sh' or 'cl' @staticmethod - def _quoter(x): + def _quoter(x, qf = quote_func): if isinstance(x, NinjaCommandArg): if x.quoting == Quoting.none: return x.s elif x.quoting == Quoting.notNinja: - return quote_func(x.s) + return qf(x.s) elif x.quoting == Quoting.notShell: return ninja_quote(x.s) # fallthrough - return ninja_quote(quote_func(str(x))) + return ninja_quote(qf(str(x))) def write(self, outfile): + if self.rspfile_quote_style == 'cl': + rspfile_quote_func = cmd_quote + else: + rspfile_quote_func = shlex.quote + def rule_iter(): if self.refcount: yield '' @@ -174,7 +190,7 @@ class NinjaRule: if rsp == '_RSP': outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command]))) outfile.write(' rspfile = $out.rsp\n') - outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x) for x in self.args]))) + outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args]))) else: outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)]))) if self.deps: @@ -285,7 +301,8 @@ class NinjaBuildElement: implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) if implicit_outs: implicit_outs = ' | ' + implicit_outs - if self._should_use_rspfile(): + use_rspfile = self._should_use_rspfile() + if use_rspfile: rulename = self.rulename + '_RSP' mlog.log("Command line for building %s is long, using a response file" % self.outfilenames) else: @@ -304,6 +321,14 @@ class NinjaBuildElement: line = line.replace('\\', '/') outfile.write(line) + if use_rspfile: + if self.rule.rspfile_quote_style == 'cl': + qf = cmd_quote + else: + qf = shlex.quote + else: + qf = quote_func + for e in self.elems: (name, elems) = e should_quote = name not in raw_names @@ -313,9 +338,9 @@ class NinjaBuildElement: if not should_quote or i == '&&': # Hackety hack hack quoter = ninja_quote else: - quoter = lambda x: ninja_quote(quote_func(x)) + quoter = lambda x: ninja_quote(qf(x)) i = i.replace('\\', '\\\\') - if quote_func('') == '""': + if qf('') == '""': i = i.replace('"', '\\"') newelems.append(quoter(i)) line += ' '.join(newelems) @@ -1694,6 +1719,7 @@ int dummy; pool = None self.add_rule(NinjaRule(rule, cmdlist, args, description, rspable=static_linker.can_linker_accept_rsp(), + rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'sh', extra=pool)) def generate_dynamic_link_rules(self): @@ -1716,6 +1742,8 @@ int dummy; pool = None self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp(), + rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or + isinstance(compiler, DmdDCompiler)) else 'sh', extra=pool)) args = self.environment.get_build_command() + \ @@ -1743,7 +1771,8 @@ int dummy; args = ['$ARGS', '$in'] description = 'Compiling C Sharp target $out' self.add_rule(NinjaRule(rule, command, args, description, - rspable=mesonlib.is_windows())) + rspable=mesonlib.is_windows(), + rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'sh')) def generate_vala_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) @@ -1829,6 +1858,8 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) depfile = '$DEPFILE' self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp(), + rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or + isinstance(compiler, DmdDCompiler)) else 'sh', deps=deps, depfile=depfile)) def generate_pch_rule_for(self, langname, compiler): -- cgit v1.1 From eb60c041f9d2ae8554045b50080bbfc7b439af01 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Tue, 30 Apr 2019 18:52:44 +0100 Subject: ninja: Implement Windows-style command line quoting We avoided having to get this right previously, as we'd always use a response file if possible. But this is so insane, I can't imagine it's right. See also: subprocess.list2cmdline() internal method --- mesonbuild/backend/ninjabackend.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 5743d48..fb150a3 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -53,8 +53,17 @@ FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)" FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)" def cmd_quote(s): - # XXX: this needs to understand how to escape any existing double quotes(") - return '"{}"'.format(s) + # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks + + # backslash escape any existing double quotes + # any existing backslashes preceding a quote are doubled + s = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', s) + # any terminal backslashes likewise need doubling + s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s) + # and double quote + s = '"{}"'.format(s) + + return s # How ninja executes command lines differs between Unix and Windows # (see https://ninja-build.org/manual.html#ref_rule_command) @@ -340,8 +349,6 @@ class NinjaBuildElement: else: quoter = lambda x: ninja_quote(qf(x)) i = i.replace('\\', '\\\\') - if qf('') == '""': - i = i.replace('"', '\\"') newelems.append(quoter(i)) line += ' '.join(newelems) line += '\n' -- cgit v1.1 From aca93df184a32ed7faf3636c0fbe90d05cb67857 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Tue, 30 Apr 2019 20:29:43 +0100 Subject: backend: There is no need for 'compiler-specific escaping' anymore Now that all command-line escaping for ninja is dealt with in the ninja backend, escape_extra_args() shouldn't need to do anything. But tests of existing behaviour rely on all backslashes in defines being C escaped: This means that Windows-style paths including backslashes can be safely used, but makes it impossible to have a define containing a C escape. --- mesonbuild/backend/backends.py | 30 +++++++----------------------- mesonbuild/backend/ninjabackend.py | 1 - 2 files changed, 7 insertions(+), 24 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 840c9a3..3573d94 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -28,7 +28,7 @@ from .. import build from .. import dependencies from .. import mesonlib from .. import mlog -from ..compilers import CompilerArgs, VisualStudioLikeCompiler +from ..compilers import CompilerArgs from ..mesonlib import ( File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy, classify_unity_sources, unholder @@ -607,29 +607,13 @@ class Backend: @staticmethod def escape_extra_args(compiler, args): - # No extra escaping/quoting needed when not running on Windows - if not mesonlib.is_windows(): - return args + # all backslashes in defines are doubly-escaped extra_args = [] - # Compiler-specific escaping is needed for -D args but not for any others - if isinstance(compiler, VisualStudioLikeCompiler): - # MSVC needs escaping when a -D argument ends in \ or \" - for arg in args: - if arg.startswith('-D') or arg.startswith('/D'): - # Without extra escaping for these two, the next character - # gets eaten - if arg.endswith('\\'): - arg += '\\' - elif arg.endswith('\\"'): - arg = arg[:-2] + '\\\\"' - extra_args.append(arg) - else: - # MinGW GCC needs all backslashes in defines to be doubly-escaped - # FIXME: Not sure about Cygwin or Clang - for arg in args: - if arg.startswith('-D') or arg.startswith('/D'): - arg = arg.replace('\\', '\\\\') - extra_args.append(arg) + for arg in args: + if arg.startswith('-D') or arg.startswith('/D'): + arg = arg.replace('\\', '\\\\') + extra_args.append(arg) + return extra_args def generate_basic_compiler_args(self, target, compiler, no_warn_args=False): diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index fb150a3..719028d 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -348,7 +348,6 @@ class NinjaBuildElement: quoter = ninja_quote else: quoter = lambda x: ninja_quote(qf(x)) - i = i.replace('\\', '\\\\') newelems.append(quoter(i)) line += ' '.join(newelems) line += '\n' -- cgit v1.1 From abf8bf488e820d75352bd21a4d9ecdd39b37a8d9 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Thu, 2 May 2019 14:11:42 +0100 Subject: ninja: Specifically implement gcc rspfile style quoting This differs from sh-quoting in that a backslash *always* escapes the following character, even inside single quotes. Yes, really. https://gcc.gnu.org/git/?p=gcc.git;a=blob;f=libiberty/argv.c#l176 --- mesonbuild/backend/ninjabackend.py | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 719028d..538aa3d 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -65,6 +65,16 @@ def cmd_quote(s): return s +def gcc_rsp_quote(s): + # see: the function buildargv() in libiberty + # + # this differs from sh-quoting in that a backslash *always* escapes the + # following character, even inside single quotes. + + s = s.replace('\\', '\\\\') + + return shlex.quote(s) + # How ninja executes command lines differs between Unix and Windows # (see https://ninja-build.org/manual.html#ref_rule_command) if mesonlib.is_windows(): @@ -134,7 +144,7 @@ class NinjaComment: class NinjaRule: def __init__(self, rule, command, args, description, rspable = False, deps = None, depfile = None, extra = None, - rspfile_quote_style = 'sh'): + rspfile_quote_style = 'gcc'): def strToCommandArg(c): if isinstance(c, NinjaCommandArg): @@ -168,7 +178,7 @@ class NinjaRule: self.rspable = rspable # if a rspfile can be used self.refcount = 0 self.rsprefcount = 0 - self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'sh' or 'cl' + self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl' @staticmethod def _quoter(x, qf = quote_func): @@ -186,7 +196,7 @@ class NinjaRule: if self.rspfile_quote_style == 'cl': rspfile_quote_func = cmd_quote else: - rspfile_quote_func = shlex.quote + rspfile_quote_func = gcc_rsp_quote def rule_iter(): if self.refcount: @@ -334,7 +344,7 @@ class NinjaBuildElement: if self.rule.rspfile_quote_style == 'cl': qf = cmd_quote else: - qf = shlex.quote + qf = gcc_rsp_quote else: qf = quote_func @@ -1725,7 +1735,7 @@ int dummy; pool = None self.add_rule(NinjaRule(rule, cmdlist, args, description, rspable=static_linker.can_linker_accept_rsp(), - rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'sh', + rspfile_quote_style='cl' if isinstance(static_linker, VisualStudioLinker) else 'gcc', extra=pool)) def generate_dynamic_link_rules(self): @@ -1749,7 +1759,7 @@ int dummy; self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp(), rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or - isinstance(compiler, DmdDCompiler)) else 'sh', + isinstance(compiler, DmdDCompiler)) else 'gcc', extra=pool)) args = self.environment.get_build_command() + \ @@ -1778,7 +1788,7 @@ int dummy; description = 'Compiling C Sharp target $out' self.add_rule(NinjaRule(rule, command, args, description, rspable=mesonlib.is_windows(), - rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'sh')) + rspfile_quote_style='cl' if isinstance(compiler, VisualStudioCsCompiler) else 'gcc')) def generate_vala_compile_rules(self, compiler): rule = self.compiler_to_rule_name(compiler) @@ -1865,7 +1875,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) self.add_rule(NinjaRule(rule, command, args, description, rspable=compiler.can_linker_accept_rsp(), rspfile_quote_style='cl' if (compiler.get_argument_syntax() == 'msvc' or - isinstance(compiler, DmdDCompiler)) else 'sh', + isinstance(compiler, DmdDCompiler)) else 'gcc', deps=deps, depfile=depfile)) def generate_pch_rule_for(self, langname, compiler): -- cgit v1.1 From 5ca37e7961ac2068382c36424c3f972ed0a9a105 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Fri, 3 May 2019 18:51:05 +0100 Subject: ninja: Add ninja variable DEPFILE_UNQUOTED with unquoted DEPFILE value It's assumed that where we use DEPFILE in command or rspfile_content, it can be quoted by quoting the ninja variable (e.g. $DEPFILE -> '$DEPFILE') This is nearly always true, but not for gcc response files, where backslash is always an escape, even inside single quotes. So this fails if the value of DEPFILE contains backslashes (e.g. a Windows path) Do some special casing, adding DEPFILE_UNQUOTED, so that the value of depfile is not shell quoted (so ninja can use it to locate the depfile to read), but the value of DEPFILE used in command or rspfile_content is shell/response file quoted) (It would seem this also exists as a more general problem with built-in ninja variables: '$out' appearing in command is fine, unless one of the output filenames contains a single quote. Although forbidding shell metacharacters in filenames seems a reasonable way to solve that.) (How does this even work, currently? Backslashes in the value of all ninja variables, including DEPFILE were escaped, which protected them against being treated as escapes in the gcc response file. And fortunately, the empty path elements indicated by a double backslash in the value of depfile are ignored when ninja opens that file to read it.) --- mesonbuild/backend/ninjabackend.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 538aa3d..1686a5d 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -93,7 +93,7 @@ rsp_threshold = 4096 # variables (or variables we use them in) is interpreted directly by ninja # (e.g. the value of the depfile variable is a pathname that ninja will read # from, etc.), so it must not be shell quoted. -raw_names = {'DEPFILE', 'DESC', 'pool', 'description', 'targetdep'} +raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep'} def ninja_quote(text, is_build_line=False): if is_build_line: @@ -180,6 +180,9 @@ class NinjaRule: self.rsprefcount = 0 self.rspfile_quote_style = rspfile_quote_style # rspfile quoting style is 'gcc' or 'cl' + if self.depfile == '$DEPFILE': + self.depfile += '_UNQUOTED' + @staticmethod def _quoter(x, qf = quote_func): if isinstance(x, NinjaCommandArg): @@ -291,6 +294,9 @@ class NinjaBuildElement: elems = [elems] self.elems.append((name, elems)) + if name == 'DEPFILE': + self.elems.append((name + '_UNQUOTED', elems)) + def _should_use_rspfile(self): # 'phony' is a rule built-in to ninja if self.rulename == 'phony': -- cgit v1.1 From 877dcdbccad25ed496b7b2d6221e7daf94cfd3da Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Fri, 29 May 2020 10:19:18 -0700 Subject: gnulike linkers (and ar) accept rsp files. --- mesonbuild/linkers.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index f02c297..25a8c9c 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -149,6 +149,10 @@ class ArLinker(StaticLinker): self.std_args = ['csrD'] else: self.std_args = ['csr'] + self.can_rsp = '@<' in stdo + + def can_linker_accept_rsp(self) -> bool: + return self.can_rsp def get_std_link_args(self) -> T.List[str]: return self.std_args @@ -704,6 +708,9 @@ class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dynam """Representation of GNU ld.bfd and ld.gold.""" + def get_accepts_rsp(self) -> bool: + return True; + class GnuGoldDynamicLinker(GnuDynamicLinker): -- cgit v1.1 From 10e6a989ba337e931ad7abae8d960ee9dc0a0b1d Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Mon, 1 Jun 2020 19:04:16 -0700 Subject: ninja: response file threshold now more accurate, overridable, portable. --- mesonbuild/backend/ninjabackend.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 1686a5d..08fe092 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -86,8 +86,26 @@ else: execute_wrapper = [] rmfile_prefix = ['rm', '-f', '{}', '&&'] -# a conservative estimate of the command-line length limit on windows -rsp_threshold = 4096 +def get_rsp_threshold(): + '''Return a conservative estimate of the commandline size in bytes + above which a response file should be used. May be overridden for + debugging by setting environment variable MESON_RSP_THRESHOLD.''' + + if mesonlib.is_windows(): + # Usually 32k, but some projects might use cmd.exe, + # and that has a limit of 8k. + limit = 8192 + else: + # On Linux, ninja always passes the commandline as a single + # big string to /bin/sh, and the kernel limits the size of a + # single argument; see MAX_ARG_STRLEN + limit = 131072 + # Be conservative + limit = limit / 2 + return int(os.environ.get('MESON_RSP_THRESHOLD', limit)) + +# a conservative estimate of the command-line length limit +rsp_threshold = get_rsp_threshold() # ninja variables whose value should remain unquoted. The value of these ninja # variables (or variables we use them in) is interpreted directly by ninja -- cgit v1.1 From 33ef3786227cc47f06a60a4882765f5468f2ce67 Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Wed, 3 Jun 2020 11:52:55 -0700 Subject: test cases/common/234: get limit right on linux, generate fewer files --- .../common/234 very long commmand line/meson.build | 27 ++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/test cases/common/234 very long commmand line/meson.build b/test cases/common/234 very long commmand line/meson.build index 7a316e1..fe47b5e 100644 --- a/test cases/common/234 very long commmand line/meson.build +++ b/test cases/common/234 very long commmand line/meson.build @@ -1,6 +1,29 @@ project('very long command lines', 'c') -seq = run_command('seq.py', '1', '256').stdout().strip().split('\n') +# Get the current system's commandline length limit. +if build_machine.system() == 'windows' + # Various limits on windows: + # cmd.exe: 8kb + # CreateProcess: 32kb + limit = 32767 +elif build_machine.system() == 'cygwin' + # cygwin-to-win32: see above + # cygwin-to-cygwin: no limit? + # Cygwin is slow, so only test it lightly here. + limit = 8192 +else + # ninja passes whole line as a single argument, for which + # the limit is 128k as of Linux 2.6.23. See MAX_ARG_STRLEN. + # BSD seems similar, see https://www.in-ulm.de/~mascheck/various/argmax + limit = 131072 +endif +# Now exceed that limit, but not so far that the test takes too long. +name = 'ALongFilenameMuchLongerThanIsNormallySeenAndReallyHardToReadThroughToTheEndAMooseOnceBitMySisterSheNowWorksAtLLamaFreshFarmsThisHasToBeSoLongThatWeExceed128KBWithoutCompilingTooManyFiles' +namelen = 187 +nfiles = 50 + limit / namelen +message('Expected link commandline length is approximately ' + '@0@'.format((nfiles * (namelen+28)))) + +seq = run_command('seq.py', '1', '@0@'.format(nfiles)).stdout().strip().split('\n') sources = [] codegen = find_program('codegen.py') @@ -8,7 +31,7 @@ codegen = find_program('codegen.py') foreach i : seq sources += custom_target('codegen' + i, command: [codegen, i, '@OUTPUT@'], - output: 'test' + i + '.c') + output: name + i + '.c') endforeach shared_library('sharedlib', sources) -- cgit v1.1 From c82c74cae88a4614a71266f4825de16d167b979f Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Mon, 1 Jun 2020 19:05:41 -0700 Subject: ci: run some builders with response file threshold set to zero --- .travis.yml | 3 ++- azure-pipelines.yml | 2 ++ ci/travis_script.sh | 8 ++++++++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index f5a32a6..22d76e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,9 +31,10 @@ matrix: compiler: gcc include: # Test cross builds separately, they do not use the global compiler + # Also hijack one cross build to test long commandline handling codepath (and avoid overloading Travis) - os: linux compiler: gcc - env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" + env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_RSP_THRESHOLD=0 - os: linux compiler: gcc env: RUN_TESTS_ARGS="--cross ubuntu-armhf.txt --cross linux-mingw-w64-64bit.txt" MESON_ARGS="--unity=on" diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 066f1a5..de956c8 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -22,6 +22,7 @@ jobs: arch: x86 compiler: msvc2017 backend: ninja + MESON_RSP_THRESHOLD: 0 vc2017x64vs: arch: x64 compiler: msvc2017 @@ -138,6 +139,7 @@ jobs: gccx64ninja: MSYSTEM: MINGW64 MSYS2_ARCH: x86_64 + MESON_RSP_THRESHOLD: 0 compiler: gcc clangx64ninja: MSYSTEM: MINGW64 diff --git a/ci/travis_script.sh b/ci/travis_script.sh index a91a5dd..bdfd4c2 100755 --- a/ci/travis_script.sh +++ b/ci/travis_script.sh @@ -23,6 +23,10 @@ export CXX=$CXX export OBJC=$CC export OBJCXX=$CXX export PATH=/root/tools:$PATH +if test "$MESON_RSP_THRESHOLD" != "" +then + export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD +fi source /ci/env_vars.sh cd /root @@ -55,5 +59,9 @@ elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then export OBJC=$CC export OBJCXX=$CXX export PATH=$HOME/tools:/usr/local/opt/qt/bin:$PATH:$(brew --prefix llvm)/bin + if test "$MESON_RSP_THRESHOLD" != "" + then + export MESON_RSP_THRESHOLD=$MESON_RSP_THRESHOLD + fi ./run_tests.py $RUN_TESTS_ARGS --backend=ninja -- $MESON_ARGS fi -- cgit v1.1 From 25085483b90990f7c8a2da9a874719327bb1fb37 Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Wed, 3 Jun 2020 22:02:36 -0700 Subject: run_unittests.py: skip test_internal_include_order for now if forcing response files on msys2 That test looks at the compdb very carefully, but isn't yet set up to handle gcc responsefile quoting on windows, it seems. --- run_unittests.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 7e0c403..a6817c3 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -2432,6 +2432,9 @@ class AllPlatformTests(BasePlatformTests): self.assertPathExists(exe2) def test_internal_include_order(self): + if mesonbuild.environment.detect_msys2_arch() and ('MESON_RSP_THRESHOLD' in os.environ): + raise unittest.SkipTest('Test does not yet support gcc rsp files on msys2') + testdir = os.path.join(self.common_test_dir, '134 include order') self.init(testdir) execmd = fxecmd = None -- cgit v1.1 From 536c64b2414c0f95f04d778ab76f53239560a79c Mon Sep 17 00:00:00 2001 From: Dan Kegel Date: Sun, 31 May 2020 22:34:00 +0000 Subject: docs: add snippet about response file change --- docs/markdown/snippets/response-files.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 docs/markdown/snippets/response-files.md diff --git a/docs/markdown/snippets/response-files.md b/docs/markdown/snippets/response-files.md new file mode 100644 index 0000000..624b664 --- /dev/null +++ b/docs/markdown/snippets/response-files.md @@ -0,0 +1,7 @@ +## Response files enabled on Linux, reined in on Windows + +Meson used to always use response files on Windows, +but never on Linux. + +It now strikes a happier balance, using them on both platforms, +but only when needed to avoid command line length limits. -- cgit v1.1 From 9462f0c7bc96d176561894f4421f3e310a485857 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Fri, 5 Jun 2020 14:14:30 -0400 Subject: msvc: Avoid spurious openmp link warnings The linker that comes with MSVC does not understand the /openmp flag. This results in a string of LINK : warning LNK4044: unrecognized option '/openmp'; ignored warnings, one for each static_library linked with an executable. Avoid this by only setting the linker openmp flag when the compiler is not MSVC. --- mesonbuild/compilers/compilers.py | 3 +++ mesonbuild/compilers/mixins/visualstudio.py | 3 +++ mesonbuild/dependencies/misc.py | 3 ++- 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index b0fa5f5..3d75811 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1087,6 +1087,9 @@ class Compiler: def openmp_flags(self): raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language()) + def openmp_link_flags(self): + return self.openmp_flags() + def language_stdlib_only_link_flags(self): return [] diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index d0004ce..4dfd8b4 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -208,6 +208,9 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta): def openmp_flags(self) -> T.List[str]: return ['/openmp'] + def openmp_link_flags(self) -> T.List[str]: + return [] + # FIXME, no idea what these should be. def thread_flags(self, env: 'Environment') -> T.List[str]: return [] diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 04dee06..5160fba 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -97,7 +97,8 @@ class OpenMPDependency(ExternalDependency): for name in header_names: if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]: self.is_found = True - self.compile_args = self.link_args = self.clib_compiler.openmp_flags() + self.compile_args = self.clib_compiler.openmp_flags() + self.link_args = self.clib_compiler.openmp_link_flags() break if not self.is_found: mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.') -- cgit v1.1 From 6db9630cf11d036222b39bdde47c3868e529ab3f Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Fri, 5 Jun 2020 14:32:34 -0400 Subject: backend/vs: Fix OpenMP support Use the IDE's OpenMP flag instead of adding /openmp to additional arguments. The IDE appears to override /openmp in additional arguments with the IDE setting, which defaults to false, leading to binaries built without OpenMP. --- mesonbuild/backend/vs2010backend.py | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 6965c42..2f02213 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -992,23 +992,23 @@ class Vs2010Backend(backends.Backend): # Cflags required by external deps might have UNIX-specific flags, # so filter them out if needed if isinstance(d, dependencies.OpenMPDependency): - d_compile_args = compiler.openmp_flags() + ET.SubElement(clconf, 'OpenMPSupport').text = 'true' else: d_compile_args = compiler.unix_args_to_native(d.get_compile_args()) - for arg in d_compile_args: - if arg.startswith(('-D', '/D')): - define = arg[2:] - # De-dup - if define in target_defines: - target_defines.remove(define) - target_defines.append(define) - elif arg.startswith(('-I', '/I')): - inc_dir = arg[2:] - # De-dup - if inc_dir not in target_inc_dirs: - target_inc_dirs.append(inc_dir) - else: - target_args.append(arg) + for arg in d_compile_args: + if arg.startswith(('-D', '/D')): + define = arg[2:] + # De-dup + if define in target_defines: + target_defines.remove(define) + target_defines.append(define) + elif arg.startswith(('-I', '/I')): + inc_dir = arg[2:] + # De-dup + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) + else: + target_args.append(arg) languages += gen_langs if len(target_args) > 0: @@ -1100,14 +1100,14 @@ class Vs2010Backend(backends.Backend): # Extend without reordering or de-dup to preserve `-L -l` sets # https://github.com/mesonbuild/meson/issues/1718 if isinstance(dep, dependencies.OpenMPDependency): - extra_link_args.extend_direct(compiler.openmp_flags()) + ET.SubElement(clconf, 'OpenMPSuppport').text = 'true' else: extra_link_args.extend_direct(dep.get_link_args()) for d in target.get_dependencies(): if isinstance(d, build.StaticLibrary): for dep in d.get_external_deps(): if isinstance(dep, dependencies.OpenMPDependency): - extra_link_args.extend_direct(compiler.openmp_flags()) + ET.SubElement(clconf, 'OpenMPSuppport').text = 'true' else: extra_link_args.extend_direct(dep.get_link_args()) # Add link args for c_* or cpp_* build options. Currently this only -- cgit v1.1 From fd0ad977a6983a01415f103a2e3dfe53b22cb5a3 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 7 Jun 2020 20:12:17 +0300 Subject: End test code with a newline. Closes #7247. --- mesonbuild/compilers/mixins/clike.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 56a9ea6..1bbe698 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -1008,7 +1008,7 @@ class CLikeCompiler: return value[:] def find_library(self, libname, env, extra_dirs, libtype: LibType = LibType.PREFER_SHARED): - code = 'int main(void) { return 0; }' + code = 'int main(void) { return 0; }\n' return self.find_library_impl(libname, env, extra_dirs, code, libtype) def find_framework_paths(self, env): @@ -1117,7 +1117,7 @@ class CLikeCompiler: # false positive. args = self.linker.fatal_warnings() + args args = self.linker_to_compiler_args(args) - code = 'int main(void) { return 0; }' + code = 'int main(void) { return 0; }\n' return self.has_arguments(args, env, code, mode='link') @staticmethod -- cgit v1.1 From 86df85d511c8a6c92da248372b47522d0a7e1aec Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 7 Jun 2020 20:27:18 +0300 Subject: Remove warnings from sample code. Closes #7248. --- mesonbuild/compilers/mixins/clike.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 1bbe698..b088fde 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -1108,7 +1108,7 @@ class CLikeCompiler: 'the compiler you are using. has_link_argument or ' 'other similar method can be used instead.' .format(arg)) - code = 'int i;\n' + code = 'extern int i;\nint i;\n' return self.has_arguments(args, env, code, mode='compile') def has_multi_link_arguments(self, args, env): -- cgit v1.1 From 44aa64b6a0bd285640aaa656db9577479428a132 Mon Sep 17 00:00:00 2001 From: Kyrylo Polezhaiev Date: Mon, 8 Jun 2020 22:00:27 +0300 Subject: docs/users: Add Le Machine Learning library [skip ci] --- docs/markdown/Users.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index 1867407..34e8e71 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -70,6 +70,7 @@ topic](https://github.com/topics/meson). - [Knot Resolver](https://gitlab.labs.nic.cz/knot/knot-resolver), Full caching DNS resolver implementation - [Ksh](https://github.com/att/ast), a Korn Shell - [Lc0](https://github.com/LeelaChessZero/lc0), LeelaChessZero is a UCI-compliant chess engine designed to play chess via neural network + - [Le](https://github.com/kirushyk/le), machine learning framework - [libcamera](https://git.linuxtv.org/libcamera.git/), a library to handle complex cameras on Linux, ChromeOS and Android - [Libdrm](https://gitlab.freedesktop.org/mesa/drm), a library for abstracting DRM kernel interfaces - [libeconf](https://github.com/openSUSE/libeconf), Enhanced config file parsing library, which merges config files placed in several locations into one -- cgit v1.1 From 8edbb2859c4b12b1d5a87e0816f630ab4e81d7ad Mon Sep 17 00:00:00 2001 From: Seungha Yang Date: Mon, 8 Jun 2020 21:24:54 +0900 Subject: compilers: Add UNIX large file support for MinGW MinGW gcc supports _FILE_OFFSET_BITS=64 and we need to set it for MinGW as well --- mesonbuild/compilers/compilers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 4b286fe..50e2188 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1246,10 +1246,10 @@ def get_largefile_args(compiler): ''' Enable transparent large-file-support for 32-bit UNIX systems ''' - if not (compiler.info.is_windows() or compiler.info.is_darwin()): + if not (compiler.get_argument_syntax() == 'msvc' or compiler.info.is_darwin()): # Enable large-file support unconditionally on all platforms other - # than macOS and Windows. macOS is now 64-bit-only so it doesn't - # need anything special, and Windows doesn't have automatic LFS. + # than macOS and MSVC. macOS is now 64-bit-only so it doesn't + # need anything special, and MSVC doesn't have automatic LFS. # You must use the 64-bit counterparts explicitly. # glibc, musl, and uclibc, and all BSD libcs support this. On Android, # support for transparent LFS is available depending on the version of -- cgit v1.1 From 477cacf1551f6c92488eeed1a44e59fe6c47b0d2 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 7 Jun 2020 23:16:26 -0400 Subject: cmake_traceparser: ignore parse error --- mesonbuild/cmake/traceparser.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index d94e774..a241360 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -139,7 +139,7 @@ class CMakeTraceParser: if not self.requires_stderr(): if not self.trace_file_path.exists and not self.trace_file_path.is_file(): raise CMakeException('CMake: Trace file "{}" not found'.format(str(self.trace_file_path))) - trace = self.trace_file_path.read_text() + trace = self.trace_file_path.read_text(errors='ignore') if not trace: raise CMakeException('CMake: The CMake trace was not provided or is empty') -- cgit v1.1 From 2fe4c60ae3f26c75ae45ab092321d07fb98bd52c Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 9 Jun 2020 20:14:57 +0300 Subject: Made --help options sorted --- mesonbuild/minit.py | 2 +- mesonbuild/mintro.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py index 4238ecd..bdbe69b 100644 --- a/mesonbuild/minit.py +++ b/mesonbuild/minit.py @@ -139,7 +139,7 @@ def add_arguments(parser): parser.add_argument("-n", "--name", help="project name. default: name of current directory") parser.add_argument("-e", "--executable", help="executable name. default: project name") parser.add_argument("-d", "--deps", help="dependencies, comma-separated") - parser.add_argument("-l", "--language", choices=LANG_SUPPORTED, help="project language. default: autodetected based on source files") + parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files") parser.add_argument("-b", "--build", action='store_true', help="build after generation") parser.add_argument("--builddir", default='build', help="directory for build") parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.") diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 8eb659b..de8fc5a 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -80,7 +80,7 @@ def add_arguments(parser): flag = '--' + key.replace('_', '-') parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc) - parser.add_argument('--backend', choices=cdata.backendlist, dest='backend', default='ninja', + parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja', help='The backend to use for the --buildoptions introspection.') parser.add_argument('-a', '--all', action='store_true', dest='all', default=False, help='Print all available information.') -- cgit v1.1 From 58e1534773b4c402656abd0f1c760682b8af5860 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Tue, 9 Jun 2020 08:43:43 -0400 Subject: tell variable name when erroring on bad combo option fixes #7269 --- mesonbuild/coredata.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 8774b80..c70ca2d 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -161,7 +161,9 @@ class UserComboOption(UserOption[str]): def validate_value(self, value): if value not in self.choices: optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices]) - raise MesonException('Value "%s" for combo option is not one of the choices. Possible choices are: %s.' % (value, optionsstring)) + raise MesonException('Value "{}" for combo option "{}" is not one of the choices.' + ' Possible choices are: {}.'.format( + value, self.description, optionsstring)) return value class UserArrayOption(UserOption[T.List[str]]): -- cgit v1.1 From 5663b4a3e8507199318965132cdc9cc53a59da80 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 8 Jun 2020 14:01:54 -0700 Subject: dependencies: Remove finish_init method This is a holdover from before we had the DependencyFactory. It should have already been refactored into the initializer, but wasn't for some reason. --- mesonbuild/dependencies/base.py | 2 -- mesonbuild/dependencies/misc.py | 51 +++++++++++++++++++++++++---------------- mesonbuild/dependencies/ui.py | 10 ++++---- 3 files changed, 37 insertions(+), 26 deletions(-) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index b0401c6..20dc593 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -419,8 +419,6 @@ class ConfigToolDependency(ExternalDependency): self.config = None return self.version = version - if getattr(self, 'finish_init', None): - self.finish_init(self) def _sanitize_version(self, version): """Remove any non-numeric, non-point version suffixes.""" diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 5160fba..47694af 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -272,8 +272,10 @@ class PcapDependencyConfigTool(ConfigToolDependency): tools = ['pcap-config'] tool_name = 'pcap-config' - @staticmethod - def finish_init(self) -> None: + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return self.compile_args = self.get_config_value(['--cflags'], 'compile_args') self.link_args = self.get_config_value(['--libs'], 'link_args') self.version = self.get_pcap_lib_version() @@ -285,6 +287,7 @@ class PcapDependencyConfigTool(ConfigToolDependency): def get_pcap_lib_version(self): # Since we seem to need to run a program to discover the pcap version, # we can't do that when cross-compiling + # FIXME: this should be handled if we have an exe_wrapper if not self.env.machines.matches_build_machine(self.for_machine): return None @@ -300,10 +303,12 @@ class CupsDependencyConfigTool(ConfigToolDependency): tools = ['cups-config'] tool_name = 'cups-config' - @staticmethod - def finish_init(ctdep): - ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - ctdep.link_args = ctdep.get_config_value(['--ldflags', '--libs'], 'link_args') + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args') @staticmethod def get_methods(): @@ -318,10 +323,12 @@ class LibWmfDependencyConfigTool(ConfigToolDependency): tools = ['libwmf-config'] tool_name = 'libwmf-config' - @staticmethod - def finish_init(ctdep): - ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') @staticmethod def get_methods(): @@ -333,11 +340,13 @@ class LibGCryptDependencyConfigTool(ConfigToolDependency): tools = ['libgcrypt-config'] tool_name = 'libgcrypt-config' - @staticmethod - def finish_init(ctdep): - ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') - ctdep.version = ctdep.get_config_value(['--version'], 'version')[0] + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + self.version = self.get_config_value(['--version'], 'version')[0] @staticmethod def get_methods(): @@ -349,11 +358,13 @@ class GpgmeDependencyConfigTool(ConfigToolDependency): tools = ['gpgme-config'] tool_name = 'gpg-config' - @staticmethod - def finish_init(ctdep): - ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') - ctdep.version = ctdep.get_config_value(['--version'], 'version')[0] + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') + self.version = self.get_config_value(['--version'], 'version')[0] @staticmethod def get_methods(): diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 741f0b8..4b3d634 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -545,10 +545,12 @@ class SDL2DependencyConfigTool(ConfigToolDependency): tools = ['sdl2-config'] tool_name = 'sdl2-config' - @staticmethod - def finish_init(ctdep): - ctdep.compile_args = ctdep.get_config_value(['--cflags'], 'compile_args') - ctdep.link_args = ctdep.get_config_value(['--libs'], 'link_args') + def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]): + super().__init__(name, environment, kwargs) + if not self.is_found: + return + self.compile_args = self.get_config_value(['--cflags'], 'compile_args') + self.link_args = self.get_config_value(['--libs'], 'link_args') @staticmethod def get_methods(): -- cgit v1.1 From f6a842821b64653de042633e714af3c5a5326c29 Mon Sep 17 00:00:00 2001 From: D Scott Phillips Date: Tue, 2 Jun 2020 21:20:52 -0700 Subject: Fix python3 installed from the Windows Store When meson is currently being run with a python that seems to have been installed from the Windows Store, replace the general WindowsApps directory in search paths with dirname(sys.executable), and also handle failures with pathlib.resolve on WindowsApps exe files. --- mesonbuild/dependencies/base.py | 15 ++++++++++++--- mesonbuild/interpreter.py | 15 ++++++++++++++- 2 files changed, 26 insertions(+), 4 deletions(-) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 20dc593..2e5a5ae 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -22,6 +22,7 @@ import json import shlex import shutil import stat +import sys import textwrap import platform import typing as T @@ -1845,14 +1846,22 @@ class ExternalProgram: # Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc. if 'USERPROFILE' not in os.environ: return path - # Ignore executables in the WindowsApps directory which are - # zero-sized wrappers that magically open the Windows Store to - # install the application. + # The WindowsApps directory is a bit of a problem. It contains + # some zero-sized .exe files which have "reparse points", that + # might either launch an installed application, or might open + # a page in the Windows Store to download the application. + # + # To handle the case where the python interpreter we're + # running on came from the Windows Store, if we see the + # WindowsApps path in the search path, replace it with + # dirname(sys.executable). appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps' paths = [] for each in path.split(os.pathsep): if Path(each) != appstore_dir: paths.append(each) + elif 'WindowsApps' in sys.executable: + paths.append(os.path.dirname(sys.executable)) return os.pathsep.join(paths) @staticmethod diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index b8d4fec..76dbebd 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -41,6 +41,7 @@ import shutil import uuid import re import shlex +import stat import subprocess import collections import functools @@ -2512,7 +2513,19 @@ class Interpreter(InterpreterBase): elif os.path.isfile(f) and not f.startswith('/dev'): srcdir = Path(self.environment.get_source_dir()) builddir = Path(self.environment.get_build_dir()) - f = Path(f).resolve() + try: + f = Path(f).resolve() + except OSError: + f = Path(f) + s = f.stat() + if (hasattr(s, 'st_file_attributes') and + s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and + s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK): + # This is a Windows Store link which we can't + # resolve, so just do our best otherwise. + f = f.parent.resolve() / f.name + else: + raise if builddir in f.parents: return if srcdir in f.parents: -- cgit v1.1 From 71d68a940bdb31f0d66448fa9bde9abf403b54f2 Mon Sep 17 00:00:00 2001 From: Eric Dodd Date: Thu, 21 May 2020 08:45:44 -0400 Subject: Updated to resolve issue identifying SGI CPUs on IRIX systems --- mesonbuild/envconfig.py | 6 +++++- mesonbuild/environment.py | 3 +++ mesonbuild/mesonlib.py | 4 +++- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index b74be35..b0dde65 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -64,7 +64,7 @@ known_cpu_families = ( 'wasm32', 'wasm64', 'x86', - 'x86_64' + 'x86_64', ) # It would feel more natural to call this "64_BIT_CPU_FAMILES", but @@ -299,6 +299,10 @@ class MachineInfo: """ return self.system == 'gnu' + def is_irix(self) -> bool: + """Machine is IRIX?""" + return self.system.startswith('irix') + # Various prefixes and suffixes for import libraries, shared libraries, # static libraries, and executables. # Versioning is added to these names in the backends as-needed. diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index cb6ae7d..4feb44c 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -344,6 +344,9 @@ def detect_cpu_family(compilers: CompilersDict) -> str: trial = 'sparc64' elif trial in {'mipsel', 'mips64el'}: trial = trial.rstrip('el') + elif trial in {'ip30', 'ip35'}: + trial = 'mips64' + # On Linux (and maybe others) there can be any mixture of 32/64 bit code in # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index 2413cb1..a43d4c4 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -509,6 +509,8 @@ def is_netbsd() -> bool: def is_freebsd() -> bool: return platform.system().lower() == 'freebsd' +def is_irix() -> bool: + return platform.system().startswith('irix') def is_hurd() -> bool: return platform.system().lower() == 'gnu' @@ -733,7 +735,7 @@ def default_libdir() -> str: return 'lib/' + archpath except Exception: pass - if is_freebsd(): + if is_freebsd() or is_irix(): return 'lib' if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'): return 'lib64' -- cgit v1.1 From 18b99b3bc319f84db78ff489d6ca5e0cc0273bcc Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Tue, 9 Jun 2020 22:40:23 +0200 Subject: compilers: corretify deduplication direction so: when building compile args, meson is deduplicating flags. When a compiler argument is appended, a later appearance of a dedup'ed is going to remove a earlier one. If the argument is prepended, the element *before* the new one is going to be removed. And that is where the problem reported in https://github.com/mesonbuild/meson/pull/7119 is coming in. In the revision linked there, the order of replacement in the prepend case was revesered. With this patch, we restore this behaviour again. --- mesonbuild/compilers/compilers.py | 4 ++-- run_unittests.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 50e2188..f427262 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -481,10 +481,10 @@ class CompilerArgs(collections.abc.MutableSequence): post_flush_set = set() #The two lists are here walked from the front to the back, in order to not need removals for deduplication - for a in reversed(self.pre): + for a in self.pre: dedup = self._can_dedup(a) if a not in pre_flush_set: - pre_flush.appendleft(a) + pre_flush.append(a) if dedup == 2: pre_flush_set.add(a) for a in reversed(self.post): diff --git a/run_unittests.py b/run_unittests.py index a6817c3..9572b27 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -350,6 +350,25 @@ class InternalTests(unittest.TestCase): stat.S_IRWXU | stat.S_ISUID | stat.S_IRGRP | stat.S_IXGRP) + def test_compiler_args_class_none_flush(self): + cargsfunc = mesonbuild.compilers.CompilerArgs + cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) + a = cargsfunc(cc, ['-I.']) + #first we are checking if the tree construction deduplicates the correct -I argument + a += ['-I..'] + a += ['-I./tests/'] + a += ['-I./tests2/'] + #think this here as assertion, we cannot apply it, otherwise the CompilerArgs would already flush the changes: + # assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..', '-I.']) + a += ['-I.'] + a += ['-I.', '-I./tests/'] + self.assertEqual(a, ['-I.', '-I./tests/', '-I./tests2/', '-I..']) + + #then we are checking that when CompilerArgs already have a build container list, that the deduplication is taking the correct one + a += ['-I.', '-I./tests2/'] + self.assertEqual(a, ['-I.', '-I./tests2/', '-I./tests/', '-I..']) + + def test_compiler_args_class(self): cargsfunc = mesonbuild.compilers.CompilerArgs cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) -- cgit v1.1 From 5b8a63650478a96799e2c8929f66849d1e68a38a Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 10 Jun 2020 08:22:29 +0530 Subject: Fix typo in bitcode message --- mesonbuild/coredata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index c70ca2d..fdba84e 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -870,7 +870,7 @@ class CoreData: def emit_base_options_warnings(self, enabled_opts: list): if 'b_bitcode' in enabled_opts: - mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.') + mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.') mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.') class CmdLineFileParser(configparser.ConfigParser): -- cgit v1.1 From 47c477711b61f7b60551129f039fba67f7d3483e Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 10 Jun 2020 08:22:46 +0530 Subject: apple: -headerpad args are ignored when bitcode is enabled Causes spammy warnings from the linker: ld: warning: -headerpad_max_install_names is ignored when used with -bitcode_bundle (Xcode setting ENABLE_BITCODE=YES) --- mesonbuild/compilers/compilers.py | 8 ++++++-- mesonbuild/linkers.py | 12 +++++++----- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index f427262..c80ffb0 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -377,9 +377,10 @@ def get_base_link_args(options, linker, is_shared_module): # -Wl,-dead_strip_dylibs is incompatible with bitcode args.extend(linker.get_asneeded_args()) - # Apple's ld (the only one that supports bitcode) does not like any - # -undefined arguments at all, so don't pass these when using bitcode + # Apple's ld (the only one that supports bitcode) does not like -undefined + # arguments or -headerpad_max_install_names when bitcode is enabled if not bitcode: + args.extend(linker.headerpad_args()) if (not is_shared_module and option_enabled(linker.base_options, options, 'b_lundef')): args.extend(linker.no_undefined_link_args()) @@ -1203,6 +1204,9 @@ class Compiler: def get_asneeded_args(self) -> T.List[str]: return self.linker.get_asneeded_args() + def headerpad_args(self) -> T.List[str]: + return self.linker.headerpad_args() + def bitcode_args(self) -> T.List[str]: return self.linker.bitcode_args() diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 25a8c9c..bb3229d 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -440,6 +440,10 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta): """Arguments to make all warnings errors.""" return [] + def headerpad_args(self) -> T.List[str]: + # Only used by the Apple linker + return [] + def bitcode_args(self) -> T.List[str]: raise mesonlib.MesonException('This linker does not support bitcode bundles') @@ -659,8 +663,8 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): def no_undefined_args(self) -> T.List[str]: return self._apply_prefix('-undefined,error') - def get_always_args(self) -> T.List[str]: - return self._apply_prefix('-headerpad_max_install_names') + super().get_always_args() + def headerpad_args(self) -> T.List[str]: + return self._apply_prefix('-headerpad_max_install_names') def bitcode_args(self) -> T.List[str]: return self._apply_prefix('-bitcode_bundle') @@ -688,9 +692,7 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: if not rpath_paths and not install_rpath and not build_rpath: return ([], set()) - # Ensure that there is enough space for install_name_tool in-place - # editing of large RPATHs - args = self._apply_prefix('-headerpad_max_install_names') + args = [] # @loader_path is the equivalent of $ORIGIN on macOS # https://stackoverflow.com/q/26280738 origin_placeholder = '@loader_path' -- cgit v1.1 From 63f1b5bb7043d992e374da7f69bdec4f57494324 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 11 Jun 2020 10:35:28 +0530 Subject: unit tests: Pass args to pytest Gets --help working and --failfast too. --- run_unittests.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 9572b27..669853e 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -8150,6 +8150,9 @@ def convert_args(argv): test_list = [] for arg in argv: if arg.startswith('-'): + if arg in ('-f', '--failfast'): + arg = '--exitfirst' + pytest_args.append(arg) continue # ClassName.test_name => 'ClassName and test_name' if '.' in arg: -- cgit v1.1 From 7fd22c969664ae6cfd60a955abc3ffcc72649ecf Mon Sep 17 00:00:00 2001 From: Phillip Johnston Date: Thu, 11 Jun 2020 10:06:57 -0700 Subject: Correct argument typo in partial_dependnecy (#7300) [skip ci] Should be "sources" not "source" ``` ../meson.build:162: WARNING: Passed invalid keyword argument "source". WARNING: This will become a hard error in the future. Traceback (most recent call last): File "/usr/local/lib/python3.7/site-packages/mesonbuild/mesonmain.py", line 131, in run return options.run_func(options) File "/usr/local/lib/python3.7/site-packages/mesonbuild/msetup.py", line 245, in run app.generate() File "/usr/local/lib/python3.7/site-packages/mesonbuild/msetup.py", line 159, in generate self._generate(env) File "/usr/local/lib/python3.7/site-packages/mesonbuild/msetup.py", line 192, in _generate intr.run() File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreter.py", line 4359, in run super().run() File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 465, in run self.evaluate_codeblock(self.ast, start=1) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 490, in evaluate_codeblock raise e File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 483, in evaluate_codeblock self.evaluate_statement(cur) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 498, in evaluate_statement self.assignment(cur) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 1151, in assignment value = self.evaluate_statement(node.value) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 500, in evaluate_statement return self.method_call(cur) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 895, in method_call return obj.method_call(method_name, args, self.kwargs_string_keys(kwargs)) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 39, in method_call return method(args, kwargs) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 285, in wrapped return f(*wrapped_args, **wrapped_kwargs) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 151, in wrapped return f(*wrapped_args, **wrapped_kwargs) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreterbase.py", line 213, in wrapped return f(*wrapped_args, **wrapped_kwargs) File "/usr/local/lib/python3.7/site-packages/mesonbuild/interpreter.py", line 484, in partial_dependency_method pdep = self.held_object.get_partial_dependency(**kwargs) TypeError: get_partial_dependency() got an unexpected keyword argument 'source' FAILED: build.ninja ``` --- docs/markdown/Reference-manual.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 9b5d657..ae49f9e 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -2425,7 +2425,7 @@ an external dependency with the following methods: defaults to `'preserve'`. - `partial_dependency(compile_args : false, link_args : false, links - : false, includes : false, source : false)` *(Added 0.46.0)* returns + : false, includes : false, sources : false)` *(Added 0.46.0)* returns a new dependency object with the same name, version, found status, type name, and methods as the object that called it. This new object will only inherit other attributes from its parent as -- cgit v1.1 From 5f8d89b7077a2141c8251c4162c41f6e01847ebd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=9D=82=E6=9C=AC=20=E8=B2=B4=E5=8F=B2?= Date: Fri, 12 Jun 2020 06:33:47 +0900 Subject: docs: remove duplicated entry for gnome.generate_gir() documentation [skip ci] The documentation of gnome.generate_gir() has duplicated entry for dependencies parameter. As a fix, this patch removes the entry added recently. Fixes: 893d101fff01 ("gnome: Add header kwarg to generate_gir()") Signed-off-by: Takashi Sakamoto --- docs/markdown/Gnome-module.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index a9c4531..54d89aa 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -88,7 +88,6 @@ There are several keyword arguments. Many of these map directly to the e.g. `Gtk` * `includes`: list of gir names to be included, can also be a GirTarget * `header`: *(Added 0.43.0)* name of main c header to include for the library, e.g. `glib.h` -* `dependencies`: deps to use during introspection scanning * `include_directories`: extra include paths to look for gir files * `install`: if true, install the generated files * `install_dir_gir`: (*Added 0.35.0*) which directory to install the -- cgit v1.1 From 0332d7e35006afa78afb5e4b4f4d2ecf9a7ded27 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 5 Jun 2020 13:42:39 +0200 Subject: cmake: Subprojects support CMAKE_PREFIX_PATH (fixes #7249) --- mesonbuild/cmake/executor.py | 22 ++++++++++++++++++++++ mesonbuild/dependencies/base.py | 16 +--------------- .../subprojects/cmOpts/CMakeLists.txt | 5 +++++ test cases/cmake/7 cmake options/test.json | 9 +++++++++ 4 files changed, 37 insertions(+), 15 deletions(-) create mode 100644 test cases/cmake/7 cmake options/test.json diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index adc028c..5ca8196 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -28,6 +28,7 @@ import textwrap from .. import mlog, mesonlib from ..mesonlib import PerMachine, Popen_safe, version_compare, MachineChoice from ..environment import Environment +from ..envconfig import get_env_var if T.TYPE_CHECKING: from ..dependencies.base import ExternalProgram @@ -48,6 +49,8 @@ class CMakeExecutor: self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent) self.always_capture_stderr = True self.print_cmout = False + self.prefix_paths = [] # type: T.List[str] + self.extra_cmake_args = [] # type: T.List[str] if self.cmakebin is False: self.cmakebin = None return @@ -60,6 +63,21 @@ class CMakeExecutor: self.cmakebin = None return + self.prefix_paths = self.environment.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value + env_pref_path = get_env_var( + self.for_machine, + self.environment.is_cross_build(), + 'CMAKE_PREFIX_PATH') + if env_pref_path is not None: + env_pref_path = env_pref_path.split(os.pathsep) + env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings + if not self.prefix_paths: + self.prefix_paths = [] + self.prefix_paths += env_pref_path + + if self.prefix_paths: + self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))] + def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple['ExternalProgram', str]: from ..dependencies.base import ExternalProgram @@ -226,6 +244,7 @@ class CMakeExecutor: if env is None: env = os.environ + args = args + self.extra_cmake_args if disable_cache: return self._call_impl(args, build_dir, env) @@ -362,5 +381,8 @@ class CMakeExecutor: def get_command(self) -> T.List[str]: return self.cmakebin.get_command() + def get_cmake_prefix_paths(self) -> T.List[str]: + return self.prefix_paths + def machine_choice(self) -> MachineChoice: return self.for_machine diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 2e5a5ae..956ca39 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -1085,21 +1085,7 @@ class CMakeDependency(ExternalDependency): if cm_path: cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path)) - pref_path = self.env.coredata.builtins_per_machine[self.for_machine]['cmake_prefix_path'].value - env_pref_path = get_env_var( - self.for_machine, - self.env.is_cross_build(), - 'CMAKE_PREFIX_PATH') - if env_pref_path is not None: - env_pref_path = env_pref_path.split(os.pathsep) - env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings - if not pref_path: - pref_path = [] - pref_path += env_pref_path - if pref_path: - cm_args.append('-DCMAKE_PREFIX_PATH={}'.format(';'.join(pref_path))) - - if not self._preliminary_find_check(name, cm_path, pref_path, environment.machines[self.for_machine]): + if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]): mlog.debug('Preliminary CMake check failed. Aborting.') return self._detect_dep(name, modules, components, cm_args) diff --git a/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt b/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt index 62b5990..873b9b3 100644 --- a/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt +++ b/test cases/cmake/7 cmake options/subprojects/cmOpts/CMakeLists.txt @@ -1,5 +1,10 @@ cmake_minimum_required(VERSION 3.7) +project(testPro) if(NOT "${SOME_CMAKE_VAR}" STREQUAL "something") message(FATAL_ERROR "Setting the CMake var failed") endif() + +if(NOT "${CMAKE_PREFIX_PATH}" STREQUAL "val1;val2") + message(FATAL_ERROR "Setting the CMAKE_PREFIX_PATH failed '${CMAKE_PREFIX_PATH}'") +endif() diff --git a/test cases/cmake/7 cmake options/test.json b/test cases/cmake/7 cmake options/test.json new file mode 100644 index 0000000..046e2ee --- /dev/null +++ b/test cases/cmake/7 cmake options/test.json @@ -0,0 +1,9 @@ +{ + "matrix": { + "options": { + "cmake_prefix_path": [ + { "val": ["val1", "val2"] } + ] + } + } +} -- cgit v1.1 From 49a974213c998937bcefe43622bb24d84666c4f2 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 11 Jun 2020 10:30:53 -0700 Subject: modules/cmake: Fix setting install_dir Fixes: #7301 --- mesonbuild/modules/cmake.py | 3 +-- test cases/cmake/19 cmake file/foolib.cmake.in | 1 + test cases/cmake/19 cmake file/meson.build | 14 ++++++++++++++ test cases/cmake/19 cmake file/test.json | 5 +++++ 4 files changed, 21 insertions(+), 2 deletions(-) create mode 100644 test cases/cmake/19 cmake file/foolib.cmake.in create mode 100644 test cases/cmake/19 cmake file/meson.build create mode 100644 test cases/cmake/19 cmake file/test.json diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py index 6c4098b..6d91b99 100644 --- a/mesonbuild/modules/cmake.py +++ b/mesonbuild/modules/cmake.py @@ -252,8 +252,7 @@ class CmakeModule(ExtensionModule): (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name))) ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname) - if 'install_dir' not in kwargs: - install_dir = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name) + install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'cmake', name)) if not isinstance(install_dir, str): raise mesonlib.MesonException('"install_dir" must be a string.') diff --git a/test cases/cmake/19 cmake file/foolib.cmake.in b/test cases/cmake/19 cmake file/foolib.cmake.in new file mode 100644 index 0000000..16e992b --- /dev/null +++ b/test cases/cmake/19 cmake file/foolib.cmake.in @@ -0,0 +1 @@ +@foo@ diff --git a/test cases/cmake/19 cmake file/meson.build b/test cases/cmake/19 cmake file/meson.build new file mode 100644 index 0000000..758bbee --- /dev/null +++ b/test cases/cmake/19 cmake file/meson.build @@ -0,0 +1,14 @@ +project( + 'cmake config file', +) + +cmake = import('cmake') + +cmake_conf = configuration_data() +cmake_conf.set_quoted('foo', 'bar') +cmake.configure_package_config_file( + name : 'foolib', + input : 'foolib.cmake.in', + install_dir : get_option('libdir') / 'cmake', + configuration : cmake_conf, +) diff --git a/test cases/cmake/19 cmake file/test.json b/test cases/cmake/19 cmake file/test.json new file mode 100644 index 0000000..a8c4ba3 --- /dev/null +++ b/test cases/cmake/19 cmake file/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"} + ] +} -- cgit v1.1 From 58c2aeb5e433ae7652488d53021c40db1712dea5 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 9 Jun 2020 20:38:46 -0400 Subject: [skip ci] docs: Bring up to date on options per machine I tried to fix the docs when the stuff was added, but it turns out I missed things, as noted in https://github.com/mesonbuild/meson/issues/7284#issuecomment-641641177 --- docs/markdown/Builtin-options.md | 41 ++++++++++++++++++++++++++------------- docs/markdown/Reference-tables.md | 37 ++++++++++++++++++++++++++++++++++- 2 files changed, 64 insertions(+), 14 deletions(-) diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index aa7d500..0fa127a 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -55,16 +55,9 @@ particularly the paths section may be necessary. ### Core options -Options that are labeled "per machine" in the table are set per machine. -Prefixing the option with `build.` just affects the build machine configuration, -while unprefixed just affects the host machine configuration, respectively. -Using the option as-is with no prefix affects all machines. For example: - - - `build.pkg_config_path` controls the paths pkg-config will search for just - `native: true` dependencies (build machine). - - - `pkg_config_path` controls the paths pkg-config will search for just - `native: false` dependencies (host machine). +Options that are labeled "per machine" in the table are set per machine. See +the [specifying options per machine](#Specifying-options-per-machine) section +for details. | Option | Default value | Description | Is per machine | | ------ | ------------- | ----------- | -------------- | @@ -186,9 +179,9 @@ The default values of `c_winlibs` and `cpp_winlibs` are in compiler-specific argument forms, but the libraries are: kernel32, user32, gdi32, winspool, shell32, ole32, oleaut32, uuid, comdlg32, advapi32. -c_args, cpp_args, c_link_args, and cpp_link_args only affect native builds, -when cross compiling they will not be applied to binaries or libraries -targeting the host system, only those being run on the build system. +All these `_*` options are specified per machine. See below in the +[specifying options per machine](#Specifying-options-per-machine) section on +how to do this in cross builds. When using MSVC, `cpp_eh=none` will result in no exception flags being passed, while the `cpp_eh=[value]` will result in `/EH[value]`. @@ -199,3 +192,25 @@ gcc-style compilers, nothing is passed (allowing exceptions to work), while Since *0.54.0* The `_thread_count` option can be used to control the value passed to `-s PTHREAD_POOL_SIZE` when using emcc. No other c/c++ compiler supports this option. + +## Specifying options per machine + +Since *0.51.0*, some options are specified per machine rather than globally for +all machine configurations. Prefixing the option with `build.` just affects the +build machine configuration, while unprefixed just affects the host machine +configuration, respectively. For example: + + - `build.pkg_config_path` controls the paths pkg-config will search for just + `native: true` dependencies (build machine). + + - `pkg_config_path` controls the paths pkg-config will search for just + `native: false` dependencies (host machine). + +This is useful for cross builds. In the native builds, build = host, and the +unprefixed option alone will suffice. + +Prior to *0.51.0*, these options just effected native builds when specified on +the command line, as there was no `build.` prefix. Similarly named fields in +the `[properties]` section of the cross file would effect cross compilers, but +the code paths were fairly different allowing differences in behavior to crop +out. diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index 60a9720..81ce921 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -155,6 +155,10 @@ These are the parameter names for passing language specific arguments to your bu | Rust | rust_args | rust_link_args | | Vala | vala_args | vala_link_args | +All these `_*` options are specified per machine. See in [specifying +options per machine](Builtin-options.md#Specifying-options-per-machine) for on +how to do this in cross builds. + ## Compiler and linker flag environment variables These environment variables will be used to modify the compiler and @@ -177,6 +181,10 @@ instead. | RUSTFLAGS | Flags for the Rust compiler | | LDFLAGS | The linker flags, used for all languages | +N.B. these settings are specified per machine, and so the environment varibles +actually come in pairs. See the [environment variables per +machine](#Environment-variables-per-machine) section for details. + ## Function Attributes These are the parameters names that are supported using @@ -267,6 +275,10 @@ These are the values that can be passed to `dependency` function's ## Compiler and Linker selection variables +N.B. these settings are specified per machine, and so the environment varibles +actually come in pairs. See the [environment variables per +machine](#Environment-variables-per-machine) section for details. + | Language | Compiler | Linker | Note | |---------------|----------|-----------|---------------------------------------------| | C | CC | CC_LD | | @@ -280,5 +292,28 @@ These are the values that can be passed to `dependency` function's | C# | CSC | CSC | The linker is the compiler | *The old environment variales are still supported, but are deprecated and will -be removed in a future version of meson. +be removed in a future version of meson.* + +## Environment variables per machine + +Since *0.54.0*, Following Autotool and other legacy build systems, environment +variables that affect machine-specific settings come in pairs: for every bare +environment variable `FOO`, there is a suffixed `FOO_FOR_BUILD`, where `FOO` +just affects the host machine configuration, while `FOO_FOR_BUILD` just affects +the build machine configuration. For example: + + - `PKG_CONFIG_PATH_FOR_BUILD` controls the paths pkg-config will search for + just `native: true` dependencies (build machine). + + - `PKG_CONFIG_PATH` controls the paths pkg-config will search for just + `native: false` dependencies (host machine). + +This mirrors the `build.` prefix used for (built-in) meson options, which has +the same meaning. + +This is useful for cross builds. In the native builds, build = host, and the +unsuffixed environment variables alone will suffice. +Prior to *0.54.0*, there was no `_FOR_BUILD`-suffixed variables, and most +environment variables only effected native machine configurations, though this +wasn't consistent (e.g. `PKG_CONFIG_PATH` still affected cross builds). -- cgit v1.1 From f1288ac7ec28363ba71c6db8065c8723e911e28c Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Wed, 10 Jun 2020 18:56:23 +0300 Subject: mintro: enforced order for subcommands --- mesonbuild/mintro.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index de8fc5a..5984e14 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -19,6 +19,7 @@ tests and so on. All output is in JSON for simple parsing. Currently only works for the Ninja backend. Others use generated project files and don't need this info.""" +import collections import json from . import build, coredata as cdata from . import mesonlib @@ -61,18 +62,19 @@ def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, else: benchmarkdata = testdata = installdata = None - return { - 'ast': IntroCommand('Dump the AST of the meson file', no_bd=dump_ast), - 'benchmarks': IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata)), - 'buildoptions': IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source), - 'buildsystem_files': IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter)), - 'dependencies': IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source), - 'scan_dependencies': IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source), - 'installed': IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata)), - 'projectinfo': IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source), - 'targets': IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source), - 'tests': IntroCommand('List all unit tests', func=lambda: list_tests(testdata)), - } + # Enforce key order for argparse + return collections.OrderedDict([ + ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)), + ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))), + ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)), + ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))), + ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)), + ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)), + ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))), + ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)), + ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)), + ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))), + ]) def add_arguments(parser): intro_types = get_meson_introspection_types() -- cgit v1.1 From a43e770071d643edacd69c3ea2801dc7d979884c Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Thu, 11 Jun 2020 00:50:43 +0300 Subject: mintro: typing fix --- mesonbuild/mintro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 5984e14..f42feec 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -53,7 +53,7 @@ class IntroCommand: def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, builddata: T.Optional[build.Build] = None, backend: T.Optional[backends.Backend] = None, - sourcedir: T.Optional[str] = None) -> T.Dict[str, IntroCommand]: + sourcedir: T.Optional[str] = None) -> T.OrderedDict[str, IntroCommand]: if backend and builddata: benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) testdata = backend.create_test_serialisation(builddata.get_tests()) -- cgit v1.1 From b71e73fc64c31981caf2df70db0ef4b86d607b0c Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Thu, 11 Jun 2020 01:20:46 +0300 Subject: mintro: typing fix --- mesonbuild/mintro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index f42feec..353fcc8 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -53,7 +53,7 @@ class IntroCommand: def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, builddata: T.Optional[build.Build] = None, backend: T.Optional[backends.Backend] = None, - sourcedir: T.Optional[str] = None) -> T.OrderedDict[str, IntroCommand]: + sourcedir: T.Optional[str] = None) -> 'T.OrderedDict[str, IntroCommand]': if backend and builddata: benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) testdata = backend.create_test_serialisation(builddata.get_tests()) -- cgit v1.1 From a8256e490099146aef45cfbd81c3ee282eeff50b Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Thu, 11 Jun 2020 01:31:23 +0300 Subject: mintro: more typing shenanigans --- mesonbuild/mintro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 353fcc8..c640ec3 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -53,7 +53,7 @@ class IntroCommand: def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, builddata: T.Optional[build.Build] = None, backend: T.Optional[backends.Backend] = None, - sourcedir: T.Optional[str] = None) -> 'T.OrderedDict[str, IntroCommand]': + sourcedir: T.Optional[str] = None) -> 'collections.OrderedDict[str, IntroCommand]': if backend and builddata: benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) testdata = backend.create_test_serialisation(builddata.get_tests()) -- cgit v1.1 From 365b667dc545923718a7fbf22ff29c15877eb67b Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Thu, 11 Jun 2020 22:17:17 +0300 Subject: mintro: typings again x2 Co-authored-by: Dylan Baker --- mesonbuild/mintro.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index c640ec3..cccedaa 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -53,7 +53,7 @@ class IntroCommand: def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, builddata: T.Optional[build.Build] = None, backend: T.Optional[backends.Backend] = None, - sourcedir: T.Optional[str] = None) -> 'collections.OrderedDict[str, IntroCommand]': + sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]': if backend and builddata: benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) testdata = backend.create_test_serialisation(builddata.get_tests()) -- cgit v1.1 From 85be45ea1169c6383a252c5c2235b024c50a9672 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 8 May 2020 14:00:38 -0700 Subject: dependencies: Split search_tool out of ExternalDependency it really doesn't make sense to put this in the ExternalDependency class. It doesn't rely on any of the state of that class, and it's generically useful inside meson. --- mesonbuild/dependencies/base.py | 46 ++++++++++++++++++++++------------------- mesonbuild/dependencies/ui.py | 7 ++++--- 2 files changed, 29 insertions(+), 24 deletions(-) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 956ca39..a7b23f6 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -77,6 +77,27 @@ class DependencyMethods(Enum): DUB = 'dub' +def find_external_program(env: Environment, for_machine: MachineChoice, name: str, + display_name: str, default_names: T.List[str]) -> T.Generator['ExternalProgram', None, None]: + """Find an external program, chcking the cross file plus any default options.""" + # Lookup in cross or machine file. + potential_path = env.lookup_binary_entry(for_machine, name) + if potential_path is not None: + mlog.debug('{} binary for {} specified from cross file, native file, ' + 'or env var as {}'.format(display_name, for_machine, potential_path)) + yield ExternalProgram.from_entry(name, potential_path) + # We never fallback if the user-specified option is no good, so + # stop returning options. + return + mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name)) + # Fallback on hard-coded defaults. + # TODO prefix this for the cross case instead of ignoring thing. + if env.machines.matches_build_machine(for_machine): + for potential_path in default_names: + mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path) + yield ExternalProgram(potential_path, silent=True) + + class Dependency: @classmethod @@ -353,25 +374,6 @@ class ExternalDependency(Dependency, HasNativeKwarg): raise DependencyException(m.format(self.name, not_found, self.version)) return - # Create an iterator of options - def search_tool(self, name, display_name, default_names): - # Lookup in cross or machine file. - potential_path = self.env.lookup_binary_entry(self.for_machine, name) - if potential_path is not None: - mlog.debug('{} binary for {} specified from cross file, native file, ' - 'or env var as {}'.format(display_name, self.for_machine, potential_path)) - yield ExternalProgram.from_entry(name, potential_path) - # We never fallback if the user-specified option is no good, so - # stop returning options. - return - mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name)) - # Fallback on hard-coded defaults. - # TODO prefix this for the cross case instead of ignoring thing. - if self.env.machines.matches_build_machine(self.for_machine): - for potential_path in default_names: - mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path) - yield ExternalProgram(potential_path, silent=True) - class NotFoundDependency(Dependency): def __init__(self, environment): @@ -437,7 +439,9 @@ class ConfigToolDependency(ExternalDependency): if not isinstance(versions, list) and versions is not None: versions = listify(versions) best_match = (None, None) - for potential_bin in self.search_tool(self.tool_name, self.tool_name, self.tools): + for potential_bin in find_external_program( + self.env, self.for_machine, self.tool_name, + self.tool_name, self.tools): if not potential_bin.found(): continue tool = potential_bin.get_command() @@ -561,7 +565,7 @@ class PkgConfigDependency(ExternalDependency): else: assert PkgConfigDependency.class_pkgbin[self.for_machine] is None mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine) - for potential_pkgbin in self.search_tool('pkgconfig', 'Pkg-config', environment.default_pkgconfig): + for potential_pkgbin in find_external_program(self.env, self.for_machine, 'pkgconfig', 'Pkg-config', environment.default_pkgconfig): mlog.debug('Trying pkg-config binary {} for machine {} at {}' .format(potential_pkgbin.name, self.for_machine, potential_pkgbin.command)) version_if_ok = self.check_pkgconfig(potential_pkgbin) diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 4b3d634..3bba3dc 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -31,9 +31,11 @@ from .base import DependencyException, DependencyMethods from .base import ExternalDependency, NonExistingExternalProgram from .base import ExtraFrameworkDependency, PkgConfigDependency from .base import ConfigToolDependency, DependencyFactory +from .base import find_external_program if T.TYPE_CHECKING: from ..environment import Environment + from .base import ExternalProgram class GLDependencySystem(ExternalDependency): @@ -324,10 +326,9 @@ class QtBaseDependency(ExternalDependency): if prefix: self.bindir = os.path.join(prefix, 'bin') - def search_qmake(self): + def search_qmake(self) -> T.Generator['ExternalProgram', None, None]: for qmake in ('qmake-' + self.name, 'qmake'): - for potential_qmake in self.search_tool(qmake, 'QMake', [qmake]): - yield potential_qmake + yield from find_external_program(self.env, self.for_machine, qmake, 'QMake', [qmake]) def _qmake_detect(self, mods, kwargs): for qmake in self.search_qmake(): -- cgit v1.1 From 01e0cc6735f4633cc3f179a0b0f877ab863a33f0 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 8 Jun 2020 12:00:19 -0700 Subject: cmake: Use shared find_external_program instead of open coding --- mesonbuild/cmake/executor.py | 27 +++++---------------------- 1 file changed, 5 insertions(+), 22 deletions(-) diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index 5ca8196..ae48918 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -79,25 +79,7 @@ class CMakeExecutor: self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))] def find_cmake_binary(self, environment: Environment, silent: bool = False) -> T.Tuple['ExternalProgram', str]: - from ..dependencies.base import ExternalProgram - - # Create an iterator of options - def search(): - # Lookup in cross or machine file. - potential_cmakepath = environment.lookup_binary_entry(self.for_machine, 'cmake') - if potential_cmakepath is not None: - mlog.debug('CMake binary for %s specified from cross file, native file, or env var as %s.', self.for_machine, potential_cmakepath) - yield ExternalProgram.from_entry('cmake', potential_cmakepath) - # We never fallback if the user-specified option is no good, so - # stop returning options. - return - mlog.debug('CMake binary missing from cross or native file, or env var undefined.') - # Fallback on hard-coded defaults. - # TODO prefix this for the cross case instead of ignoring thing. - if environment.machines.matches_build_machine(self.for_machine): - for potential_cmakepath in environment.default_cmake: - mlog.debug('Trying a default CMake fallback at', potential_cmakepath) - yield ExternalProgram(potential_cmakepath, silent=True) + from ..dependencies.base import find_external_program # Only search for CMake the first time and store the result in the class # definition @@ -107,10 +89,11 @@ class CMakeExecutor: mlog.debug('CMake binary for %s is cached.' % self.for_machine) else: assert CMakeExecutor.class_cmakebin[self.for_machine] is None + mlog.debug('CMake binary for %s is not cached' % self.for_machine) - for potential_cmakebin in search(): - mlog.debug('Trying CMake binary {} for machine {} at {}' - .format(potential_cmakebin.name, self.for_machine, potential_cmakebin.command)) + for potential_cmakebin in find_external_program( + environment, self.for_machine, 'cmake', 'CMake', + environment.default_cmake): version_if_ok = self.check_cmake(potential_cmakebin) if not version_if_ok: continue -- cgit v1.1 From 18d8dbd3bda56a989bb9f6d767c7491fbbd10d45 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 8 Jun 2020 12:05:05 -0700 Subject: dependencies: Don't try to find a binary by "default_path" when cross compiling --- mesonbuild/dependencies/base.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index a7b23f6..7bff082 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -78,7 +78,8 @@ class DependencyMethods(Enum): def find_external_program(env: Environment, for_machine: MachineChoice, name: str, - display_name: str, default_names: T.List[str]) -> T.Generator['ExternalProgram', None, None]: + display_name: str, default_names: T.List[str], + allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]: """Find an external program, chcking the cross file plus any default options.""" # Lookup in cross or machine file. potential_path = env.lookup_binary_entry(for_machine, name) @@ -90,12 +91,14 @@ def find_external_program(env: Environment, for_machine: MachineChoice, name: st # stop returning options. return mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name)) - # Fallback on hard-coded defaults. - # TODO prefix this for the cross case instead of ignoring thing. - if env.machines.matches_build_machine(for_machine): + # Fallback on hard-coded defaults, if a default binary is allowed for use + # with cross targets, or if this is not a cross target + if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)): for potential_path in default_names: mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path) yield ExternalProgram(potential_path, silent=True) + else: + mlog.debug('Default target is not allowed for cross use') class Dependency: -- cgit v1.1 From f5bd3254e926087091ed4c0e5e561d9118aa114b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 8 Jun 2020 12:21:42 -0700 Subject: dependencies: Don't allow using the default binary for host on cross compiles Otherwise we can end up finding dependencies from the build machine for the host machine, which is incorrect. This alters cmake, pkg-config, and all config-tool based dependencies. Fixes: #7276 --- mesonbuild/cmake/executor.py | 2 +- mesonbuild/dependencies/base.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index ae48918..d64286a 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -93,7 +93,7 @@ class CMakeExecutor: mlog.debug('CMake binary for %s is not cached' % self.for_machine) for potential_cmakebin in find_external_program( environment, self.for_machine, 'cmake', 'CMake', - environment.default_cmake): + environment.default_cmake, allow_default_for_cross=False): version_if_ok = self.check_cmake(potential_cmakebin) if not version_if_ok: continue diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 7bff082..828f81e 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -444,7 +444,7 @@ class ConfigToolDependency(ExternalDependency): best_match = (None, None) for potential_bin in find_external_program( self.env, self.for_machine, self.tool_name, - self.tool_name, self.tools): + self.tool_name, self.tools, allow_default_for_cross=False): if not potential_bin.found(): continue tool = potential_bin.get_command() @@ -568,9 +568,9 @@ class PkgConfigDependency(ExternalDependency): else: assert PkgConfigDependency.class_pkgbin[self.for_machine] is None mlog.debug('Pkg-config binary for %s is not cached.' % self.for_machine) - for potential_pkgbin in find_external_program(self.env, self.for_machine, 'pkgconfig', 'Pkg-config', environment.default_pkgconfig): - mlog.debug('Trying pkg-config binary {} for machine {} at {}' - .format(potential_pkgbin.name, self.for_machine, potential_pkgbin.command)) + for potential_pkgbin in find_external_program( + self.env, self.for_machine, 'pkgconfig', 'Pkg-config', + environment.default_pkgconfig, allow_default_for_cross=False): version_if_ok = self.check_pkgconfig(potential_pkgbin) if not version_if_ok: continue -- cgit v1.1 From 36d4ccaf806660f1a65a959e88e228bd3f5fb746 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 8 Jun 2020 13:47:33 -0700 Subject: dependencies: Add a couple of type annotations --- mesonbuild/dependencies/base.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 828f81e..0f8c8a2 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -435,13 +435,14 @@ class ConfigToolDependency(ExternalDependency): return m.group(0).rstrip('.') return version - def find_config(self, versions=None, returncode: int = 0): + def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) \ + -> T.Tuple[T.Optional[str], T.Optional[str]]: """Helper method that searches for config tool binaries in PATH and returns the one that best matches the given version requirements. """ if not isinstance(versions, list) and versions is not None: versions = listify(versions) - best_match = (None, None) + best_match = (None, None) # type: T.Tuple[T.Optional[str], T.Optional[str]] for potential_bin in find_external_program( self.env, self.for_machine, self.tool_name, self.tool_name, self.tools, allow_default_for_cross=False): -- cgit v1.1 From ec1bd22b15a5bfbd4599ebf30e8c63067f336f29 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Fri, 12 Jun 2020 02:04:38 +0530 Subject: mdist: Filter out buildtype to avoid warning Since we parse buildoptions.json to pass options, we end up passing -Dbuildtype and also -Doptimization and -Ddebug which triggers the warning: WARNING: Recommend using either -Dbuildtype or -Doptimization + -Ddebug [...] Filter out buildtype. It is redundant. --- mesonbuild/mdist.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py index b324f76..5ab0ad4 100644 --- a/mesonbuild/mdist.py +++ b/mesonbuild/mdist.py @@ -213,7 +213,7 @@ def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir): unpacked_src_dir = unpacked_files[0] with open(os.path.join(bld_root, 'meson-info', 'intro-buildoptions.json')) as boptions: meson_command += ['-D{name}={value}'.format(**o) for o in json.load(boptions) - if o['name'] not in ['backend', 'install_umask']] + if o['name'] not in ['backend', 'install_umask', 'buildtype']] meson_command += extra_meson_args ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_bin) -- cgit v1.1 From edcddb3a28b11a32ffc19857ceca2f5384381fd9 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 12 Jun 2020 12:15:14 +0200 Subject: cmake: Fix handling of path seperators (fixes #7294) --- mesonbuild/cmake/executor.py | 2 +- mesonbuild/dependencies/base.py | 11 ++++++++--- .../13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake | 9 +++++++++ .../13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake | 9 +++++++++ test cases/linuxlike/13 cmake dependency/meson.build | 2 ++ test cases/linuxlike/13 cmake dependency/test.json | 2 +- 6 files changed, 30 insertions(+), 5 deletions(-) create mode 100644 test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake create mode 100644 test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index d64286a..148a999 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -69,7 +69,7 @@ class CMakeExecutor: self.environment.is_cross_build(), 'CMAKE_PREFIX_PATH') if env_pref_path is not None: - env_pref_path = env_pref_path.split(os.pathsep) + env_pref_path = re.split(r':|;', env_pref_path) env_pref_path = [x for x in env_pref_path if x] # Filter out empty strings if not self.prefix_paths: self.prefix_paths = [] diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 0f8c8a2..eed714a 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -1145,12 +1145,17 @@ class CMakeDependency(ExternalDependency): except MesonException: return None + def process_paths(l: T.List[str]) -> T.Set[str]: + l = [x.split(':') for x in l] + l = [x for sublist in l for x in sublist] + return set(l) + # Extract the variables and sanity check them - root_paths = set(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH')) - root_paths.update(set(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT'))) + root_paths = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH')) + root_paths.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT'))) root_paths = sorted(root_paths) root_paths = list(filter(lambda x: os.path.isdir(x), root_paths)) - module_paths = set(temp_parser.get_cmake_var('MESON_PATHS_LIST')) + module_paths = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST')) rooted_paths = [] for j in [Path(x) for x in root_paths]: for i in [Path(x) for x in module_paths]: diff --git a/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake b/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake new file mode 100644 index 0000000..e12aeb9 --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/cmake_fake1/cmMesonTestF1Config.cmake @@ -0,0 +1,9 @@ +find_package(ZLIB) + +if(ZLIB_FOUND OR ZLIB_Found) + set(cmMesonTestF1_FOUND ON) + set(cmMesonTestF1_LIBRARIES ${ZLIB_LIBRARY}) + set(cmMesonTestF1_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) +else() + set(cmMesonTestF1_FOUND OFF) +endif() diff --git a/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake b/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake new file mode 100644 index 0000000..a7a55d8 --- /dev/null +++ b/test cases/linuxlike/13 cmake dependency/cmake_fake2/cmMesonTestF2Config.cmake @@ -0,0 +1,9 @@ +find_package(ZLIB) + +if(ZLIB_FOUND OR ZLIB_Found) + set(cmMesonTestF2_FOUND ON) + set(cmMesonTestF2_LIBRARIES ${ZLIB_LIBRARY}) + set(cmMesonTestF2_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) +else() + set(cmMesonTestF2_FOUND OFF) +endif() diff --git a/test cases/linuxlike/13 cmake dependency/meson.build b/test cases/linuxlike/13 cmake dependency/meson.build index 93824ab..9918a71 100644 --- a/test cases/linuxlike/13 cmake dependency/meson.build +++ b/test cases/linuxlike/13 cmake dependency/meson.build @@ -44,6 +44,8 @@ assert(depf2.found() == false, 'Invalid CMake targets should fail') # Try to find cmMesonTestDep in a custom prefix # setup_env.json is used by run_project_tests.py:_run_test to point to ./cmake_pref_env/ depPrefEnv = dependency('cmMesonTestDep', required : true, method : 'cmake') +depPrefEnv1 = dependency('cmMesonTestF1', required : true, method : 'cmake') +depPrefEnv2 = dependency('cmMesonTestF2', required : true, method : 'cmake') # Try to find a dependency with a custom CMake module diff --git a/test cases/linuxlike/13 cmake dependency/test.json b/test cases/linuxlike/13 cmake dependency/test.json index 565713e..fc29f72 100644 --- a/test cases/linuxlike/13 cmake dependency/test.json +++ b/test cases/linuxlike/13 cmake dependency/test.json @@ -1,5 +1,5 @@ { "env": { - "CMAKE_PREFIX_PATH": "@ROOT@/cmake_pref_env" + "CMAKE_PREFIX_PATH": "@ROOT@/cmake_fake1;@ROOT@/cmake_fake2:@ROOT@/cmake_pref_env" } } -- cgit v1.1 From 96379e51edaa0bb85f2e89b42512634b87e20110 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 12 Jun 2020 20:17:45 +0200 Subject: tests: reset CMakeDependency.class_cmakeinfo --- run_project_tests.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/run_project_tests.py b/run_project_tests.py index 5fc8aa2..088241b 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -438,7 +438,10 @@ def validate_output(test: TestDef, stdo: str, stde: str) -> str: # coded to run as a batch process. def clear_internal_caches(): import mesonbuild.interpreterbase + from mesonbuild.dependencies import CMakeDependency + from mesonbuild.mesonlib import PerMachine mesonbuild.interpreterbase.FeatureNew.feature_registry = {} + CMakeDependency.class_cmakeinfo = PerMachine(None, None) def run_test_inprocess(testdir): old_stdout = sys.stdout -- cgit v1.1 From 4a5aec2fc02a522b783d6177c3a6f2bf18ce60af Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Sat, 13 Jun 2020 16:32:41 +0530 Subject: ci: Pin ninja to 1.9.0.post1 for now See: https://github.com/mesonbuild/meson/pull/7306#issuecomment-643606736 --- .github/workflows/unusedargs_missingreturn.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/unusedargs_missingreturn.yml b/.github/workflows/unusedargs_missingreturn.yml index fa4405b..859dec2 100644 --- a/.github/workflows/unusedargs_missingreturn.yml +++ b/.github/workflows/unusedargs_missingreturn.yml @@ -55,7 +55,11 @@ jobs: - uses: actions/setup-python@v1 with: python-version: '3.x' - - run: pip install ninja pefile + # ninja==1.10 pypi release didn't ship with windows binaries, which causes + # pip to try to build it which fails on Windows. Pin the previous version + # for now. We can update once that's fixed. + # https://pypi.org/project/ninja/1.10.0/#files + - run: pip install ninja==1.9.0.post1 pefile - run: python run_project_tests.py --only platform-windows env: CI: "1" -- cgit v1.1 From bd7122beed23ad157b8bc389cc30ee16d9f13f0f Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Thu, 4 Jun 2020 15:54:22 +0100 Subject: azure: Run gtkdoc tests for Cygwin --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index de956c8..0408342 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -83,6 +83,7 @@ jobs: gcc-objc,^ git,^ gobject-introspection,^ + gtk-doc,^ libarchive13,^ libboost-devel,^ libglib2.0-devel,^ -- cgit v1.1 From 1b7855c36d6ed40ef5bb5920e8c18b7b646809b0 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Sun, 31 May 2020 16:34:28 +0100 Subject: Handle multiple substitutions in eval_custom_target_command() Handle command arguments which contain multiple substitutions correctly in Backend.eval_custom_target_command() In particular, gnome.gtkdoc() makes arguments of the form '--cflags -I@SOURCE_ROOT@ -I@BUILD_ROOT' (where these arguments are then passed down to a compiler invocation) Normally, these are subsequently made right by NinjaBackend.replace_paths(), but if Backend.as_meson_exe_cmdline() decides that the command needs to be pickled, that doesn't happen. (Although having two places where this substitution might happen smells really bad) --- mesonbuild/backend/backends.py | 59 +++++++++++++++++++++--------------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 3573d94..19df78b 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -1034,35 +1034,36 @@ class Backend: elif not isinstance(i, str): err_msg = 'Argument {0} is of unknown type {1}' raise RuntimeError(err_msg.format(str(i), str(type(i)))) - elif '@SOURCE_ROOT@' in i: - i = i.replace('@SOURCE_ROOT@', source_root) - elif '@BUILD_ROOT@' in i: - i = i.replace('@BUILD_ROOT@', build_root) - elif '@DEPFILE@' in i: - if target.depfile is None: - msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \ - 'keyword argument.'.format(target.name) - raise MesonException(msg) - dfilename = os.path.join(outdir, target.depfile) - i = i.replace('@DEPFILE@', dfilename) - elif '@PRIVATE_DIR@' in i: - if target.absolute_paths: - pdir = self.get_target_private_dir_abs(target) - else: - pdir = self.get_target_private_dir(target) - i = i.replace('@PRIVATE_DIR@', pdir) - elif '@PRIVATE_OUTDIR_' in i: - match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i) - if not match: - msg = 'Custom target {!r} has an invalid argument {!r}' \ - ''.format(target.name, i) - raise MesonException(msg) - source = match.group(0) - if match.group(1) is None and not target.absolute_paths: - lead_dir = '' - else: - lead_dir = self.environment.get_build_dir() - i = i.replace(source, os.path.join(lead_dir, outdir)) + else: + if '@SOURCE_ROOT@' in i: + i = i.replace('@SOURCE_ROOT@', source_root) + if '@BUILD_ROOT@' in i: + i = i.replace('@BUILD_ROOT@', build_root) + if '@DEPFILE@' in i: + if target.depfile is None: + msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \ + 'keyword argument.'.format(target.name) + raise MesonException(msg) + dfilename = os.path.join(outdir, target.depfile) + i = i.replace('@DEPFILE@', dfilename) + if '@PRIVATE_DIR@' in i: + if target.absolute_paths: + pdir = self.get_target_private_dir_abs(target) + else: + pdir = self.get_target_private_dir(target) + i = i.replace('@PRIVATE_DIR@', pdir) + if '@PRIVATE_OUTDIR_' in i: + match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i) + if not match: + msg = 'Custom target {!r} has an invalid argument {!r}' \ + ''.format(target.name, i) + raise MesonException(msg) + source = match.group(0) + if match.group(1) is None and not target.absolute_paths: + lead_dir = '' + else: + lead_dir = self.environment.get_build_dir() + i = i.replace(source, os.path.join(lead_dir, outdir)) cmd.append(i) # Substitute the rest of the template strings values = mesonlib.get_filenames_templates_dict(inputs, outputs) -- cgit v1.1 From b91355a29b2aac57a118b813d6166c0e8dea481d Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Sun, 31 May 2020 20:08:09 +0100 Subject: Include the dep itself in extra_paths for Windows This is needed in the case where a custom_target directly depends on a shared library, and somehow loads it. (Specifically this can be the case with gtkdoc, when it invokes gtkdoc-scangobj, which will build and run it's own code to load a shared library, to introspect it) --- mesonbuild/backend/backends.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 19df78b..fc8c8df 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -743,6 +743,7 @@ class Backend: for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False): result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath))) for bdep in extra_bdeps: + prospectives.add(bdep) prospectives.update(bdep.get_transitive_link_deps()) # Internal deps for ld in prospectives: -- cgit v1.1 From 9f0e75bb409e07f27f4f75663572c2139a860f20 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 3 Jun 2020 17:28:41 +0100 Subject: Also adjust PATH in gtkdochelper for Cygwin Also do Windows loader specific PATH adjustment (to emulate rpath) in gtkdochelper for Cygwin. --- mesonbuild/scripts/gtkdochelper.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py index 6b174a6..812604a 100644 --- a/mesonbuild/scripts/gtkdochelper.py +++ b/mesonbuild/scripts/gtkdochelper.py @@ -16,7 +16,7 @@ import sys, os import subprocess import shutil import argparse -from ..mesonlib import MesonException, Popen_safe, is_windows, split_args +from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args from . import destdir_join parser = argparse.ArgumentParser() @@ -55,16 +55,18 @@ def gtkdoc_run_check(cmd, cwd, library_paths=None): library_paths = [] env = dict(os.environ) - if is_windows(): + if is_windows() or is_cygwin(): if 'PATH' in env: library_paths.extend(env['PATH'].split(os.pathsep)) env['PATH'] = os.pathsep.join(library_paths) - cmd.insert(0, sys.executable) else: if 'LD_LIBRARY_PATH' in env: library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep)) env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths) + if is_windows(): + cmd.insert(0, sys.executable) + # Put stderr into stdout since we want to print it out anyway. # This preserves the order of messages. p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2] -- cgit v1.1 From 3babaaaeecb01ceefb25508783207f1bf7876447 Mon Sep 17 00:00:00 2001 From: Jon Turney Date: Wed, 3 Jun 2020 17:16:47 +0100 Subject: Tweak gnome.gtkdoc() documentation for clarity and grammar --- docs/markdown/Gnome-module.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index 54d89aa..0d1f269 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -343,8 +343,8 @@ of the module. Note that this has the downside of rebuilding the doc for each build, which is often very slow. It usually should be enabled only in CI. -This creates a `$module-doc` target that can be ran to build docs and -normally these are only built on install. +This also creates a `$module-doc` target that can be run to build documentation. +Normally the documentation is only built on install. *Since 0.52.0* Returns a target object that can be passed as dependency to other targets using generated doc files (e.g. in `content_files` of another doc). -- cgit v1.1 From 6c56478ee1e15477b54d92bf25b7b4b1489bfcdb Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 12 Jun 2020 22:03:50 +0200 Subject: cmake: fix definitions with interface libraries (fixes #7299) --- mesonbuild/cmake/interpreter.py | 13 ++++++------- test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt | 2 ++ test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp | 4 ++++ test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp | 4 ++++ test cases/cmake/10 header only/main.cpp | 6 ++++++ .../cmake/10 header only/subprojects/cmMod/CMakeLists.txt | 1 + .../10 header only/subprojects/cmMod/include/cmMod.hpp | 5 +++++ 7 files changed, 28 insertions(+), 7 deletions(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index a5bf545..d5ec0a7 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -317,13 +317,6 @@ class ConverterTarget: tgt = trace.targets.get(self.cmake_name) if tgt: self.depends_raw = trace.targets[self.cmake_name].depends - if self.type.upper() == 'INTERFACE_LIBRARY': - props = tgt.properties - - self.includes += props.get('INTERFACE_INCLUDE_DIRECTORIES', []) - self.public_compile_opts += props.get('INTERFACE_COMPILE_DEFINITIONS', []) - self.public_compile_opts += props.get('INTERFACE_COMPILE_OPTIONS', []) - self.link_flags += props.get('INTERFACE_LINK_OPTIONS', []) # TODO refactor this copy paste from CMakeDependency for future releases reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$') @@ -342,6 +335,12 @@ class ConverterTarget: libraries = [] mlog.debug(tgt) + if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties: + self.includes += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x] + + if 'INTERFACE_LINK_OPTIONS' in tgt.properties: + self.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x] + if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties: self.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x] diff --git a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt index 9798209..9c95636 100644 --- a/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt +++ b/test cases/cmake/1 basic/subprojects/cmMod/CMakeLists.txt @@ -8,5 +8,7 @@ include_directories(${CMAKE_CURRENT_BINARY_DIR}) add_definitions("-DDO_NOTHING_JUST_A_FLAG=1") add_library(cmModLib++ SHARED cmMod.cpp) +target_compile_definitions(cmModLib++ PRIVATE MESON_MAGIC_FLAG=21) +target_compile_definitions(cmModLib++ INTERFACE MESON_MAGIC_FLAG=42) include(GenerateExportHeader) generate_export_header(cmModLib++) diff --git a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp index d3141d5..f4cbea0 100644 --- a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp +++ b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.cpp @@ -2,6 +2,10 @@ using namespace std; +#if MESON_MAGIC_FLAG != 21 +#error "Invalid MESON_MAGIC_FLAG (private)" +#endif + cmModClass::cmModClass(string foo) { str = foo + " World"; } diff --git a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp index 0e6dc04..4445e1f 100644 --- a/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp +++ b/test cases/cmake/1 basic/subprojects/cmMod/cmMod.hpp @@ -3,6 +3,10 @@ #include "cmmodlib++_export.h" #include +#if MESON_MAGIC_FLAG != 42 && MESON_MAGIC_FLAG != 21 +#error "Invalid MESON_MAGIC_FLAG" +#endif + class CMMODLIB___EXPORT cmModClass { private: std::string str; diff --git a/test cases/cmake/10 header only/main.cpp b/test cases/cmake/10 header only/main.cpp index 9507961..1417881 100644 --- a/test cases/cmake/10 header only/main.cpp +++ b/test cases/cmake/10 header only/main.cpp @@ -3,8 +3,14 @@ using namespace std; +#define EXPECTED "Hello World compDef 42" + int main(void) { cmModClass obj("Hello"); cout << obj.getStr() << endl; + if (obj.getStr() != EXPECTED) { + cerr << "Expected: '" << EXPECTED << "'" << endl; + return 1; + } return 0; } diff --git a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt index f5d9a47..e01b6e2 100644 --- a/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt +++ b/test cases/cmake/10 header only/subprojects/cmMod/CMakeLists.txt @@ -9,3 +9,4 @@ add_library(cmModLib INTERFACE) set_target_properties(cmModLib PROPERTIES INTERFACE_COMPILE_OPTIONS "-DCMAKE_FLAG_MUST_BE_PRESENT") target_include_directories(cmModLib INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}" "${CMAKE_CURRENT_SOURCE_DIR}/include") target_compile_definitions(cmModLib INTERFACE -DCMAKE_COMPILER_DEFINE_STR="compDef") +target_compile_definitions(cmModLib INTERFACE MESON_MAGIC_FLAG=42) diff --git a/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp b/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp index 7ea72f7..fe01040 100644 --- a/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp +++ b/test cases/cmake/10 header only/subprojects/cmMod/include/cmMod.hpp @@ -6,6 +6,9 @@ #error "The flag CMAKE_FLAG_MUST_BE_PRESENT was not set" #endif +#define xstr(s) str(s) +#define str(s) #s + class cmModClass { private: std::string str; @@ -13,6 +16,8 @@ class cmModClass { cmModClass(std::string foo) { str = foo + " World "; str += CMAKE_COMPILER_DEFINE_STR; + str += ' '; + str += xstr(MESON_MAGIC_FLAG); } inline std::string getStr() const { return str; } -- cgit v1.1 From e121942fcd1a5853618a5b9ba1bc01ed10acf4c4 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Sun, 14 Jun 2020 03:45:23 +0530 Subject: Disable failing cmake jobs on VS2017 These only fail when building with msvc/clang-cl on the VS2017-Win2016 image. See: https://github.com/mesonbuild/meson/issues/7307 --- azure-pipelines.yml | 2 ++ test cases/cmake/2 advanced/meson.build | 7 +++++++ test cases/cmake/5 object library/meson.build | 7 +++++++ 3 files changed, 16 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0408342..65fc020 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -15,6 +15,8 @@ jobs: - job: vs2017 pool: vmImage: VS2017-Win2016 + variables: + CI_JOB_VS2017: 1 strategy: matrix: diff --git a/test cases/cmake/2 advanced/meson.build b/test cases/cmake/2 advanced/meson.build index a10db1c..4ebcd48 100644 --- a/test cases/cmake/2 advanced/meson.build +++ b/test cases/cmake/2 advanced/meson.build @@ -5,6 +5,13 @@ if not dep_test.found() error('MESON_SKIP_TEST: zlib is not installed') endif +py3 = import('python').find_installation('python3') +get_envvar = '''import os, sys; print(os.environ.get('@0@', 0), end='')''' +# Remove this env var from azure-pipelines.yml when fixed +if run_command(py3, '-c', get_envvar.format('CI_JOB_VS2017')).stdout() == '1' + error('MESON_SKIP_TEST: broken for vs2017 jobs') +endif + cm = import('cmake') # Test the "normal" subproject call diff --git a/test cases/cmake/5 object library/meson.build b/test cases/cmake/5 object library/meson.build index f38a2dd..40b909b 100644 --- a/test cases/cmake/5 object library/meson.build +++ b/test cases/cmake/5 object library/meson.build @@ -5,6 +5,13 @@ if not dep_test.found() error('MESON_SKIP_TEST: zlib is not installed') endif +py3 = import('python').find_installation('python3') +get_envvar = '''import os, sys; print(os.environ.get('@0@', 0), end='')''' +# Remove this env var from azure-pipelines.yml when fixed +if run_command(py3, '-c', get_envvar.format('CI_JOB_VS2017')).stdout() == '1' + error('MESON_SKIP_TEST: broken for vs2017 jobs') +endif + cm = import('cmake') sub_pro = cm.subproject('cmObjLib') -- cgit v1.1 From 977dc4d19f849ed5db39d1d2399e506969aee709 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vili=20V=C3=A4in=C3=B6l=C3=A4?= Date: Sun, 14 Jun 2020 14:16:52 +0300 Subject: Use cmake args also when calling get_cmake_info - vcpkg libraries are not found when given cmake_toolchain_file and vcpkg_target_triplet as cmake_args when looking for the dependency if the first call to cmake has different arguments. The libraries are found if the first call has same arguments or if the CMakeCache.txt is deleted in call_with_fake_build. --- mesonbuild/dependencies/base.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index eed714a..23701da 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -1078,8 +1078,9 @@ class CMakeDependency(ExternalDependency): # Setup the trace parser self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir()) + cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args')) if CMakeDependency.class_cmakeinfo[self.for_machine] is None: - CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info() + CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args) self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine] if self.cmakeinfo is None: raise self._gen_exception('Unable to obtain CMake system information') @@ -1089,10 +1090,8 @@ class CMakeDependency(ExternalDependency): modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))] cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path')) cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path] - cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args')) if cm_path: cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path)) - if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]): mlog.debug('Preliminary CMake check failed. Aborting.') return @@ -1103,7 +1102,7 @@ class CMakeDependency(ExternalDependency): return s.format(self.__class__.__name__, self.name, self.is_found, self.version_reqs) - def _get_cmake_info(self): + def _get_cmake_info(self, cm_args): mlog.debug("Extracting basic cmake information") res = {} @@ -1122,6 +1121,7 @@ class CMakeDependency(ExternalDependency): # Prepare options cmake_opts = temp_parser.trace_args() + ['.'] + cmake_opts += cm_args if len(i) > 0: cmake_opts = ['-G', i] + cmake_opts -- cgit v1.1 From e2379148a61f47107e7a84f235ca31f76fe26ac1 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Mon, 8 Jun 2020 00:13:18 -0400 Subject: fortran generated test: add code to reveal issue used with Issue #7265 --- test cases/fortran/7 generated/meson.build | 7 ++++++- test cases/fortran/7 generated/mod1.fpp | 4 ++-- test cases/fortran/7 generated/mod2.fpp | 6 +++--- test cases/fortran/7 generated/mod3.fpp | 6 ++++++ test cases/fortran/7 generated/prog.f90 | 9 +++++---- 5 files changed, 22 insertions(+), 10 deletions(-) create mode 100644 test cases/fortran/7 generated/mod3.fpp diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build index c2efe34..cbdbe4e 100644 --- a/test cases/fortran/7 generated/meson.build +++ b/test cases/fortran/7 generated/meson.build @@ -6,6 +6,11 @@ project('generated', 'fortran') conf_data = configuration_data() conf_data.set('ONE', 1) conf_data.set('TWO', 2) +conf_data.set('THREE', 3) + +outfile = configure_file( + input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data) +three = library('mod3', meson.current_build_dir() / 'mod3.f90') templates_basenames = ['mod2', 'mod1'] generated_sources = [] @@ -18,5 +23,5 @@ foreach template_basename : templates_basenames endforeach sources = ['prog.f90'] + generated_sources -exe = executable('generated', sources) +exe = executable('generated', sources, link_with: three) test('generated', exe) diff --git a/test cases/fortran/7 generated/mod1.fpp b/test cases/fortran/7 generated/mod1.fpp index 42d1fde..c4decf6 100644 --- a/test cases/fortran/7 generated/mod1.fpp +++ b/test cases/fortran/7 generated/mod1.fpp @@ -1,6 +1,6 @@ module mod1 - implicit none +implicit none - integer, parameter :: modval1 = @ONE@ +integer, parameter :: modval1 = @ONE@ end module mod1 diff --git a/test cases/fortran/7 generated/mod2.fpp b/test cases/fortran/7 generated/mod2.fpp index 594e9df..78ceae4 100644 --- a/test cases/fortran/7 generated/mod2.fpp +++ b/test cases/fortran/7 generated/mod2.fpp @@ -1,7 +1,7 @@ module mod2 - use mod1 - implicit none +use mod1, only : modval1 +implicit none - integer, parameter :: modval2 = @TWO@ +integer, parameter :: modval2 = @TWO@ end module mod2 diff --git a/test cases/fortran/7 generated/mod3.fpp b/test cases/fortran/7 generated/mod3.fpp new file mode 100644 index 0000000..ab3db65 --- /dev/null +++ b/test cases/fortran/7 generated/mod3.fpp @@ -0,0 +1,6 @@ +module mod3 +implicit none + +integer, parameter :: modval3 = @THREE@ + +end module mod3 diff --git a/test cases/fortran/7 generated/prog.f90 b/test cases/fortran/7 generated/prog.f90 index 8a102c0..6ee0bca 100644 --- a/test cases/fortran/7 generated/prog.f90 +++ b/test cases/fortran/7 generated/prog.f90 @@ -1,7 +1,8 @@ -program prog -use mod2 +program generated +use mod2, only : modval1, modval2 +use mod3, only : modval3 implicit none -if (modval1 + modval2 /= 3) stop 1 +if (modval1 + modval2 + modval3 /= 6) error stop -end program prog +end program generated -- cgit v1.1 From eab0e5a8b3a575ead36fa99ac9154d6e960d4525 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Sun, 14 Jun 2020 14:11:47 +0530 Subject: windows: Canonicalize `:` in filenames Fixes https://github.com/mesonbuild/meson/issues/7265 --- mesonbuild/backend/backends.py | 8 +++++++- mesonbuild/backend/ninjabackend.py | 2 +- test cases/fortran/7 generated/meson.build | 2 ++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index fc8c8df..68ae1a7 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -501,6 +501,12 @@ class Backend: target.rpath_dirs_to_remove.update([d.encode('utf8') for d in result]) return tuple(result) + @staticmethod + def canonicalize_filename(fname): + for ch in ('/', '\\', ':'): + fname = fname.replace(ch, '_') + return fname + def object_filename_from_source(self, target, source): assert isinstance(source, mesonlib.File) build_dir = self.environment.get_build_dir() @@ -531,7 +537,7 @@ class Backend: source = os.path.relpath(os.path.join(build_dir, rel_src), os.path.join(self.environment.get_source_dir(), target.get_subdir())) machine = self.environment.machines[target.for_machine] - return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix() + return self.canonicalize_filename(source) + '.' + machine.get_object_suffix() def determine_ext_objs(self, extobj, proj_dir_to_build_root): result = [] diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index cff35ee..8dbb57a 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -2160,7 +2160,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) src_filename = os.path.basename(src) else: src_filename = src - obj_basename = src_filename.replace('/', '_').replace('\\', '_') + obj_basename = self.canonicalize_filename(src_filename) rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix() commands += self.get_compile_debugfile_args(compiler, target, rel_obj) diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build index cbdbe4e..b01ddc9 100644 --- a/test cases/fortran/7 generated/meson.build +++ b/test cases/fortran/7 generated/meson.build @@ -10,6 +10,8 @@ conf_data.set('THREE', 3) outfile = configure_file( input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data) +# Manually build absolute path to source file to test +# https://github.com/mesonbuild/meson/issues/7265 three = library('mod3', meson.current_build_dir() / 'mod3.f90') templates_basenames = ['mod2', 'mod1'] -- cgit v1.1 From a44dc67cb7533592f86fd2b4f3ae979f5a901405 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Sun, 14 Jun 2020 11:44:59 +0200 Subject: ci: Use test.json to skip VS2017 azure tests See also #7307 #7314 #7316 cc @nirbheek --- run_project_tests.py | 10 +++++----- test cases/cmake/2 advanced/meson.build | 7 ------- test cases/cmake/2 advanced/test.json | 5 +++++ test cases/cmake/5 object library/meson.build | 7 ------- test cases/cmake/5 object library/test.json | 7 +++++++ 5 files changed, 17 insertions(+), 19 deletions(-) create mode 100644 test cases/cmake/5 object library/test.json diff --git a/run_project_tests.py b/run_project_tests.py index 088241b..0879e2d 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -669,11 +669,6 @@ def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]: assert "val" in i skip = False - # Add an empty matrix entry - if i['val'] is None: - tmp_opts += [(None, False)] - continue - # Skip the matrix entry if environment variable is present if 'skip_on_env' in i: for skip_env_var in i['skip_on_env']: @@ -687,6 +682,11 @@ def gather_tests(testdir: Path, stdout_mandatory: bool) -> T.List[TestDef]: skip = True break + # Add an empty matrix entry + if i['val'] is None: + tmp_opts += [(None, skip)] + continue + tmp_opts += [('{}={}'.format(key, i['val']), skip)] if opt_list: diff --git a/test cases/cmake/2 advanced/meson.build b/test cases/cmake/2 advanced/meson.build index 4ebcd48..a10db1c 100644 --- a/test cases/cmake/2 advanced/meson.build +++ b/test cases/cmake/2 advanced/meson.build @@ -5,13 +5,6 @@ if not dep_test.found() error('MESON_SKIP_TEST: zlib is not installed') endif -py3 = import('python').find_installation('python3') -get_envvar = '''import os, sys; print(os.environ.get('@0@', 0), end='')''' -# Remove this env var from azure-pipelines.yml when fixed -if run_command(py3, '-c', get_envvar.format('CI_JOB_VS2017')).stdout() == '1' - error('MESON_SKIP_TEST: broken for vs2017 jobs') -endif - cm = import('cmake') # Test the "normal" subproject call diff --git a/test cases/cmake/2 advanced/test.json b/test cases/cmake/2 advanced/test.json index e12f530..f4cb58b 100644 --- a/test cases/cmake/2 advanced/test.json +++ b/test cases/cmake/2 advanced/test.json @@ -1,4 +1,9 @@ { + "matrix": { + "options": { + "_": [{"val": null, "skip_on_env": ["CI_JOB_VS2017"]}] + } + }, "installed": [ {"type": "expr", "file": "usr/?lib/libcm_cmModLib?so"}, {"type": "implib", "platform": "cygwin", "file": "usr/lib/libcm_cmModLib"}, diff --git a/test cases/cmake/5 object library/meson.build b/test cases/cmake/5 object library/meson.build index 40b909b..f38a2dd 100644 --- a/test cases/cmake/5 object library/meson.build +++ b/test cases/cmake/5 object library/meson.build @@ -5,13 +5,6 @@ if not dep_test.found() error('MESON_SKIP_TEST: zlib is not installed') endif -py3 = import('python').find_installation('python3') -get_envvar = '''import os, sys; print(os.environ.get('@0@', 0), end='')''' -# Remove this env var from azure-pipelines.yml when fixed -if run_command(py3, '-c', get_envvar.format('CI_JOB_VS2017')).stdout() == '1' - error('MESON_SKIP_TEST: broken for vs2017 jobs') -endif - cm = import('cmake') sub_pro = cm.subproject('cmObjLib') diff --git a/test cases/cmake/5 object library/test.json b/test cases/cmake/5 object library/test.json new file mode 100644 index 0000000..1840ce4 --- /dev/null +++ b/test cases/cmake/5 object library/test.json @@ -0,0 +1,7 @@ +{ + "matrix": { + "options": { + "_": [{"val": null, "skip_on_env": ["CI_JOB_VS2017"]}] + } + } +} -- cgit v1.1 From 399303b534ac2ddf5ffcd1d779075dd578946bdb Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Mon, 15 Jun 2020 08:50:20 +0300 Subject: Added docs for all meson commands + corresponding unit test (#7217) --- docs/markdown/Commands.md | 604 ++++++++++++++++++++++++++++++++++++++++++++++ docs/sitemap.txt | 1 + run_unittests.py | 113 +++++++++ 3 files changed, 718 insertions(+) create mode 100644 docs/markdown/Commands.md diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md new file mode 100644 index 0000000..c9e5f96 --- /dev/null +++ b/docs/markdown/Commands.md @@ -0,0 +1,604 @@ +# Command-line commands + +There are two different ways of invoking Meson. First, you can run it directly +from the source tree with the command `/path/to/source/meson.py`. Meson may +also be installed in which case the command is simply `meson`. In this manual +we only use the latter format for simplicity. + +Meson is invoked using the following syntax: +`meson [COMMAND] [COMMAND_OPTIONS]` + +This section describes all available commands and some of their Optional arguments. +The most common workflow is to run [`setup`](#setup), followed by [`compile`](#compile), and then [`install`](#install). + +For the full list of all available options for a specific command use the following syntax: +`meson COMMAND --help` + +### configure + +``` +$ meson configure [-h] [--prefix PREFIX] [--bindir BINDIR] + [--datadir DATADIR] [--includedir INCLUDEDIR] + [--infodir INFODIR] [--libdir LIBDIR] + [--libexecdir LIBEXECDIR] [--localedir LOCALEDIR] + [--localstatedir LOCALSTATEDIR] [--mandir MANDIR] + [--sbindir SBINDIR] [--sharedstatedir SHAREDSTATEDIR] + [--sysconfdir SYSCONFDIR] + [--auto-features {enabled,disabled,auto}] + [--backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode}] + [--buildtype {plain,debug,debugoptimized,release,minsize,custom}] + [--debug] [--default-library {shared,static,both}] + [--errorlogs] [--install-umask INSTALL_UMASK] + [--layout {mirror,flat}] [--optimization {0,g,1,2,3,s}] + [--stdsplit] [--strip] [--unity {on,off,subprojects}] + [--unity-size UNITY_SIZE] [--warnlevel {0,1,2,3}] + [--werror] + [--wrap-mode {default,nofallback,nodownload,forcefallback}] + [--pkg-config-path PKG_CONFIG_PATH] + [--build.pkg-config-path BUILD.PKG_CONFIG_PATH] + [--cmake-prefix-path CMAKE_PREFIX_PATH] + [--build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH] + [-D option] [--clearcache] + [builddir] +``` + +Changes options of a configured meson project. + +``` +positional arguments: + builddir + +optional arguments: + -h, --help show this help message and exit + --prefix PREFIX Installation prefix. + --bindir BINDIR Executable directory. + --datadir DATADIR Data file directory. + --includedir INCLUDEDIR Header file directory. + --infodir INFODIR Info page directory. + --libdir LIBDIR Library directory. + --libexecdir LIBEXECDIR Library executable directory. + --localedir LOCALEDIR Locale data directory. + --localstatedir LOCALSTATEDIR Localstate data directory. + --mandir MANDIR Manual page directory. + --sbindir SBINDIR System executable directory. + --sharedstatedir SHAREDSTATEDIR Architecture-independent data directory. + --sysconfdir SYSCONFDIR Sysconf data directory. + --auto-features {enabled,disabled,auto} + Override value of all 'auto' features + (default: auto). + --backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode} + Backend to use (default: ninja). + --buildtype {plain,debug,debugoptimized,release,minsize,custom} + Build type to use (default: debug). + --debug Debug + --default-library {shared,static,both} + Default library type (default: shared). + --errorlogs Whether to print the logs from failing + tests + --install-umask INSTALL_UMASK Default umask to apply on permissions of + installed files (default: 022). + --layout {mirror,flat} Build directory layout (default: + mirror). + --optimization {0,g,1,2,3,s} Optimization level (default: 0). + --stdsplit Split stdout and stderr in test logs + --strip Strip targets on install + --unity {on,off,subprojects} Unity build (default: off). + --unity-size UNITY_SIZE Unity block size (default: (2, None, + 4)). + --warnlevel {0,1,2,3} Compiler warning level to use (default: + 1). + --werror Treat warnings as errors + --wrap-mode {default,nofallback,nodownload,forcefallback} + Wrap mode (default: default). + --pkg-config-path PKG_CONFIG_PATH List of additional paths for pkg-config + to search (default: []). (just for host + machine) + --build.pkg-config-path BUILD.PKG_CONFIG_PATH + List of additional paths for pkg-config + to search (default: []). (just for build + machine) + --cmake-prefix-path CMAKE_PREFIX_PATH + List of additional prefixes for cmake to + search (default: []). (just for host + machine) + --build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH + List of additional prefixes for cmake to + search (default: []). (just for build + machine) + -D option Set the value of an option, can be used + several times to set multiple options. + --clearcache Clear cached state (e.g. found + dependencies) +``` + +Most arguments are the same as in [`setup`](#setup). + +Note: reconfiguring project will not reset options to their default values (even if they were changed in `meson.build`). + +#### Examples: + +List all available options: +``` +meson configure builddir +``` + +Change value of a single option: +``` +meson configure builddir -Doption=new_value +``` + +### compile + +*(since 0.54.0)* + +``` +$ meson compile [-h] [-j JOBS] [-l LOAD_AVERAGE] [--clean] [-C BUILDDIR] +``` + +Builds a default or a specified target of a configured meson project. + +``` +optional arguments: + -h, --help show this help message and exit + -j JOBS, --jobs JOBS The number of worker jobs to run (if + supported). If the value is less than 1 + the build program will guess. + -l LOAD_AVERAGE, --load-average LOAD_AVERAGE + The system load average to try to + maintain (if supported) + --clean Clean the build directory. + -C BUILDDIR The directory containing build files to + be built. +``` + +#### Examples: + +Build the project: +``` +meson compile -C builddir +``` + +### dist + +*(since 0.52.0)* + +``` +$ meson dist [-h] [-C WD] [--formats FORMATS] [--include-subprojects] + [--no-tests] +``` + +Generates a release archive from the current source tree. + +``` +optional arguments: + -h, --help show this help message and exit + -C WD directory to cd into before running + --formats FORMATS Comma separated list of archive types to create. + --include-subprojects Include source code of subprojects that have been used + for the build. + --no-tests Do not build and test generated packages. +``` + +This creates a file called `projectname-version.tar.xz` in the build +tree subdirectory `meson-dist`. This archive contains the full +contents of the latest commit in revision control including all the +submodules (recursively). All revision control metadata is removed. +Meson then takes +this archive and tests that it works by doing a full compile + test + +install cycle. If all these pass, Meson will then create a SHA-256 +checksum file next to the archive. + +**Note**: Meson behaviour is different from Autotools. The Autotools +"dist" target packages up the current source tree. Meson packages +the latest revision control commit. The reason for this is that it +prevents developers from doing accidental releases where the +distributed archive does not match any commit in revision control +(especially the one tagged for the release). + +#### Examples: + +Create a release archive: +``` +meson dist -C builddir +``` + +### init + +*(since 0.45.0)* + +``` +$ meson init [-h] [-C WD] [-n NAME] [-e EXECUTABLE] [-d DEPS] + [-l {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust}] [-b] + [--builddir BUILDDIR] [-f] [--type {executable,library}] + [--version VERSION] + [sourcefile [sourcefile ...]] +``` + +Creates a basic set of build files based on a template. + +``` +positional arguments: + sourcefile source files. default: all recognized + files in current directory + +optional arguments: + -h, --help show this help message and exit + -C WD directory to cd into before running + -n NAME, --name NAME project name. default: name of current + directory + -e EXECUTABLE, --executable EXECUTABLE + executable name. default: project name + -d DEPS, --deps DEPS dependencies, comma-separated + -l {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust}, --language {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust} + project language. default: autodetected + based on source files + -b, --build build after generation + --builddir BUILDDIR directory for build + -f, --force force overwrite of existing files and + directories. + --type {executable,library} project type. default: executable based + project + --version VERSION project version. default: 0.1 +``` + +#### Examples: + +Create a project in `sourcedir`: +``` +meson init -C sourcedir +``` + +### introspect + +``` +$ meson introspect [-h] [--ast] [--benchmarks] [--buildoptions] + [--buildsystem-files] [--dependencies] + [--scan-dependencies] [--installed] [--projectinfo] + [--targets] [--tests] + [--backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode}] + [-a] [-i] [-f] + [builddir] +``` + +Displays information about a configured meson project. + +``` +positional arguments: + builddir The build directory + +optional arguments: + -h, --help show this help message and exit + --ast Dump the AST of the meson file. + --benchmarks List all benchmarks. + --buildoptions List all build options. + --buildsystem-files List files that make up the build + system. + --dependencies List external dependencies. + --scan-dependencies Scan for dependencies used in the + meson.build file. + --installed List all installed files and + directories. + --projectinfo Information about projects. + --targets List top level targets. + --tests List all unit tests. + --backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode} + The backend to use for the + --buildoptions introspection. + -a, --all Print all available information. + -i, --indent Enable pretty printed JSON. + -f, --force-object-output Always use the new JSON format for + multiple entries (even for 0 and 1 + introspection commands) +``` + +#### Examples: + +Display basic information about a configured project in `builddir`: +``` +meson introspect builddir +``` + +### install + +*(since 0.47.0)* + +``` +$ meson install [-h] [-C WD] [--no-rebuild] [--only-changed] [--quiet] +``` + +Installs the project to the prefix specified in `setup`. + +``` +optional arguments: + -h, --help show this help message and exit + -C WD directory to cd into before running + --no-rebuild Do not rebuild before installing. + --only-changed Only overwrite files that are older than the copied file. + --quiet Do not print every file that was installed. +``` + +See [the installation documentation](Installing.md) for more info. + +#### Examples: + +Install project to `prefix` (see [`setup`](#setup)): +``` +meson install -C builddir +``` + +Install project to `$DESTDIR/prefix`: +``` +DESTDIR=/path/to/staging/area meson install -C builddir +``` + +### rewrite + +*(since 0.50.0)* + +``` +$ meson rewrite [-h] [-s SRCDIR] [-V] [-S] + {target,kwargs,default-options,command} ... +``` + +Modifies the meson project. + +``` +optional arguments: + -h, --help show this help message and exit + -s SRCDIR, --sourcedir SRCDIR Path to source directory. + -V, --verbose Enable verbose output + -S, --skip-errors Skip errors instead of aborting + +Rewriter commands: + Rewrite command to execute + + {target,kwargs,default-options,command} + target Modify a target + kwargs Modify keyword arguments + default-options Modify the project default options + command Execute a JSON array of commands +``` + +See [the meson file rewriter documentation](Rewriter.md) for more info. + +### setup + +``` +$ meson setup [-h] [--prefix PREFIX] [--bindir BINDIR] [--datadir DATADIR] + [--includedir INCLUDEDIR] [--infodir INFODIR] + [--libdir LIBDIR] [--libexecdir LIBEXECDIR] + [--localedir LOCALEDIR] [--localstatedir LOCALSTATEDIR] + [--mandir MANDIR] [--sbindir SBINDIR] + [--sharedstatedir SHAREDSTATEDIR] [--sysconfdir SYSCONFDIR] + [--auto-features {enabled,disabled,auto}] + [--backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode}] + [--buildtype {plain,debug,debugoptimized,release,minsize,custom}] + [--debug] [--default-library {shared,static,both}] + [--errorlogs] [--install-umask INSTALL_UMASK] + [--layout {mirror,flat}] [--optimization {0,g,1,2,3,s}] + [--stdsplit] [--strip] [--unity {on,off,subprojects}] + [--unity-size UNITY_SIZE] [--warnlevel {0,1,2,3}] [--werror] + [--wrap-mode {default,nofallback,nodownload,forcefallback}] + [--pkg-config-path PKG_CONFIG_PATH] + [--build.pkg-config-path BUILD.PKG_CONFIG_PATH] + [--cmake-prefix-path CMAKE_PREFIX_PATH] + [--build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH] + [-D option] [--native-file NATIVE_FILE] + [--cross-file CROSS_FILE] [-v] [--fatal-meson-warnings] + [--reconfigure] [--wipe] + [builddir] [sourcedir] +``` + +Configures a build directory for the meson project. + +This is the default meson command (invoked if there was no COMMAND supplied). + +``` +positional arguments: + builddir + sourcedir + +optional arguments: + -h, --help show this help message and exit + --prefix PREFIX Installation prefix. + --bindir BINDIR Executable directory. + --datadir DATADIR Data file directory. + --includedir INCLUDEDIR Header file directory. + --infodir INFODIR Info page directory. + --libdir LIBDIR Library directory. + --libexecdir LIBEXECDIR Library executable directory. + --localedir LOCALEDIR Locale data directory. + --localstatedir LOCALSTATEDIR Localstate data directory. + --mandir MANDIR Manual page directory. + --sbindir SBINDIR System executable directory. + --sharedstatedir SHAREDSTATEDIR Architecture-independent data directory. + --sysconfdir SYSCONFDIR Sysconf data directory. + --auto-features {enabled,disabled,auto} + Override value of all 'auto' features + (default: auto). + --backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode} + Backend to use (default: ninja). + --buildtype {plain,debug,debugoptimized,release,minsize,custom} + Build type to use (default: debug). + --debug Debug + --default-library {shared,static,both} + Default library type (default: shared). + --errorlogs Whether to print the logs from failing + tests + --install-umask INSTALL_UMASK Default umask to apply on permissions of + installed files (default: 022). + --layout {mirror,flat} Build directory layout (default: + mirror). + --optimization {0,g,1,2,3,s} Optimization level (default: 0). + --stdsplit Split stdout and stderr in test logs + --strip Strip targets on install + --unity {on,off,subprojects} Unity build (default: off). + --unity-size UNITY_SIZE Unity block size (default: (2, None, + 4)). + --warnlevel {0,1,2,3} Compiler warning level to use (default: + 1). + --werror Treat warnings as errors + --wrap-mode {default,nofallback,nodownload,forcefallback} + Wrap mode (default: default). + --pkg-config-path PKG_CONFIG_PATH List of additional paths for pkg-config + to search (default: []). (just for host + machine) + --build.pkg-config-path BUILD.PKG_CONFIG_PATH + List of additional paths for pkg-config + to search (default: []). (just for build + machine) + --cmake-prefix-path CMAKE_PREFIX_PATH + List of additional prefixes for cmake to + search (default: []). (just for host + machine) + --build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH + List of additional prefixes for cmake to + search (default: []). (just for build + machine) + -D option Set the value of an option, can be used + several times to set multiple options. + --native-file NATIVE_FILE File containing overrides for native + compilation environment. + --cross-file CROSS_FILE File describing cross compilation + environment. + -v, --version show program's version number and exit + --fatal-meson-warnings Make all Meson warnings fatal + --reconfigure Set options and reconfigure the project. + Useful when new options have been added + to the project and the default value is + not working. + --wipe Wipe build directory and reconfigure + using previous command line options. + Useful when build directory got + corrupted, or when rebuilding with a + newer version of meson. +``` + +See [meson introduction page](Running-Meson.md#configuring-the-build-directory) for more info. + +#### Examples: + +Configures `builddir` with default values: +``` +meson setup builddir +``` + +### subprojects + +*(since 0.49.0)* + +``` +$ meson subprojects [-h] {update,checkout,download,foreach} ... +``` + +Manages subprojects of the meson project. + +``` +optional arguments: + -h, --help show this help message and exit + +Commands: + {update,checkout,download,foreach} + update Update all subprojects from wrap files + checkout Checkout a branch (git only) + download Ensure subprojects are fetched, even if + not in use. Already downloaded subprojects + are not modified. This can be used to pre- + fetch all subprojects and avoid downloads + during configure. + foreach Execute a command in each subproject + directory. +``` + +### test + +``` +$ meson test [-h] [--repeat REPEAT] [--no-rebuild] [--gdb] + [--gdb-path GDB_PATH] [--list] [--wrapper WRAPPER] [-C WD] + [--suite SUITE] [--no-suite SUITE] [--no-stdsplit] + [--print-errorlogs] [--benchmark] [--logbase LOGBASE] + [--num-processes NUM_PROCESSES] [-v] [-q] + [-t TIMEOUT_MULTIPLIER] [--setup SETUP] + [--test-args TEST_ARGS] + [args [args ...]] +``` + +Run tests for the configure meson project. + +``` +positional arguments: + args Optional list of tests to run + +optional arguments: + -h, --help show this help message and exit + --repeat REPEAT Number of times to run the tests. + --no-rebuild Do not rebuild before running tests. + --gdb Run test under gdb. + --gdb-path GDB_PATH Path to the gdb binary (default: gdb). + --list List available tests. + --wrapper WRAPPER wrapper to run tests with (e.g. + Valgrind) + -C WD directory to cd into before running + --suite SUITE Only run tests belonging to the given + suite. + --no-suite SUITE Do not run tests belonging to the given + suite. + --no-stdsplit Do not split stderr and stdout in test + logs. + --print-errorlogs Whether to print failing tests' logs. + --benchmark Run benchmarks instead of tests. + --logbase LOGBASE Base name for log file. + --num-processes NUM_PROCESSES How many parallel processes to use. + -v, --verbose Do not redirect stdout and stderr + -q, --quiet Produce less output to the terminal. + -t TIMEOUT_MULTIPLIER, --timeout-multiplier TIMEOUT_MULTIPLIER + Define a multiplier for test timeout, + for example when running tests in + particular conditions they might take + more time to execute. + --setup SETUP Which test setup to use. + --test-args TEST_ARGS Arguments to pass to the specified + test(s) or all tests +``` + +See [the unit test documentation](Unit-tests.md) for more info. + +#### Examples: + +Run tests for the project: +``` +meson test -C builddir +``` + +Run only `specific_test_1` and `specific_test_2`: +``` +meson test -C builddir specific_test_1 specific_test_2 +``` + +### wrap + +``` +$ meson wrap [-h] {list,search,install,update,info,status,promote} ... +``` + +An utility to manage WrapDB dependencies. + +``` +optional arguments: + -h, --help show this help message and exit + +Commands: + {list,search,install,update,info,status,promote} + list show all available projects + search search the db by name + install install the specified project + update update the project to its newest + available release + info show available versions of a project + status show installed and available versions of + your projects + promote bring a subsubproject up to the master + project +``` + +See [the WrapDB tool documentation](Using-wraptool.md) for more info. diff --git a/docs/sitemap.txt b/docs/sitemap.txt index aa3f51a..1aef1c1 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -5,6 +5,7 @@ index.md Manual.md Overview.md Running-Meson.md + Commands.md Builtin-options.md Using-with-Visual-Studio.md Meson-sample.md diff --git a/run_unittests.py b/run_unittests.py index 669853e..170df88 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4702,6 +4702,119 @@ recommended as it is not supported on some platforms''') self.assertRegex(contents, r'build main(\.exe)?.*: c_LINKER') self.assertRegex(contents, r'build (lib|cyg)?mylib.*: c_LINKER') + def test_commands_documented(self): + ''' + Test that all listed meson commands are documented in Commands.md. + ''' + help_usage_start_pattern = re.compile(r'^usage:[\t ]*[\r\n]*', re.MULTILINE) + help_positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE) + help_options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE) + help_commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE) + + def get_next_start(iterators, end): + return next((i.start() for i in iterators if i), end) + + def parse_help(help): + help_len = len(help) + usage = help_usage_start_pattern.search(help) + positionals = help_positional_start_pattern.search(help) + options = help_options_start_pattern.search(help) + commands = help_commands_start_pattern.search(help) + + arguments_start = get_next_start([positionals, options, commands], None) + self.assertIsNotNone(arguments_start, 'Cmd command is missing argument list') + + return { + 'usage': help[usage.end():arguments_start], + 'arguments': help[arguments_start:help_len], + } + + md_code_pattern = re.compile(r'^```[\r\n]*', re.MULTILINE) + md_usage_pattern = re.compile(r'^\$ ', re.MULTILINE) + + def parse_section(text, section_start, section_end): + matches = [i + for i in md_code_pattern.finditer(text, pos=section_start, endpos=section_end)] + self.assertGreaterEqual(len(matches), 4, '.md command is missing usage description and/or argument list') + + usage = md_usage_pattern.search(text, pos=matches[0].end(), endpos=matches[1].start()) + + return { + 'usage': text[usage.end():matches[1].start()], + 'arguments': text[matches[2].end():matches[3].start()], + } + + def normalize_text(text): + # clean up formatting + out = re.sub(r'( {2,}|\t+)', r' ', text, flags=re.MULTILINE) # replace whitespace chars with a single space + out = re.sub(r'\r\n+', r'\r', out, flags=re.MULTILINE) # replace newlines with a single linux EOL + out = re.sub(r'(^ +| +$)', '', out, flags=re.MULTILINE) # strip lines + out = re.sub(r'(^\n)', '', out, flags=re.MULTILINE) # remove empty lines + return out + + def clean_dir_arguments(text): + # Remove platform specific defaults + args = [ + 'prefix', + 'bindir', + 'datadir', + 'includedir', + 'infodir', + 'libdir', + 'libexecdir', + 'localedir', + 'localstatedir', + 'mandir', + 'sbindir', + 'sharedstatedir', + 'sysconfdir' + ] + out = text + for a in args: + out = re.sub(r'(--' + a + r' .+?)[ |\n]\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL) + return out + + ## Get command sections + + md = None + with open('docs/markdown/Commands.md', encoding='utf-8') as f: + md = f.read() + self.assertIsNotNone(md) + + section_pattern = re.compile(r'^### (.+)$', re.MULTILINE) + md_command_section_matches = [i for i in section_pattern.finditer(md)] + md_command_sections = dict() + for i, s in enumerate(md_command_section_matches): + section_end = len(md) if i == len(md_command_section_matches) - 1 else md_command_section_matches[i + 1].start() + md_command_sections[s.group(1)] = (s.start(), section_end) + + ## Validate commands + + md_commands = set(k for k,v in md_command_sections.items()) + + help_output = self._run(self.meson_command + ['--help']) + help_commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(',')) + + self.assertEqual(md_commands | {'help'}, help_commands) + + ## Validate command options + + for command in md_commands: + print('Current command: {}'.format(command)) + + help_cmd_output = self._run(self.meson_command + [command, '--help'], override_envvars={'COLUMNS': '80'}) + + parsed_help = parse_help(help_cmd_output) + parsed_section = parse_section(md, *md_command_sections[command]) + + for p in [parsed_help, parsed_section]: + p['usage'] = normalize_text(p['usage']) + p['arguments'] = normalize_text(p['arguments']) + if command in ['setup', 'configure']: + parsed_help['arguments'] = clean_dir_arguments(parsed_help['arguments']) + + self.assertEqual(parsed_help['usage'], parsed_section['usage']) + self.assertEqual(parsed_help['arguments'], parsed_section['arguments']) class FailureTests(BasePlatformTests): ''' -- cgit v1.1 From 8905a637be224ded904fec7e05c9e9266e219453 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Vili=20V=C3=A4in=C3=B6l=C3=A4?= Date: Sun, 14 Jun 2020 20:11:49 +0300 Subject: Add exception handling to be also written to the VS project xml When changing meson option cpp_eh, it was passed to cl with AdditionalOptions and resulted in unsuppressable warning "cl : command line warning D9025: overriding '/EHs' with '/EHa'" --- mesonbuild/backend/vs2010backend.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 2f02213..87514c6 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -858,6 +858,18 @@ class Vs2010Backend(backends.Backend): ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck' elif '/RTCs' in buildtype_args: ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck' + # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise + # cl will give warning D9025: overriding '/Ehs' with cpp_eh value + if 'cpp' in target.compilers: + eh = self.environment.coredata.compiler_options[target.for_machine]['cpp']['eh'] + if eh.value == 'a': + ET.SubElement(clconf, 'ExceptionHandling').text = 'Async' + elif eh.value == 's': + ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow' + elif eh.value == 'none': + ET.SubElement(clconf, 'ExceptionHandling').text = 'false' + else: # 'sc' or 'default' + ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync' # End configuration ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(target, root) -- cgit v1.1 From 4ba06fb4ddf2f314ad8bb384bf8b7e65ef73c95e Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 14 Jun 2020 23:48:25 -0400 Subject: test:fortran7: default static so Intel/VS-based compilers pass we did the same thing earlier with other Fortran tests for the same reason. --- test cases/fortran/7 generated/meson.build | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test cases/fortran/7 generated/meson.build b/test cases/fortran/7 generated/meson.build index b01ddc9..b555b17 100644 --- a/test cases/fortran/7 generated/meson.build +++ b/test cases/fortran/7 generated/meson.build @@ -1,15 +1,15 @@ # Tests whether fortran sources files created during configuration are properly # scanned for dependency information -project('generated', 'fortran') +project('generated', 'fortran', + default_options : ['default_library=static']) conf_data = configuration_data() conf_data.set('ONE', 1) conf_data.set('TWO', 2) conf_data.set('THREE', 3) -outfile = configure_file( - input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data) +configure_file(input : 'mod3.fpp', output : 'mod3.f90', configuration : conf_data) # Manually build absolute path to source file to test # https://github.com/mesonbuild/meson/issues/7265 three = library('mod3', meson.current_build_dir() / 'mod3.f90') -- cgit v1.1 From 0b9e8e39dd492f175679e5caab79eb8f251b6ca9 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Mon, 15 Jun 2020 00:14:20 -0400 Subject: environment: fallback regex for versions like 2020.01 add four-digit version unit_test cases --- mesonbuild/environment.py | 10 ++++++++-- run_unittests.py | 14 ++++++++++---- 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 4feb44c..c02376e 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -347,7 +347,6 @@ def detect_cpu_family(compilers: CompilersDict) -> str: elif trial in {'ip30', 'ip35'}: trial = 'mips64' - # On Linux (and maybe others) there can be any mixture of 32/64 bit code in # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only # reliable way to know is to check the compiler defines. @@ -443,7 +442,7 @@ def machine_info_can_run(machine_info: MachineInfo): (machine_info.cpu_family == true_build_cpu_family) or \ ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) -def search_version(text): +def search_version(text: str) -> str: # Usually of the type 4.1.4 but compiler output may contain # stuff like this: # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease) @@ -477,6 +476,13 @@ def search_version(text): match = version_regex.search(text) if match: return match.group(0) + + # try a simpler regex that has like "blah 2020.01.100 foo" or "blah 2020.01 foo" + version_regex = re.compile(r"(\d{1,4}\.\d{1,4}\.?\d{0,4})") + match = version_regex.search(text) + if match: + return match.group(0) + return 'unknown version' class Environment: diff --git a/run_unittests.py b/run_unittests.py index 170df88..41ae710 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -40,7 +40,7 @@ from contextlib import contextmanager from glob import glob from pathlib import (PurePath, Path) from distutils.dir_util import copy_tree -import typing +import typing as T import mesonbuild.mlog import mesonbuild.depfile @@ -312,8 +312,14 @@ class InternalTests(unittest.TestCase): self.assertEqual(searchfunc('1.2.3'), '1.2.3') self.assertEqual(searchfunc('foobar 2016.10.28 1.2.3'), '1.2.3') self.assertEqual(searchfunc('2016.10.28 1.2.3'), '1.2.3') - self.assertEqual(searchfunc('foobar 2016.10.128'), 'unknown version') - self.assertEqual(searchfunc('2016.10.128'), 'unknown version') + self.assertEqual(searchfunc('foobar 2016.10.128'), '2016.10.128') + self.assertEqual(searchfunc('2016.10.128'), '2016.10.128') + self.assertEqual(searchfunc('2016.10'), '2016.10') + self.assertEqual(searchfunc('2016.10 1.2.3'), '1.2.3') + self.assertEqual(searchfunc('oops v1.2.3'), '1.2.3') + self.assertEqual(searchfunc('2016.oops 1.2.3'), '1.2.3') + self.assertEqual(searchfunc('2016.x'), 'unknown version') + def test_mode_symbolic_to_bits(self): modefunc = mesonbuild.mesonlib.FileMode.perms_s_to_bits @@ -7768,7 +7774,7 @@ class CrossFileTests(BasePlatformTests): """ def _cross_file_generator(self, *, needs_exe_wrapper: bool = False, - exe_wrapper: typing.Optional[typing.List[str]] = None) -> str: + exe_wrapper: T.Optional[T.List[str]] = None) -> str: if is_windows(): raise unittest.SkipTest('Cannot run this test on non-mingw/non-cygwin windows') if is_sunos(): -- cgit v1.1 From 0a583ac06acb34025adb74efe1c5806bb226c1e8 Mon Sep 17 00:00:00 2001 From: laolux Date: Tue, 16 Jun 2020 05:31:55 +0900 Subject: Documentation, FAQ.md, generated_headers [skip ci] --- docs/markdown/FAQ.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md index e43c857..7b5c03a 100644 --- a/docs/markdown/FAQ.md +++ b/docs/markdown/FAQ.md @@ -432,7 +432,7 @@ sources in the build target: libfoo_gen_headers = custom_target('gen-headers', ..., output: 'foo-gen.h') libfoo_sources = files('foo-utils.c', 'foo-lib.c') # Add generated headers to the list of sources for the build target -libfoo = library('foo', sources: libfoo_sources + libfoo_gen_headers) +libfoo = library('foo', sources: [libfoo_sources + libfoo_gen_headers]) ``` Now let's say you have a new target that links to `libfoo`: -- cgit v1.1 From 1ec84c570fe33309a388238c247caac9578b2191 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Thu, 14 May 2020 13:55:46 -0400 Subject: run_*tests*.py: print Meson version at start of test suite on some systems, tests may take over an hour to run--only to find you might have used an unintended Meson version (e.g. release instead of dev). This change prints the Meson version at the start of the run_*tests*.py scripts. Also, raise SystemExit(main()) is preferred in general over sys.exit(main()) --- run_cross_test.py | 6 ++++-- run_meson_command_tests.py | 6 ++++-- run_project_tests.py | 3 ++- run_tests.py | 5 +++-- run_unittests.py | 1 + 5 files changed, 14 insertions(+), 7 deletions(-) diff --git a/run_cross_test.py b/run_cross_test.py index abbfdac..1e67876 100755 --- a/run_cross_test.py +++ b/run_cross_test.py @@ -21,8 +21,9 @@ This is now just a wrapper around run_project_tests.py with specific arguments import argparse import subprocess -import sys from mesonbuild import mesonlib +from mesonbuild.coredata import version as meson_version + def runtests(cross_file, failfast): tests = ['--only', 'common'] @@ -37,4 +38,5 @@ def main(): return runtests(options.cross_file, options.failfast) if __name__ == '__main__': - sys.exit(main()) + print('Meson build system', meson_version, 'Cross Tests') + raise SystemExit(main()) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index 9dfb62e..c1af758 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys import os import tempfile import unittest @@ -23,6 +22,8 @@ import zipapp from pathlib import Path from mesonbuild.mesonlib import windows_proof_rmtree, python_command, is_windows +from mesonbuild.coredata import version as meson_version + def get_pypath(): import sysconfig @@ -195,4 +196,5 @@ class CommandTests(unittest.TestCase): if __name__ == '__main__': - sys.exit(unittest.main(buffer=True)) + print('Meson build system', meson_version, ' Command Tests') + raise SystemExit(unittest.main(buffer=True)) diff --git a/run_project_tests.py b/run_project_tests.py index 0879e2d..c368253 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -42,7 +42,7 @@ from mesonbuild import mesonlib from mesonbuild import mlog from mesonbuild import mtest from mesonbuild.mesonlib import MachineChoice, Popen_safe -from mesonbuild.coredata import backendlist +from mesonbuild.coredata import backendlist, version as meson_version from run_tests import get_fake_options, run_configure, get_meson_script from run_tests import get_backend_commands, get_backend_args_for_dir, Backend @@ -1248,6 +1248,7 @@ if __name__ == '__main__': if options.cross_file: options.extra_args += ['--cross-file', options.cross_file] + print('Meson build system', meson_version, 'Project Tests') setup_commands(options.backend) detect_system_compiler(options) print_tool_versions() diff --git a/run_tests.py b/run_tests.py index 44dcf82..2648e06 100755 --- a/run_tests.py +++ b/run_tests.py @@ -33,7 +33,7 @@ from mesonbuild import mesonmain from mesonbuild import mtest from mesonbuild import mlog from mesonbuild.environment import Environment, detect_ninja -from mesonbuild.coredata import backendlist +from mesonbuild.coredata import backendlist, version as meson_version NINJA_1_9_OR_NEWER = False NINJA_CMD = None @@ -401,4 +401,5 @@ def main(): return returncode if __name__ == '__main__': - sys.exit(main()) + print('Meson build system', meson_version, 'Project and Unit Tests') + raise SystemExit(main()) diff --git a/run_unittests.py b/run_unittests.py index 41ae710..0c2980b 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -8303,4 +8303,5 @@ def main(): return unittest.main(defaultTest=cases, buffer=True) if __name__ == '__main__': + print('Meson build system', mesonbuild.coredata.version, 'Unit Tests') raise SystemExit(main()) -- cgit v1.1 From 6f2eb6233cca9d92f6a41a39e56f547e8e82a2b6 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Thu, 14 May 2020 14:04:32 -0400 Subject: pep8: add missing import, remove unused variable --- run_meson_command_tests.py | 2 +- run_unittests.py | 51 +++++++++++++++++++++++----------------------- 2 files changed, 27 insertions(+), 26 deletions(-) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index c1af758..6ed3d8f 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -196,5 +196,5 @@ class CommandTests(unittest.TestCase): if __name__ == '__main__': - print('Meson build system', meson_version, ' Command Tests') + print('Meson build system', meson_version, 'Command Tests') raise SystemExit(unittest.main(buffer=True)) diff --git a/run_unittests.py b/run_unittests.py index 0c2980b..93c1659 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import typing as T import stat import subprocess import re @@ -707,7 +708,6 @@ class InternalTests(unittest.TestCase): self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False)) # Test flattening and unholdering holder1 = ObjectHolder(1) - holder3 = ObjectHolder(3) self.assertEqual([holder1], listify(holder1)) self.assertEqual([holder1], listify([holder1])) self.assertEqual([holder1, 2], listify([holder1, 2])) @@ -1493,6 +1493,7 @@ class DataTests(unittest.TestCase): class BasePlatformTests(unittest.TestCase): prefix = '/usr' libdir = 'lib' + def setUp(self): super().setUp() self.maxDiff = None @@ -1925,48 +1926,48 @@ class AllPlatformTests(BasePlatformTests): (result, missing_variables, confdata_useless) = mesonbuild.mesonlib.do_conf_str(in_data, confdata, variable_format = vformat) return '\n'.join(result) - def check_formats (confdata, result): - self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'),result) - self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'),result) - self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'),result) + def check_formats(confdata, result): + self.assertEqual(conf_str(['#mesondefine VAR'], confdata, 'meson'), result) + self.assertEqual(conf_str(['#cmakedefine VAR ${VAR}'], confdata, 'cmake'), result) + self.assertEqual(conf_str(['#cmakedefine VAR @VAR@'], confdata, 'cmake@'), result) confdata = ConfigurationData() # Key error as they do not exists check_formats(confdata, '/* #undef VAR */\n') # Check boolean - confdata.values = {'VAR': (False,'description')} + confdata.values = {'VAR': (False, 'description')} check_formats(confdata, '#undef VAR\n') - confdata.values = {'VAR': (True,'description')} + confdata.values = {'VAR': (True, 'description')} check_formats(confdata, '#define VAR\n') # Check string - confdata.values = {'VAR': ('value','description')} + confdata.values = {'VAR': ('value', 'description')} check_formats(confdata, '#define VAR value\n') # Check integer - confdata.values = {'VAR': (10,'description')} + confdata.values = {'VAR': (10, 'description')} check_formats(confdata, '#define VAR 10\n') # Check multiple string with cmake formats - confdata.values = {'VAR': ('value','description')} - self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value\n') - self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'),'#define VAR xxx value yyy value') - self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value\n') - self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'),'#define VAR xxx value yyy value') + confdata.values = {'VAR': ('value', 'description')} + self.assertEqual(conf_str(['#cmakedefine VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value\n') + self.assertEqual(conf_str(['#define VAR xxx @VAR@ yyy @VAR@'], confdata, 'cmake@'), '#define VAR xxx value yyy value') + self.assertEqual(conf_str(['#cmakedefine VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value\n') + self.assertEqual(conf_str(['#define VAR xxx ${VAR} yyy ${VAR}'], confdata, 'cmake'), '#define VAR xxx value yyy value') # Handles meson format exceptions # Unknown format - self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'unknown_format') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'unknown_format') # More than 2 params in mesondefine - self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR xxx'], confdata, 'meson') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR xxx'], confdata, 'meson') # Mismatched line with format - self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#cmakedefine VAR'], confdata, 'meson') - self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake') - self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'cmake@') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#cmakedefine VAR'], confdata, 'meson') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake') + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'cmake@') # Dict value in confdata - confdata.values = {'VAR': (['value'],'description')} - self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str,['#mesondefine VAR'], confdata, 'meson') + confdata.values = {'VAR': (['value'], 'description')} + self.assertRaises(mesonbuild.mesonlib.MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson') def test_absolute_prefix_libdir(self): ''' @@ -6659,10 +6660,10 @@ class LinuxlikeTests(BasePlatformTests): prog = os.path.join(self.installdir, 'bin', 'client') env3 = {} if is_cygwin(): - env3['PATH'] = os.path.join(val1prefix, 'bin') + \ - os.pathsep + \ - os.path.join(val2prefix, 'bin') + \ - os.pathsep + os.environ['PATH'] + env3['PATH'] = os.path.join(val1prefix, 'bin') + \ + os.pathsep + \ + os.path.join(val2prefix, 'bin') + \ + os.pathsep + os.environ['PATH'] out = self._run([prog], override_envvars=env3).strip() # Expected output is val1 + val2 = 3 self.assertEqual(out, '3') -- cgit v1.1 From c4761afa634a1c3b15c1c8229cb39b3f267fd3b1 Mon Sep 17 00:00:00 2001 From: Igor Raits Date: Mon, 15 Jun 2020 19:33:52 +0200 Subject: macros.meson: Switch to %{_smp_build_ncpus} It is available since RPM 4.15 which has been around 1 year by now. Signed-off-by: Igor Raits --- data/macros.meson | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/data/macros.meson b/data/macros.meson index c5b90de..25601aa 100644 --- a/data/macros.meson +++ b/data/macros.meson @@ -2,12 +2,6 @@ %__meson_wrap_mode nodownload %__meson_auto_features enabled -%_smp_mesonflags %([ -z "$MESON_BUILD_NCPUS" ] \\\ - && MESON_BUILD_NCPUS="`/usr/bin/getconf _NPROCESSORS_ONLN`"; \\\ - ncpus_max=%{?_smp_ncpus_max}; \\\ - if [ -n "$ncpus_max" ] && [ "$ncpus_max" -gt 0 ] && [ "$MESON_BUILD_NCPUS" -gt "$ncpus_max" ]; then MESON_BUILD_NCPUS="$ncpus_max"; fi; \\\ - if [ "$MESON_BUILD_NCPUS" -gt 1 ]; then echo "--num-processes $MESON_BUILD_NCPUS"; fi) - %meson \ %set_build_flags \ %{shrink:%{__meson} \ @@ -37,8 +31,8 @@ %ninja_install -C %{_vpath_builddir} %meson_test \ - %{shrink: %{__meson} test \ + %{shrink:%{__meson} test \ -C %{_vpath_builddir} \ - %{?_smp_mesonflags} \ + --num-processes %{_smp_build_ncpus} \ --print-errorlogs \ - %{nil}} + %{nil}} -- cgit v1.1 From 0a61f511aa1960ac0d3f9b5e50e35f5f603b99b7 Mon Sep 17 00:00:00 2001 From: Igor Raits Date: Mon, 15 Jun 2020 19:36:47 +0200 Subject: macros.meson: Switch to meson compile / install Signed-off-by: Igor Raits --- data/macros.meson | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/data/macros.meson b/data/macros.meson index 25601aa..8a66c96 100644 --- a/data/macros.meson +++ b/data/macros.meson @@ -22,13 +22,19 @@ --wrap-mode=%{__meson_wrap_mode} \ --auto-features=%{__meson_auto_features} \ %{_vpath_srcdir} %{_vpath_builddir} \ - %{nil}} + %{nil}} %meson_build \ - %ninja_build -C %{_vpath_builddir} + %{shrink:%{__meson} compile \ + -C %{_vpath_builddir} \ + -j %{_smp_build_ncpus} \ + %{nil}} %meson_install \ - %ninja_install -C %{_vpath_builddir} + %{shrink:DESTDIR=%{buildroot} %{__meson} install \ + -C %{_vpath_builddir} \ + --no-rebuild \ + %{nil}} %meson_test \ %{shrink:%{__meson} test \ -- cgit v1.1 From 0b4cf5a8447610a8d5a2101ecbac013fc0420811 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 9 Jun 2020 16:27:51 -0400 Subject: gnome: Add fatal_warnings kwarg to generate_gir() Fixes: #7130 --- docs/markdown/Gnome-module.md | 1 + docs/markdown/snippets/gir_fatal_warnings.md | 5 +++++ mesonbuild/modules/gnome.py | 13 +++++++++++-- 3 files changed, 17 insertions(+), 2 deletions(-) create mode 100644 docs/markdown/snippets/gir_fatal_warnings.md diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index 0d1f269..ced8a40 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -97,6 +97,7 @@ There are several keyword arguments. Many of these map directly to the * `link_with`: list of libraries to link with * `symbol_prefix`: the symbol prefix for the gir object, e.g. `gtk`, (*Since 0.43.0*) an ordered list of multiple prefixes is allowed +* `fatal_warnings`: *Since 0.55.0* turn scanner warnings into fatal errors. Returns an array of two elements which are: `[gir_target, typelib_target]` diff --git a/docs/markdown/snippets/gir_fatal_warnings.md b/docs/markdown/snippets/gir_fatal_warnings.md new file mode 100644 index 0000000..951e98e --- /dev/null +++ b/docs/markdown/snippets/gir_fatal_warnings.md @@ -0,0 +1,5 @@ +## Fatal warnings in `gnome.generate_gir()` + +`gnome.generate_gir()` now has `fatal_warnings` keyword argument to abort when +a warning is produced. This is useful for example in CI environment where it's +important to catch potential issues. diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index ea1b325..d541eee 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -721,11 +721,12 @@ class GnomeModule(ExtensionModule): if f.startswith(('-L', '-l', '--extra-library')): yield f - @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) + @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings']) + @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default']) @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix', 'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories', 'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args', - 'packages', 'header', 'build_by_default'}) + 'packages', 'header', 'build_by_default', 'fatal_warnings'}) def generate_gir(self, state, args, kwargs): if not args: raise MesonException('generate_gir takes at least one argument') @@ -798,6 +799,14 @@ class GnomeModule(ExtensionModule): scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)] scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)] + if '--warn-error' in scan_command: + mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument') + fatal_warnings = kwargs.get('fatal_warnings', False) + if not isinstance(fatal_warnings, bool): + raise MesonException('fatal_warnings keyword argument must be string.') + if fatal_warnings: + scan_command.append('--warn-error') + scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs) typelib_output = '%s-%s.typelib' % (ns, nsversion) -- cgit v1.1 From 2ebda584ad5d4b7005bbd8810ddb276d03c3e970 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Mon, 15 Jun 2020 18:01:35 -0400 Subject: Update mesonbuild/modules/gnome.py Co-authored-by: Nirbheek Chauhan --- mesonbuild/modules/gnome.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index d541eee..bb1507c 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -800,7 +800,7 @@ class GnomeModule(ExtensionModule): scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)] if '--warn-error' in scan_command: - mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument') + mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55') fatal_warnings = kwargs.get('fatal_warnings', False) if not isinstance(fatal_warnings, bool): raise MesonException('fatal_warnings keyword argument must be string.') -- cgit v1.1 From f40e1567f5b38000b47bfbdf307f843c07645f19 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Mon, 15 Jun 2020 18:02:38 -0400 Subject: Update mesonbuild/modules/gnome.py Co-authored-by: Nirbheek Chauhan --- mesonbuild/modules/gnome.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index bb1507c..1faa128 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -803,7 +803,7 @@ class GnomeModule(ExtensionModule): mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55') fatal_warnings = kwargs.get('fatal_warnings', False) if not isinstance(fatal_warnings, bool): - raise MesonException('fatal_warnings keyword argument must be string.') + raise MesonException('fatal_warnings keyword argument must be a boolean') if fatal_warnings: scan_command.append('--warn-error') -- cgit v1.1 From 20709af4d2a2f7f4ece3ecd3a9c65da3075be891 Mon Sep 17 00:00:00 2001 From: Mathieu Duponchelle Date: Tue, 2 Jun 2020 23:29:33 +0200 Subject: interpreter: add support for --force-fallback-for This new command line option allows specifying dependencies for which to force fallback. See the documentation for more information Fixes: #7218 --- docs/markdown/Builtin-options.md | 1 + docs/markdown/Subprojects.md | 16 ++++++++++++++++ docs/markdown/snippets/force_fallback_for.md | 10 ++++++++++ mesonbuild/coredata.py | 1 + mesonbuild/interpreter.py | 11 +++++++++-- run_unittests.py | 6 ++++++ 6 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 docs/markdown/snippets/force_fallback_for.md diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index 0fa127a..e7101d5 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -79,6 +79,7 @@ for details. | warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | | werror | false | Treat warnings as errors | no | | wrap_mode {default, nofallback,
nodownload, forcefallback} | default | Wrap mode to use | no | +| force_fallback_for | [] | Force fallback for those dependencies | no | For setting optimization levels and toggling debug, you can either set the diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md index 8232da9..9c54d69 100644 --- a/docs/markdown/Subprojects.md +++ b/docs/markdown/Subprojects.md @@ -212,6 +212,9 @@ the following command-line options: calls, and those are meant to be used for sources that cannot be provided by the system, such as copylibs. + This option may be overriden by `--force-fallback-for` for specific + dependencies. + * **--wrap-mode=forcefallback** Meson will not look at the system for any dependencies which have @@ -220,6 +223,19 @@ the following command-line options: want to specifically build against the library sources provided by your subprojects. +* **--force-fallback-for=list,of,dependencies** + + Meson will not look at the system for any dependencies listed there, + provided a fallback was supplied when the dependency was declared. + + This option takes precedence over `--wrap-mode=nofallback`, and when + used in combination with `--wrap-mode=nodownload` will only work + if the dependency has already been downloaded. + + This is useful when your project has many fallback dependencies, + but you only want to build against the library sources for a few + of them. + ## Download subprojects *Since 0.49.0* diff --git a/docs/markdown/snippets/force_fallback_for.md b/docs/markdown/snippets/force_fallback_for.md new file mode 100644 index 0000000..b6af209 --- /dev/null +++ b/docs/markdown/snippets/force_fallback_for.md @@ -0,0 +1,10 @@ +## Force fallback for + +A newly-added `--force-fallback-for` command line option can now be used to +force fallback for specific subprojects. + +Example: + +``` +meson build --force-fallback-for=foo,bar +``` diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index fdba84e..94f977f 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -1129,6 +1129,7 @@ builtin_options = OrderedDict([ ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])), ('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)), ('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])), + ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])), ]) builtin_options_per_machine = OrderedDict([ diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 76dbebd..bfbb189 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3568,7 +3568,8 @@ external dependencies (including libraries) must go to "dependencies".''') return self.get_subproject_dep(name, display_name, dirname, varname, kwargs) wrap_mode = self.coredata.get_builtin_option('wrap_mode') - forcefallback = wrap_mode == WrapMode.forcefallback and has_fallback + force_fallback_for = self.coredata.get_builtin_option('force_fallback_for') + forcefallback = (wrap_mode == WrapMode.forcefallback or name in force_fallback_for) and has_fallback if name != '' and not forcefallback: self._handle_featurenew_dependencies(name) kwargs['required'] = required and not has_fallback @@ -3622,7 +3623,13 @@ external dependencies (including libraries) must go to "dependencies".''') def dependency_fallback(self, name, display_name, kwargs): required = kwargs.get('required', True) - if self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback: + + # Explicitly listed fallback preferences for specific subprojects + # take precedence over wrap-mode + if name in self.coredata.get_builtin_option('force_fallback_for'): + mlog.log('Looking for a fallback subproject for the dependency', + mlog.bold(display_name), 'because:\nUse of fallback was forced for that specific subproject') + elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback: mlog.log('Not looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback ' 'dependencies is disabled.') diff --git a/run_unittests.py b/run_unittests.py index 93c1659..827e3c8 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -2287,6 +2287,12 @@ class AllPlatformTests(BasePlatformTests): self.build() self.run_tests() + def test_force_fallback_for(self): + testdir = os.path.join(self.unit_test_dir, '31 forcefallback') + self.init(testdir, extra_args=['--force-fallback-for=zlib,foo']) + self.build() + self.run_tests() + def test_env_ops_dont_stack(self): ''' Test that env ops prepend/append do not stack, and that this usage issues a warning -- cgit v1.1 From 4180f044338e8ed8d28db62efcfa1e2639f088e2 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 16 Jun 2020 14:04:06 -0400 Subject: interpreter: Avoid new feature warning when using old has_exe_wrapper() --- mesonbuild/interpreter.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index bfbb189..7849c81 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2031,12 +2031,15 @@ class MesonMain(InterpreterObject): @permittedKwargs({}) @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.') def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool: - return self.can_run_host_binaries_method(args, kwargs) + return self.can_run_host_binaries_impl(args, kwargs) @noPosargs @permittedKwargs({}) @FeatureNew('meson.can_run_host_binaries', '0.55.0') def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool: + return self.can_run_host_binaries_impl(args, kwargs) + + def can_run_host_binaries_impl(self, args, kwargs): if (self.is_cross_build_method(None, None) and self.build.environment.need_exe_wrapper()): if self.build.environment.exe_wrapper is None: -- cgit v1.1 From 972cac13f4220eeb58569f880991a6dd64d82ce0 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 16 Jun 2020 14:49:25 -0400 Subject: doc: Fix missing --force-fallback-for documentation --- docs/markdown/Commands.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md index c9e5f96..3aecae1 100644 --- a/docs/markdown/Commands.md +++ b/docs/markdown/Commands.md @@ -34,6 +34,7 @@ $ meson configure [-h] [--prefix PREFIX] [--bindir BINDIR] [--unity-size UNITY_SIZE] [--warnlevel {0,1,2,3}] [--werror] [--wrap-mode {default,nofallback,nodownload,forcefallback}] + [--force-fallback-for FORCE_FALLBACK_FOR] [--pkg-config-path PKG_CONFIG_PATH] [--build.pkg-config-path BUILD.PKG_CONFIG_PATH] [--cmake-prefix-path CMAKE_PREFIX_PATH] @@ -90,6 +91,9 @@ optional arguments: --werror Treat warnings as errors --wrap-mode {default,nofallback,nodownload,forcefallback} Wrap mode (default: default). + --force-fallback-for FORCE_FALLBACK_FOR + Force fallback for those subprojects + (default: []). --pkg-config-path PKG_CONFIG_PATH List of additional paths for pkg-config to search (default: []). (just for host machine) @@ -379,6 +383,7 @@ $ meson setup [-h] [--prefix PREFIX] [--bindir BINDIR] [--datadir DATADIR] [--stdsplit] [--strip] [--unity {on,off,subprojects}] [--unity-size UNITY_SIZE] [--warnlevel {0,1,2,3}] [--werror] [--wrap-mode {default,nofallback,nodownload,forcefallback}] + [--force-fallback-for FORCE_FALLBACK_FOR] [--pkg-config-path PKG_CONFIG_PATH] [--build.pkg-config-path BUILD.PKG_CONFIG_PATH] [--cmake-prefix-path CMAKE_PREFIX_PATH] @@ -440,6 +445,9 @@ optional arguments: --werror Treat warnings as errors --wrap-mode {default,nofallback,nodownload,forcefallback} Wrap mode (default: default). + --force-fallback-for FORCE_FALLBACK_FOR + Force fallback for those subprojects + (default: []). --pkg-config-path PKG_CONFIG_PATH List of additional paths for pkg-config to search (default: []). (just for host machine) -- cgit v1.1 From 07d2331d2354d27d895d926c5c8a8636f93077db Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 16 Jun 2020 14:42:29 -0400 Subject: interpreter: Allow dependecy or subproject name in force_fallback_for --- mesonbuild/interpreter.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 7849c81..76c8254 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3572,7 +3572,9 @@ external dependencies (including libraries) must go to "dependencies".''') wrap_mode = self.coredata.get_builtin_option('wrap_mode') force_fallback_for = self.coredata.get_builtin_option('force_fallback_for') - forcefallback = (wrap_mode == WrapMode.forcefallback or name in force_fallback_for) and has_fallback + forcefallback = has_fallback and (wrap_mode == WrapMode.forcefallback or \ + name in force_fallback_for or \ + dirname in force_fallback_for) if name != '' and not forcefallback: self._handle_featurenew_dependencies(name) kwargs['required'] = required and not has_fallback @@ -3625,11 +3627,13 @@ external dependencies (including libraries) must go to "dependencies".''') return fbinfo def dependency_fallback(self, name, display_name, kwargs): + dirname, varname = self.get_subproject_infos(kwargs) required = kwargs.get('required', True) # Explicitly listed fallback preferences for specific subprojects # take precedence over wrap-mode - if name in self.coredata.get_builtin_option('force_fallback_for'): + force_fallback_for = self.coredata.get_builtin_option('force_fallback_for') + if name in force_fallback_for or dirname in force_fallback_for: mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name), 'because:\nUse of fallback was forced for that specific subproject') elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.nofallback: @@ -3646,7 +3650,6 @@ external dependencies (including libraries) must go to "dependencies".''') else: mlog.log('Looking for a fallback subproject for the dependency', mlog.bold(display_name)) - dirname, varname = self.get_subproject_infos(kwargs) sp_kwargs = { 'default_options': kwargs.get('default_options', []), 'required': required, -- cgit v1.1 From f7b751401409fb8036b084a7d1729d45e73fe6e6 Mon Sep 17 00:00:00 2001 From: Michael Date: Tue, 16 Jun 2020 17:46:29 -0700 Subject: update Project-templates.md --- docs/markdown/Project-templates.md | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/docs/markdown/Project-templates.md b/docs/markdown/Project-templates.md index 5f323bd..832cd1d 100644 --- a/docs/markdown/Project-templates.md +++ b/docs/markdown/Project-templates.md @@ -20,11 +20,30 @@ project. The result can be compiled as usual. For example compiling it with Ninja could be done like this: ``` -$ meson builddir -$ ninja -C builddir +$ meson setup builddir +$ meson compile -C builddir ``` The generator has many different projects and settings. They can all be listed by invoking the command `meson init --help`. This feature is available since Meson version 0.45.0. + +# Generate a build script for an existing project + +With `meson init` you can generate a build script for an existing +project with existing project files by running the command in the +root directory of your project. Meson currently supports this +feature for `executable`, and `jar` projects. + +# Build after generation of template + +It is possible to have Meson generate a build directory from the +`meson init` command without running `meson setup`. This is done +by passing `-b` or `--build` switch. + +```console +$ mkdir project_name +$ cd project_name +$ meson init --language=c --name=myproject --version=0.1 --build +``` \ No newline at end of file -- cgit v1.1 From 804cefc94cd334fccccdf3015eb0b3f589f87515 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 17 Jun 2020 20:45:54 +0530 Subject: ci: Remove PostgreSQL from PATH It's one of the causes of the cmake test failures, and it's also plaguing the VS2019 jobs now because of the image update. --- azure-pipelines.yml | 2 -- ci/run.ps1 | 3 ++- test cases/cmake/2 advanced/test.json | 5 ----- test cases/cmake/5 object library/test.json | 7 ------- 4 files changed, 2 insertions(+), 15 deletions(-) delete mode 100644 test cases/cmake/5 object library/test.json diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 65fc020..0408342 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -15,8 +15,6 @@ jobs: - job: vs2017 pool: vmImage: VS2017-Win2016 - variables: - CI_JOB_VS2017: 1 strategy: matrix: diff --git a/ci/run.ps1 b/ci/run.ps1 index 34856c0..5065b87 100644 --- a/ci/run.ps1 +++ b/ci/run.ps1 @@ -4,7 +4,8 @@ if ($LastExitCode -ne 0) { } # remove Chocolately, MinGW, Strawberry Perl from path, so we don't find gcc/gfortran and try to use it -$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey' }) -join ';' +# remove PostgreSQL from path so we don't pickup a broken zlib from it +$env:Path = ($env:Path.Split(';') | Where-Object { $_ -notmatch 'mingw|Strawberry|Chocolatey|PostgreSQL' }) -join ';' # Rust puts its shared stdlib in a secret place, but it is needed to run tests. $env:Path += ";$HOME/.rustup/toolchains/stable-x86_64-pc-windows-msvc/bin" diff --git a/test cases/cmake/2 advanced/test.json b/test cases/cmake/2 advanced/test.json index f4cb58b..e12f530 100644 --- a/test cases/cmake/2 advanced/test.json +++ b/test cases/cmake/2 advanced/test.json @@ -1,9 +1,4 @@ { - "matrix": { - "options": { - "_": [{"val": null, "skip_on_env": ["CI_JOB_VS2017"]}] - } - }, "installed": [ {"type": "expr", "file": "usr/?lib/libcm_cmModLib?so"}, {"type": "implib", "platform": "cygwin", "file": "usr/lib/libcm_cmModLib"}, diff --git a/test cases/cmake/5 object library/test.json b/test cases/cmake/5 object library/test.json deleted file mode 100644 index 1840ce4..0000000 --- a/test cases/cmake/5 object library/test.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "matrix": { - "options": { - "_": [{"val": null, "skip_on_env": ["CI_JOB_VS2017"]}] - } - } -} -- cgit v1.1 From a198e5d191820fda9142d248cd5d134e5f2a5b93 Mon Sep 17 00:00:00 2001 From: Cary Converse Date: Sun, 29 Mar 2020 11:28:02 -0400 Subject: coverage: llvm-cov support --- azure-pipelines.yml | 9 ++-- ci/ciimage/arch/install.sh | 2 +- ci/ciimage/bionic/install.sh | 3 +- ci/ciimage/eoan/install.sh | 3 +- ci/ciimage/fedora/install.sh | 2 +- ci/ciimage/opensuse/install.sh | 4 +- docs/markdown/howtox.md | 3 +- docs/markdown/snippets/clang_coverage.md | 4 ++ mesonbuild/backend/ninjabackend.py | 10 +++- mesonbuild/environment.py | 11 ++++- mesonbuild/scripts/coverage.py | 46 +++++++++++++----- run_unittests.py | 83 ++++++++++++++++++++++++++------ 12 files changed, 140 insertions(+), 40 deletions(-) create mode 100644 docs/markdown/snippets/clang_coverage.md diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 0408342..3822110 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -91,6 +91,7 @@ jobs: libjsoncpp19,^ librhash0,^ libuv1,^ + libxml2,^ ninja,^ python2-devel,^ python3-devel,^ @@ -102,8 +103,8 @@ jobs: displayName: Install Dependencies - script: | set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 - env.exe -- python3 -m pip --disable-pip-version-check install pefile pytest-xdist jsonschema - displayName: pip install pefile pytest-xdist jsonschema + env.exe -- python3 -m pip --disable-pip-version-check install gcovr pefile pytest-xdist jsonschema + displayName: pip install gcovr pefile pytest-xdist jsonschema - script: | set BOOST_ROOT= set PATH=%CYGWIN_ROOT%\bin;%SYSTEMROOT%\system32 @@ -165,6 +166,8 @@ jobs: git ^ mercurial ^ mingw-w64-$(MSYS2_ARCH)-cmake ^ + mingw-w64-$(MSYS2_ARCH)-lcov ^ + mingw-w64-$(MSYS2_ARCH)-libxml2 ^ mingw-w64-$(MSYS2_ARCH)-ninja ^ mingw-w64-$(MSYS2_ARCH)-pkg-config ^ mingw-w64-$(MSYS2_ARCH)-python2 ^ @@ -172,7 +175,7 @@ jobs: mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^ mingw-w64-$(MSYS2_ARCH)-python3-pip ^ %TOOLCHAIN% - %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install pefile jsonschema" + %MSYS2_ROOT%\usr\bin\bash -lc "python3 -m pip --disable-pip-version-check install gcovr jsonschema pefile" displayName: Install Dependencies - script: | set BOOST_ROOT= diff --git a/ci/ciimage/arch/install.sh b/ci/ciimage/arch/install.sh index 6cbbb27..fb27c26 100755 --- a/ci/ciimage/arch/install.sh +++ b/ci/ciimage/arch/install.sh @@ -17,7 +17,7 @@ pkgs=( ) aur_pkgs=(scalapack) -pip_pkgs=(hotdoc) +pip_pkgs=(hotdoc gcovr) cleanup_pkgs=(go) AUR_USER=docker diff --git a/ci/ciimage/bionic/install.sh b/ci/ciimage/bionic/install.sh index 47deb2a..0bfcdfb 100755 --- a/ci/ciimage/bionic/install.sh +++ b/ci/ciimage/bionic/install.sh @@ -15,6 +15,7 @@ pkgs=( qt4-linguist-tools qt5-default qtbase5-private-dev python-dev libomp-dev + llvm lcov ldc libclang-dev libgcrypt20-dev @@ -45,7 +46,7 @@ done # packages eatmydata apt-get -y install "${pkgs[@]}" -eatmydata python3 -m pip install codecov jsonschema +eatmydata python3 -m pip install codecov gcovr jsonschema # Install the ninja 0.10 wget https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-linux.zip diff --git a/ci/ciimage/eoan/install.sh b/ci/ciimage/eoan/install.sh index 7d7a1fd..36dec72 100755 --- a/ci/ciimage/eoan/install.sh +++ b/ci/ciimage/eoan/install.sh @@ -18,6 +18,7 @@ pkgs=( qt4-linguist-tools python-dev libomp-dev + llvm lcov dub ldc mingw-w64 mingw-w64-tools nim libclang-dev @@ -42,7 +43,7 @@ eatmydata apt-get -y build-dep meson eatmydata apt-get -y install "${pkgs[@]}" eatmydata apt-get -y install --no-install-recommends wine-stable # Wine is special -eatmydata python3 -m pip install hotdoc codecov jsonschema +eatmydata python3 -m pip install hotdoc codecov gcovr jsonschema # dub stuff dub_fetch urld diff --git a/ci/ciimage/fedora/install.sh b/ci/ciimage/fedora/install.sh index f61d97e..3beb11c 100755 --- a/ci/ciimage/fedora/install.sh +++ b/ci/ciimage/fedora/install.sh @@ -21,7 +21,7 @@ dnf -y upgrade # Install deps dnf -y install "${pkgs[@]}" -python3 -m pip install hotdoc gobject PyGObject +python3 -m pip install hotdoc gcovr gobject PyGObject # Cleanup dnf -y clean all diff --git a/ci/ciimage/opensuse/install.sh b/ci/ciimage/opensuse/install.sh index b9e440d..4c8e770 100755 --- a/ci/ciimage/opensuse/install.sh +++ b/ci/ciimage/opensuse/install.sh @@ -7,7 +7,7 @@ source /ci/common.sh pkgs=( python3-setuptools python3-wheel python3-pip python3-pytest-xdist python3 python3-lxml ninja make git autoconf automake patch python3-Cython python3-jsonschema - elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl + elfutils gcc gcc-c++ gcc-fortran gcc-objc gcc-obj-c++ vala rust bison flex curl lcov mono-core gtkmm3-devel gtest gmock protobuf-devel wxGTK3-3_2-devel gobject-introspection-devel itstool gtk3-devel java-15-openjdk-devel gtk-doc llvm-devel clang-devel libSDL2-devel graphviz-devel zlib-devel zlib-devel-static #hdf5-devel netcdf-devel libscalapack2-openmpi3-devel libscalapack2-gnu-openmpi3-hpc-devel openmpi3-devel @@ -26,7 +26,7 @@ zypper --non-interactive update # Install deps zypper install -y "${pkgs[@]}" -python3 -m pip install hotdoc gobject PyGObject +python3 -m pip install hotdoc gcovr gobject PyGObject echo 'export PKG_CONFIG_PATH="/usr/lib64/mpi/gcc/openmpi3/lib64/pkgconfig:$PKG_CONFIG_PATH"' >> /ci/env_vars.sh diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index 84546b7..0832060 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -155,8 +155,7 @@ $ ninja coverage-html (or coverage-xml) The coverage report can be found in the meson-logs subdirectory. -Note: Currently, Meson does not support generating coverage reports -with Clang. +*New in 0.55.0* llvm-cov support for use with clang ## Add some optimization to debug builds diff --git a/docs/markdown/snippets/clang_coverage.md b/docs/markdown/snippets/clang_coverage.md new file mode 100644 index 0000000..733a3d9 --- /dev/null +++ b/docs/markdown/snippets/clang_coverage.md @@ -0,0 +1,4 @@ +## Clang coverage support + +llvm-cov is now used to generate coverage information when clang is used as +the compiler. \ No newline at end of file diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 8dbb57a..252f646 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -968,6 +968,13 @@ int dummy; self.processed_targets[target.get_id()] = True def generate_coverage_command(self, elem, outputs): + targets = self.build.get_targets().values() + use_llvm_cov = False + for target in targets: + for compiler in target.compilers.values(): + if compiler.get_id() == 'clang' and not compiler.info.is_darwin(): + use_llvm_cov = True + break elem.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'coverage'] + outputs + @@ -975,7 +982,8 @@ int dummy; os.path.join(self.environment.get_source_dir(), self.build.get_subproject_dir()), self.environment.get_build_dir(), - self.environment.get_log_dir()]) + self.environment.get_log_dir()] + + ['--use_llvm_cov'] if use_llvm_cov else []) def generate_coverage_rules(self): e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY') diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index c02376e..afc2a63 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -134,9 +134,18 @@ def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False): return gcovr_exe, mesonlib.version_compare(found, '>=' + new_rootdir_version) return None, None +def detect_llvm_cov(): + tools = get_llvm_tool_names('llvm-cov') + for tool in tools: + if mesonlib.exe_exists([tool, '--version']): + return tool + return None + def find_coverage_tools(): gcovr_exe, gcovr_new_rootdir = detect_gcovr() + llvm_cov_exe = detect_llvm_cov() + lcov_exe = 'lcov' genhtml_exe = 'genhtml' @@ -145,7 +154,7 @@ def find_coverage_tools(): if not mesonlib.exe_exists([genhtml_exe, '--version']): genhtml_exe = None - return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe + return gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe def detect_ninja(version: str = '1.7', log: bool = False) -> str: r = detect_ninja_command_and_version(version, log) diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index 4bd41fe..7231972 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -12,15 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from mesonbuild import environment +from mesonbuild import environment, mesonlib -import argparse, sys, os, subprocess, pathlib +import argparse, sys, os, subprocess, pathlib, stat -def coverage(outputs, source_root, subproject_root, build_root, log_dir): +def coverage(outputs, source_root, subproject_root, build_root, log_dir, use_llvm_cov): outfiles = [] exitcode = 0 - (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe) = environment.find_coverage_tools() + (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools() # gcovr >= 4.2 requires a different syntax for out of source builds if gcovr_new_rootdir: @@ -28,13 +28,18 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): else: gcovr_base_cmd = [gcovr_exe, '-r', build_root] + if use_llvm_cov: + gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov'] + else: + gcov_exe_args = [] + if not outputs or 'xml' in outputs: if gcovr_exe: subprocess.check_call(gcovr_base_cmd + ['-x', '-e', subproject_root, - '-o', os.path.join(log_dir, 'coverage.xml'), - ]) + '-o', os.path.join(log_dir, 'coverage.xml') + ] + gcov_exe_args) outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml'))) elif outputs: print('gcovr >= 3.3 needed to generate Xml coverage report') @@ -44,8 +49,8 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): if gcovr_exe: subprocess.check_call(gcovr_base_cmd + ['-e', subproject_root, - '-o', os.path.join(log_dir, 'coverage.txt'), - ]) + '-o', os.path.join(log_dir, 'coverage.txt') + ] + gcov_exe_args) outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt'))) elif outputs: print('gcovr >= 3.3 needed to generate text coverage report') @@ -58,19 +63,34 @@ def coverage(outputs, source_root, subproject_root, build_root, log_dir): initial_tracefile = covinfo + '.initial' run_tracefile = covinfo + '.run' raw_tracefile = covinfo + '.raw' + if use_llvm_cov: + # Create a shim to allow using llvm-cov as a gcov tool. + if mesonlib.is_windows(): + llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat') + with open(llvm_cov_shim_path, 'w') as llvm_cov_bat: + llvm_cov_bat.write('@"{}" gcov %*'.format(llvm_cov_exe)) + else: + llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh') + with open(llvm_cov_shim_path, 'w') as llvm_cov_sh: + llvm_cov_sh.write('#!/usr/bin/env sh\nexec "{}" gcov $@'.format(llvm_cov_exe)) + os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC) + gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path] + else: + gcov_tool_args = [] subprocess.check_call([lcov_exe, '--directory', build_root, '--capture', '--initial', '--output-file', - initial_tracefile]) + initial_tracefile] + + gcov_tool_args) subprocess.check_call([lcov_exe, '--directory', build_root, '--capture', '--output-file', run_tracefile, '--no-checksum', - '--rc', 'lcov_branch_coverage=1', - ]) + '--rc', 'lcov_branch_coverage=1'] + + gcov_tool_args) # Join initial and test results. subprocess.check_call([lcov_exe, '-a', initial_tracefile, @@ -137,6 +157,8 @@ def run(args): const='xml', help='generate Xml report') parser.add_argument('--html', dest='outputs', action='append_const', const='html', help='generate Html report') + parser.add_argument('--use_llvm_cov', action='store_true', + help='use llvm-cov') parser.add_argument('source_root') parser.add_argument('subproject_root') parser.add_argument('build_root') @@ -144,7 +166,7 @@ def run(args): options = parser.parse_args(args) return coverage(options.outputs, options.source_root, options.subproject_root, options.build_root, - options.log_dir) + options.log_dir, options.use_llvm_cov) if __name__ == '__main__': sys.exit(run(sys.argv[1:])) diff --git a/run_unittests.py b/run_unittests.py index 827e3c8..0a61935 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4829,6 +4829,74 @@ recommended as it is not supported on some platforms''') self.assertEqual(parsed_help['usage'], parsed_section['usage']) self.assertEqual(parsed_help['arguments'], parsed_section['arguments']) + def test_coverage(self): + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage') + + def test_coverage_html(self): + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage-html') + + def test_coverage_text(self): + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage-text') + + def test_coverage_xml(self): + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage-xml') + class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically @@ -6301,21 +6369,6 @@ class LinuxlikeTests(BasePlatformTests): for i in compdb: self.assertIn("-fsanitize=address", i["command"]) - def test_coverage(self): - gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() - if not gcovr_exe: - raise unittest.SkipTest('gcovr not found') - if not shutil.which('genhtml') and not gcovr_new_rootdir: - raise unittest.SkipTest('genhtml not found and gcovr is too old') - if 'clang' in os.environ.get('CC', ''): - # We need to use llvm-cov instead of gcovr with clang - raise unittest.SkipTest('Coverage does not work with clang right now, help wanted!') - testdir = os.path.join(self.common_test_dir, '1 trivial') - self.init(testdir, extra_args=['-Db_coverage=true']) - self.build() - self.run_tests() - self.run_target('coverage-html') - def test_cross_find_program(self): testdir = os.path.join(self.unit_test_dir, '11 cross prog') crossfile = tempfile.NamedTemporaryFile(mode='w') -- cgit v1.1 From 5bb7f743fd94d28d48a30551302c3afbd9e9ab7c Mon Sep 17 00:00:00 2001 From: Cary Converse Date: Tue, 2 Jun 2020 14:22:23 -0400 Subject: add missing gcovr dependencies for cygwin ci --- azure-pipelines.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 3822110..3be1975 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -92,9 +92,14 @@ jobs: librhash0,^ libuv1,^ libxml2,^ + libxml2-devel,^ + libxslt,^ + libxslt-devel,^ ninja,^ python2-devel,^ python3-devel,^ + python3-libxml2,^ + python3-libxslt,^ python36-pip,^ vala,^ wget,^ -- cgit v1.1 From f3e2e4c63ee75413501856fb026d478f82ddef0d Mon Sep 17 00:00:00 2001 From: Cary Converse Date: Tue, 2 Jun 2020 14:38:36 -0400 Subject: add missing gcovr dependencies for msys2 ci --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 3be1975..45d85b7 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -177,6 +177,7 @@ jobs: mingw-w64-$(MSYS2_ARCH)-pkg-config ^ mingw-w64-$(MSYS2_ARCH)-python2 ^ mingw-w64-$(MSYS2_ARCH)-python3 ^ + mingw-w64-$(MSYS2_ARCH)-python3-lxml ^ mingw-w64-$(MSYS2_ARCH)-python3-setuptools ^ mingw-w64-$(MSYS2_ARCH)-python3-pip ^ %TOOLCHAIN% -- cgit v1.1 From 8620ca2066d04b0d61c8f9fc60c218c779bd11fb Mon Sep 17 00:00:00 2001 From: Cary Converse Date: Tue, 16 Jun 2020 22:56:08 -0400 Subject: disable coverage tests on msys2 --- run_unittests.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 0a61935..b663e83 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4830,6 +4830,8 @@ recommended as it is not supported on some platforms''') self.assertEqual(parsed_help['arguments'], parsed_section['arguments']) def test_coverage(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') @@ -4847,6 +4849,8 @@ recommended as it is not supported on some platforms''') self.run_target('coverage') def test_coverage_html(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') @@ -4864,6 +4868,8 @@ recommended as it is not supported on some platforms''') self.run_target('coverage-html') def test_coverage_text(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') @@ -4881,6 +4887,8 @@ recommended as it is not supported on some platforms''') self.run_target('coverage-text') def test_coverage_xml(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() if not gcovr_exe: raise unittest.SkipTest('gcovr not found, or too old') -- cgit v1.1 From e191cbf6e990caa0e912996977584909aab21da8 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 17 Jun 2020 18:12:20 -0400 Subject: mconf: Fix regression when printing all options This is a regression introduced by #5489 --- mesonbuild/mconf.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 05e9518..2e03cab 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -97,9 +97,9 @@ class Conf: else: print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths)) - def split_options_per_subproject(self, options_iter): + def split_options_per_subproject(self, options): result = {} - for k, o in options_iter: + for k, o in options.items(): subproject = '' if ':' in k: subproject, optname = k.split(':') @@ -211,15 +211,15 @@ class Conf: return 'build.' + k return k[:idx + 1] + 'build.' + k[idx + 1:] - core_options = self.split_options_per_subproject(core_options.items()) + core_options = self.split_options_per_subproject(core_options) host_compiler_options = self.split_options_per_subproject( - self.coredata.flatten_lang_iterator( - self.coredata.compiler_options.host.items())) + dict(self.coredata.flatten_lang_iterator( + self.coredata.compiler_options.host.items()))) build_compiler_options = self.split_options_per_subproject( - self.coredata.flatten_lang_iterator( + dict(self.coredata.flatten_lang_iterator( (insert_build_prefix(k), o) - for k, o in self.coredata.compiler_options.build.items())) - project_options = self.split_options_per_subproject(self.coredata.user_options.items()) + for k, o in self.coredata.compiler_options.build.items()))) + project_options = self.split_options_per_subproject(self.coredata.user_options) show_build_options = self.default_values_only or self.build.environment.is_cross_build() self.add_section('Main project options') -- cgit v1.1 From 96609da8d26fc05be8845c1edf05c8aa38af047d Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Mon, 15 Jun 2020 18:36:08 +0300 Subject: docs: replaced `ninja` with appropriate `meson` commands [skip ci] --- docs/markdown/Commands.md | 20 ++-------- docs/markdown/Configuring-a-build-directory.md | 4 +- docs/markdown/Continuous-Integration.md | 18 ++++----- docs/markdown/Creating-OSX-packages.md | 2 +- docs/markdown/Creating-releases.md | 12 +++++- docs/markdown/Cross-compilation.md | 2 +- docs/markdown/Design-rationale.md | 4 +- docs/markdown/FAQ.md | 2 +- docs/markdown/Feature-autodetection.md | 2 +- docs/markdown/Gnome-module.md | 4 +- docs/markdown/IDE-integration.md | 4 +- docs/markdown/IndepthTutorial.md | 6 +-- docs/markdown/Installing.md | 20 +++++++--- docs/markdown/Meson-sample.md | 2 +- docs/markdown/Project-templates.md | 4 +- docs/markdown/Qt5-module.md | 2 +- docs/markdown/Quick-guide.md | 16 ++++---- docs/markdown/Running-Meson.md | 45 ++++++++++++++++++----- docs/markdown/Tutorial.md | 4 +- docs/markdown/Unit-tests.md | 5 +-- docs/markdown/Using-multiple-build-directories.md | 4 +- docs/markdown/Vs-External.md | 6 +-- docs/markdown/howtox.md | 10 ++--- 23 files changed, 113 insertions(+), 85 deletions(-) diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md index 3aecae1..615b302 100644 --- a/docs/markdown/Commands.md +++ b/docs/markdown/Commands.md @@ -183,21 +183,7 @@ optional arguments: --no-tests Do not build and test generated packages. ``` -This creates a file called `projectname-version.tar.xz` in the build -tree subdirectory `meson-dist`. This archive contains the full -contents of the latest commit in revision control including all the -submodules (recursively). All revision control metadata is removed. -Meson then takes -this archive and tests that it works by doing a full compile + test + -install cycle. If all these pass, Meson will then create a SHA-256 -checksum file next to the archive. - -**Note**: Meson behaviour is different from Autotools. The Autotools -"dist" target packages up the current source tree. Meson packages -the latest revision control commit. The reason for this is that it -prevents developers from doing accidental releases where the -distributed archive does not match any commit in revision control -(especially the one tagged for the release). +See [notes about creating releases](Creating-releases.md) for more info. #### Examples: @@ -310,7 +296,7 @@ meson introspect builddir $ meson install [-h] [-C WD] [--no-rebuild] [--only-changed] [--quiet] ``` -Installs the project to the prefix specified in `setup`. +Installs the project to the prefix specified in [`setup`](#setup). ``` optional arguments: @@ -325,7 +311,7 @@ See [the installation documentation](Installing.md) for more info. #### Examples: -Install project to `prefix` (see [`setup`](#setup)): +Install project to `prefix`: ``` meson install -C builddir ``` diff --git a/docs/markdown/Configuring-a-build-directory.md b/docs/markdown/Configuring-a-build-directory.md index 1387a46..330899f 100644 --- a/docs/markdown/Configuring-a-build-directory.md +++ b/docs/markdown/Configuring-a-build-directory.md @@ -109,11 +109,11 @@ you would issue the following command. meson configure -Dprefix=/tmp/testroot -Then you would run your build command (usually `ninja`), which would +Then you would run your build command (usually `meson compile`), which would cause Meson to detect that the build setup has changed and do all the work required to bring your build tree up to date. Since 0.50.0, it is also possible to get a list of all build options -by invoking `meson configure` with the project source directory or +by invoking [`meson configure`](Commands.md#configure) with the project source directory or the path to the root `meson.build`. In this case, meson will print the default values of all options similar to the example output from above. diff --git a/docs/markdown/Continuous-Integration.md b/docs/markdown/Continuous-Integration.md index 0846f2d..76a05a3 100644 --- a/docs/markdown/Continuous-Integration.md +++ b/docs/markdown/Continuous-Integration.md @@ -36,8 +36,8 @@ script: - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM YOUR/REPO:eoan > Dockerfile; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && ninja -C builddir test"; fi - - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && ninja -C builddir test; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX meson builddir && meson test -C builddir"; fi + - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then SDKROOT=$(xcodebuild -version -sdk macosx Path) meson builddir && meson test -C builddir; fi ``` ## CircleCi for Linux (with Docker) @@ -69,7 +69,7 @@ jobs: steps: - checkout - run: meson setup builddir --backend ninja - - run: ninja -C builddir + - run: meson compile -C builddir - run: meson test -C builddir meson_debian_build: @@ -77,7 +77,7 @@ jobs: steps: - checkout - run: meson setup builddir --backend ninja - - run: ninja -C builddir + - run: meson compile -C builddir - run: meson test -C builddir meson_fedora_build: @@ -85,7 +85,7 @@ jobs: steps: - checkout - run: meson setup builddir --backend ninja - - run: ninja -C builddir + - run: meson compile -C builddir - run: meson test -C builddir workflows: @@ -138,10 +138,10 @@ install: build_script: - cmd: echo Building on %arch% with %compiler% - cmd: meson --backend=ninja builddir - - cmd: ninja -C builddir + - cmd: meson compile -C builddir test_script: - - cmd: ninja -C builddir test + - cmd: meson test -C builddir ``` ### Qt @@ -187,8 +187,8 @@ install: script: - meson builddir - - ninja -C builddir - - ninja -C builddir test + - meson compile -C builddir + - meson test -C builddir ``` ## GitHub Actions diff --git a/docs/markdown/Creating-OSX-packages.md b/docs/markdown/Creating-OSX-packages.md index bda06a3..849d5fd 100644 --- a/docs/markdown/Creating-OSX-packages.md +++ b/docs/markdown/Creating-OSX-packages.md @@ -39,7 +39,7 @@ $ meson --prefix=/tmp/myapp.app \ ``` -Now when we do `ninja install` the bundle is properly staged. If you +Now when we do `meson install` the bundle is properly staged. If you have any resource files or data, you need to install them into `Contents/Resources` either by custom install commands or specifying more install paths to the Meson command. diff --git a/docs/markdown/Creating-releases.md b/docs/markdown/Creating-releases.md index 45c4b4e..efaa85a 100644 --- a/docs/markdown/Creating-releases.md +++ b/docs/markdown/Creating-releases.md @@ -10,9 +10,17 @@ or zip format) of the source code. They do not contain any revision control metadata, only the source code. Meson provides a simple way of generating these. It consists of a -single command: +single command *(available since 0.52.0)*: - ninja dist +```sh +meson dist +``` + +or alternatively (on older meson versions with `ninja` backend): + +```sh +ninja dist +``` This creates a file called `projectname-version.tar.xz` in the build tree subdirectory `meson-dist`. This archive contains the full diff --git a/docs/markdown/Cross-compilation.md b/docs/markdown/Cross-compilation.md index 1c53dcf..d86d417 100644 --- a/docs/markdown/Cross-compilation.md +++ b/docs/markdown/Cross-compilation.md @@ -222,7 +222,7 @@ Once you have the cross file, starting a build is simple $ meson srcdir builddir --cross-file cross_file.txt ``` -Once configuration is done, compilation is started by invoking Ninja +Once configuration is done, compilation is started by invoking `meson compile` in the usual way. ## Introspection and system checks diff --git a/docs/markdown/Design-rationale.md b/docs/markdown/Design-rationale.md index 57aaee4..7121192 100644 --- a/docs/markdown/Design-rationale.md +++ b/docs/markdown/Design-rationale.md @@ -223,11 +223,11 @@ add_test('test library', exe) ``` First we build a shared library named foobar. It is marked -installable, so running `ninja install` installs it to the library +installable, so running `meson install` installs it to the library directory (the system knows which one so the user does not have to care). Then we build a test executable which is linked against the library. It will not be installed, but instead it is added to the list -of unit tests, which can be run with the command `ninja test`. +of unit tests, which can be run with the command `meson test`. Above we mentioned precompiled headers as a feature not supported by other build systems. Here's how you would use them. diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md index 7b5c03a..7a41443 100644 --- a/docs/markdown/FAQ.md +++ b/docs/markdown/FAQ.md @@ -51,7 +51,7 @@ $ /path/to/meson.py After this you don't have to care about invoking Meson any more. It remembers where it was originally invoked from and calls itself appropriately. As a user the only thing you need to do is to `cd` into -your build directory and invoke `ninja`. +your build directory and invoke `meson compile`. ## Why can't I specify target files with a wildcard? diff --git a/docs/markdown/Feature-autodetection.md b/docs/markdown/Feature-autodetection.md index c1b7659..a568313 100644 --- a/docs/markdown/Feature-autodetection.md +++ b/docs/markdown/Feature-autodetection.md @@ -34,6 +34,6 @@ also generated which will produce all 3 coverage report types, if possible. Note that generating any of the coverage reports described above -requires the tests (i.e. `ninja test`) to finish running so the +requires the tests (i.e. `meson test`) to finish running so the information about the functions that are called in the tests can be gathered for the report. diff --git a/docs/markdown/Gnome-module.md b/docs/markdown/Gnome-module.md index ced8a40..3d06233 100644 --- a/docs/markdown/Gnome-module.md +++ b/docs/markdown/Gnome-module.md @@ -223,7 +223,7 @@ directory. Note that this is not for installing schemas and is only useful when running the application locally for example during tests. * `build_by_default`: causes, when set to true, to have this target be - built by default, that is, when invoking plain `ninja`, the default + built by default, that is, when invoking plain `meson compile`, the default value is true for all built target types * `depend_files`: files ([`string`](Reference-manual.md#string-object), [`files()`](Reference-manual.md#files), or @@ -246,7 +246,7 @@ one XML file. * `annotations`: *(Added 0.43.0)* list of lists of 3 strings for the annotation for `'ELEMENT', 'KEY', 'VALUE'` * `docbook`: *(Added 0.43.0)* prefix to generate `'PREFIX'-NAME.xml` docbooks * `build_by_default`: causes, when set to true, to have this target be - built by default, that is, when invoking plain `ninja`, the default + built by default, that is, when invoking plain `meson compile`, the default value is true for all built target types * `install_dir`: (*Added 0.46.0*) location to install the header or bundle depending on previous options diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index f51075e..2cc4f4f 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -227,8 +227,8 @@ in the `meson.build`. ## Tests -Compilation and unit tests are done as usual by running the `ninja` and -`ninja test` commands. A JSON formatted result log can be found in +Compilation and unit tests are done as usual by running the `meson compile` and +`meson test` commands. A JSON formatted result log can be found in `workspace/project/builddir/meson-logs/testlog.json`. When these tests fail, the user probably wants to run the failing test in a diff --git a/docs/markdown/IndepthTutorial.md b/docs/markdown/IndepthTutorial.md index dd93f82..d2e2662 100644 --- a/docs/markdown/IndepthTutorial.md +++ b/docs/markdown/IndepthTutorial.md @@ -79,12 +79,12 @@ With these four files we are done. To configure, build and run the test suite, w ```console $ meson builddir && cd builddir -$ ninja -$ ninja test +$ meson compile +$ meson test ``` To then install the project you only need one command. ```console -$ ninja install +$ meson install ``` diff --git a/docs/markdown/Installing.md b/docs/markdown/Installing.md index 5abfdd4..9dc2ad4 100644 --- a/docs/markdown/Installing.md +++ b/docs/markdown/Installing.md @@ -4,6 +4,18 @@ short-description: Installing targets # Installing +Invoked via the [following command](Commands.md#install) *(available since 0.47.0)*: + +```sh +meson install +``` + +or alternatively (on older meson versions with `ninja` backend): + +```sh +ninja install +``` + By default Meson will not install anything. Build targets can be installed by tagging them as installable in the definition. @@ -97,15 +109,13 @@ packages. This is done with the `DESTDIR` environment variable and it is used just like with other build systems: ```console -$ DESTDIR=/path/to/staging/area ninja install +$ DESTDIR=/path/to/staging/area meson install ``` ## Custom install behaviour -The default install target (executed via, e.g., `ninja install`) does -installing with reasonable default options. More control over the -install behaviour can be achieved with the `meson install` command, -that has been available since 0.47.0. +Installation behaviour can be further customized using +additional arguments. For example, if you wish to install the current setup without rebuilding the code (which the default install target always does) and diff --git a/docs/markdown/Meson-sample.md b/docs/markdown/Meson-sample.md index 6f26f36..f98e022 100644 --- a/docs/markdown/Meson-sample.md +++ b/docs/markdown/Meson-sample.md @@ -50,7 +50,7 @@ exe = executable('myexe', src) test('simple test', exe) ``` -Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `ninja test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user. +Here we create a unit test called *simple test*, and which uses the built executable. When the tests are run with the `meson test` command, the built executable is run. If it returns zero, the test passes. A non-zero return value indicates an error, which Meson will then report to the user. A note to Visual Studio users ----- diff --git a/docs/markdown/Project-templates.md b/docs/markdown/Project-templates.md index 832cd1d..7ded318 100644 --- a/docs/markdown/Project-templates.md +++ b/docs/markdown/Project-templates.md @@ -16,8 +16,8 @@ $ meson init --language=c --name=myproject --version=0.1 ``` This would create the build definitions for a helloworld type -project. The result can be compiled as usual. For example compiling it -with Ninja could be done like this: +project. The result can be compiled as usual. For example it +could be done like this: ``` $ meson setup builddir diff --git a/docs/markdown/Qt5-module.md b/docs/markdown/Qt5-module.md index f1c2f6c..0d9a6b6 100644 --- a/docs/markdown/Qt5-module.md +++ b/docs/markdown/Qt5-module.md @@ -21,7 +21,7 @@ This method generates the necessary targets to build translation files with lrel - `ts_files`, the list of input translation files produced by Qt's lupdate tool. - `install` when true, this target is installed during the install step (optional). - `install_dir` directory to install to (optional). - - `build_by_default` when set to true, to have this target be built by default, that is, when invoking plain ninja; the default value is false (optional). + - `build_by_default` when set to true, to have this target be built by default, that is, when invoking `meson compile`; the default value is false (optional). ## has_tools diff --git a/docs/markdown/Quick-guide.md b/docs/markdown/Quick-guide.md index 0bed683..74636e5 100644 --- a/docs/markdown/Quick-guide.md +++ b/docs/markdown/Quick-guide.md @@ -93,8 +93,8 @@ are working on. The steps to take are very simple. ```console $ cd /path/to/source/root $ meson builddir && cd builddir -$ ninja -$ ninja test +$ meson compile +$ meson test ``` The only thing to note is that you need to create a separate build @@ -104,14 +104,14 @@ directory. This allows you to have multiple build trees with different configurations at the same time. This way generated files are not added into revision control by accident. -To recompile after code changes, just type `ninja`. The build command +To recompile after code changes, just type `meson compile`. The build command is always the same. You can do arbitrary changes to source code and build system files and Meson will detect those and will do the right thing. If you want to build optimized binaries, just use the argument `--buildtype=debugoptimized` when running Meson. It is recommended that you keep one build directory for unoptimized builds and one for optimized ones. To compile any given configuration, just go into the -corresponding build directory and run `ninja`. +corresponding build directory and run `meson compile`. Meson will automatically add compiler flags to enable debug information and compiler warnings (i.e. `-g` and `-Wall`). This means @@ -128,9 +128,9 @@ build and install Meson projects are the following. ```console $ cd /path/to/source/root $ meson --prefix /usr --buildtype=plain builddir -Dc_args=... -Dcpp_args=... -Dc_link_args=... -Dcpp_link_args=... -$ ninja -v -C builddir -$ ninja -C builddir test -$ DESTDIR=/path/to/staging/root ninja -C builddir install +$ meson compile -C builddir +$ meson test -C builddir +$ DESTDIR=/path/to/staging/root meson install -C builddir ``` The command line switch `--buildtype=plain` tells Meson not to add its @@ -139,7 +139,7 @@ on used flags. This is very similar to other build systems. The only difference is that the `DESTDIR` variable is passed as an environment variable -rather than as an argument to `ninja install`. +rather than as an argument to `meson install`. As distro builds happen always from scratch, you might consider enabling [unity builds](Unity-builds.md) on your packages because they diff --git a/docs/markdown/Running-Meson.md b/docs/markdown/Running-Meson.md index 910513c..326ecb9 100644 --- a/docs/markdown/Running-Meson.md +++ b/docs/markdown/Running-Meson.md @@ -9,13 +9,12 @@ from the source tree with the command `/path/to/source/meson.py`. Meson may also be installed in which case the command is simply `meson`. In this manual we only use the latter format for simplicity. -Additionally, the invocation can pass options to meson. The list of options is -documented [here](Builtin-options.md). - At the time of writing only a command line version of Meson is available. This means that Meson must be invoked using the terminal. If you wish to use the MSVC compiler, you need to run Meson under "Visual Studio command prompt". +All available meson commands are listed on the [commands reference page](Commands.md). + ## Configuring the build directory Let us assume that we have a source tree that has a Meson build system. This @@ -41,6 +40,9 @@ build backend in the build directory. By default Meson generates a *debug build*, which turns on basic warnings and debug information and disables compiler optimizations. +Additionally, the invocation can pass options to meson. The list of options is +documented [here](Builtin-options.md). + You can specify a different type of build with the `--buildtype` command line argument. It can have one of the following values. @@ -83,7 +85,7 @@ during configuration time. As an example, here is how you would use Meson to generate a Visual studio solution. ```sh -meson setup --backend=vs2010 +meson setup --backend=vs ``` You can then open the generated solution with Visual Studio and compile it in @@ -105,9 +107,18 @@ linker arguments needed. ## Building from the source -If you are not using an IDE, Meson uses the [Ninja build -system](https://ninja-build.org/) to actually build the code. To start the -build, simply type the following command. +To start the build, simply type the following command. + +```sh +meson compile -C builddir +``` + +See [`meson compile` description](Commands.md#compile) for more info. + +### Building directly with ninja + +By default Meson uses the [Ninja build system](https://ninja-build.org/) to +actually build the code. To start the build, simply type the following command. ```sh ninja -C builddir @@ -133,20 +144,29 @@ Meson provides native support for running tests. The command to do that is simple. ```sh -ninja -C builddir test +meson test -C builddir ``` +See [`meson test` description](Commands.md#test) for more info. + Meson does not force the use of any particular testing framework. You are free to use GTest, Boost Test, Check or even custom executables. +Note: it can be also invoked directly with ninja with the following command: +```sh +ninja -C builddir test +``` + ## Installing Installing the built software is just as simple. ```sh -ninja -C builddir install +meson install -C builddir ``` +See [`meson install` description](Commands.md#install) for more info. + Note that Meson will only install build targets explicitly tagged as installable, as detailed in the [installing targets documentation](Installing.md). @@ -157,7 +177,12 @@ Meson also supports the `DESTDIR` variable used in e.g. building packages. It is used like this: ```sh -DESTDIR=/path/to/staging ninja -C builddir install +DESTDIR=/path/to/staging meson install -C builddir +``` + +Note: it can be also invoked directly with ninja with the following command: +```sh +ninja -C builddir install ``` ## Command line help diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md index 6b248b5..be6888d 100644 --- a/docs/markdown/Tutorial.md +++ b/docs/markdown/Tutorial.md @@ -124,12 +124,12 @@ or the like. Instead we just type the exact same command as if we were rebuilding our code without any build system changes. ``` -$ ninja +$ meson compile ``` Once you have set up your build directory the first time, you don't ever need to run the `meson` command again. You always just run -`ninja`. Meson will automatically detect when you have done changes to +`meson compile`. Meson will automatically detect when you have done changes to build definitions and will take care of everything so users don't have to care. In this case the following output is produced. diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index 06664db6..4f51d35 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -75,7 +75,7 @@ test machine. You can override this with the environment variable `MESON_TESTTHREADS` like this. ```console -$ MESON_TESTTHREADS=5 ninja test +$ MESON_TESTTHREADS=5 meson test ``` ## Priorities @@ -122,8 +122,7 @@ The goal of the meson test tool is to provide a simple way to run tests in a variety of different ways. The tool is designed to be run in the build directory. -The simplest thing to do is just to run all tests, which is equivalent to -running `ninja test`. +The simplest thing to do is just to run all tests. ```console $ meson test diff --git a/docs/markdown/Using-multiple-build-directories.md b/docs/markdown/Using-multiple-build-directories.md index 2455640..ab6cf3c 100644 --- a/docs/markdown/Using-multiple-build-directories.md +++ b/docs/markdown/Using-multiple-build-directories.md @@ -32,9 +32,9 @@ You can add cross builds, too. As an example, let's set up a Linux -> Windows cr mkdir buildwine meson --cross-file=mingw-cross.txt buildwine -The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `ninja test`. +The cross compilation file sets up Wine so that not only can you compile your application, you can also run the unit test suite just by issuing the command `meson test`. -To compile any of these build types, just cd into the corresponding build directory and run `ninja` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories. +To compile any of these build types, just cd into the corresponding build directory and run `meson compile` or instruct your IDE to do the same. Note that once you have set up your build directory once, you can just run Ninja and Meson will ensure that the resulting build is fully up to date according to the source. Even if you have not touched one of the directories in weeks and have done major changes to your build configuration, Meson will detect this and bring the build directory up to date (or print an error if it can't do that). This allows you to do most of your work in the default directory and use the others every now and then without having to babysit your build directories. ## Specialized uses diff --git a/docs/markdown/Vs-External.md b/docs/markdown/Vs-External.md index add089e..ab3d191 100644 --- a/docs/markdown/Vs-External.md +++ b/docs/markdown/Vs-External.md @@ -23,9 +23,9 @@ as follows: | entry | value | | ----- | ----- | -|build | `ninja -C $(Configuration)` | -|clean | `ninja -C $(Configuration) clean` | -|rebuild| `ninja -C $(Configuration) clean all| +|build | `meson compile -C $(Configuration)` | +|clean | `meson compile -C $(Configuration) --clean` | +|rebuild| `meson compile -C $(Configuration) --clean && meson compile -C $(Configuration)` | |Output | `$(Configuration)\name_of_your_executable.exe| diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index 84546b7..b48aeab 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -148,8 +148,8 @@ $ meson -Db_coverage=true Then issue the following commands. ```console -$ ninja -$ ninja test +$ meson compile +$ meson test $ ninja coverage-html (or coverage-xml) ``` @@ -209,8 +209,8 @@ operation. First we set up the project with profile measurements enabled and compile it. ```console -$ meson -Db_pgo=generate -$ ninja -C builddir +$ meson setup -Db_pgo=generate +$ meson compile -C builddir ``` Then we need to run the program with some representative input. This @@ -221,7 +221,7 @@ information and rebuild. ```console $ meson configure -Db_pgo=use -$ ninja +$ meson compile ``` After these steps the resulting binary is fully optimized. -- cgit v1.1 From 189d3b051393271a938e78e159da1ee6476a34c9 Mon Sep 17 00:00:00 2001 From: Cyril Richard Date: Fri, 19 Jun 2020 16:32:18 +0200 Subject: Update Users.md [skip ci] Update Users.md with the add of Siril (www.siril.org) that is migrating from autotools to meson. --- docs/markdown/Users.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index 34e8e71..49d30a4 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -117,6 +117,7 @@ format files - [RxDock](https://gitlab.com/rxdock/rxdock), a protein-ligand docking software designed for high throughput virtual screening (fork of rDock) - [scrcpy](https://github.com/Genymobile/scrcpy), a cross platform application that provides display and control of Android devices connected on USB or over TCP/IP - [Sequeler](https://github.com/Alecaddd/sequeler), a friendly SQL client for Linux, built with Vala and Gtk + - [Siril](https://gitlab.com/free-astro/siril), an image processing software for amateur astronomy - [SSHFS](https://github.com/libfuse/sshfs), allows you to mount a remote filesystem using SFTP - [sway](https://github.com/swaywm/sway), i3-compatible Wayland compositor - [Sysprof](https://git.gnome.org/browse/sysprof), a profiling tool -- cgit v1.1 From 348b0ef671223dec2560adaec5fb7328bca61b25 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 19 Jun 2020 10:54:18 -0700 Subject: docs: Alphabetically sort the cpu_family reference table It's close, but not quite. This should make it easier to read --- docs/markdown/Reference-tables.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index 81ce921..560630c 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -82,14 +82,16 @@ set in the cross file. | arc | 32 bit ARC processor | | arm | 32 bit ARM processor | | avr | Atmel AVR processor | -| e2k | MCST Elbrus processor | | c2000 | 32 bit C2000 processor | +| dspic | 16 bit Microchip dsPIC | +| e2k | MCST Elbrus processor | | ia64 | Itanium processor | | m68k | Motorola 68000 processor | | microblaze | MicroBlaze processor | | mips | 32 bit MIPS processor | | mips64 | 64 bit MIPS processor | | parisc | HP PA-RISC processor | +| pic24 | 16 bit Microchip PIC24 | | ppc | 32 bit PPC processors | | ppc64 | 64 bit PPC processors | | riscv32 | 32 bit RISC-V Open ISA | @@ -102,8 +104,6 @@ set in the cross file. | sparc64 | SPARC v9 processor | | wasm32 | 32 bit Webassembly | | wasm64 | 64 bit Webassembly | -| pic24 | 16 bit Microchip PIC24 | -| dspic | 16 bit Microchip dsPIC | | x86 | 32 bit x86 processor | | x86_64 | 64 bit x86 processor | -- cgit v1.1 From b384f82b9a69d488f660dca37d14abed1f4c6500 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 19 Jun 2020 10:56:09 -0700 Subject: envconfig: Add support SuperH SH-4 Fixes: #7358 --- docs/markdown/Reference-tables.md | 1 + mesonbuild/envconfig.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index 560630c..48f43f9 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -100,6 +100,7 @@ set in the cross file. | rx | Renesas RX 32 bit MCU | | s390 | IBM zSystem s390 | | s390x | IBM zSystem s390x | +| sh4 | SuperH SH-4 | | sparc | 32 bit SPARC | | sparc64 | SPARC v9 processor | | wasm32 | 32 bit Webassembly | diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index b0dde65..10464a2 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -42,6 +42,7 @@ known_cpu_families = ( 'arm', 'avr', 'c2000', + 'dspic', 'e2k', 'ia64', 'm68k', @@ -49,6 +50,7 @@ known_cpu_families = ( 'mips', 'mips64', 'parisc', + 'pic24', 'ppc', 'ppc64', 'riscv32', @@ -57,10 +59,9 @@ known_cpu_families = ( 'rx', 's390', 's390x', + 'sh4', 'sparc', 'sparc64', - 'pic24', - 'dspic', 'wasm32', 'wasm64', 'x86', -- cgit v1.1 From 52a36a552139047f8cffe76452bcdeb06ae74b93 Mon Sep 17 00:00:00 2001 From: Michael Brockus <55331536+michaelbadcrumble@users.noreply.github.com> Date: Sun, 21 Jun 2020 12:34:12 -0700 Subject: update meson init info message. [skip ci] --- mesonbuild/minit.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py index bdbe69b..d0aff49 100644 --- a/mesonbuild/minit.py +++ b/mesonbuild/minit.py @@ -44,8 +44,8 @@ class DEFAULT_TYPES(Enum): INFO_MESSAGE = '''Sample project created. To build it run the following commands: -meson builddir -ninja -C builddir +meson setup builddir +meson compile -C builddir ''' -- cgit v1.1 From 3f1108c9239abcf51dbccdd1810b0c6f63435a2e Mon Sep 17 00:00:00 2001 From: Michael Brockus <55331536+michaelbadcrumble@users.noreply.github.com> Date: Sun, 21 Jun 2020 12:35:33 -0700 Subject: Update Creating-releases.md [skip ci] --- docs/markdown/Creating-releases.md | 55 ++++++++++++++++++++++++++------------ 1 file changed, 38 insertions(+), 17 deletions(-) diff --git a/docs/markdown/Creating-releases.md b/docs/markdown/Creating-releases.md index efaa85a..040fb53 100644 --- a/docs/markdown/Creating-releases.md +++ b/docs/markdown/Creating-releases.md @@ -5,9 +5,10 @@ short-description: Creating releases # Creating releases In addition to development, almost all projects provide periodical -source releases. These are standalone packages (usually either in tar -or zip format) of the source code. They do not contain any revision -control metadata, only the source code. +source releases. These are standalone packages (usually either in +tar or zip format) of the source code. They do not contain any +revision control metadata, only the source code. Meson provides +a simple way of generating these, with the `meson dist` command. Meson provides a simple way of generating these. It consists of a single command *(available since 0.52.0)*: @@ -23,17 +24,37 @@ ninja dist ``` This creates a file called `projectname-version.tar.xz` in the build -tree subdirectory `meson-dist`. This archive contains the full -contents of the latest commit in revision control including all the -submodules (recursively). All revision control metadata is removed. -Meson then takes -this archive and tests that it works by doing a full compile + test + -install cycle. If all these pass, Meson will then create a SHA-256 -checksum file next to the archive. - -**Note**: Meson behaviour is different from Autotools. The Autotools -"dist" target packages up the current source tree. Meson packages -the latest revision control commit. The reason for this is that it -prevents developers from doing accidental releases where the -distributed archive does not match any commit in revision control -(especially the one tagged for the release). +tree subdirectory `meson-dist`. This archive contains the full contents +of the latest commit in revision control including all the submodules +(recursively). All revision control metadata is removed. Meson then +takes this archive and tests that it works by doing a full +`compile` + `test` + `install` cycle. If all these pass, Meson will +then create a `SHA-256` checksum file next to the archive. + + +## Autotools dist VS Meson dist + +Meson behaviour is different from Autotools. The Autotools "dist" +target packages up the current source tree. Meson packages the latest +revision control commit. The reason for this is that it prevents developers +from doing accidental releases where the distributed archive does not match +any commit in revision control (especially the one tagged for the release). + + +## Include subprojects in your release + +The `meson dist` command has `--include-subprojects` command line option. +When enabled, the source tree of all subprojects used by the current build +will also be included in the final tarball. This is useful to distribute +self contained tarball that can be built offline (i.e. `--wrap-mode=nodownload`). + + +## Skip build and test with `--no-tests` + +The `meson dist` command has a `--no-tests` option to skip build and +tests steps of generated packages. It can be used to not waste time +for example when done in CI that already does its own testing. + +So with `--no-tests` you can tell Meson "Do not build and test generated +packages.". + -- cgit v1.1 From 91680324035dd19ccf256cc1efba4ea7352e80c5 Mon Sep 17 00:00:00 2001 From: Michael Brockus <55331536+michaelbadcrumble@users.noreply.github.com> Date: Sun, 21 Jun 2020 13:06:48 -0700 Subject: Update Style-guide.md [skip ci] --- docs/markdown/Style-guide.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/markdown/Style-guide.md b/docs/markdown/Style-guide.md index ee2ecfe..960e60c 100644 --- a/docs/markdown/Style-guide.md +++ b/docs/markdown/Style-guide.md @@ -11,6 +11,12 @@ Meson build files. Always spaces. +## Naming Variable + +The most consistent naming convention is the snake case. Let say you would +like to refer to your executable so something like `my_exe` would work or +just `exe`. + ## Naming options There are two ways of naming project options. As an example for -- cgit v1.1 From 246e5437aaf213401a22361a55c46e70a4eb505d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim-Philipp=20M=C3=BCller?= Date: Sun, 21 Jun 2020 17:31:59 +0100 Subject: compiler: add 'force_align_arg_pointer' function attribute --- docs/markdown/Reference-tables.md | 89 ++++++++++++++------------- mesonbuild/compilers/c_function_attributes.py | 2 + 2 files changed, 48 insertions(+), 43 deletions(-) diff --git a/docs/markdown/Reference-tables.md b/docs/markdown/Reference-tables.md index 48f43f9..3be129f 100644 --- a/docs/markdown/Reference-tables.md +++ b/docs/markdown/Reference-tables.md @@ -198,49 +198,50 @@ These values are supported using the GCC style `__attribute__` annotations, which are supported by GCC, Clang, and other compilers. -| Name | -|----------------------| -| alias | -| aligned | -| alloc_size | -| always_inline | -| artificial | -| cold | -| const | -| constructor | -| constructor_priority | -| deprecated | -| destructor | -| error | -| externally_visible | -| fallthrough | -| flatten | -| format | -| format_arg | -| gnu_inline | -| hot | -| ifunc | -| malloc | -| noclone | -| noinline | -| nonnull | -| noreturn | -| nothrow | -| optimize | -| packed | -| pure | -| returns_nonnull | -| unused | -| used | -| visibility* | -| visibility:default† | -| visibility:hidden† | -| visibility:internal† | -| visibility:protected†| -| warning | -| warn_unused_result | -| weak | -| weakreaf | +| Name | +|--------------------------| +| alias | +| aligned | +| alloc_size | +| always_inline | +| artificial | +| cold | +| const | +| constructor | +| constructor_priority | +| deprecated | +| destructor | +| error | +| externally_visible | +| fallthrough | +| flatten | +| format | +| format_arg | +| force_align_arg_pointer³ | +| gnu_inline | +| hot | +| ifunc | +| malloc | +| noclone | +| noinline | +| nonnull | +| noreturn | +| nothrow | +| optimize | +| packed | +| pure | +| returns_nonnull | +| unused | +| used | +| visibility* | +| visibility:default† | +| visibility:hidden† | +| visibility:internal† | +| visibility:protected† | +| warning | +| warn_unused_result | +| weak | +| weakreaf | \* *Changed in 0.52.0* the "visibility" target no longer includes "protected", which is not present in Apple's clang. @@ -248,6 +249,8 @@ which are supported by GCC, Clang, and other compilers. † *New in 0.52.0* These split visibility attributes are preferred to the plain "visibility" as they provide narrower checks. +³ *New in 0.55.0* + ### MSVC __declspec These values are supported using the MSVC style `__declspec` annotation, diff --git a/mesonbuild/compilers/c_function_attributes.py b/mesonbuild/compilers/c_function_attributes.py index e5de485..f31229e 100644 --- a/mesonbuild/compilers/c_function_attributes.py +++ b/mesonbuild/compilers/c_function_attributes.py @@ -56,6 +56,8 @@ C_FUNC_ATTRIBUTES = { 'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));', 'format_arg': 'char * foo(const char * p) __attribute__((format_arg(1)));', + 'force_align_arg_pointer': + '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }', 'gnu_inline': 'inline __attribute__((gnu_inline)) int foo(void) { return 0; }', 'hot': -- cgit v1.1 From e353b2e8d48c8ffce579342fac9ccfc62127bec8 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 28 May 2020 13:22:57 -0400 Subject: wrap: Add patch_directory support Copy a tree instead of extracting an archive. Closes: #7216 --- docs/markdown/Wrap-dependency-system-manual.md | 3 ++ docs/markdown/snippets/wrap_patch.md | 8 ++++++ mesonbuild/wrap/wrap.py | 32 ++++++++++++++-------- .../157 wrap file should not failed/meson.build | 2 ++ .../subprojects/packagefiles/foo-1.0/meson.build | 2 ++ .../subprojects/patchdir.wrap | 9 ++++++ 6 files changed, 44 insertions(+), 12 deletions(-) create mode 100644 test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build create mode 100644 test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 868263c..f6c658f 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -79,6 +79,9 @@ revision = head - `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0* - `patch_filename` - filename of the downloaded overlay archive - `patch_hash` - sha256 checksum of the downloaded overlay archive +- `patch_directory` - *Since 0.55.0* Overlay directory, alternative to `patch_filename` in the case + files are local instead of a downloaded archive. The directory must be placed in + `subprojects/packagefiles`. - `lead_directory_missing` - for `wrap-file` create the leading directory name. Needed when the source file does not have a leading directory. diff --git a/docs/markdown/snippets/wrap_patch.md b/docs/markdown/snippets/wrap_patch.md index 7d6d9c2..d5a1f5f 100644 --- a/docs/markdown/snippets/wrap_patch.md +++ b/docs/markdown/snippets/wrap_patch.md @@ -4,3 +4,11 @@ It is now possible to use the `patch_filename` and `source_filename` value in a `.wrap` file without `*_url` to specify a local source / patch file. All local files must be located in the `subprojects/packagefiles` directory. The `*_hash` entries are optional with this setup. + +## Local wrap patch directory + +Wrap files can now specify `patch_directory` instead of `patch_filename` in the +case overlay files are local. Every files in that directory, and subdirectories, +will be copied to the subproject directory. This can be used for example to add +`meson.build` files to a project not using Meson build system upstream. +The patch directory must be placed in `subprojects/packagefiles` directory. diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 9d95bff..44173f7 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -126,9 +126,6 @@ class PackageDefinition: m = 'Missing key {!r} in {}' raise WrapException(m.format(key, self.basename)) - def has_patch(self) -> bool: - return 'patch_filename' in self.values - def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition: fname = os.path.join(subdir_root, packagename + '.wrap') if os.path.isfile(fname): @@ -253,8 +250,7 @@ class Resolver: os.mkdir(self.dirname) extract_dir = self.dirname shutil.unpack_archive(path, extract_dir) - if self.wrap.has_patch(): - self.apply_patch() + self.apply_patch() def get_git(self) -> None: if not GIT: @@ -422,13 +418,25 @@ class Resolver: return path.as_posix() def apply_patch(self) -> None: - path = self.get_file_internal('patch') - try: - shutil.unpack_archive(path, self.subdir_root) - except Exception: - with tempfile.TemporaryDirectory() as workdir: - shutil.unpack_archive(path, workdir) - self.copy_tree(workdir, self.subdir_root) + if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values: + m = 'Wrap file {!r} must not have both "patch_filename" and "patch_directory"' + raise WrapException(m.format(self.wrap.basename)) + if 'patch_filename' in self.wrap.values: + path = self.get_file_internal('patch') + try: + shutil.unpack_archive(path, self.subdir_root) + except Exception: + with tempfile.TemporaryDirectory() as workdir: + shutil.unpack_archive(path, workdir) + self.copy_tree(workdir, self.subdir_root) + elif 'patch_directory' in self.wrap.values: + from ..interpreterbase import FeatureNew + FeatureNew('patch_directory', '0.55.0').use(self.current_subproject) + patch_dir = self.wrap.values['patch_directory'] + src_dir = os.path.join(self.filesdir, patch_dir) + if not os.path.isdir(src_dir): + raise WrapException('patch directory does not exists: {}'.format(patch_dir)) + self.copy_tree(src_dir, self.dirname) def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None: """ diff --git a/test cases/common/157 wrap file should not failed/meson.build b/test cases/common/157 wrap file should not failed/meson.build index cffce2f..48d1068 100644 --- a/test cases/common/157 wrap file should not failed/meson.build +++ b/test cases/common/157 wrap file should not failed/meson.build @@ -12,3 +12,5 @@ libbar = bar.get_variable('libbar') executable('grabprog', files('src/subprojects/prog.c')) executable('grabprog2', files('src/subprojects/foo/prog2.c')) subdir('src') + +subproject('patchdir') diff --git a/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build new file mode 100644 index 0000000..dbaf91f --- /dev/null +++ b/test cases/common/157 wrap file should not failed/subprojects/packagefiles/foo-1.0/meson.build @@ -0,0 +1,2 @@ +project('static lib patchdir', 'c') +libfoo = static_library('foo', 'foo.c') diff --git a/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap b/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap new file mode 100644 index 0000000..1a2134c --- /dev/null +++ b/test cases/common/157 wrap file should not failed/subprojects/patchdir.wrap @@ -0,0 +1,9 @@ +[wrap-file] +directory = foo-1.0-patchdir + +source_url = http://something.invalid +source_filename = foo-1.0.tar.xz +source_hash = 9ed8f67d75e43d3be161efb6eddf30dd01995a958ca83951ea64234bac8908c1 +lead_directory_missing = true + +patch_directory = foo-1.0 -- cgit v1.1 From d6c6b933c464d2689751da4f78cdd6463a4bc041 Mon Sep 17 00:00:00 2001 From: Igor Raits Date: Thu, 18 Jun 2020 16:45:27 +0200 Subject: mcompile: Add --verbose mode Closes: https://github.com/mesonbuild/meson/issues/7352 Signed-off-by: Igor Raits --- data/macros.meson | 1 + docs/markdown/Commands.md | 2 ++ docs/markdown/Release-notes-for-0.54.0.md | 5 +++++ mesonbuild/mcompile.py | 11 ++++++++++- 4 files changed, 18 insertions(+), 1 deletion(-) diff --git a/data/macros.meson b/data/macros.meson index 8a66c96..cc4953c 100644 --- a/data/macros.meson +++ b/data/macros.meson @@ -28,6 +28,7 @@ %{shrink:%{__meson} compile \ -C %{_vpath_builddir} \ -j %{_smp_build_ncpus} \ + --verbose \ %{nil}} %meson_install \ diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md index 615b302..dbcfee4 100644 --- a/docs/markdown/Commands.md +++ b/docs/markdown/Commands.md @@ -137,6 +137,7 @@ meson configure builddir -Doption=new_value ``` $ meson compile [-h] [-j JOBS] [-l LOAD_AVERAGE] [--clean] [-C BUILDDIR] + [--verbose] ``` Builds a default or a specified target of a configured meson project. @@ -153,6 +154,7 @@ optional arguments: --clean Clean the build directory. -C BUILDDIR The directory containing build files to be built. + --verbose Show more verbose output. ``` #### Examples: diff --git a/docs/markdown/Release-notes-for-0.54.0.md b/docs/markdown/Release-notes-for-0.54.0.md index 3202b57..2f215de 100644 --- a/docs/markdown/Release-notes-for-0.54.0.md +++ b/docs/markdown/Release-notes-for-0.54.0.md @@ -359,3 +359,8 @@ target that has eight source files, Meson will generate two unity files each of which includes four source files. The old behaviour can be replicated by setting `unity_size` to a large value, such as 10000. +## Verbose mode for `meson compile` + +The new option `--verbose` has been added to `meson compile` that will enable +more verbose compilation logs. Note that for VS backend it means that logs will +be less verbose by default (without `--verbose` option). diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index e457623..0bcb56e 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -54,6 +54,8 @@ def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path): cmd.extend(['-j', str(options.jobs)]) if options.load_average > 0: cmd.extend(['-l', str(options.load_average)]) + if options.verbose: + cmd.append('-v') if options.clean: cmd.append('clean') @@ -74,8 +76,10 @@ def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path): if options.load_average: mlog.warning('Msbuild does not have a load-average switch, ignoring.') + if not options.verbose: + cmd.append('/v:minimal') if options.clean: - cmd.extend(['/t:Clean']) + cmd.append('/t:Clean') return cmd @@ -108,6 +112,11 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None: default='.', help='The directory containing build files to be built.' ) + parser.add_argument( + '--verbose', + action='store_true', + help='Show more verbose output.' + ) def run(options: 'argparse.Namespace') -> int: -- cgit v1.1 From b887212bee8ec86086670f31ff7dff3e39cf7bf8 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 11 Jun 2020 11:42:31 -0700 Subject: compilers: Use enum for for deupdlication returns in CompilerArgs --- mesonbuild/compilers/compilers.py | 63 ++++++++++++++++++++++----------------- 1 file changed, 36 insertions(+), 27 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index c80ffb0..cfe0fdf 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -18,6 +18,7 @@ from collections import deque import itertools import typing as T from functools import lru_cache +import enum from ..linkers import ( GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker, @@ -409,6 +410,28 @@ class RunResult: self.stdout = stdout self.stderr = stderr + +class Dedup(enum.Enum): + + """What kind of deduplication can be done to compiler args. + + OVERRIDEN - Whether an argument can be 'overridden' by a later argument. + For example, -DFOO defines FOO and -UFOO undefines FOO. In this case, + we can safely remove the previous occurrence and add a new one. The + same is true for include paths and library paths with -I and -L. + UNIQUE - Arguments that once specified cannot be undone, such as `-c` or + `-pipe`. New instances of these can be completely skipped. + NO_DEDUP - Whether it matters where or how many times on the command-line + a particular argument is present. This can matter for symbol + resolution in static or shared libraries, so we cannot de-dup or + reorder them. + """ + + NO_DEDUP = 0 + UNIQUE = 1 + OVERRIDEN = 2 + + class CompilerArgs(collections.abc.MutableSequence): ''' List-like class that manages a list of compiler arguments. Should be used @@ -486,13 +509,13 @@ class CompilerArgs(collections.abc.MutableSequence): dedup = self._can_dedup(a) if a not in pre_flush_set: pre_flush.append(a) - if dedup == 2: + if dedup is Dedup.OVERRIDEN: pre_flush_set.add(a) for a in reversed(self.post): dedup = self._can_dedup(a) if a not in post_flush_set: post_flush.appendleft(a) - if dedup == 2: + if dedup is Dedup.OVERRIDEN: post_flush_set.add(a) #pre and post will overwrite every element that is in the container @@ -551,29 +574,15 @@ class CompilerArgs(collections.abc.MutableSequence): @classmethod @lru_cache(maxsize=None) - def _can_dedup(cls, arg): - ''' - Returns whether the argument can be safely de-duped. This is dependent - on three things: - - a) Whether an argument can be 'overridden' by a later argument. For - example, -DFOO defines FOO and -UFOO undefines FOO. In this case, we - can safely remove the previous occurrence and add a new one. The same - is true for include paths and library paths with -I and -L. For - these we return `2`. See `dedup2_prefixes` and `dedup2_args`. - b) Arguments that once specified cannot be undone, such as `-c` or - `-pipe`. New instances of these can be completely skipped. For these - we return `1`. See `dedup1_prefixes` and `dedup1_args`. - c) Whether it matters where or how many times on the command-line - a particular argument is present. This can matter for symbol - resolution in static or shared libraries, so we cannot de-dup or - reorder them. For these we return `0`. This is the default. + def _can_dedup(cls, arg: str) -> Dedup: + """Returns whether the argument can be safely de-duped. In addition to these, we handle library arguments specially. - With GNU ld, we surround library arguments with -Wl,--start/end-group + With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup to recursively search for symbols in the libraries. This is not needed with other linkers. - ''' + """ + # A standalone argument must never be deduplicated because it is # defined by what comes _after_ it. Thus dedupping this: # -D FOO -D BAR @@ -583,7 +592,7 @@ class CompilerArgs(collections.abc.MutableSequence): # FOO -D BAR # both of which are invalid. if arg in cls.dedup2_prefixes: - return 0 + return Dedup.NO_DEDUP if arg.startswith('-L='): # DMD and LDC proxy all linker arguments using -L=; in conjunction # with ld64 on macOS this can lead to command line arguments such @@ -592,17 +601,17 @@ class CompilerArgs(collections.abc.MutableSequence): # spaces and quoting does not work. if we deduplicate these then # one of the -L=0 arguments will be removed and the version # argument will consume the next argument instead. - return 0 + return Dedup.NO_DEDUP if arg in cls.dedup2_args or \ arg.startswith(cls.dedup2_prefixes) or \ arg.endswith(cls.dedup2_suffixes): - return 2 + return Dedup.OVERRIDEN if arg in cls.dedup1_args or \ arg.startswith(cls.dedup1_prefixes) or \ arg.endswith(cls.dedup1_suffixes) or \ re.search(cls.dedup1_regex, arg): - return 1 - return 0 + return Dedup.UNIQUE + return Dedup.NO_DEDUP @classmethod @lru_cache(maxsize=None) @@ -725,7 +734,7 @@ class CompilerArgs(collections.abc.MutableSequence): # previous occurrence of it and adding a new one, or not adding the # new occurrence. dedup = self._can_dedup(arg) - if dedup == 1: + if dedup is Dedup.UNIQUE: # Argument already exists and adding a new instance is useless if arg in self.__container or arg in self.pre or arg in self.post: continue -- cgit v1.1 From 386721f7fd3261ba15004a7f527e9db83e216ace Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 11 Jun 2020 11:46:22 -0700 Subject: compilers: Add missing annotations to CompilerArgs class --- mesonbuild/compilers/compilers.py | 30 ++++++++++++++---------------- 1 file changed, 14 insertions(+), 16 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index cfe0fdf..c34071c 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -491,18 +491,18 @@ class CompilerArgs(collections.abc.MutableSequence): iterable: T.Optional[T.Iterable[str]] = None): self.compiler = compiler self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] - self.pre = deque() - self.post = deque() + self.pre = deque() # type: T.Deque[str] + self.post = deque() # type: T.Deque[str] # Flush the saved pre and post list into the __container list # # This correctly deduplicates the entries after _can_dedup definition # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. - def flush_pre_post(self): - pre_flush = deque() - pre_flush_set = set() - post_flush = deque() - post_flush_set = set() + def flush_pre_post(self) -> None: + pre_flush = deque() # type: T.Deque[str] + pre_flush_set = set() # type: T.Set[str] + post_flush = deque() # type: T.Deque[str] + post_flush_set = set() # type: T.Set[str] #The two lists are here walked from the front to the back, in order to not need removals for deduplication for a in self.pre: @@ -529,9 +529,9 @@ class CompilerArgs(collections.abc.MutableSequence): self.pre.clear() self.post.clear() - def __iter__(self): + def __iter__(self) -> T.Iterator[str]: self.flush_pre_post() - return iter(self.__container); + return iter(self.__container) @T.overload # noqa: F811 def __getitem__(self, index: int) -> str: # noqa: F811 @@ -615,12 +615,10 @@ class CompilerArgs(collections.abc.MutableSequence): @classmethod @lru_cache(maxsize=None) - def _should_prepend(cls, arg): - if arg.startswith(cls.prepend_prefixes): - return True - return False + def _should_prepend(cls, arg: str) -> bool: + return arg.startswith(cls.prepend_prefixes) - def need_to_split_linker_args(self): + def need_to_split_linker_args(self) -> bool: return isinstance(self.compiler, Compiler) and self.compiler.get_language() == 'd' def to_native(self, copy: bool = False) -> T.List[str]: @@ -726,7 +724,7 @@ class CompilerArgs(collections.abc.MutableSequence): Add two CompilerArgs while taking into account overriding of arguments and while preserving the order of arguments as much as possible ''' - tmp_pre = deque() + tmp_pre = deque() # type: T.Deque[str] if not isinstance(args, collections.abc.Iterable): raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) for arg in args: @@ -746,7 +744,7 @@ class CompilerArgs(collections.abc.MutableSequence): #pre and post is going to be merged later before a iter call return self - def __radd__(self, args: T.Iterable[str]): + def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': self.flush_pre_post() new = CompilerArgs(self.compiler, args) new += self -- cgit v1.1 From 9d0ad66c29fccd2ff72c2b40da02cdb2b03ccba6 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 11 Jun 2020 12:06:29 -0700 Subject: compilers: Split CompilerArgs into a separate module I've also moved this out of the compilers pacakge because we're soon going to need it in linkers, and that creates some serious spagetti --- mesonbuild/arglist.py | 400 +++++++++++++++++++++++++++++++++++ mesonbuild/backend/backends.py | 2 +- mesonbuild/backend/ninjabackend.py | 3 +- mesonbuild/backend/vs2010backend.py | 2 +- mesonbuild/compilers/__init__.py | 2 - mesonbuild/compilers/compilers.py | 372 +------------------------------- mesonbuild/compilers/d.py | 2 +- mesonbuild/compilers/mixins/clike.py | 7 +- run_unittests.py | 10 +- 9 files changed, 416 insertions(+), 384 deletions(-) create mode 100644 mesonbuild/arglist.py diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py new file mode 100644 index 0000000..ac97a41 --- /dev/null +++ b/mesonbuild/arglist.py @@ -0,0 +1,400 @@ +# Copyright 2012-2020 The Meson development team +# Copyright © 2020 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from functools import lru_cache +import collections +import enum +import os +import re +import typing as T + +from . import mesonlib +from .linkers import ( + GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker, +) + +if T.TYPE_CHECKING: + from .linkers import StaticLinker + from .compilers import Compiler + + +UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str] +# execinfo is a compiler lib on FreeBSD and NetBSD +if mesonlib.is_freebsd() or mesonlib.is_netbsd(): + UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo') +SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + +class Dedup(enum.Enum): + + """What kind of deduplication can be done to compiler args. + + OVERRIDEN - Whether an argument can be 'overridden' by a later argument. + For example, -DFOO defines FOO and -UFOO undefines FOO. In this case, + we can safely remove the previous occurrence and add a new one. The + same is true for include paths and library paths with -I and -L. + UNIQUE - Arguments that once specified cannot be undone, such as `-c` or + `-pipe`. New instances of these can be completely skipped. + NO_DEDUP - Whether it matters where or how many times on the command-line + a particular argument is present. This can matter for symbol + resolution in static or shared libraries, so we cannot de-dup or + reorder them. + """ + + NO_DEDUP = 0 + UNIQUE = 1 + OVERRIDEN = 2 + + +class CompilerArgs(collections.abc.MutableSequence): + ''' + List-like class that manages a list of compiler arguments. Should be used + while constructing compiler arguments from various sources. Can be + operated with ordinary lists, so this does not need to be used + everywhere. + + All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) + and can converted to the native type of each compiler by using the + .to_native() method to which you must pass an instance of the compiler or + the compiler class. + + New arguments added to this class (either with .append(), .extend(), or +=) + are added in a way that ensures that they override previous arguments. + For example: + + >>> a = ['-Lfoo', '-lbar'] + >>> a += ['-Lpho', '-lbaz'] + >>> print(a) + ['-Lpho', '-Lfoo', '-lbar', '-lbaz'] + + Arguments will also be de-duped if they can be de-duped safely. + + Note that because of all this, this class is not commutative and does not + preserve the order of arguments if it is safe to not. For example: + >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror'] + >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar'] + ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] + + ''' + # NOTE: currently this class is only for C-like compilers, but it can be + # extended to other languages easily. Just move the following to the + # compiler class and initialize when self.compiler is set. + + # Arg prefixes that override by prepending instead of appending + prepend_prefixes = ('-I', '-L') + # Arg prefixes and args that must be de-duped by returning 2 + dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') + dedup2_suffixes = () + dedup2_args = () + # Arg prefixes and args that must be de-duped by returning 1 + # + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') + # Match a .so of the form path/to/libfoo.so.0.1.0 + # Only UNIX shared libraries require this. Others have a fixed extension. + dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + # In generate_link() we add external libs without de-dup, but we must + # *always* de-dup these because they're special arguments to the linker + always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) + + def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], + iterable: T.Optional[T.Iterable[str]] = None): + self.compiler = compiler + self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] + self.pre = collections.deque() # type: T.Deque[str] + self.post = collections.deque() # type: T.Deque[str] + + # Flush the saved pre and post list into the __container list + # + # This correctly deduplicates the entries after _can_dedup definition + # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. + def flush_pre_post(self) -> None: + pre_flush = collections.deque() # type: T.Deque[str] + pre_flush_set = set() # type: T.Set[str] + post_flush = collections.deque() # type: T.Deque[str] + post_flush_set = set() # type: T.Set[str] + + #The two lists are here walked from the front to the back, in order to not need removals for deduplication + for a in self.pre: + dedup = self._can_dedup(a) + if a not in pre_flush_set: + pre_flush.append(a) + if dedup is Dedup.OVERRIDEN: + pre_flush_set.add(a) + for a in reversed(self.post): + dedup = self._can_dedup(a) + if a not in post_flush_set: + post_flush.appendleft(a) + if dedup is Dedup.OVERRIDEN: + post_flush_set.add(a) + + #pre and post will overwrite every element that is in the container + #only copy over args that are in __container but not in the post flush or pre flush set + + for a in self.__container: + if a not in post_flush_set and a not in pre_flush_set: + pre_flush.append(a) + + self.__container = list(pre_flush) + list(post_flush) + self.pre.clear() + self.post.clear() + + def __iter__(self) -> T.Iterator[str]: + self.flush_pre_post() + return iter(self.__container) + + @T.overload # noqa: F811 + def __getitem__(self, index: int) -> str: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811 + pass + + def __getitem__(self, index): # noqa: F811 + self.flush_pre_post() + return self.__container[index] + + @T.overload # noqa: F811 + def __setitem__(self, index: int, value: str) -> None: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811 + pass + + def __setitem__(self, index, value) -> None: # noqa: F811 + self.flush_pre_post() + self.__container[index] = value + + def __delitem__(self, index: T.Union[int, slice]) -> None: + self.flush_pre_post() + del self.__container[index] + + def __len__(self) -> int: + return len(self.__container) + len(self.pre) + len(self.post) + + def insert(self, index: int, value: str) -> None: + self.flush_pre_post() + self.__container.insert(index, value) + + def copy(self) -> 'CompilerArgs': + self.flush_pre_post() + return CompilerArgs(self.compiler, self.__container.copy()) + + @classmethod + @lru_cache(maxsize=None) + def _can_dedup(cls, arg: str) -> Dedup: + """Returns whether the argument can be safely de-duped. + + In addition to these, we handle library arguments specially. + With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup + to recursively search for symbols in the libraries. This is not needed + with other linkers. + """ + + # A standalone argument must never be deduplicated because it is + # defined by what comes _after_ it. Thus dedupping this: + # -D FOO -D BAR + # would yield either + # -D FOO BAR + # or + # FOO -D BAR + # both of which are invalid. + if arg in cls.dedup2_prefixes: + return Dedup.NO_DEDUP + if arg.startswith('-L='): + # DMD and LDC proxy all linker arguments using -L=; in conjunction + # with ld64 on macOS this can lead to command line arguments such + # as: `-L=-compatibility_version -L=0 -L=current_version -L=0`. + # These cannot be combined, ld64 insists they must be passed with + # spaces and quoting does not work. if we deduplicate these then + # one of the -L=0 arguments will be removed and the version + # argument will consume the next argument instead. + return Dedup.NO_DEDUP + if arg in cls.dedup2_args or \ + arg.startswith(cls.dedup2_prefixes) or \ + arg.endswith(cls.dedup2_suffixes): + return Dedup.OVERRIDEN + if arg in cls.dedup1_args or \ + arg.startswith(cls.dedup1_prefixes) or \ + arg.endswith(cls.dedup1_suffixes) or \ + re.search(cls.dedup1_regex, arg): + return Dedup.UNIQUE + return Dedup.NO_DEDUP + + @classmethod + @lru_cache(maxsize=None) + def _should_prepend(cls, arg: str) -> bool: + return arg.startswith(cls.prepend_prefixes) + + def need_to_split_linker_args(self) -> bool: + # XXX: gross + from .compilers import Compiler + return isinstance(self.compiler, Compiler) and self.compiler.get_language() == 'd' + + def to_native(self, copy: bool = False) -> T.List[str]: + # XXX: gross + from .compilers import Compiler + + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + # To proxy these arguments with D you need to split the + # arguments, thus you get `-L=-soname -L=lib.so` we don't + # want to put the lib in a link -roup + split_linker_args = self.need_to_split_linker_args() + # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which + # all act like (or are) gnu ld + # TODO: this could probably be added to the DynamicLinker instead + if (isinstance(self.compiler, Compiler) and + self.compiler.linker is not None and + isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))): + group_start = -1 + group_end = -1 + is_soname = False + for i, each in enumerate(new): + if is_soname: + is_soname = False + continue + elif split_linker_args and '-soname' in each: + is_soname = True + continue + if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \ + not SOREGEX.match(each): + continue + group_end = i + if group_start < 0: + # First occurrence of a library + group_start = i + if group_start >= 0: + # Last occurrence of a library + new.insert(group_end + 1, '-Wl,--end-group') + new.insert(group_start, '-Wl,--start-group') + # Remove system/default include paths added with -isystem + if hasattr(self.compiler, 'get_default_include_dirs'): + default_dirs = self.compiler.get_default_include_dirs() + bad_idx_list = [] # type: T.List[int] + for i, each in enumerate(new): + # Remove the -isystem and the path if the path is a default path + if (each == '-isystem' and + i < (len(new) - 1) and + new[i + 1] in default_dirs): + bad_idx_list += [i, i + 1] + elif each.startswith('-isystem=') and each[9:] in default_dirs: + bad_idx_list += [i] + elif each.startswith('-isystem') and each[8:] in default_dirs: + bad_idx_list += [i] + for i in reversed(bad_idx_list): + new.pop(i) + return self.compiler.unix_args_to_native(new.__container) + + def append_direct(self, arg: str) -> None: + ''' + Append the specified argument without any reordering or de-dup except + for absolute paths to libraries, etc, which can always be de-duped + safely. + ''' + self.flush_pre_post() + if os.path.isabs(arg): + self.append(arg) + else: + self.__container.append(arg) + + def extend_direct(self, iterable: T.Iterable[str]) -> None: + ''' + Extend using the elements in the specified iterable without any + reordering or de-dup except for absolute paths where the order of + include search directories is not relevant + ''' + self.flush_pre_post() + for elem in iterable: + self.append_direct(elem) + + def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None: + normal_flags = [] + lflags = [] + for i in iterable: + if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')): + lflags.append(i) + else: + normal_flags.append(i) + self.extend(normal_flags) + self.extend_direct(lflags) + + def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = self.copy() + new += args + return new + + def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + ''' + Add two CompilerArgs while taking into account overriding of arguments + and while preserving the order of arguments as much as possible + ''' + tmp_pre = collections.deque() # type: T.Deque[str] + if not isinstance(args, collections.abc.Iterable): + raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) + for arg in args: + # If the argument can be de-duped, do it either by removing the + # previous occurrence of it and adding a new one, or not adding the + # new occurrence. + dedup = self._can_dedup(arg) + if dedup is Dedup.UNIQUE: + # Argument already exists and adding a new instance is useless + if arg in self.__container or arg in self.pre or arg in self.post: + continue + if self._should_prepend(arg): + tmp_pre.appendleft(arg) + else: + self.post.append(arg) + self.pre.extendleft(tmp_pre) + #pre and post is going to be merged later before a iter call + return self + + def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = CompilerArgs(self.compiler, args) + new += self + return new + + def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + self.flush_pre_post() + # Only allow equality checks against other CompilerArgs and lists instances + if isinstance(other, CompilerArgs): + return self.compiler == other.compiler and self.__container == other.__container + elif isinstance(other, list): + return self.__container == other + return NotImplemented + + def append(self, arg: str) -> None: + self.__iadd__([arg]) + + def extend(self, args: T.Iterable[str]) -> None: + self.__iadd__(args) + + def __repr__(self) -> str: + self.flush_pre_post() + return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 68ae1a7..bca0304 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -28,7 +28,7 @@ from .. import build from .. import dependencies from .. import mesonlib from .. import mlog -from ..compilers import CompilerArgs +from ..arglist import CompilerArgs from ..mesonlib import ( File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy, classify_unity_sources, unholder diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 252f646..b326e3b 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -30,8 +30,9 @@ from .. import build from .. import mlog from .. import dependencies from .. import compilers +from ..arglist import CompilerArgs from ..compilers import ( - Compiler, CompilerArgs, CCompiler, + Compiler, CCompiler, DmdDCompiler, FortranCompiler, PGICCompiler, VisualStudioCsCompiler, diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 87514c6..bd77132 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -26,7 +26,7 @@ from .. import build from .. import dependencies from .. import mlog from .. import compilers -from ..compilers import CompilerArgs +from ..arglist import CompilerArgs from ..interpreter import Interpreter from ..mesonlib import ( MesonException, File, python_command, replace_if_different diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py index af7e519..fd47545 100644 --- a/mesonbuild/compilers/__init__.py +++ b/mesonbuild/compilers/__init__.py @@ -48,7 +48,6 @@ __all__ = [ 'ClangObjCPPCompiler', 'ClangClCCompiler', 'ClangClCPPCompiler', - 'CompilerArgs', 'CPPCompiler', 'DCompiler', 'DmdDCompiler', @@ -123,7 +122,6 @@ from .compilers import ( is_known_suffix, lang_suffixes, sort_clink, - CompilerArgs, ) from .c import ( CCompiler, diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index c34071c..bf66982 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -13,20 +13,14 @@ # limitations under the License. import contextlib, os.path, re, tempfile -import collections.abc -from collections import deque import itertools import typing as T from functools import lru_cache -import enum -from ..linkers import ( - GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker, - StaticLinker, -) from .. import coredata from .. import mlog from .. import mesonlib +from ..linkers import LinkerEnvVarsMixin from ..mesonlib import ( EnvironmentException, MachineChoice, MesonException, Popen_safe, split_args @@ -34,6 +28,7 @@ from ..mesonlib import ( from ..envconfig import ( Properties, get_env_var ) +from ..arglist import CompilerArgs if T.TYPE_CHECKING: from ..coredata import OptionDictType @@ -100,11 +95,6 @@ cflags_mapping = {'c': 'CFLAGS', 'vala': 'VALAFLAGS', 'rust': 'RUSTFLAGS'} -unixy_compiler_internal_libs = ('m', 'c', 'pthread', 'dl', 'rt') -# execinfo is a compiler lib on FreeBSD and NetBSD -if mesonlib.is_freebsd() or mesonlib.is_netbsd(): - unixy_compiler_internal_libs += ('execinfo',) - # All these are only for C-linkable languages; see `clink_langs` above. def sort_clink(lang): @@ -411,364 +401,6 @@ class RunResult: self.stderr = stderr -class Dedup(enum.Enum): - - """What kind of deduplication can be done to compiler args. - - OVERRIDEN - Whether an argument can be 'overridden' by a later argument. - For example, -DFOO defines FOO and -UFOO undefines FOO. In this case, - we can safely remove the previous occurrence and add a new one. The - same is true for include paths and library paths with -I and -L. - UNIQUE - Arguments that once specified cannot be undone, such as `-c` or - `-pipe`. New instances of these can be completely skipped. - NO_DEDUP - Whether it matters where or how many times on the command-line - a particular argument is present. This can matter for symbol - resolution in static or shared libraries, so we cannot de-dup or - reorder them. - """ - - NO_DEDUP = 0 - UNIQUE = 1 - OVERRIDEN = 2 - - -class CompilerArgs(collections.abc.MutableSequence): - ''' - List-like class that manages a list of compiler arguments. Should be used - while constructing compiler arguments from various sources. Can be - operated with ordinary lists, so this does not need to be used - everywhere. - - All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) - and can converted to the native type of each compiler by using the - .to_native() method to which you must pass an instance of the compiler or - the compiler class. - - New arguments added to this class (either with .append(), .extend(), or +=) - are added in a way that ensures that they override previous arguments. - For example: - - >>> a = ['-Lfoo', '-lbar'] - >>> a += ['-Lpho', '-lbaz'] - >>> print(a) - ['-Lpho', '-Lfoo', '-lbar', '-lbaz'] - - Arguments will also be de-duped if they can be de-duped safely. - - Note that because of all this, this class is not commutative and does not - preserve the order of arguments if it is safe to not. For example: - >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror'] - ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror'] - >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar'] - ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] - - ''' - # NOTE: currently this class is only for C-like compilers, but it can be - # extended to other languages easily. Just move the following to the - # compiler class and initialize when self.compiler is set. - - # Arg prefixes that override by prepending instead of appending - prepend_prefixes = ('-I', '-L') - # Arg prefixes and args that must be de-duped by returning 2 - dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') - dedup2_suffixes = () - dedup2_args = () - # Arg prefixes and args that must be de-duped by returning 1 - # - # NOTE: not thorough. A list of potential corner cases can be found in - # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 - dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') - dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') - # Match a .so of the form path/to/libfoo.so.0.1.0 - # Only UNIX shared libraries require this. Others have a fixed extension. - dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') - dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') - # In generate_link() we add external libs without de-dup, but we must - # *always* de-dup these because they're special arguments to the linker - always_dedup_args = tuple('-l' + lib for lib in unixy_compiler_internal_libs) - - def __init__(self, compiler: T.Union['Compiler', StaticLinker], - iterable: T.Optional[T.Iterable[str]] = None): - self.compiler = compiler - self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] - self.pre = deque() # type: T.Deque[str] - self.post = deque() # type: T.Deque[str] - - # Flush the saved pre and post list into the __container list - # - # This correctly deduplicates the entries after _can_dedup definition - # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. - def flush_pre_post(self) -> None: - pre_flush = deque() # type: T.Deque[str] - pre_flush_set = set() # type: T.Set[str] - post_flush = deque() # type: T.Deque[str] - post_flush_set = set() # type: T.Set[str] - - #The two lists are here walked from the front to the back, in order to not need removals for deduplication - for a in self.pre: - dedup = self._can_dedup(a) - if a not in pre_flush_set: - pre_flush.append(a) - if dedup is Dedup.OVERRIDEN: - pre_flush_set.add(a) - for a in reversed(self.post): - dedup = self._can_dedup(a) - if a not in post_flush_set: - post_flush.appendleft(a) - if dedup is Dedup.OVERRIDEN: - post_flush_set.add(a) - - #pre and post will overwrite every element that is in the container - #only copy over args that are in __container but not in the post flush or pre flush set - - for a in self.__container: - if a not in post_flush_set and a not in pre_flush_set: - pre_flush.append(a) - - self.__container = list(pre_flush) + list(post_flush) - self.pre.clear() - self.post.clear() - - def __iter__(self) -> T.Iterator[str]: - self.flush_pre_post() - return iter(self.__container) - - @T.overload # noqa: F811 - def __getitem__(self, index: int) -> str: # noqa: F811 - pass - - @T.overload # noqa: F811 - def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811 - pass - - def __getitem__(self, index): # noqa: F811 - self.flush_pre_post() - return self.__container[index] - - @T.overload # noqa: F811 - def __setitem__(self, index: int, value: str) -> None: # noqa: F811 - pass - - @T.overload # noqa: F811 - def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811 - pass - - def __setitem__(self, index, value) -> None: # noqa: F811 - self.flush_pre_post() - self.__container[index] = value - - def __delitem__(self, index: T.Union[int, slice]) -> None: - self.flush_pre_post() - del self.__container[index] - - def __len__(self) -> int: - return len(self.__container) + len(self.pre) + len(self.post) - - def insert(self, index: int, value: str) -> None: - self.flush_pre_post() - self.__container.insert(index, value) - - def copy(self) -> 'CompilerArgs': - self.flush_pre_post() - return CompilerArgs(self.compiler, self.__container.copy()) - - @classmethod - @lru_cache(maxsize=None) - def _can_dedup(cls, arg: str) -> Dedup: - """Returns whether the argument can be safely de-duped. - - In addition to these, we handle library arguments specially. - With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup - to recursively search for symbols in the libraries. This is not needed - with other linkers. - """ - - # A standalone argument must never be deduplicated because it is - # defined by what comes _after_ it. Thus dedupping this: - # -D FOO -D BAR - # would yield either - # -D FOO BAR - # or - # FOO -D BAR - # both of which are invalid. - if arg in cls.dedup2_prefixes: - return Dedup.NO_DEDUP - if arg.startswith('-L='): - # DMD and LDC proxy all linker arguments using -L=; in conjunction - # with ld64 on macOS this can lead to command line arguments such - # as: `-L=-compatibility_version -L=0 -L=current_version -L=0`. - # These cannot be combined, ld64 insists they must be passed with - # spaces and quoting does not work. if we deduplicate these then - # one of the -L=0 arguments will be removed and the version - # argument will consume the next argument instead. - return Dedup.NO_DEDUP - if arg in cls.dedup2_args or \ - arg.startswith(cls.dedup2_prefixes) or \ - arg.endswith(cls.dedup2_suffixes): - return Dedup.OVERRIDEN - if arg in cls.dedup1_args or \ - arg.startswith(cls.dedup1_prefixes) or \ - arg.endswith(cls.dedup1_suffixes) or \ - re.search(cls.dedup1_regex, arg): - return Dedup.UNIQUE - return Dedup.NO_DEDUP - - @classmethod - @lru_cache(maxsize=None) - def _should_prepend(cls, arg: str) -> bool: - return arg.startswith(cls.prepend_prefixes) - - def need_to_split_linker_args(self) -> bool: - return isinstance(self.compiler, Compiler) and self.compiler.get_language() == 'd' - - def to_native(self, copy: bool = False) -> T.List[str]: - # Check if we need to add --start/end-group for circular dependencies - # between static libraries, and for recursively searching for symbols - # needed by static libraries that are provided by object files or - # shared libraries. - self.flush_pre_post() - if copy: - new = self.copy() - else: - new = self - # To proxy these arguments with D you need to split the - # arguments, thus you get `-L=-soname -L=lib.so` we don't - # want to put the lib in a link -roup - split_linker_args = self.need_to_split_linker_args() - # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which - # all act like (or are) gnu ld - # TODO: this could probably be added to the DynamicLinker instead - if (isinstance(self.compiler, Compiler) and - self.compiler.linker is not None and - isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))): - group_start = -1 - group_end = -1 - is_soname = False - for i, each in enumerate(new): - if is_soname: - is_soname = False - continue - elif split_linker_args and '-soname' in each: - is_soname = True - continue - if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \ - not soregex.match(each): - continue - group_end = i - if group_start < 0: - # First occurrence of a library - group_start = i - if group_start >= 0: - # Last occurrence of a library - new.insert(group_end + 1, '-Wl,--end-group') - new.insert(group_start, '-Wl,--start-group') - # Remove system/default include paths added with -isystem - if hasattr(self.compiler, 'get_default_include_dirs'): - default_dirs = self.compiler.get_default_include_dirs() - bad_idx_list = [] # type: T.List[int] - for i, each in enumerate(new): - # Remove the -isystem and the path if the path is a default path - if (each == '-isystem' and - i < (len(new) - 1) and - new[i + 1] in default_dirs): - bad_idx_list += [i, i + 1] - elif each.startswith('-isystem=') and each[9:] in default_dirs: - bad_idx_list += [i] - elif each.startswith('-isystem') and each[8:] in default_dirs: - bad_idx_list += [i] - for i in reversed(bad_idx_list): - new.pop(i) - return self.compiler.unix_args_to_native(new.__container) - - def append_direct(self, arg: str) -> None: - ''' - Append the specified argument without any reordering or de-dup except - for absolute paths to libraries, etc, which can always be de-duped - safely. - ''' - self.flush_pre_post() - if os.path.isabs(arg): - self.append(arg) - else: - self.__container.append(arg) - - def extend_direct(self, iterable: T.Iterable[str]) -> None: - ''' - Extend using the elements in the specified iterable without any - reordering or de-dup except for absolute paths where the order of - include search directories is not relevant - ''' - self.flush_pre_post() - for elem in iterable: - self.append_direct(elem) - - def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None: - normal_flags = [] - lflags = [] - for i in iterable: - if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')): - lflags.append(i) - else: - normal_flags.append(i) - self.extend(normal_flags) - self.extend_direct(lflags) - - def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': - self.flush_pre_post() - new = self.copy() - new += args - return new - - def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs': - ''' - Add two CompilerArgs while taking into account overriding of arguments - and while preserving the order of arguments as much as possible - ''' - tmp_pre = deque() # type: T.Deque[str] - if not isinstance(args, collections.abc.Iterable): - raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) - for arg in args: - # If the argument can be de-duped, do it either by removing the - # previous occurrence of it and adding a new one, or not adding the - # new occurrence. - dedup = self._can_dedup(arg) - if dedup is Dedup.UNIQUE: - # Argument already exists and adding a new instance is useless - if arg in self.__container or arg in self.pre or arg in self.post: - continue - if self._should_prepend(arg): - tmp_pre.appendleft(arg) - else: - self.post.append(arg) - self.pre.extendleft(tmp_pre) - #pre and post is going to be merged later before a iter call - return self - - def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': - self.flush_pre_post() - new = CompilerArgs(self.compiler, args) - new += self - return new - - def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: - self.flush_pre_post() - # Only allow equality checks against other CompilerArgs and lists instances - if isinstance(other, CompilerArgs): - return self.compiler == other.compiler and self.__container == other.__container - elif isinstance(other, list): - return self.__container == other - return NotImplemented - - def append(self, arg: str) -> None: - self.__iadd__([arg]) - - def extend(self, args: T.Iterable[str]) -> None: - self.__iadd__(args) - - def __repr__(self) -> str: - self.flush_pre_post() - return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container) - class Compiler: # Libraries to ignore in find_library() since they are provided by the # compiler or the C library. Currently only used for MSVC. diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index 777fa19..924ac90 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -19,13 +19,13 @@ from ..mesonlib import ( EnvironmentException, MachineChoice, version_compare, ) +from ..arglist import CompilerArgs from .compilers import ( d_dmd_buildtype_args, d_gdc_buildtype_args, d_ldc_buildtype_args, clike_debug_args, Compiler, - CompilerArgs, ) from .mixins.gnu import GnuCompiler diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index b088fde..b32ac29 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -29,9 +29,10 @@ import subprocess import typing as T from pathlib import Path +from ... import arglist from ... import mesonlib -from ...mesonlib import LibType from ... import mlog +from ...mesonlib import LibType from .. import compilers from .visualstudio import VisualStudioLikeCompiler @@ -48,7 +49,7 @@ class CLikeCompiler: program_dirs_cache = {} find_library_cache = {} find_framework_cache = {} - internal_libs = compilers.unixy_compiler_internal_libs + internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS def __init__(self, is_cross: bool, exe_wrapper: T.Optional[str] = None): # If a child ObjC or CPP class has already set it, don't set it ourselves @@ -338,7 +339,7 @@ class CLikeCompiler: elif not isinstance(dependencies, list): dependencies = [dependencies] # Collect compiler arguments - cargs = compilers.CompilerArgs(self) + cargs = arglist.CompilerArgs(self) largs = [] for d in dependencies: # Add compile flags needed by dependencies diff --git a/run_unittests.py b/run_unittests.py index b663e83..b1ad965 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -358,7 +358,7 @@ class InternalTests(unittest.TestCase): stat.S_IRGRP | stat.S_IXGRP) def test_compiler_args_class_none_flush(self): - cargsfunc = mesonbuild.compilers.CompilerArgs + cargsfunc = mesonbuild.arglist.CompilerArgs cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) a = cargsfunc(cc, ['-I.']) #first we are checking if the tree construction deduplicates the correct -I argument @@ -377,7 +377,7 @@ class InternalTests(unittest.TestCase): def test_compiler_args_class(self): - cargsfunc = mesonbuild.compilers.CompilerArgs + cargsfunc = mesonbuild.arglist.CompilerArgs cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) # Test that empty initialization works a = cargsfunc(cc) @@ -458,7 +458,7 @@ class InternalTests(unittest.TestCase): self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a']) def test_compiler_args_class_gnuld(self): - cargsfunc = mesonbuild.compilers.CompilerArgs + cargsfunc = mesonbuild.arglist.CompilerArgs ## Test --start/end-group linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', []) gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) @@ -487,7 +487,7 @@ class InternalTests(unittest.TestCase): self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group']) def test_compiler_args_remove_system(self): - cargsfunc = mesonbuild.compilers.CompilerArgs + cargsfunc = mesonbuild.arglist.CompilerArgs ## Test --start/end-group linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', []) gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) @@ -4764,7 +4764,7 @@ recommended as it is not supported on some platforms''') out = re.sub(r'(^ +| +$)', '', out, flags=re.MULTILINE) # strip lines out = re.sub(r'(^\n)', '', out, flags=re.MULTILINE) # remove empty lines return out - + def clean_dir_arguments(text): # Remove platform specific defaults args = [ -- cgit v1.1 From 93c3ec7e2dd6d425baff5fd80e5a46c88d152cb0 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 11 Jun 2020 12:44:53 -0700 Subject: compilers: Return CompilerArgs from compiler instance Since the CompileArgs class already needs to know about the compiler, and we really need at least per-lanaguage if not per-compiler CompilerArgs classes, let's get the CompilerArgs instance from the compiler using a method. --- mesonbuild/arglist.py | 43 ++++++++++++++---------------------- mesonbuild/backend/backends.py | 3 +-- mesonbuild/backend/ninjabackend.py | 10 ++++----- mesonbuild/backend/vs2010backend.py | 9 ++++---- mesonbuild/compilers/compilers.py | 9 ++++++-- mesonbuild/compilers/d.py | 3 +-- mesonbuild/compilers/mixins/clike.py | 18 ++++++++++++++- mesonbuild/linkers.py | 4 ++++ run_unittests.py | 20 +++++++---------- 9 files changed, 63 insertions(+), 56 deletions(-) diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py index ac97a41..dcc670b 100644 --- a/mesonbuild/arglist.py +++ b/mesonbuild/arglist.py @@ -21,9 +21,6 @@ import re import typing as T from . import mesonlib -from .linkers import ( - GnuLikeDynamicLinkerMixin, LinkerEnvVarsMixin, SolarisDynamicLinker, -) if T.TYPE_CHECKING: from .linkers import StaticLinker @@ -36,6 +33,7 @@ if mesonlib.is_freebsd() or mesonlib.is_netbsd(): UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo') SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + class Dedup(enum.Enum): """What kind of deduplication can be done to compiler args. @@ -88,29 +86,28 @@ class CompilerArgs(collections.abc.MutableSequence): ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] ''' - # NOTE: currently this class is only for C-like compilers, but it can be - # extended to other languages easily. Just move the following to the - # compiler class and initialize when self.compiler is set. - # Arg prefixes that override by prepending instead of appending - prepend_prefixes = ('-I', '-L') + prepend_prefixes = () # type: T.Tuple[str, ...] + # Arg prefixes and args that must be de-duped by returning 2 - dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') - dedup2_suffixes = () - dedup2_args = () + dedup2_prefixes = () # type: T.Tuple[str, ...] + dedup2_suffixes = () # type: T.Tuple[str, ...] + dedup2_args = () # type: T.Tuple[str, ...] + # Arg prefixes and args that must be de-duped by returning 1 # # NOTE: not thorough. A list of potential corner cases can be found in # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 - dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') - dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') + dedup1_prefixes = () # type: T.Tuple[str, ...] + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...] # Match a .so of the form path/to/libfoo.so.0.1.0 # Only UNIX shared libraries require this. Others have a fixed extension. dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') - dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + dedup1_args = () # type: T.Tuple[str, ...] # In generate_link() we add external libs without de-dup, but we must # *always* de-dup these because they're special arguments to the linker - always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) + # TODO: these should probably move too + always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...] def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], iterable: T.Optional[T.Iterable[str]] = None): @@ -195,7 +192,7 @@ class CompilerArgs(collections.abc.MutableSequence): def copy(self) -> 'CompilerArgs': self.flush_pre_post() - return CompilerArgs(self.compiler, self.__container.copy()) + return type(self)(self.compiler, self.__container.copy()) @classmethod @lru_cache(maxsize=None) @@ -218,15 +215,6 @@ class CompilerArgs(collections.abc.MutableSequence): # both of which are invalid. if arg in cls.dedup2_prefixes: return Dedup.NO_DEDUP - if arg.startswith('-L='): - # DMD and LDC proxy all linker arguments using -L=; in conjunction - # with ld64 on macOS this can lead to command line arguments such - # as: `-L=-compatibility_version -L=0 -L=current_version -L=0`. - # These cannot be combined, ld64 insists they must be passed with - # spaces and quoting does not work. if we deduplicate these then - # one of the -L=0 arguments will be removed and the version - # argument will consume the next argument instead. - return Dedup.NO_DEDUP if arg in cls.dedup2_args or \ arg.startswith(cls.dedup2_prefixes) or \ arg.endswith(cls.dedup2_suffixes): @@ -251,6 +239,7 @@ class CompilerArgs(collections.abc.MutableSequence): def to_native(self, copy: bool = False) -> T.List[str]: # XXX: gross from .compilers import Compiler + from .linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker # Check if we need to add --start/end-group for circular dependencies # between static libraries, and for recursively searching for symbols @@ -376,7 +365,7 @@ class CompilerArgs(collections.abc.MutableSequence): def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': self.flush_pre_post() - new = CompilerArgs(self.compiler, args) + new = type(self)(self.compiler, args) new += self return new @@ -397,4 +386,4 @@ class CompilerArgs(collections.abc.MutableSequence): def __repr__(self) -> str: self.flush_pre_post() - return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self.__container) + return '{}({!r}, {!r})'.format(self.__name__, self.compiler, self.__container) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index bca0304..cfd3a39 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -28,7 +28,6 @@ from .. import build from .. import dependencies from .. import mesonlib from .. import mlog -from ..arglist import CompilerArgs from ..mesonlib import ( File, MachineChoice, MesonException, OrderedSet, OptionOverrideProxy, classify_unity_sources, unholder @@ -626,7 +625,7 @@ class Backend: # Create an empty commands list, and start adding arguments from # various sources in the order in which they must override each other # starting from hard-coded defaults followed by build options and so on. - commands = CompilerArgs(compiler) + commands = compiler.compiler_args() copt_proxy = self.get_compiler_options_for_target(target)[compiler.language] # First, the trivial ones that are impossible to override. diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b326e3b..62bda1a 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -1202,7 +1202,7 @@ int dummy; compiler = target.compilers['cs'] rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list] deps = [] - commands = CompilerArgs(compiler, target.extra_args.get('cs', [])) + commands = compiler.compiler_args(target.extra_args.get('cs', [])) commands += compiler.get_buildtype_args(buildtype) commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target)) commands += compiler.get_debug_args(self.get_option_for_target('debug', target)) @@ -2156,7 +2156,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) def generate_llvm_ir_compile(self, target, src): compiler = get_compiler_for_source(target.compilers.values(), src) - commands = CompilerArgs(compiler) + commands = compiler.compiler_args() # Compiler args for compiling this target commands += compilers.get_base_compile_args(self.environment.coredata.base_options, compiler) @@ -2245,7 +2245,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) base_proxy = self.get_base_options_for_target(target) # Create an empty commands list, and start adding arguments from # various sources in the order in which they must override each other - commands = CompilerArgs(compiler) + commands = compiler.compiler_args() # Start with symbol visibility. commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility) # Add compiler args for compiling this target derived from 'base' build @@ -2325,7 +2325,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) compiler = get_compiler_for_source(target.compilers.values(), src) commands = self._generate_single_compile(target, compiler, is_generated) - commands = CompilerArgs(commands.compiler, commands) + commands = commands.compiler.compiler_args(commands) # Create introspection information if is_generated is False: @@ -2674,7 +2674,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # # Once all the linker options have been passed, we will start passing # libraries and library paths from internal and external sources. - commands = CompilerArgs(linker) + commands = linker.compiler_args() # First, the trivial ones that are impossible to override. # # Add linker args for linking this target derived from 'base' build diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index bd77132..f282d02 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -26,7 +26,6 @@ from .. import build from .. import dependencies from .. import mlog from .. import compilers -from ..arglist import CompilerArgs from ..interpreter import Interpreter from ..mesonlib import ( MesonException, File, python_command, replace_if_different @@ -899,9 +898,9 @@ class Vs2010Backend(backends.Backend): # # file_args is also later split out into defines and include_dirs in # case someone passed those in there - file_args = dict((lang, CompilerArgs(comp)) for lang, comp in target.compilers.items()) - file_defines = dict((lang, []) for lang in target.compilers) - file_inc_dirs = dict((lang, []) for lang in target.compilers) + file_args = {l: c.compiler_args() for l, c in target.compilers.items()} + file_defines = {l: [] for l in target.compilers} + file_inc_dirs = {l: [] for l in target.compilers} # The order in which these compile args are added must match # generate_single_compile() and generate_basic_compiler_args() for l, comp in target.compilers.items(): @@ -1084,7 +1083,7 @@ class Vs2010Backend(backends.Backend): # Linker options link = ET.SubElement(compiles, 'Link') - extra_link_args = CompilerArgs(compiler) + extra_link_args = compiler.compiler_args() # FIXME: Can these buildtype linker args be added as tags in the # vcxproj file (similar to buildtype compiler args) instead of in # AdditionalOptions? diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index bf66982..8ecb972 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import abc import contextlib, os.path, re, tempfile import itertools import typing as T @@ -401,7 +402,7 @@ class RunResult: self.stderr = stderr -class Compiler: +class Compiler(metaclass=abc.ABCMeta): # Libraries to ignore in find_library() since they are provided by the # compiler or the C library. Currently only used for MSVC. ignore_libs = () @@ -624,6 +625,10 @@ class Compiler: args += self.get_preprocess_only_args() return args + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs: + """Return an appropriate CompilerArgs instance for this class.""" + return CompilerArgs(self, args) + @contextlib.contextmanager def compile(self, code: str, extra_args: list = None, *, mode: str = 'link', want_output: bool = False, temp_dir: str = None): if extra_args is None: @@ -642,7 +647,7 @@ class Compiler: srcname = code.fname # Construct the compiler command-line - commands = CompilerArgs(self) + commands = self.compiler_args() commands.append(srcname) # Preprocess mode outputs to stdout, so no output args if mode != 'preprocess': diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index 924ac90..32919e4 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -19,7 +19,6 @@ from ..mesonlib import ( EnvironmentException, MachineChoice, version_compare, ) -from ..arglist import CompilerArgs from .compilers import ( d_dmd_buildtype_args, d_gdc_buildtype_args, @@ -582,7 +581,7 @@ class DCompiler(Compiler): elif not isinstance(dependencies, list): dependencies = [dependencies] # Collect compiler arguments - args = CompilerArgs(self) + args = self.compiler_args() for d in dependencies: # Add compile flags needed by dependencies args += d.get_compile_args() diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index b32ac29..455fbe2 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -32,6 +32,7 @@ from pathlib import Path from ... import arglist from ... import mesonlib from ... import mlog +from ...arglist import CompilerArgs from ...mesonlib import LibType from .. import compilers from .visualstudio import VisualStudioLikeCompiler @@ -40,6 +41,18 @@ if T.TYPE_CHECKING: from ...environment import Environment +class CLikeCompilerArgs(CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') + + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') + + dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + + class CLikeCompiler: """Shared bits for the C and CPP Compilers.""" @@ -62,6 +75,9 @@ class CLikeCompiler: else: self.exe_wrapper = exe_wrapper.get_command() + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs: + return CLikeCompilerArgs(self, args) + def needs_static_linker(self): return True # When compiling static libraries, so yes. @@ -339,7 +355,7 @@ class CLikeCompiler: elif not isinstance(dependencies, list): dependencies = [dependencies] # Collect compiler arguments - cargs = arglist.CompilerArgs(self) + cargs = self.compiler_args() largs = [] for d in dependencies: # Add compile flags needed by dependencies diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index bb3229d..805bbc7 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -17,6 +17,7 @@ import os import typing as T from . import mesonlib +from .arglist import CompilerArgs from .envconfig import get_env_var if T.TYPE_CHECKING: @@ -29,6 +30,9 @@ class StaticLinker: def __init__(self, exelist: T.List[str]): self.exelist = exelist + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs: + return CompilerArgs(self, args) + def can_linker_accept_rsp(self) -> bool: """ Determines whether the linker can accept arguments using the @rsp syntax. diff --git a/run_unittests.py b/run_unittests.py index b1ad965..72ca809 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -358,9 +358,8 @@ class InternalTests(unittest.TestCase): stat.S_IRGRP | stat.S_IXGRP) def test_compiler_args_class_none_flush(self): - cargsfunc = mesonbuild.arglist.CompilerArgs cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) - a = cargsfunc(cc, ['-I.']) + a = cc.compiler_args(['-I.']) #first we are checking if the tree construction deduplicates the correct -I argument a += ['-I..'] a += ['-I./tests/'] @@ -377,16 +376,15 @@ class InternalTests(unittest.TestCase): def test_compiler_args_class(self): - cargsfunc = mesonbuild.arglist.CompilerArgs cc = mesonbuild.compilers.CCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock()) # Test that empty initialization works - a = cargsfunc(cc) + a = cc.compiler_args() self.assertEqual(a, []) # Test that list initialization works - a = cargsfunc(cc, ['-I.', '-I..']) + a = cc.compiler_args(['-I.', '-I..']) self.assertEqual(a, ['-I.', '-I..']) # Test that there is no de-dup on initialization - self.assertEqual(cargsfunc(cc, ['-I.', '-I.']), ['-I.', '-I.']) + self.assertEqual(cc.compiler_args(['-I.', '-I.']), ['-I.', '-I.']) ## Test that appending works a.append('-I..') @@ -432,7 +430,7 @@ class InternalTests(unittest.TestCase): self.assertEqual(a, ['-Ibar', '-Ifoo', '-Ibaz', '-I..', '-I.', '-Ldir', '-Lbah', '-Werror', '-O3', '-O2', '-Wall']) ## Test that adding libraries works - l = cargsfunc(cc, ['-Lfoodir', '-lfoo']) + l = cc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l, ['-Lfoodir', '-lfoo']) # Adding a library and a libpath appends both correctly l += ['-Lbardir', '-lbar'] @@ -442,7 +440,7 @@ class InternalTests(unittest.TestCase): self.assertEqual(l, ['-Lbardir', '-Lfoodir', '-lfoo', '-lbar']) ## Test that 'direct' append and extend works - l = cargsfunc(cc, ['-Lfoodir', '-lfoo']) + l = cc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l, ['-Lfoodir', '-lfoo']) # Direct-adding a library and a libpath appends both correctly l.extend_direct(['-Lbardir', '-lbar']) @@ -458,14 +456,13 @@ class InternalTests(unittest.TestCase): self.assertEqual(l, ['-Lfoodir', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a']) def test_compiler_args_class_gnuld(self): - cargsfunc = mesonbuild.arglist.CompilerArgs ## Test --start/end-group linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', []) gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) ## Ensure that the fake compiler is never called by overriding the relevant function gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include'] ## Test that 'direct' append and extend works - l = cargsfunc(gcc, ['-Lfoodir', '-lfoo']) + l = gcc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) # Direct-adding a library and a libpath appends both correctly l.extend_direct(['-Lbardir', '-lbar']) @@ -487,14 +484,13 @@ class InternalTests(unittest.TestCase): self.assertEqual(l.to_native(copy=True), ['-Lfoo', '-Lfoodir', '-Wl,--start-group', '-lfoo', '-Lbardir', '-lbar', '-lbar', '/libbaz.a', '-Wl,--export-dynamic', '-Wl,-ldl', '-Wl,--end-group']) def test_compiler_args_remove_system(self): - cargsfunc = mesonbuild.arglist.CompilerArgs ## Test --start/end-group linker = mesonbuild.linkers.GnuDynamicLinker([], MachineChoice.HOST, 'fake', '-Wl,', []) gcc = mesonbuild.compilers.GnuCCompiler([], 'fake', False, MachineChoice.HOST, mock.Mock(), linker=linker) ## Ensure that the fake compiler is never called by overriding the relevant function gcc.get_default_include_dirs = lambda: ['/usr/include', '/usr/share/include', '/usr/local/include'] ## Test that 'direct' append and extend works - l = cargsfunc(gcc, ['-Lfoodir', '-lfoo']) + l = gcc.compiler_args(['-Lfoodir', '-lfoo']) self.assertEqual(l.to_native(copy=True), ['-Lfoodir', '-Wl,--start-group', '-lfoo', '-Wl,--end-group']) ## Test that to_native removes all system includes l += ['-isystem/usr/include', '-isystem=/usr/share/include', '-DSOMETHING_IMPORTANT=1', '-isystem', '/usr/local/include'] -- cgit v1.1 From 4f6bd29ac9c697e042a2a808344f1db3efd1d6cb Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 11 Jun 2020 14:05:14 -0700 Subject: arglist: Split the C/C++ specifics parts into a subclass for CLike This means that we don't need work arounds for D-like compilers, as the special c-like hanlding wont be used for D compilers. --- mesonbuild/arglist.py | 93 +++++++----------------------------- mesonbuild/compilers/mixins/clike.py | 56 ++++++++++++++++++++-- 2 files changed, 71 insertions(+), 78 deletions(-) diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py index dcc670b..bb16f38 100644 --- a/mesonbuild/arglist.py +++ b/mesonbuild/arglist.py @@ -31,7 +31,6 @@ UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List # execinfo is a compiler lib on FreeBSD and NetBSD if mesonlib.is_freebsd() or mesonlib.is_netbsd(): UNIXY_COMPILER_INTERNAL_LIBS.append('execinfo') -SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') class Dedup(enum.Enum): @@ -112,11 +111,11 @@ class CompilerArgs(collections.abc.MutableSequence): def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], iterable: T.Optional[T.Iterable[str]] = None): self.compiler = compiler - self.__container = list(iterable) if iterable is not None else [] # type: T.List[str] + self._container = list(iterable) if iterable is not None else [] # type: T.List[str] self.pre = collections.deque() # type: T.Deque[str] self.post = collections.deque() # type: T.Deque[str] - # Flush the saved pre and post list into the __container list + # Flush the saved pre and post list into the _container list # # This correctly deduplicates the entries after _can_dedup definition # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. @@ -141,19 +140,19 @@ class CompilerArgs(collections.abc.MutableSequence): post_flush_set.add(a) #pre and post will overwrite every element that is in the container - #only copy over args that are in __container but not in the post flush or pre flush set + #only copy over args that are in _container but not in the post flush or pre flush set - for a in self.__container: + for a in self._container: if a not in post_flush_set and a not in pre_flush_set: pre_flush.append(a) - self.__container = list(pre_flush) + list(post_flush) + self._container = list(pre_flush) + list(post_flush) self.pre.clear() self.post.clear() def __iter__(self) -> T.Iterator[str]: self.flush_pre_post() - return iter(self.__container) + return iter(self._container) @T.overload # noqa: F811 def __getitem__(self, index: int) -> str: # noqa: F811 @@ -165,7 +164,7 @@ class CompilerArgs(collections.abc.MutableSequence): def __getitem__(self, index): # noqa: F811 self.flush_pre_post() - return self.__container[index] + return self._container[index] @T.overload # noqa: F811 def __setitem__(self, index: int, value: str) -> None: # noqa: F811 @@ -177,22 +176,22 @@ class CompilerArgs(collections.abc.MutableSequence): def __setitem__(self, index, value) -> None: # noqa: F811 self.flush_pre_post() - self.__container[index] = value + self._container[index] = value def __delitem__(self, index: T.Union[int, slice]) -> None: self.flush_pre_post() - del self.__container[index] + del self._container[index] def __len__(self) -> int: - return len(self.__container) + len(self.pre) + len(self.post) + return len(self._container) + len(self.pre) + len(self.post) def insert(self, index: int, value: str) -> None: self.flush_pre_post() - self.__container.insert(index, value) + self._container.insert(index, value) def copy(self) -> 'CompilerArgs': self.flush_pre_post() - return type(self)(self.compiler, self.__container.copy()) + return type(self)(self.compiler, self._container.copy()) @classmethod @lru_cache(maxsize=None) @@ -231,16 +230,7 @@ class CompilerArgs(collections.abc.MutableSequence): def _should_prepend(cls, arg: str) -> bool: return arg.startswith(cls.prepend_prefixes) - def need_to_split_linker_args(self) -> bool: - # XXX: gross - from .compilers import Compiler - return isinstance(self.compiler, Compiler) and self.compiler.get_language() == 'd' - def to_native(self, copy: bool = False) -> T.List[str]: - # XXX: gross - from .compilers import Compiler - from .linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker - # Check if we need to add --start/end-group for circular dependencies # between static libraries, and for recursively searching for symbols # needed by static libraries that are provided by object files or @@ -250,54 +240,7 @@ class CompilerArgs(collections.abc.MutableSequence): new = self.copy() else: new = self - # To proxy these arguments with D you need to split the - # arguments, thus you get `-L=-soname -L=lib.so` we don't - # want to put the lib in a link -roup - split_linker_args = self.need_to_split_linker_args() - # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which - # all act like (or are) gnu ld - # TODO: this could probably be added to the DynamicLinker instead - if (isinstance(self.compiler, Compiler) and - self.compiler.linker is not None and - isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker))): - group_start = -1 - group_end = -1 - is_soname = False - for i, each in enumerate(new): - if is_soname: - is_soname = False - continue - elif split_linker_args and '-soname' in each: - is_soname = True - continue - if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \ - not SOREGEX.match(each): - continue - group_end = i - if group_start < 0: - # First occurrence of a library - group_start = i - if group_start >= 0: - # Last occurrence of a library - new.insert(group_end + 1, '-Wl,--end-group') - new.insert(group_start, '-Wl,--start-group') - # Remove system/default include paths added with -isystem - if hasattr(self.compiler, 'get_default_include_dirs'): - default_dirs = self.compiler.get_default_include_dirs() - bad_idx_list = [] # type: T.List[int] - for i, each in enumerate(new): - # Remove the -isystem and the path if the path is a default path - if (each == '-isystem' and - i < (len(new) - 1) and - new[i + 1] in default_dirs): - bad_idx_list += [i, i + 1] - elif each.startswith('-isystem=') and each[9:] in default_dirs: - bad_idx_list += [i] - elif each.startswith('-isystem') and each[8:] in default_dirs: - bad_idx_list += [i] - for i in reversed(bad_idx_list): - new.pop(i) - return self.compiler.unix_args_to_native(new.__container) + return self.compiler.unix_args_to_native(new._container) def append_direct(self, arg: str) -> None: ''' @@ -309,7 +252,7 @@ class CompilerArgs(collections.abc.MutableSequence): if os.path.isabs(arg): self.append(arg) else: - self.__container.append(arg) + self._container.append(arg) def extend_direct(self, iterable: T.Iterable[str]) -> None: ''' @@ -353,7 +296,7 @@ class CompilerArgs(collections.abc.MutableSequence): dedup = self._can_dedup(arg) if dedup is Dedup.UNIQUE: # Argument already exists and adding a new instance is useless - if arg in self.__container or arg in self.pre or arg in self.post: + if arg in self._container or arg in self.pre or arg in self.post: continue if self._should_prepend(arg): tmp_pre.appendleft(arg) @@ -373,9 +316,9 @@ class CompilerArgs(collections.abc.MutableSequence): self.flush_pre_post() # Only allow equality checks against other CompilerArgs and lists instances if isinstance(other, CompilerArgs): - return self.compiler == other.compiler and self.__container == other.__container + return self.compiler == other.compiler and self._container == other._container elif isinstance(other, list): - return self.__container == other + return self._container == other return NotImplemented def append(self, arg: str) -> None: @@ -386,4 +329,4 @@ class CompilerArgs(collections.abc.MutableSequence): def __repr__(self) -> str: self.flush_pre_post() - return '{}({!r}, {!r})'.format(self.__name__, self.compiler, self.__container) + return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 455fbe2..47e97d2 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -32,7 +32,7 @@ from pathlib import Path from ... import arglist from ... import mesonlib from ... import mlog -from ...arglist import CompilerArgs +from ...linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker from ...mesonlib import LibType from .. import compilers from .visualstudio import VisualStudioLikeCompiler @@ -40,8 +40,9 @@ from .visualstudio import VisualStudioLikeCompiler if T.TYPE_CHECKING: from ...environment import Environment +SOREGEX = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') -class CLikeCompilerArgs(CompilerArgs): +class CLikeCompilerArgs(arglist.CompilerArgs): prepend_prefixes = ('-I', '-L') dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') @@ -49,9 +50,58 @@ class CLikeCompilerArgs(CompilerArgs): # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') - dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + def to_native(self, copy: bool = False) -> T.List[str]: + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which + # all act like (or are) gnu ld + # TODO: this could probably be added to the DynamicLinker instead + if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker)): + group_start = -1 + group_end = -1 + for i, each in enumerate(new): + if not each.startswith(('-Wl,-l', '-l')) and not each.endswith('.a') and \ + not SOREGEX.match(each): + continue + group_end = i + if group_start < 0: + # First occurrence of a library + group_start = i + if group_start >= 0: + # Last occurrence of a library + new.insert(group_end + 1, '-Wl,--end-group') + new.insert(group_start, '-Wl,--start-group') + # Remove system/default include paths added with -isystem + if hasattr(self.compiler, 'get_default_include_dirs'): + default_dirs = self.compiler.get_default_include_dirs() + bad_idx_list = [] # type: T.List[int] + for i, each in enumerate(new): + # Remove the -isystem and the path if the path is a default path + if (each == '-isystem' and + i < (len(new) - 1) and + new[i + 1] in default_dirs): + bad_idx_list += [i, i + 1] + elif each.startswith('-isystem=') and each[9:] in default_dirs: + bad_idx_list += [i] + elif each.startswith('-isystem') and each[8:] in default_dirs: + bad_idx_list += [i] + for i in reversed(bad_idx_list): + new.pop(i) + return self.compiler.unix_args_to_native(new._container) + + def __repr__(self) -> str: + self.flush_pre_post() + return 'CLikeCompilerArgs({!r}, {!r})'.format(self.compiler, self._container) + class CLikeCompiler: -- cgit v1.1 From d42cd735a4dc894d8e898a5f9e81029f6eb5364c Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Mon, 15 Jun 2020 11:49:40 -0700 Subject: arglist: Fix remaining mypy errors and warnings So we can lint it with mypy --- .github/workflows/lint_mypy.yml | 2 +- mesonbuild/arglist.py | 9 ++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml index 7afee2e..056f96e 100644 --- a/.github/workflows/lint_mypy.yml +++ b/.github/workflows/lint_mypy.yml @@ -31,4 +31,4 @@ jobs: with: python-version: '3.x' - run: python -m pip install mypy - - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py + - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py mesonbuild/arglist.py diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py index bb16f38..fd4de96 100644 --- a/mesonbuild/arglist.py +++ b/mesonbuild/arglist.py @@ -26,7 +26,6 @@ if T.TYPE_CHECKING: from .linkers import StaticLinker from .compilers import Compiler - UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt'] # type: T.List[str] # execinfo is a compiler lib on FreeBSD and NetBSD if mesonlib.is_freebsd() or mesonlib.is_netbsd(): @@ -158,8 +157,8 @@ class CompilerArgs(collections.abc.MutableSequence): def __getitem__(self, index: int) -> str: # noqa: F811 pass - @T.overload # noqa: F811 - def __getitem__(self, index: slice) -> T.List[str]: # noqa: F811 + @T.overload # noqa: F811 + def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811 pass def __getitem__(self, index): # noqa: F811 @@ -171,7 +170,7 @@ class CompilerArgs(collections.abc.MutableSequence): pass @T.overload # noqa: F811 - def __setitem__(self, index: slice, value: T.List[str]) -> None: # noqa: F811 + def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811 pass def __setitem__(self, index, value) -> None: # noqa: F811 @@ -312,7 +311,7 @@ class CompilerArgs(collections.abc.MutableSequence): new += self return new - def __eq__(self, other: T.Any) -> T.Union[bool, type(NotImplemented)]: + def __eq__(self, other: T.Any) -> T.Union[bool]: self.flush_pre_post() # Only allow equality checks against other CompilerArgs and lists instances if isinstance(other, CompilerArgs): -- cgit v1.1 From 45793b6ee21021b69ddac5aa4560055b9b11f57b Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Mon, 8 Jun 2020 16:41:52 -0700 Subject: symbolextractor: Add support for Solaris Signed-off-by: Alan Coopersmith --- mesonbuild/scripts/symbolextractor.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index 41cca26..5240275 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -121,6 +121,13 @@ def gnu_syms(libfilename: str, outfilename: str): result += [' '.join(entry)] write_if_changed('\n'.join(result) + '\n', outfilename) +def solaris_syms(libfilename: str, outfilename: str): + # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump + origpath = os.environ['PATH'] + os.environ['PATH'] = '/usr/gnu/bin:' + origpath + gnu_syms(libfilename, outfilename) + os.environ['PATH'] = origpath + def osx_syms(libfilename: str, outfilename: str): # Get the name of the library output = call_tool('otool', ['-l', libfilename]) @@ -270,6 +277,8 @@ def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host # No import library. Not sure how the DLL is being used, so just # rebuild everything that links to it every time. dummy_syms(outfilename) + elif mesonlib.is_sunos(): + solaris_syms(libfilename, outfilename) else: if not os.path.exists(TOOL_WARNING_FILE): mlog.warning('Symbol extracting has not been implemented for this ' -- cgit v1.1 From e06b04dc809935fdd5865d59ce8a8bd28a5039f6 Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Mon, 8 Jun 2020 16:08:19 -0700 Subject: unittests: Fix test_compiler_detection on Solaris Accept Solaris linker in addition to GNU linker. Previously using the system provided gcc (which calls the Solaris linker) caused it to fail with: ====================================================================== FAIL: test_compiler_detection (__main__.AllPlatformTests) ---------------------------------------------------------------------- Traceback (most recent call last): File "run_unittests.py", line 2525, in test_compiler_detection self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin) AssertionError: is not an instance of Signed-off-by: Alan Coopersmith --- run_unittests.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 72ca809..237203f 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -2582,6 +2582,8 @@ class AllPlatformTests(BasePlatformTests): self.assertIsInstance(linker, ar) if is_osx(): self.assertIsInstance(cc.linker, mesonbuild.linkers.AppleDynamicLinker) + elif is_sunos(): + self.assertIsInstance(cc.linker, (mesonbuild.linkers.SolarisDynamicLinker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin)) else: self.assertIsInstance(cc.linker, mesonbuild.linkers.GnuLikeDynamicLinkerMixin) if isinstance(cc, clangcl): -- cgit v1.1 From 77935db7b0a36b499d07a142f7a97cf219d1da5a Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Mon, 8 Jun 2020 11:39:57 -0700 Subject: test: Fix linuxlike/3 linker script on Solaris 11.4 Solaris linker added support for GNU-style --version-script in Solaris 11.4, but requires adding the -z gnu-version-script-compat flag to enable it. Signed-off-by: Alan Coopersmith --- test cases/linuxlike/3 linker script/meson.build | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/test cases/linuxlike/3 linker script/meson.build b/test cases/linuxlike/3 linker script/meson.build index 63765e7..5901bf7 100644 --- a/test cases/linuxlike/3 linker script/meson.build +++ b/test cases/linuxlike/3 linker script/meson.build @@ -1,5 +1,11 @@ project('linker script', 'c') +# Solaris 11.4 ld supports --version-script only when you also specify +# -z gnu-version-script-compat +if meson.get_compiler('c').get_linker_id() == 'ld.solaris' + add_project_link_arguments('-Wl,-z,gnu-version-script-compat', language: 'C') +endif + # Static map file mapfile = 'bob.map' vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile) -- cgit v1.1 From f71c828d99fcfc6961882b3c37fbe2d0a712a1e6 Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Mon, 8 Jun 2020 18:58:48 -0700 Subject: unittests: Fix test_identity_cross on Solaris Since mesonbuild/environment.py doesn't recognize Studio compilers, force use of gcc on Solaris for now. Signed-off-by: Alan Coopersmith --- test cases/unit/61 identity cross/build_wrapper.py | 10 ++++++++-- test cases/unit/61 identity cross/host_wrapper.py | 10 ++++++++-- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/test cases/unit/61 identity cross/build_wrapper.py b/test cases/unit/61 identity cross/build_wrapper.py index b5fe7bb..15d5c07 100755 --- a/test cases/unit/61 identity cross/build_wrapper.py +++ b/test cases/unit/61 identity cross/build_wrapper.py @@ -1,5 +1,11 @@ #!/usr/bin/env python3 -import subprocess, sys +import subprocess, sys, platform -subprocess.call(["cc", "-DEXTERNAL_BUILD"] + sys.argv[1:]) +# Meson does not yet support Studio cc on Solaris, only gcc or clang +if platform.system() == 'SunOS': + cc = 'gcc' +else: + cc = 'cc' + +subprocess.call([cc, "-DEXTERNAL_BUILD"] + sys.argv[1:]) diff --git a/test cases/unit/61 identity cross/host_wrapper.py b/test cases/unit/61 identity cross/host_wrapper.py index e88577c..a3a694a 100755 --- a/test cases/unit/61 identity cross/host_wrapper.py +++ b/test cases/unit/61 identity cross/host_wrapper.py @@ -1,5 +1,11 @@ #!/usr/bin/env python3 -import subprocess, sys +import subprocess, sys, platform -subprocess.call(["cc", "-DEXTERNAL_HOST"] + sys.argv[1:]) +# Meson does not yet support Studio cc on Solaris, only gcc or clang +if platform.system() == 'SunOS': + cc = 'gcc' +else: + cc = 'cc' + +subprocess.call([cc, "-DEXTERNAL_HOST"] + sys.argv[1:]) -- cgit v1.1 From feef5a1c064af7d9e3143c6df559e112a371e7fe Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Mon, 8 Jun 2020 12:15:39 -0700 Subject: unittests: Fix 51 ldflagdedup on Solaris If the linker doesn't support --export-dynamic, skip test to deduplicate it, since it always fails finding 0 copies instead of 1. --- run_unittests.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 237203f..1a96f8f 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -6772,6 +6772,11 @@ class LinuxlikeTests(BasePlatformTests): testdir = os.path.join(self.unit_test_dir, '52 ldflagdedup') if is_cygwin() or is_osx(): raise unittest.SkipTest('Not applicable on Cygwin or OSX.') + env = get_fake_env() + cc = env.detect_c_compiler(MachineChoice.HOST) + linker = cc.linker + if not linker.export_dynamic_args(env): + raise unittest.SkipTest('Not applicable for linkers without --export-dynamic') self.init(testdir) build_ninja = os.path.join(self.builddir, 'build.ninja') max_count = 0 -- cgit v1.1 From e4330eff6d340e1a2990f1f0ef7a2e328866ce05 Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Sat, 29 Jun 2019 19:09:52 -0700 Subject: Fix "test cases/common/125 object only target" on Solaris If object is not built pic, trying to link it into libshr.so fails: [6/8] Linking target libshr.so. FAILED: libshr.so gcc -o libshr.so 'shr@sha/source2.o' -Wl,--no-undefined -Wl,--as-needed -shared -fPIC -Wl,--start-group -Wl,-soname,libshr.so -Wl,--end-group Text relocation remains referenced against symbol offset in file .text (section) 0x20 shr@sha/source2.o ld: fatal: relocations remain against allocatable but non-writable sections collect2: error: ld returned 1 exit status ninja: build stopped: subcommand failed. Signed-off-by: Alan Coopersmith --- test cases/common/125 object only target/obj_generator.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test cases/common/125 object only target/obj_generator.py b/test cases/common/125 object only target/obj_generator.py index a33872a..afdbc09 100755 --- a/test cases/common/125 object only target/obj_generator.py +++ b/test cases/common/125 object only target/obj_generator.py @@ -13,6 +13,8 @@ if __name__ == '__main__': ofile = sys.argv[3] if compiler.endswith('cl'): cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile] + elif sys.platform == 'sunos5': + cmd = [compiler, '-fpic', '-c', ifile, '-o', ofile] else: cmd = [compiler, '-c', ifile, '-o', ofile] sys.exit(subprocess.call(cmd)) -- cgit v1.1 From 28d1bb90162931a5fcce7470b22c622b903e6dca Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Sun, 21 Jun 2020 08:39:51 -0700 Subject: SolarisDynamicLinker: add get_asneeded_args() & get_pie_args() Signed-off-by: Alan Coopersmith --- mesonbuild/linkers.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 805bbc7..4264e7d 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -1098,6 +1098,13 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): return args return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive') + def get_pie_args(self) -> T.List[str]: + # Available in Solaris 11.2 and later + return ['-z', 'type=pie'] + + def get_asneeded_args(self) -> T.List[str]: + return self._apply_prefix(['-z', 'ignore']) + def no_undefined_args(self) -> T.List[str]: return ['-z', 'defs'] -- cgit v1.1 From 83df219747088ec1278cb66ca29d09645185218f Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 24 Jun 2020 20:48:16 +0530 Subject: docs: Document string path building with examples Also document that line continuation didn't work before 0.50. 90c9b868b20b11bb089fc5e0c634d5ed76fea0cb fixed it. --- docs/markdown/Syntax.md | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index 666d50e..8db7bb3 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -16,9 +16,10 @@ statements* and *includes*. Usually one Meson statement takes just one line. There is no way to have multiple statements on one line as in e.g. *C*. Function and method calls' argument lists can be split over multiple lines. Meson -will autodetect this case and do the right thing. In other cases you -can get multi-line statements by ending the line with a `\`. Apart -from line ending whitespace has no syntactic meaning. +will autodetect this case and do the right thing. + +In other cases, *(added 0.50)* you can get multi-line statements by ending the +line with a `\`. Apart from line ending whitespace has no syntactic meaning. Variables -- @@ -136,6 +137,24 @@ str2 = 'xyz' combined = str1 + '_' + str2 # combined is now abc_xyz ``` +#### String path building + +*(Added 0.49)* + +You can concatenate any two strings using `/` as an operator to build paths. +This will always use `/` as the path separator on all platforms. + +```meson +joined = '/usr/share' / 'projectname' # => /usr/share/projectname +joined = '/usr/local' / '/etc/name' # => /etc/name + +joined = 'C:\\foo\\bar' / 'builddir' # => C:/foo/bar/builddir +joined = 'C:\\foo\\bar' / 'D:\\builddir' # => D:/builddir +``` + +Note that this is equivalent to using [`join_paths()`](Reference-manual.md#join_paths), +which was obsoleted by this operator. + #### Strings running over multiple lines Strings running over multiple lines can be declared with three single -- cgit v1.1 From 08f29c18123f1c77866638b9c595a6c2fff163b3 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Mon, 22 Jun 2020 22:27:28 +0200 Subject: docs: Add initial docs for the Meson CI [skip ci] --- docs/markdown/MesonCI.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++++ docs/sitemap.txt | 1 + 2 files changed, 54 insertions(+) create mode 100644 docs/markdown/MesonCI.md diff --git a/docs/markdown/MesonCI.md b/docs/markdown/MesonCI.md new file mode 100644 index 0000000..73b979b --- /dev/null +++ b/docs/markdown/MesonCI.md @@ -0,0 +1,53 @@ +# Meson CI setup + +This document is aimed for Meson contributors and documents +the CI setup used for testing Meson itself. The Meson +project uses multiple CI platforms for covering a wide +range of target systems. + +## Travis CI + +The travis configuration file is the `.travis.yml` in the +the project root. This platform tests cross compilation and +unity builds on a [linux docker image](#docker-images) and +on OSX. + +## GitHub actions + +The configuration files for GitHub actions are located in +`.github/workflows`. Here, all [images](#docker-images) +are tested with the full `run_tests.py` run. Additionally, +some other, smaller, tests are run. + +## Docker images + +The Linux docker images are automatically built and +uploaded by GitHub actions. An image rebuild is triggerd +when any of the image definition files are changed (in +`ci/ciimage`) in the master branch. Additionally, the +images are also updated weekly. + +Each docker image has one corresponding dirctory in +`ci/ciimage` with an `image.json` and an `install.sh`. + +### Image generation + +There are no manual Dockerfiles. Instead the Dockerfile is +automatically generated by the `build.py` script. This is +done to ensure that all images have the same layout and can +all be built and tested automatically. + +The Dockerfile is generated from the `image.json` file and +basically only adds a few common files and runs the +`install.sh` script which should contain all distribution +specific setup steps. The `common.sh` can be sourced via +`source /ci/common.sh` to access some shared functionalety. + +To generate the image run `build.py -t build `. A +generated image can be tested with `build.py -t test `. + +### Common image setup + +Each docker image has a `/ci` directory with an +`env_vars.sh` script. This script has to be sourced before +running the meson test suite. diff --git a/docs/sitemap.txt b/docs/sitemap.txt index 1aef1c1..be1d908 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -118,5 +118,6 @@ index.md Using-multiple-build-directories.md Vs-External.md Contributing.md + MesonCI.md legal.md Videos.md -- cgit v1.1 From d0f468fef11f576cbbb2e704517f7b2d010750a1 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Sun, 21 Jun 2020 19:48:39 -0400 Subject: pkgconfig: Add missing cflags in uninstalled files Fixes: #7365 --- mesonbuild/modules/pkgconfig.py | 27 +++++++++------------- run_unittests.py | 5 ++-- .../common/47 pkgconfig-gen/dependencies/main.c | 4 ++++ test cases/common/47 pkgconfig-gen/meson.build | 3 ++- 4 files changed, 20 insertions(+), 19 deletions(-) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 18baf0c..b7a12ff 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -372,18 +372,18 @@ class PkgConfigModule(ExtensionModule): if len(deps.priv_libs) > 0: ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs)))) - def generate_compiler_flags(): - cflags_buf = [] - for f in deps.cflags: - cflags_buf.append(self._escape(f)) - return cflags_buf - - cflags = generate_compiler_flags() - ofile.write('Cflags: ') + cflags = [] if uninstalled: - ofile.write(' '.join(generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs))) - elif not dataonly and cflags: - ofile.write('{}\n'.format(' '.join(cflags))) + cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs) + else: + for d in subdirs: + if d == '.': + cflags.append('-I${includedir}') + else: + cflags.append(self._escape(PurePath('-I${includedir}') / d)) + cflags += [self._escape(f) for f in deps.cflags] + if cflags and not dataonly: + ofile.write('Cflags: {}\n'.format(' '.join(cflags))) @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables']) @FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags']) @@ -448,11 +448,6 @@ class PkgConfigModule(ExtensionModule): libraries = [mainlib] + libraries deps = DependenciesHelper(state, filebase) - for d in subdirs: - if d == '.': - deps.add_cflags(['-I${includedir}']) - else: - deps.add_cflags(self._escape(PurePath('-I${includedir}') / d)) deps.add_pub_libs(libraries) deps.add_priv_libs(kwargs.get('libraries_private', [])) deps.add_pub_reqs(kwargs.get('requires', [])) diff --git a/run_unittests.py b/run_unittests.py index 1a96f8f..9e8aa50 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -3177,8 +3177,9 @@ int main(int argc, char **argv) { self.assertEqual(foo_dep.get_link_args(), link_args) # Ensure include args are properly quoted incdir = PurePath(prefix) / PurePath('include') - cargs = ['-I' + incdir.as_posix()] - self.assertEqual(foo_dep.get_compile_args(), cargs) + cargs = ['-I' + incdir.as_posix(), '-DLIBFOO'] + # pkg-config and pkgconf does not respect the same order + self.assertEqual(sorted(foo_dep.get_compile_args()), sorted(cargs)) def test_array_option_change(self): def get_opt(): diff --git a/test cases/common/47 pkgconfig-gen/dependencies/main.c b/test cases/common/47 pkgconfig-gen/dependencies/main.c index 61708d3..397d40c 100644 --- a/test cases/common/47 pkgconfig-gen/dependencies/main.c +++ b/test cases/common/47 pkgconfig-gen/dependencies/main.c @@ -1,5 +1,9 @@ #include +#ifndef LIBFOO +#error LIBFOO should be defined in pkgconfig cflags +#endif + int main(int argc, char *argv[]) { return simple_function() == 42 ? 0 : 1; diff --git a/test cases/common/47 pkgconfig-gen/meson.build b/test cases/common/47 pkgconfig-gen/meson.build index c251b9f..eb2afe4 100644 --- a/test cases/common/47 pkgconfig-gen/meson.build +++ b/test cases/common/47 pkgconfig-gen/meson.build @@ -43,7 +43,8 @@ pkgg.generate( name : 'libfoo', version : libver, description : 'A foo library.', - variables : ['foo=bar', 'datadir=${prefix}/data'] + variables : ['foo=bar', 'datadir=${prefix}/data'], + extra_cflags : ['-DLIBFOO'], ) pkgg.generate( -- cgit v1.1 From e9dbe82d3ebcdc873946fe84f8ab213377b1561d Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 25 Jun 2020 03:20:42 +0530 Subject: ci: Bump the cygwin timeout to 120 minutes The default timeout is 60 min and we're timing out, but the maximum we can have is 6 hours. Bump it to 120 min for now. --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 45d85b7..f482b8e 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -64,6 +64,7 @@ jobs: - template: ci/azure-steps.yml - job: cygwin + timeoutInMinutes: 120 pool: vmImage: VS2017-Win2016 strategy: -- cgit v1.1 From 6aabc205f71c44475d364b10f4bcca5fcaf38a7e Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Thu, 25 Jun 2020 03:30:54 +0530 Subject: ci: Workaround MSYS2 update breakage Also kill all MSYS2 processes after the first update, and constantly print update status. --- azure-pipelines.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index f482b8e..85fedab 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -161,7 +161,13 @@ jobs: displayName: Install MSYS2 - script: | set PATH=%MSYS2_ROOT%\usr\bin;%SystemRoot%\system32;%SystemRoot%;%SystemRoot%\System32\Wbem - %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syyuu + # Remove this line when https://github.com/msys2/MSYS2-packages/pull/2022 is merged + %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Sy dash + echo Updating msys2 + %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu || echo system update failed, ignoring + echo Killing all msys2 processes + taskkill /F /FI "MODULES eq msys-2.0.dll" + echo Updating msys2 (again) %MSYS2_ROOT%\usr\bin\pacman --noconfirm -Syuu displayName: Update MSYS2 - script: | -- cgit v1.1 From 33167d40a29fea6b2b09fe0bc8a9eff5904857de Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Thu, 25 Jun 2020 11:28:39 -0400 Subject: wrap: add user agent when downloading files This lets servers know when they're being used by meson. It also avoids issues where the Independent JPEG Group decided to ban the "Python-urllib" default user agent. Fixes https://github.com/mesonbuild/libjpeg/issues/9 --- mesonbuild/wrap/wrap.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 44173f7..5afbe8f 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -29,6 +29,7 @@ import typing as T from pathlib import Path from . import WrapMode +from .. import coredata from ..mesonlib import git, GIT, ProgressBar, MesonException if T.TYPE_CHECKING: @@ -329,7 +330,8 @@ class Resolver: raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring)) else: try: - resp = urllib.request.urlopen(urlstring, timeout=REQ_TIMEOUT) + req = urllib.request.Request(urlstring, headers={'User-Agent': 'mesonbuild/{}'.format(coredata.version)}) + resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT) except urllib.error.URLError as e: mlog.log(str(e)) raise WrapException('could not get {} is the internet available?'.format(urlstring)) -- cgit v1.1 From 1e140c002bcf0e76ea096c081373330106433391 Mon Sep 17 00:00:00 2001 From: "Michael Hirsch, Ph.D" Date: Sun, 28 Jun 2020 12:52:25 -0400 Subject: syntax.md: correct markdown heading syntax [skip ci] --- docs/markdown/Syntax.md | 79 +++++++++++++++++++++---------------------------- 1 file changed, 33 insertions(+), 46 deletions(-) diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index 8db7bb3..002ed01 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -21,8 +21,7 @@ will autodetect this case and do the right thing. In other cases, *(added 0.50)* you can get multi-line statements by ending the line with a `\`. Apart from line ending whitespace has no syntactic meaning. -Variables --- +## Variables Variables in Meson work just like in other high level programming languages. A variable can contain a value of any type, such as an @@ -47,8 +46,7 @@ var2 += [4] # var1 is still [1, 2, 3] ``` -Numbers --- +## Numbers Meson supports only integer numbers. They are declared simply by writing them out. Basic arithmetic operations are supported. @@ -86,8 +84,7 @@ int_var = 42 string_var = int_var.to_string() ``` -Booleans --- +## Booleans A boolean is either `true` or `false`. @@ -95,8 +92,7 @@ A boolean is either `true` or `false`. truth = true ``` -Strings --- +## Strings Strings in Meson are declared with single quotes. To enter a literal single quote do it like this: @@ -127,7 +123,7 @@ As in python and C, up to three octal digits are accepted in `\ooo`. Unrecognized escape sequences are left in the string unchanged, i.e., the backslash is left in the string. -#### String concatenation +### String concatenation Strings can be concatenated to form a new string using the `+` symbol. @@ -137,7 +133,7 @@ str2 = 'xyz' combined = str1 + '_' + str2 # combined is now abc_xyz ``` -#### String path building +### String path building *(Added 0.49)* @@ -155,7 +151,7 @@ joined = 'C:\\foo\\bar' / 'D:\\builddir' # => D:/builddir Note that this is equivalent to using [`join_paths()`](Reference-manual.md#join_paths), which was obsoleted by this operator. -#### Strings running over multiple lines +### Strings running over multiple lines Strings running over multiple lines can be declared with three single quotes, like this: @@ -171,7 +167,7 @@ These are raw strings that do not support the escape sequences listed above. These strings can also be combined with the string formatting functionality described below. -#### String formatting +### String formatting Strings can be built using the string formatting functionality. @@ -184,12 +180,12 @@ res = template.format('text', 1, true) As can be seen, the formatting works by replacing placeholders of type `@number@` with the corresponding argument. -#### String methods +### String methods Strings also support a number of other methods that return transformed copies. -**.strip()** +#### .strip() ```meson # Similar to the Python str.strip(). Removes leading/ending spaces and newlines @@ -198,7 +194,7 @@ stripped_define = define.strip() # 'stripped_define' now has the value '-Dsomedefine' ``` -**.to_upper()**, **.to_lower()** +#### .to_upper(), .to_lower() ```meson target = 'x86_FreeBSD' @@ -206,7 +202,7 @@ upper = target.to_upper() # t now has the value 'X86_FREEBSD' lower = target.to_lower() # t now has the value 'x86_freebsd' ``` -**.to_int()** +#### .to_int() ```meson version = '1' @@ -214,7 +210,7 @@ version = '1' ver_int = version.to_int() ``` -**.contains()**, **.startswith()**, **.endswith()** +#### .contains(), .startswith(), .endswith() ```meson target = 'x86_FreeBSD' @@ -224,7 +220,7 @@ is_x86 = target.startswith('x86') # boolean value 'true' is_bsd = target.to_lower().endswith('bsd') # boolean value 'true' ``` -**.split()**, **.join()** +#### .split(), .join() ```meson # Similar to the Python str.split() @@ -265,7 +261,7 @@ api_version = '@0@.@1@'.format(version_array[0], version_array[1]) # api_version now (again) has the value '0.2' ``` -**.underscorify()** +#### .underscorify() ```meson name = 'Meson Docs.txt#Reference-manual' @@ -275,7 +271,7 @@ underscored = name.underscorify() # underscored now has the value 'Meson_Docs_txt_Reference_manual' ``` -**.version_compare()** +#### .version_compare() ```meson version = '1.2.3' @@ -285,8 +281,7 @@ is_new = version.version_compare('>=2.0') # Supports the following operators: '>', '<', '>=', '<=', '!=', '==', '=' ``` -Arrays --- +## Arrays Arrays are delimited by brackets. An array can contain an arbitrary number of objects of any type. @@ -321,6 +316,7 @@ assign it to `my_array` instead of modifying the original since all objects in Meson are immutable. Since 0.49.0, you can check if an array contains an element like this: + ```meson my_array = [1, 2] if 1 in my_array @@ -331,7 +327,7 @@ if 1 not in my_array endif ``` -#### Array methods +### Array methods The following methods are defined for all arrays: @@ -339,8 +335,7 @@ The following methods are defined for all arrays: - `contains`, returns `true` if the array contains the object given as argument, `false` otherwise - `get`, returns the object at the given index, negative indices count from the back of the array, indexing out of bounds is a fatal error. Provided for backwards-compatibility, it is identical to array indexing. -Dictionaries --- +## Dictionaries Dictionaries are delimited by curly braces. A dictionary can contain an arbitrary number of key value pairs. Keys are required to be strings, values can @@ -365,6 +360,7 @@ Visit the [Reference Manual](Reference-manual.md#dictionary-object) to read about the methods exposed by dictionaries. Since 0.49.0, you can check if a dictionary contains a key like this: + ```meson my_dict = {'foo': 42, 'bar': 43} if 'foo' in my_dict @@ -380,14 +376,14 @@ endif *Since 0.53.0* Keys can be any expression evaluating to a string value, not limited to string literals any more. + ```meson d = {'a' + 'b' : 42} k = 'cd' d += {k : 43} ``` -Function calls --- +## Function calls Meson provides a set of usable functions. The most common use case is creating build objects. @@ -432,8 +428,7 @@ executable('progname', 'prog.c', Attempting to do this causes Meson to immediately exit with an error. -Method calls --- +## Method calls Objects can have methods, which are called with the dot operator. The exact methods it provides depends on the object. @@ -443,8 +438,7 @@ myobj = some_function() myobj.do_something('now') ``` -If statements --- +## If statements If statements work just like in other languages. @@ -465,8 +459,7 @@ if opt != 'foo' endif ``` -Logical operations --- +## Logical operations Meson has the standard range of logical operations which can be used in `if` statements. @@ -556,8 +549,7 @@ endforeach # result is ['a', 'b'] ``` -Comments --- +## Comments A comment starts with the `#` character and extends until the end of the line. @@ -566,8 +558,7 @@ some_function() # This is a comment some_other_function() ``` -Ternary operator --- +## Ternary operator The ternary operator works just like in other languages. @@ -579,8 +570,7 @@ The only exception is that nested ternary operators are forbidden to improve legibility. If your branching needs are more complex than this you need to write an `if/else` construct. -Includes --- +## Includes Most source trees have multiple subdirectories to process. These can be handled by Meson's `subdir` command. It changes to the given @@ -595,8 +585,7 @@ test_data_dir = 'data' subdir('tests') ``` -User-defined functions and methods --- +## User-defined functions and methods Meson does not currently support user-defined functions or methods. The addition of user-defined functions would make Meson @@ -608,8 +597,7 @@ because of this limitation you find yourself copying and pasting code a lot you may be able to use a [`foreach` loop instead](#foreach-statements). -Stability Promises --- +## Stability Promises Meson is very actively developed and continuously improved. There is a possibility that future enhancements to the Meson build system will require @@ -618,8 +606,7 @@ keywords, changing the meaning of existing keywords or additions around the basic building blocks like statements and fundamental types. It is planned to stabilize the syntax with the 1.0 release. -Grammar --- +## Grammar This is the full Meson grammar, as it is used to parse Meson build definition files: @@ -641,7 +628,7 @@ equality_expression: relational_expression | (equality_expression equality_opera equality_operator: "==" | "!=" expression: assignment_expression expression_list: expression ("," expression)* -expression_statememt: expression +expression_statememt: expression function_expression: id_expression "(" [argument_list] ")" hex_literal: "0x" HEX_NUMBER HEX_NUMBER: /[a-fA-F0-9]+/ -- cgit v1.1 From b9b15816e85c3bc596a44650837af1c687fd6398 Mon Sep 17 00:00:00 2001 From: "Michael Hirsch, Ph.D" Date: Sun, 28 Jun 2020 12:56:09 -0400 Subject: syntax.md: document version_compare ambiguities [skip ci] --- docs/markdown/Syntax.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index 002ed01..7cb39e9 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -281,6 +281,14 @@ is_new = version.version_compare('>=2.0') # Supports the following operators: '>', '<', '>=', '<=', '!=', '==', '=' ``` +Meson version comparison conventions include: + +```meson +'3.6'.version_compare('>=3.6.0') == false +``` + +It is best to be unambiguous and specify the full revision level to compare. + ## Arrays Arrays are delimited by brackets. An array can contain an arbitrary number of objects of any type. -- cgit v1.1 From 4d0233540f15c686c199d8f464fc7499a094645e Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Thu, 21 May 2020 17:05:04 +0300 Subject: Added ability to pass arguments to backend in `meson compile` --- docs/markdown/Commands.md | 31 +++++++++++++++++++- docs/markdown/snippets/add_compile_backend_arg.md | 26 +++++++++++++++++ mesonbuild/mcompile.py | 35 ++++++++++++++++------- run_unittests.py | 15 ++++++++++ 4 files changed, 96 insertions(+), 11 deletions(-) create mode 100644 docs/markdown/snippets/add_compile_backend_arg.md diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md index dbcfee4..4d3de55 100644 --- a/docs/markdown/Commands.md +++ b/docs/markdown/Commands.md @@ -137,7 +137,7 @@ meson configure builddir -Doption=new_value ``` $ meson compile [-h] [-j JOBS] [-l LOAD_AVERAGE] [--clean] [-C BUILDDIR] - [--verbose] + [--verbose] [--ninja-args NINJA_ARGS] [--vs-args VS_ARGS] ``` Builds a default or a specified target of a configured meson project. @@ -155,6 +155,30 @@ optional arguments: -C BUILDDIR The directory containing build files to be built. --verbose Show more verbose output. + --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied + only on `ninja` backend). + --vs-args VS_ARGS Arguments to pass to `msbuild` (applied + only on `vs` backend). +``` + +#### Backend specific arguments + +*(since 0.55.0)* + +`BACKEND-args` use the following syntax: + +If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command: + +``` +$ meson compile --ninja-args=-n,-d,explain +``` + +would add `-n`, `-d` and `explain` arguments to ninja invocation. + +If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this: + +``` +$ meson compile "--ninja-args=['a,b', 'c d']" ``` #### Examples: @@ -164,6 +188,11 @@ Build the project: meson compile -C builddir ``` +Execute a dry run on ninja backend with additional debug info: +``` +meson compile --ninja-args=-n,-d,explain +``` + ### dist *(since 0.52.0)* diff --git a/docs/markdown/snippets/add_compile_backend_arg.md b/docs/markdown/snippets/add_compile_backend_arg.md new file mode 100644 index 0000000..76e2abb --- /dev/null +++ b/docs/markdown/snippets/add_compile_backend_arg.md @@ -0,0 +1,26 @@ +## Added ability to specify backend arguments in `meson compile` + +It's now possible to specify backend specific arguments in `meson compile`. + +Usage: `meson compile [--vs-args=args] [--ninja-args=args]` + +``` + --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend). + --vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend). +``` + +These arguments use the following syntax: + +If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command: + +``` +$ meson compile --ninja-args=-n,-d,explain +``` + +would add `-n`, `-d` and `explain` arguments to ninja invocation. + +If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this: + +``` +$ meson compile "--ninja-args=['a,b', 'c d']" +``` diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index 0bcb56e..3799ce3 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -14,6 +14,7 @@ """Entrypoint script for backend agnostic compile.""" +import argparse import sys import typing as T from pathlib import Path @@ -23,10 +24,11 @@ from . import mesonlib from . import coredata from .mesonlib import MesonException from mesonbuild.environment import detect_ninja +from mesonbuild.coredata import UserArrayOption + +def array_arg(value: str) -> T.List[str]: + return UserArrayOption(None, value, allow_dups=True, user_input=True).value -if T.TYPE_CHECKING: - import argparse - def validate_builddir(builddir: Path): if not (builddir / 'meson-private' / 'coredata.dat' ).is_file(): raise MesonException('Current directory is not a meson build directory: `{}`.\n' @@ -58,31 +60,31 @@ def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path): cmd.append('-v') if options.clean: cmd.append('clean') - + return cmd def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path): slns = list(builddir.glob('*.sln')) assert len(slns) == 1, 'More than one solution in a project?' - + sln = slns[0] cmd = ['msbuild', str(sln.resolve())] - + # In msbuild `-m` with no number means "detect cpus", the default is `-m1` if options.jobs > 0: cmd.append('-m{}'.format(options.jobs)) else: cmd.append('-m') - + if options.load_average: mlog.warning('Msbuild does not have a load-average switch, ignoring.') if not options.verbose: cmd.append('/v:minimal') if options.clean: cmd.append('/t:Clean') - + return cmd - + def add_arguments(parser: 'argparse.ArgumentParser') -> None: """Add compile specific arguments.""" parser.add_argument( @@ -117,7 +119,18 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None: action='store_true', help='Show more verbose output.' ) - + parser.add_argument( + '--ninja-args', + type=array_arg, + default=[], + help='Arguments to pass to `ninja` (applied only on `ninja` backend).' + ) + parser.add_argument( + '--vs-args', + type=array_arg, + default=[], + help='Arguments to pass to `msbuild` (applied only on `vs` backend).' + ) def run(options: 'argparse.Namespace') -> int: bdir = options.builddir # type: Path @@ -128,8 +141,10 @@ def run(options: 'argparse.Namespace') -> int: backend = get_backend_from_coredata(bdir) if backend == 'ninja': cmd = get_parsed_args_ninja(options, bdir) + cmd += options.ninja_args elif backend.startswith('vs'): cmd = get_parsed_args_vs(options, bdir) + cmd += options.vs_args else: # TODO: xcode? raise MesonException( diff --git a/run_unittests.py b/run_unittests.py index 9e8aa50..1af0d8b 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4636,6 +4636,7 @@ recommended as it is not supported on some platforms''') testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) + self._run([*self.meson_command, 'compile', '-C', self.builddir]) # If compile worked then we should get a program self.assertPathExists(os.path.join(self.builddir, prog)) @@ -4643,6 +4644,20 @@ recommended as it is not supported on some platforms''') self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean']) self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) + # `--$BACKEND-args` + + if self.backend is Backend.ninja: + self.init(testdir, extra_args=['--wipe']) + # Dry run - should not create a program + self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n']) + self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) + elif self.backend is Backend.vs: + self.init(testdir, extra_args=['--wipe']) + self._run([*self.meson_command, 'compile', '-C', self.builddir]) + # Explicitly clean the target through msbuild interface + self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', prog))]) + self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) + def test_spurious_reconfigure_built_dep_file(self): testdir = os.path.join(self.unit_test_dir, '75 dep files') -- cgit v1.1 From 5696a5abbaaff75279d9c50d431de47f35dc6228 Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Thu, 21 May 2020 17:05:04 +0300 Subject: Added ability to specify target in `meson compile` --- docs/markdown/Commands.md | 39 +++- docs/markdown/snippets/add_meson_compile_target.md | 19 ++ mesonbuild/mcompile.py | 209 ++++++++++++++++++--- run_unittests.py | 68 ++++++- 4 files changed, 291 insertions(+), 44 deletions(-) create mode 100644 docs/markdown/snippets/add_meson_compile_target.md diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md index 4d3de55..e2a352a 100644 --- a/docs/markdown/Commands.md +++ b/docs/markdown/Commands.md @@ -136,24 +136,30 @@ meson configure builddir -Doption=new_value *(since 0.54.0)* ``` -$ meson compile [-h] [-j JOBS] [-l LOAD_AVERAGE] [--clean] [-C BUILDDIR] +$ meson compile [-h] [--clean] [-C BUILDDIR] [-j JOBS] [-l LOAD_AVERAGE] [--verbose] [--ninja-args NINJA_ARGS] [--vs-args VS_ARGS] + [TARGET [TARGET ...]] ``` Builds a default or a specified target of a configured meson project. ``` +positional arguments: + TARGET Targets to build. Target has the + following format: [PATH_TO_TARGET/]TARGE + T_NAME[:TARGET_TYPE]. + optional arguments: -h, --help show this help message and exit + --clean Clean the build directory. + -C BUILDDIR The directory containing build files to + be built. -j JOBS, --jobs JOBS The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess. -l LOAD_AVERAGE, --load-average LOAD_AVERAGE The system load average to try to - maintain (if supported) - --clean Clean the build directory. - -C BUILDDIR The directory containing build files to - be built. + maintain (if supported). --verbose Show more verbose output. --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend). @@ -161,6 +167,19 @@ optional arguments: only on `vs` backend). ``` +`--verbose` argument is available since 0.55.0. + +#### Targets + +*(since 0.55.0)* + +`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where: +- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). +- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. +- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'. + +`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. + #### Backend specific arguments *(since 0.55.0)* @@ -193,6 +212,16 @@ Execute a dry run on ninja backend with additional debug info: meson compile --ninja-args=-n,-d,explain ``` +Build three targets: two targets that have the same `foo` name, but different type, and a `bar` target: +``` +meson compile foo:shared_library foo:static_library bar +``` + +Produce a coverage html report (if available): +``` +meson compile coverage-html +``` + ### dist *(since 0.52.0)* diff --git a/docs/markdown/snippets/add_meson_compile_target.md b/docs/markdown/snippets/add_meson_compile_target.md new file mode 100644 index 0000000..d75862f --- /dev/null +++ b/docs/markdown/snippets/add_meson_compile_target.md @@ -0,0 +1,19 @@ +## Added ability to specify targets in `meson compile` + +It's now possible to specify targets in `meson compile`, which will result in building only the requested targets. + +Usage: `meson compile [TARGET [TARGET...]]` +`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`. +`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). +`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. +`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc) + +`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. + +For example targets from the following code: +```meson +shared_library('foo', ...) +static_library('foo', ...) +executable('bar', ...) +``` +can be invoked with `meson compile foo:shared_library foo:static_library bar`. diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index 3799ce3..9fe3a65 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -14,9 +14,11 @@ """Entrypoint script for backend agnostic compile.""" -import argparse +import json +import re import sys import typing as T +from collections import defaultdict from pathlib import Path from . import mlog @@ -26,10 +28,13 @@ from .mesonlib import MesonException from mesonbuild.environment import detect_ninja from mesonbuild.coredata import UserArrayOption +if T.TYPE_CHECKING: + import argparse + def array_arg(value: str) -> T.List[str]: return UserArrayOption(None, value, allow_dups=True, user_input=True).value -def validate_builddir(builddir: Path): +def validate_builddir(builddir: Path) -> None: if not (builddir / 'meson-private' / 'coredata.dat' ).is_file(): raise MesonException('Current directory is not a meson build directory: `{}`.\n' 'Please specify a valid build dir or change the working directory to it.\n' @@ -42,7 +47,93 @@ def get_backend_from_coredata(builddir: Path) -> str: """ return coredata.load(str(builddir)).get_builtin_option('backend') -def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path): +def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]: + """ + Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique) + """ + path_to_intro = builddir / 'meson-info' / 'intro-targets.json' + if not path_to_intro.exists(): + raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name)) + with path_to_intro.open() as f: + schema = json.load(f) + + parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]] + for target in schema: + parsed_data[target['name']] += [target] + return parsed_data + +class ParsedTargetName: + full_name = '' + name = '' + type = '' + path = '' + + def __init__(self, target: str): + self.full_name = target + split = target.rsplit(':', 1) + if len(split) > 1: + self.type = split[1] + if not self._is_valid_type(self.type): + raise MesonException('Can\'t invoke target `{}`: unknown target type: `{}`'.format(target, self.type)) + + split = split[0].rsplit('/', 1) + if len(split) > 1: + self.path = split[0] + self.name = split[1] + else: + self.name = split[0] + + @staticmethod + def _is_valid_type(type: str) -> bool: + # Ammend docs in Commands.md when editing this list + allowed_types = { + 'executable', + 'static_library', + 'shared_library', + 'shared_module', + 'custom', + 'run', + 'jar', + } + return type in allowed_types + +def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> dict: + if target.name not in introspect_data: + raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name)) + + intro_targets = introspect_data[target.name] + found_targets = [] + + resolved_bdir = builddir.resolve() + + if not target.type and not target.path: + found_targets = intro_targets + else: + for intro_target in intro_targets: + if (intro_target['subproject'] or + (target.type and target.type != intro_target['type'].replace(' ', '_')) or + (target.path + and intro_target['filename'] != 'no_name' + and Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)): + continue + found_targets += [intro_target] + + if not found_targets: + raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name)) + elif len(found_targets) > 1: + raise MesonException('Can\'t invoke target `{}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`'.format(target.full_name)) + + return found_targets[0] + +def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]: + intro_target = get_target_from_intro_data(target, builddir, introspect_data) + + if intro_target['type'] == 'run': + return [target.name] + else: + return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']] + +def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.List[str]: runner = detect_ninja() if runner is None: raise MesonException('Cannot find ninja.') @@ -50,57 +141,100 @@ def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path): cmd = [runner, '-C', builddir.as_posix()] + if options.targets: + intro_data = parse_introspect_data(builddir) + for t in options.targets: + cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data)) + if options.clean: + cmd.append('clean') + # If the value is set to < 1 then don't set anything, which let's # ninja/samu decide what to do. if options.jobs > 0: cmd.extend(['-j', str(options.jobs)]) if options.load_average > 0: cmd.extend(['-l', str(options.load_average)]) + if options.verbose: - cmd.append('-v') - if options.clean: - cmd.append('clean') + cmd.append('--verbose') + + cmd += options.ninja_args return cmd -def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path): +def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str: + intro_target = get_target_from_intro_data(target, builddir, introspect_data) + + assert intro_target['type'] != 'run', 'Should not reach here: `run` targets must be handle above' + + # Normalize project name + # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe + target_name = re.sub('[\%\$\@\;\.\(\)\']', '_', intro_target['id']) + rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent + if rel_path != '.': + target_name = str(rel_path / target_name) + return target_name + +def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.List[str]: slns = list(builddir.glob('*.sln')) assert len(slns) == 1, 'More than one solution in a project?' - sln = slns[0] - cmd = ['msbuild', str(sln.resolve())] - # In msbuild `-m` with no number means "detect cpus", the default is `-m1` + cmd = ['msbuild'] + + if options.targets: + intro_data = parse_introspect_data(builddir) + has_run_target = any(map( + lambda t: + get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] == 'run', + options.targets + )) + + if has_run_target: + # `run` target can't be used the same way as other targets on `vs` backend. + # They are defined as disabled projects, which can't be invoked as `.sln` + # target and have to be invoked directly as project instead. + # Issue: https://github.com/microsoft/msbuild/issues/4772 + + if len(options.targets) > 1: + raise MesonException('Only one target may be specified when `run` target type is used on this backend.') + intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data) + proj_dir = Path(intro_target['filename'][0]).parent + proj = proj_dir/'{}.vcxproj'.format(intro_target['id']) + cmd += [str(proj.resolve())] + else: + cmd += [str(sln.resolve())] + cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets]) + else: + cmd += [str(sln.resolve())] + + if options.clean: + cmd.extend(['-target:Clean']) + + # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1` if options.jobs > 0: - cmd.append('-m{}'.format(options.jobs)) + cmd.append('-maxCpuCount:{}'.format(options.jobs)) else: - cmd.append('-m') + cmd.append('-maxCpuCount') if options.load_average: mlog.warning('Msbuild does not have a load-average switch, ignoring.') + if not options.verbose: - cmd.append('/v:minimal') - if options.clean: - cmd.append('/t:Clean') + cmd.append('-verbosity:minimal') + + cmd += options.vs_args return cmd def add_arguments(parser: 'argparse.ArgumentParser') -> None: """Add compile specific arguments.""" parser.add_argument( - '-j', '--jobs', - action='store', - default=0, - type=int, - help='The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess.' - ) - parser.add_argument( - '-l', '--load-average', - action='store', - default=0, - type=int, - help='The system load average to try to maintain (if supported)' - ) + 'targets', + metavar='TARGET', + nargs='*', + default=None, + help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].') parser.add_argument( '--clean', action='store_true', @@ -115,6 +249,20 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None: help='The directory containing build files to be built.' ) parser.add_argument( + '-j', '--jobs', + action='store', + default=0, + type=int, + help='The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess.' + ) + parser.add_argument( + '-l', '--load-average', + action='store', + default=0, + type=int, + help='The system load average to try to maintain (if supported).' + ) + parser.add_argument( '--verbose', action='store_true', help='Show more verbose output.' @@ -138,13 +286,14 @@ def run(options: 'argparse.Namespace') -> int: cmd = [] # type: T.List[str] + if options.targets and options.clean: + raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously') + backend = get_backend_from_coredata(bdir) if backend == 'ninja': cmd = get_parsed_args_ninja(options, bdir) - cmd += options.ninja_args elif backend.startswith('vs'): cmd = get_parsed_args_vs(options, bdir) - cmd += options.vs_args else: # TODO: xcode? raise MesonException( diff --git a/run_unittests.py b/run_unittests.py index 1af0d8b..8a12180 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4630,33 +4630,83 @@ recommended as it is not supported on some platforms''') def test_meson_compile(self): """Test the meson compile command.""" - prog = 'trivialprog' - if is_windows(): - prog = '{}.exe'.format(prog) + + def get_exe_name(basename: str) -> str: + if is_windows(): + return '{}.exe'.format(basename) + else: + return basename + + def get_shared_lib_name(basename: str) -> str: + if mesonbuild.environment.detect_msys2_arch(): + return 'lib{}.dll'.format(basename) + elif is_windows(): + return '{}.dll'.format(basename) + elif is_cygwin(): + return 'cyg{}.dll'.format(basename) + elif is_osx(): + return 'lib{}.dylib'.format(basename) + else: + return 'lib{}.so'.format(basename) + + def get_static_lib_name(basename: str) -> str: + return 'lib{}.a'.format(basename) + + # Base case (no targets or additional arguments) testdir = os.path.join(self.common_test_dir, '1 trivial') self.init(testdir) self._run([*self.meson_command, 'compile', '-C', self.builddir]) - # If compile worked then we should get a program - self.assertPathExists(os.path.join(self.builddir, prog)) + self.assertPathExists(os.path.join(self.builddir, get_exe_name('trivialprog'))) + + # `--clean` self._run([*self.meson_command, 'compile', '-C', self.builddir, '--clean']) - self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) + + # Target specified in a project with unique names + + testdir = os.path.join(self.common_test_dir, '6 linkshared') + self.init(testdir, extra_args=['--wipe']) + # Multiple targets and target type specified + self._run([*self.meson_command, 'compile', '-C', self.builddir, 'mylib', 'mycpplib:shared_library']) + # Check that we have a shared lib, but not an executable, i.e. check that target actually worked + self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mylib'))) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('prog'))) + self.assertPathExists(os.path.join(self.builddir, get_shared_lib_name('mycpplib'))) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('cppprog'))) + + # Target specified in a project with non unique names + + testdir = os.path.join(self.common_test_dir, '190 same target name') + self.init(testdir, extra_args=['--wipe']) + self._run([*self.meson_command, 'compile', '-C', self.builddir, './foo']) + self.assertPathExists(os.path.join(self.builddir, get_static_lib_name('foo'))) + self._run([*self.meson_command, 'compile', '-C', self.builddir, 'sub/foo']) + self.assertPathExists(os.path.join(self.builddir, 'sub', get_static_lib_name('foo'))) + + # run_target + + testdir = os.path.join(self.common_test_dir, '54 run target') + self.init(testdir, extra_args=['--wipe']) + out = self._run([*self.meson_command, 'compile', '-C', self.builddir, 'py3hi']) + self.assertIn('I am Python3.', out) # `--$BACKEND-args` + testdir = os.path.join(self.common_test_dir, '1 trivial') if self.backend is Backend.ninja: self.init(testdir, extra_args=['--wipe']) # Dry run - should not create a program self._run([*self.meson_command, 'compile', '-C', self.builddir, '--ninja-args=-n']) - self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) elif self.backend is Backend.vs: self.init(testdir, extra_args=['--wipe']) self._run([*self.meson_command, 'compile', '-C', self.builddir]) # Explicitly clean the target through msbuild interface - self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', prog))]) - self.assertPathDoesNotExist(os.path.join(self.builddir, prog)) + self._run([*self.meson_command, 'compile', '-C', self.builddir, '--vs-args=-t:{}:Clean'.format(re.sub(r'[\%\$\@\;\.\(\)\']', '_', get_exe_name('trivialprog')))]) + self.assertPathDoesNotExist(os.path.join(self.builddir, get_exe_name('trivialprog'))) def test_spurious_reconfigure_built_dep_file(self): testdir = os.path.join(self.unit_test_dir, '75 dep files') -- cgit v1.1 From 1c8731a10018e8ba1e6b30411a290ca50fa45d81 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 11 Jun 2020 16:04:50 -0400 Subject: envconfig: Add [constants] section in machine files Machine files already supports `+` operator as an implementation detail, since it's using eval(). Now make it an officially supported feature and add a way to define constants that are used while evaluating an entry value. --- docs/markdown/Machine-files.md | 73 ++++++++++++++++++++++++ docs/markdown/snippets/machine_file_constants.md | 20 +++++++ mesonbuild/coredata.py | 73 +++++++++++++++++++++--- mesonbuild/envconfig.py | 29 +--------- mesonbuild/environment.py | 8 +-- run_unittests.py | 34 ++++++++++- 6 files changed, 193 insertions(+), 44 deletions(-) create mode 100644 docs/markdown/snippets/machine_file_constants.md diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index 404c3d2..9011f79 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -8,10 +8,83 @@ environments](Native-environments.md). ## Sections The following sections are allowed: +- constants - binaries - paths - properties +### constants + +*Since 0.55.0* + +String and list concatenation is supported using the `+` operator, joining paths +is supported using the `/` operator. +Entries defined in the `[constants]` section can be used in any other section +(they are always parsed first), entries in any other section can be used only +within that same section and only after it has been defined. + +```ini +[constants] +toolchain = '/toolchain' +common_flags = ['--sysroot=' + toolchain / 'sysroot'] + +[properties] +c_args = common_flags + ['-DSOMETHING'] +cpp_args = c_args + ['-DSOMETHING_ELSE'] + +[binaries] +c = toolchain / 'gcc' +``` + +This can be useful with cross file composition as well. A generic cross file +could be composed with a platform specific file where constants are defined: +```ini +# aarch64.ini +[constants] +arch = 'aarch64-linux-gnu' +``` + +```ini +# cross.ini +[binaries] +c = arch + '-gcc' +cpp = arch + '-g++' +strip = arch + '-strip' +pkgconfig = arch + '-pkg-config' +... +``` + +This can be used as `meson setup --cross-file aarch64.ini --cross-file cross.ini builddir`. + +Note that file composition happens before the parsing of values. The example +below results in `b` being `'HelloWorld'`: +```ini +# file1.ini: +[constants] +a = 'Foo' +b = a + 'World' +``` + +```ini +#file2.ini: +[constants] +a = 'Hello' +``` + +The example below results in an error when file1.ini is included before file2.ini +because `b` would be defined before `a`: +```ini +# file1.ini: +[constants] +b = a + 'World' +``` + +```ini +#file2.ini: +[constants] +a = 'Hello' +``` + ### Binaries The binaries section contains a list of binaries. These can be used diff --git a/docs/markdown/snippets/machine_file_constants.md b/docs/markdown/snippets/machine_file_constants.md new file mode 100644 index 0000000..84b0848 --- /dev/null +++ b/docs/markdown/snippets/machine_file_constants.md @@ -0,0 +1,20 @@ +## Machine file constants + +Native and cross files now support string and list concatenation using the `+` +operator, and joining paths using the `/` operator. +Entries defined in the `[constants]` section can be used in any other section. +An entry defined in any other section can be used only within that same section and only +after it has been defined. + +```ini +[constants] +toolchain = '/toolchain' +common_flags = ['--sysroot=' + toolchain + '/sysroot'] + +[properties] +c_args = common_flags + ['-DSOMETHING'] +cpp_args = c_args + ['-DSOMETHING_ELSE'] + +[binaries] +c = toolchain + '/gcc' +``` diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 94f977f..329c333 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from . import mlog +from . import mlog, mparser import pickle, os, uuid import sys from itertools import chain @@ -229,14 +229,6 @@ class UserFeatureOption(UserComboOption): def is_auto(self): return self.value == 'auto' - -def load_configs(filenames: T.List[str]) -> configparser.ConfigParser: - """Load configuration files from a named subdirectory.""" - config = configparser.ConfigParser(interpolation=None) - config.read(filenames) - return config - - if T.TYPE_CHECKING: CacheKeyType = T.Tuple[T.Tuple[T.Any, ...], ...] SubCacheKeyType = T.Tuple[T.Any, ...] @@ -879,6 +871,69 @@ class CmdLineFileParser(configparser.ConfigParser): # storing subproject options like "subproject:option=value" super().__init__(delimiters=['='], interpolation=None) +class MachineFileParser(): + def __init__(self, filenames: T.List[str]): + self.parser = CmdLineFileParser() + self.constants = {'True': True, 'False': False} + self.sections = {} + + self.parser.read(filenames) + + # Parse [constants] first so they can be used in other sections + if self.parser.has_section('constants'): + self.constants.update(self._parse_section('constants')) + + for s in self.parser.sections(): + if s == 'constants': + continue + self.sections[s] = self._parse_section(s) + + def _parse_section(self, s): + self.scope = self.constants.copy() + section = {} + for entry, value in self.parser.items(s): + if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry: + raise EnvironmentException('Malformed variable name {!r} in machine file.'.format(entry)) + # Windows paths... + value = value.replace('\\', '\\\\') + try: + ast = mparser.Parser(value, 'machinefile').parse() + res = self._evaluate_statement(ast.lines[0]) + except MesonException: + raise EnvironmentException('Malformed value in machine file variable {!r}.'.format(entry)) + except KeyError as e: + raise EnvironmentException('Undefined constant {!r} in machine file variable {!r}.'.format(e.args[0], entry)) + section[entry] = res + self.scope[entry] = res + return section + + def _evaluate_statement(self, node): + if isinstance(node, (mparser.StringNode)): + return node.value + elif isinstance(node, mparser.BooleanNode): + return node.value + elif isinstance(node, mparser.NumberNode): + return node.value + elif isinstance(node, mparser.ArrayNode): + return [self._evaluate_statement(arg) for arg in node.args.arguments] + elif isinstance(node, mparser.IdNode): + return self.scope[node.value] + elif isinstance(node, mparser.ArithmeticNode): + l = self._evaluate_statement(node.left) + r = self._evaluate_statement(node.right) + if node.operation == 'add': + if (isinstance(l, str) and isinstance(r, str)) or \ + (isinstance(l, list) and isinstance(r, list)): + return l + r + elif node.operation == 'div': + if isinstance(l, str) and isinstance(r, str): + return os.path.join(l, r) + raise EnvironmentException('Unsupported node type') + +def parse_machine_files(filenames): + parser = MachineFileParser(filenames) + return parser.sections + def get_cmd_line_file(build_dir): return os.path.join(build_dir, 'meson-private', 'cmd_line.txt') diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index 10464a2..219b62e 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import configparser, os, subprocess +import os, subprocess import typing as T from . import mesonlib @@ -83,33 +83,6 @@ CPU_FAMILES_64_BIT = [ 'x86_64', ] -class MesonConfigFile: - @classmethod - def from_config_parser(cls, parser: configparser.ConfigParser) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]: - out = {} - # This is a bit hackish at the moment. - for s in parser.sections(): - section = {} - for entry in parser[s]: - value = parser[s][entry] - # Windows paths... - value = value.replace('\\', '\\\\') - if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry: - raise EnvironmentException('Malformed variable name {} in cross file..'.format(entry)) - try: - res = eval(value, {'__builtins__': None}, {'true': True, 'false': False}) - except Exception: - raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry)) - - for i in (res if isinstance(res, list) else [res]): - if not isinstance(i, (str, int, bool)): - raise EnvironmentException('Malformed value in cross file variable {}.'.format(entry)) - - section[entry] = res - - out[s] = section - return out - def get_env_var_pair(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T.Tuple[T.Optional[str], T.Optional[str]]: diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index afc2a63..d1cbfe7 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -27,7 +27,7 @@ from .mesonlib import ( from . import mlog from .envconfig import ( - BinaryTable, Directories, MachineInfo, MesonConfigFile, + BinaryTable, Directories, MachineInfo, Properties, known_cpu_families, ) from . import compilers @@ -563,8 +563,7 @@ class Environment: ## Read in native file(s) to override build machine configuration if self.coredata.config_files is not None: - config = MesonConfigFile.from_config_parser( - coredata.load_configs(self.coredata.config_files)) + config = coredata.parse_machine_files(self.coredata.config_files) binaries.build = BinaryTable(config.get('binaries', {})) paths.build = Directories(**config.get('paths', {})) properties.build = Properties(config.get('properties', {})) @@ -572,8 +571,7 @@ class Environment: ## Read in cross file(s) to override host machine configuration if self.coredata.cross_files: - config = MesonConfigFile.from_config_parser( - coredata.load_configs(self.coredata.cross_files)) + config = coredata.parse_machine_files(self.coredata.cross_files) properties.host = Properties(config.get('properties', {})) binaries.host = BinaryTable(config.get('binaries', {})) if 'host_machine' in config: diff --git a/run_unittests.py b/run_unittests.py index 8a12180..6cc6302 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -3915,7 +3915,7 @@ recommended as it is not supported on some platforms''') with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: crossfile.write(textwrap.dedent( '''[binaries] - pkgconfig = r'{0}' + pkgconfig = '{0}' [properties] @@ -3945,7 +3945,7 @@ recommended as it is not supported on some platforms''') pkgconfig = 'pkg-config' [properties] - pkg_config_libdir = [r'{0}'] + pkg_config_libdir = ['{0}'] [host_machine] system = 'linux' @@ -4969,6 +4969,36 @@ recommended as it is not supported on some platforms''') self.run_tests() self.run_target('coverage-xml') + def test_cross_file_constants(self): + with temp_filename() as crossfile1, temp_filename() as crossfile2: + with open(crossfile1, 'w') as f: + f.write(textwrap.dedent( + ''' + [constants] + compiler = 'gcc' + ''')) + with open(crossfile2, 'w') as f: + f.write(textwrap.dedent( + ''' + [constants] + toolchain = '/toolchain/' + common_flags = ['--sysroot=' + toolchain / 'sysroot'] + + [properties] + c_args = common_flags + ['-DSOMETHING'] + cpp_args = c_args + ['-DSOMETHING_ELSE'] + + [binaries] + c = toolchain / compiler + ''')) + + values = mesonbuild.coredata.parse_machine_files([crossfile1, crossfile2]) + self.assertEqual(values['binaries']['c'], '/toolchain/gcc') + self.assertEqual(values['properties']['c_args'], + ['--sysroot=/toolchain/sysroot', '-DSOMETHING']) + self.assertEqual(values['properties']['cpp_args'], + ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE']) + class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically -- cgit v1.1 From eb3b35ae024651743bc1e2b8e50566b6f53d95ae Mon Sep 17 00:00:00 2001 From: Mike Gorse Date: Fri, 26 Jun 2020 11:45:30 -0500 Subject: test: fix enum generation for GNOME Fixes: #7252 --- test cases/frameworks/7 gnome/mkenums/meson.build | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test cases/frameworks/7 gnome/mkenums/meson.build b/test cases/frameworks/7 gnome/mkenums/meson.build index af4a901..3d7adf0 100644 --- a/test cases/frameworks/7 gnome/mkenums/meson.build +++ b/test cases/frameworks/7 gnome/mkenums/meson.build @@ -126,6 +126,14 @@ enums5 = gnome.mkenums_simple('enums5', sources : 'meson-sample.h', install_header : true, decorator : 'MESON_EXPORT', header_prefix : '#include "meson-decls.h"') + +conf = configuration_data() +conf.set('ENUM_FILE', 'enums5.h') +main = configure_file( + input : 'main.c', + output : 'main5.c', + configuration : conf) + enumexe5 = executable('enumprog5', main, enums5, dependencies : gobj) # Generate template then use as input to mkenums -- cgit v1.1 From 4a371c97f422d0dc68fece2fe56d544ccd6558e9 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 17 Oct 2018 20:07:04 -0400 Subject: wrap: Apply patch even in VCS cases --- docs/markdown/Wrap-dependency-system-manual.md | 12 +++++++----- docs/markdown/snippets/wrap_patch.md | 5 +++++ mesonbuild/wrap/wrap.py | 2 +- run_unittests.py | 19 +++++++++++++++++++ test cases/unit/78 wrap-git/meson.build | 4 ++++ .../packagefiles/wrap_git_builddef/meson.build | 3 +++ .../78 wrap-git/subprojects/wrap_git_upstream/main.c | 4 ++++ 7 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 test cases/unit/78 wrap-git/meson.build create mode 100644 test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build create mode 100644 test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index f6c658f..e59a6be 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -70,11 +70,7 @@ revision = head ## Accepted configuration properties for wraps - `directory` - name of the subproject root directory, defaults to the name of the wrap. -### Specific to wrap-file -- `source_url` - download url to retrieve the wrap-file source archive -- `source_fallback_url` - fallback URL to be used when download from `source_url` fails *Since: 0.55.0* -- `source_filename` - filename of the downloaded source archive -- `source_hash` - sha256 checksum of the downloaded source archive +Since *0.55.0* those can be used in all wrap types, they were previously reserved to `wrap-file`: - `patch_url` - download url to retrieve an optional overlay archive - `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0* - `patch_filename` - filename of the downloaded overlay archive @@ -82,6 +78,12 @@ revision = head - `patch_directory` - *Since 0.55.0* Overlay directory, alternative to `patch_filename` in the case files are local instead of a downloaded archive. The directory must be placed in `subprojects/packagefiles`. + +### Specific to wrap-file +- `source_url` - download url to retrieve the wrap-file source archive +- `source_fallback_url` - fallback URL to be used when download from `source_url` fails *Since: 0.55.0* +- `source_filename` - filename of the downloaded source archive +- `source_hash` - sha256 checksum of the downloaded source archive - `lead_directory_missing` - for `wrap-file` create the leading directory name. Needed when the source file does not have a leading directory. diff --git a/docs/markdown/snippets/wrap_patch.md b/docs/markdown/snippets/wrap_patch.md index d5a1f5f..ae66bbd 100644 --- a/docs/markdown/snippets/wrap_patch.md +++ b/docs/markdown/snippets/wrap_patch.md @@ -12,3 +12,8 @@ case overlay files are local. Every files in that directory, and subdirectories, will be copied to the subproject directory. This can be used for example to add `meson.build` files to a project not using Meson build system upstream. The patch directory must be placed in `subprojects/packagefiles` directory. + +## Patch on all wrap types + +`patch_*` keys are not limited to `wrap-file` any more, they can be specified for +all wrap types. diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 5afbe8f..689fb4f 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -192,6 +192,7 @@ class Resolver: self.get_svn() else: raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type)) + self.apply_patch() # A meson.build or CMakeLists.txt file is required in the directory if method == 'meson' and not os.path.exists(meson_file): @@ -251,7 +252,6 @@ class Resolver: os.mkdir(self.dirname) extract_dir = self.dirname shutil.unpack_archive(path, extract_dir) - self.apply_patch() def get_git(self) -> None: if not GIT: diff --git a/run_unittests.py b/run_unittests.py index 6cc6302..a02284c 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4999,6 +4999,25 @@ recommended as it is not supported on some platforms''') self.assertEqual(values['properties']['cpp_args'], ['--sysroot=/toolchain/sysroot', '-DSOMETHING', '-DSOMETHING_ELSE']) + @unittest.skipIf(is_windows(), 'Directory cleanup fails for some reason') + def test_wrap_git(self): + with tempfile.TemporaryDirectory() as tmpdir: + srcdir = os.path.join(tmpdir, 'src') + shutil.copytree(os.path.join(self.unit_test_dir, '78 wrap-git'), srcdir) + upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream') + upstream_uri = Path(upstream).as_uri() + _git_init(upstream) + with open(os.path.join(srcdir, 'subprojects', 'wrap_git.wrap'), 'w') as f: + f.write(textwrap.dedent(''' + [wrap-git] + url = {} + patch_directory = wrap_git_builddef + revision = master + '''.format(upstream_uri))) + self.init(srcdir) + self.build() + self.run_tests() + class FailureTests(BasePlatformTests): ''' Tests that test failure conditions. Build files here should be dynamically diff --git a/test cases/unit/78 wrap-git/meson.build b/test cases/unit/78 wrap-git/meson.build new file mode 100644 index 0000000..b0af30a --- /dev/null +++ b/test cases/unit/78 wrap-git/meson.build @@ -0,0 +1,4 @@ +project('test-wrap-git') + +exe = subproject('wrap_git').get_variable('exe') +test('test1', exe) diff --git a/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build b/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build new file mode 100644 index 0000000..2570f77 --- /dev/null +++ b/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build @@ -0,0 +1,3 @@ +project('foo', 'c') + +exe = executable('app', 'main.c') diff --git a/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c b/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c new file mode 100644 index 0000000..8488f4e --- /dev/null +++ b/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c @@ -0,0 +1,4 @@ +int main(void) +{ + return 0; +} -- cgit v1.1 From 5acd8acd51859ab3189f43d6ba2fcd3e4b27518d Mon Sep 17 00:00:00 2001 From: georgev93 Date: Sun, 28 Jun 2020 21:59:17 -0400 Subject: Move mesonbuild/cmake/data/run_ctgt.py to mesonbuild/scripts/cmake_run_ctgt.py, as well as enclose everything in a run() function so it can be called by `meson --internal cmake_run_ctgt ...`. Also, include mesonbuild/cmake/data/ in the msi package. --- mesonbuild/cmake/data/run_ctgt.py | 96 --------------------------------- mesonbuild/cmake/interpreter.py | 8 ++- mesonbuild/scripts/cmake_run_ctgt.py | 100 +++++++++++++++++++++++++++++++++++ msi/createmsi.py | 2 + 4 files changed, 105 insertions(+), 101 deletions(-) delete mode 100755 mesonbuild/cmake/data/run_ctgt.py create mode 100755 mesonbuild/scripts/cmake_run_ctgt.py diff --git a/mesonbuild/cmake/data/run_ctgt.py b/mesonbuild/cmake/data/run_ctgt.py deleted file mode 100755 index 9d5d437..0000000 --- a/mesonbuild/cmake/data/run_ctgt.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 - -import argparse -import subprocess -import shutil -import os -import sys -from pathlib import Path - -commands = [[]] -SEPARATOR = ';;;' - -# Generate CMD parameters -parser = argparse.ArgumentParser(description='Wrapper for add_custom_command') -parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to') -parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files') -parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake') -parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR)) - -# Parse -args = parser.parse_args() - -dummy_target = None -if len(args.outputs) == 1 and len(args.original_outputs) == 0: - dummy_target = args.outputs[0] -elif len(args.outputs) != len(args.original_outputs): - print('Length of output list and original output list differ') - sys.exit(1) - -for i in args.commands: - if i == SEPARATOR: - commands += [[]] - continue - - i = i.replace('"', '') # Remove lefover quotes - commands[-1] += [i] - -# Execute -for i in commands: - # Skip empty lists - if not i: - continue - - cmd = [] - stdout = None - stderr = None - capture_file = '' - - for j in i: - if j in ['>', '>>']: - stdout = subprocess.PIPE - continue - elif j in ['&>', '&>>']: - stdout = subprocess.PIPE - stderr = subprocess.STDOUT - continue - - if stdout is not None or stderr is not None: - capture_file += j - else: - cmd += [j] - - try: - os.makedirs(args.directory, exist_ok=True) - - res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True) - if capture_file: - out_file = Path(args.directory) / capture_file - out_file.write_bytes(res.stdout) - except subprocess.CalledProcessError: - exit(1) - -if dummy_target: - with open(dummy_target, 'a'): - os.utime(dummy_target, None) - exit(0) - -# Copy outputs -zipped_outputs = zip(args.outputs, args.original_outputs) -for expected, generated in zipped_outputs: - do_copy = False - if not os.path.exists(expected): - if not os.path.exists(generated): - print('Unable to find generated file. This can cause the build to fail:') - print(generated) - do_copy = False - else: - do_copy = True - elif os.path.exists(generated): - if os.path.getmtime(generated) > os.path.getmtime(expected): - do_copy = True - - if do_copy: - if os.path.exists(expected): - os.remove(expected) - shutil.copyfile(generated, expected) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index d5ec0a7..2857527 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -22,7 +22,7 @@ from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCo from .fileapi import CMakeFileAPI from .executor import CMakeExecutor from .traceparser import CMakeTraceParser, CMakeGeneratorTarget -from .. import mlog +from .. import mlog, mesonlib from ..environment import Environment from ..mesonlib import MachineChoice, OrderedSet, version_compare from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header @@ -1059,9 +1059,6 @@ class CMakeInterpreter: root_cb.lines += [function('project', [self.project_name] + self.languages)] # Add the run script for custom commands - run_script = pkg_resources.resource_filename('mesonbuild', 'cmake/data/run_ctgt.py') - run_script_var = 'ctgt_run_script' - root_cb.lines += [assign(run_script_var, function('find_program', [[run_script]], {'required': True}))] # Add the targets processing = [] @@ -1243,7 +1240,8 @@ class CMakeInterpreter: # Generate the command list command = [] - command += [id_node(run_script_var)] + command += mesonlib.meson_command + command += ['--internal', 'cmake_run_ctgt'] command += ['-o', '@OUTPUT@'] if tgt.original_outputs: command += ['-O'] + tgt.original_outputs diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py new file mode 100755 index 0000000..5c0b31f --- /dev/null +++ b/mesonbuild/scripts/cmake_run_ctgt.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python3 + +import argparse +import subprocess +import shutil +import os +import sys +from pathlib import Path + +def run(argsv): + commands = [[]] + SEPARATOR = ';;;' + + # Generate CMD parameters + parser = argparse.ArgumentParser(description='Wrapper for add_custom_command') + parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to') + parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files') + parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake') + parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" seperated list of commands'.format(SEPARATOR)) + + # Parse + args = parser.parse_args(argsv) + + dummy_target = None + if len(args.outputs) == 1 and len(args.original_outputs) == 0: + dummy_target = args.outputs[0] + elif len(args.outputs) != len(args.original_outputs): + print('Length of output list and original output list differ') + sys.exit(1) + + for i in args.commands: + if i == SEPARATOR: + commands += [[]] + continue + + i = i.replace('"', '') # Remove lefover quotes + commands[-1] += [i] + + # Execute + for i in commands: + # Skip empty lists + if not i: + continue + + cmd = [] + stdout = None + stderr = None + capture_file = '' + + for j in i: + if j in ['>', '>>']: + stdout = subprocess.PIPE + continue + elif j in ['&>', '&>>']: + stdout = subprocess.PIPE + stderr = subprocess.STDOUT + continue + + if stdout is not None or stderr is not None: + capture_file += j + else: + cmd += [j] + + try: + os.makedirs(args.directory, exist_ok=True) + + res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=args.directory, check=True) + if capture_file: + out_file = Path(args.directory) / capture_file + out_file.write_bytes(res.stdout) + except subprocess.CalledProcessError: + sys.exit(1) + + if dummy_target: + with open(dummy_target, 'a'): + os.utime(dummy_target, None) + sys.exit(0) + + # Copy outputs + zipped_outputs = zip(args.outputs, args.original_outputs) + for expected, generated in zipped_outputs: + do_copy = False + if not os.path.exists(expected): + if not os.path.exists(generated): + print('Unable to find generated file. This can cause the build to fail:') + print(generated) + do_copy = False + else: + do_copy = True + elif os.path.exists(generated): + if os.path.getmtime(generated) > os.path.getmtime(expected): + do_copy = True + + if do_copy: + if os.path.exists(expected): + os.remove(expected) + shutil.copyfile(generated, expected) + +if __name__ == '__main__': + sys.run(sys.argv[1:]) diff --git a/msi/createmsi.py b/msi/createmsi.py index 4d03593..76cb520 100644 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -154,6 +154,7 @@ class PackageGenerator: shutil.rmtree(sdir) main_stage, ninja_stage = self.staging_dirs dep_data_dir = 'mesonbuild/dependencies/data' + cmake_data_dir = 'mesonbuild/cmake/data' modules = self.get_all_modules_from_dir('mesonbuild/modules') modules += self.get_all_modules_from_dir('mesonbuild/scripts') modules += self.get_more_modules() @@ -176,6 +177,7 @@ class PackageGenerator: subprocess.check_call(pyinst_cmd) shutil.move(pyinstaller_tmpdir + '/meson', main_stage) shutil.copytree(dep_data_dir, main_stage + '/mesonbuild/dependencies/data') + shutil.copytree(cmake_data_dir, main_stage + '/mesonbuild/cmake/data') if not os.path.exists(os.path.join(main_stage, 'meson.exe')): sys.exit('Meson exe missing from staging dir.') os.mkdir(ninja_stage) -- cgit v1.1 From 9e0db0a05ed146b48dc56cc95a6a85b99f93fc8a Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 30 Jun 2020 18:33:22 +0300 Subject: Cleaned up Reference docs [skip ci] --- docs/markdown/Reference-manual.md | 956 +++++++++++++++++++------------------- 1 file changed, 470 insertions(+), 486 deletions(-) diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index ae49f9e..dad8c12 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -15,19 +15,19 @@ afterwards](#returned-objects). Adds the positional arguments to the compiler command line. This function has two keyword arguments: -- `language` specifies the language(s) that the arguments should be +- `language`: specifies the language(s) that the arguments should be applied to. If a list of languages is given, the arguments are added to each of the corresponding compiler command lines. Note that there is no way to remove an argument set in this way. If you have an argument that is only used in a subset of targets, you have to specify it in per-target flags. -- `native` is a boolean specifying whether the arguments should be +- `native` *(since 0.48.0)*: a boolean specifying whether the arguments should be applied to the native or cross compilation. If `true` the arguments will only be used for native compilations. If `false` the arguments will only be used in cross compilations. If omitted, the flags are added to native compilations if compiling natively and cross - compilations (only) when cross compiling. Available since 0.48.0 + compilations (only) when cross compiling. The arguments are used in all compiler invocations with the exception of compile tests, because you might need to run a compile test with @@ -70,12 +70,12 @@ endif Takes the following keyword arguments: -- `required` defaults to `true`, which means that if any of the languages -specified is not found, Meson will halt. Since *0.47.0* the value of a +- `required`: defaults to `true`, which means that if any of the languages +specified is not found, Meson will halt. *(since 0.47.0)* The value of a [`feature`](Build-options.md#features) option can also be passed. -- `native` if set to `true`, the language will be used to compile for the build - machine, if `false`, for the host machine. Since *0.54.0*. +- `native` *(since 0.54.0)*: if set to `true`, the language will be used to compile for the build + machine, if `false`, for the host machine. Returns `true` if all languages specified were found and `false` otherwise. @@ -113,16 +113,16 @@ Add a custom test setup that can be used to run the tests with a custom setup, for example under Valgrind. The keyword arguments are the following: -- `env` environment variables to set, such as `['NAME1=value1', +- `env`: environment variables to set, such as `['NAME1=value1', 'NAME2=value2']`, or an [`environment()` object](#environment-object) which allows more sophisticated - environment juggling. *Since 0.52.0* a dictionary is also accepted. -- `exe_wrapper` a list containing the wrapper command or script followed by the arguments to it -- `gdb` if `true`, the tests are also run under `gdb` -- `timeout_multiplier` a number to multiply the test timeout with -- `is_default` a bool to set whether this is the default test setup. + environment juggling. *(since 0.52.0)* A dictionary is also accepted. +- `exe_wrapper`: a list containing the wrapper command or script followed by the arguments to it +- `gdb`: if `true`, the tests are also run under `gdb` +- `timeout_multiplier`: a number to multiply the test timeout with +- `is_default` *(since 0.49.0)*: a bool to set whether this is the default test setup. If `true`, the setup will be used whenever `meson test` is run - without the `--setup` option. Since 0.49.0 + without the `--setup` option. To use the test setup, run `meson test --setup=*name*` inside the build dir. @@ -137,11 +137,11 @@ Note that all these options are also available while running the runtarget alias_target(target_name, dep1, ...) ``` -Since *0.52.0* +*(since 0.52.0)* This function creates a new top-level target. Like all top-level targets, this -integrates with the selected backend. For instance, with Ninja you can -run it as `ninja target_name`. This is a dummy target that does not execute any +integrates with the selected backend. For instance, with you can +run it as `meson compile target_name`. This is a dummy target that does not execute any command, but ensures that all dependencies are built. Dependencies can be any build target (e.g. return value of [executable()](#executable), custom_target(), etc) @@ -153,7 +153,7 @@ build target (e.g. return value of [executable()](#executable), custom_target(), Abort with an error message if `condition` evaluates to `false`. -*Since 0.53.0* `message` argument is optional and defaults to print the condition +*(since 0.53.0)* `message` argument is optional and defaults to print the condition statement instead. ### benchmark() @@ -169,7 +169,7 @@ run. The behavior of this function is identical to [`test()`](#test) except for: * benchmark() does not automatically add the `MALLOC_PERTURB_` environment variable *Note:* Prior to 0.52.0 benchmark would warn that `depends` and `priority` -were unsupported, this is incorrect +were unsupported, this is incorrect. ### both_libraries() @@ -177,6 +177,8 @@ were unsupported, this is incorrect buildtarget = both_libraries(library_name, list_of_sources, ...) ``` +*(since 0.46.0)* + Builds both a static and shared library with the given sources. Positional and keyword arguments are otherwise the same as for [`library`](#library). Source files will be compiled only once and @@ -190,8 +192,6 @@ shared library. In addition it supports the following extra methods: - `get_shared_lib()` returns the shared library build target - `get_static_lib()` returns the static library build target -*Added 0.46.0* - ### build_target() Creates a build target whose type can be set dynamically with the @@ -234,7 +234,7 @@ Creates an empty configuration object. You should add your configuration with [its method calls](#configuration-data-object) and finally use it in a call to `configure_file`. -Since *0.49.0* takes an optional dictionary as first argument. If +*(since 0.49.0)* Takes an optional dictionary as first argument. If provided, each key/value pair is added into the `configuration_data` as if `set()` method was called for each of them. @@ -251,7 +251,7 @@ When a [`configuration_data()`](#configuration_data) object is passed to the `configuration:` keyword argument, it takes a template file as the `input:` (optional) and produces the `output:` (required) by substituting values from the configuration data as detailed in [the -configuration file documentation](Configuration.md). Since *0.49.0* a +configuration file documentation](Configuration.md). *(since 0.49.0)* A dictionary can be passed instead of a [`configuration_data()`](#configuration_data) object. @@ -259,53 +259,53 @@ When a list of strings is passed to the `command:` keyword argument, it takes any source or configured file as the `input:` and assumes that the `output:` is produced when the specified command is run. -Since *0.47.0*, when the `copy:` keyword argument is set to `true`, +*(since 0.47.0)* When the `copy:` keyword argument is set to `true`, this function will copy the file provided in `input:` to a file in the build directory with the name `output:` in the current directory. These are all the supported keyword arguments: -- `capture` when this argument is set to true, Meson captures `stdout` - of the `command` and writes it to the target file specified as - `output`. Available since v0.41.0. -- `command` as explained above, if specified, Meson does not create +- `capture` *(since 0.41.0)*: when this argument is set to true, + Meson captures `stdout` of the `command` and writes it to the target + file specified as `output`. +- `command`: as explained above, if specified, Meson does not create the file itself but rather runs the specified command, which allows - you to do fully custom file generation. Since *0.52.0* the command can contain + you to do fully custom file generation. *(since 0.52.0)* The command can contain file objects and more than one file can be passed to the `input` keyword argument, see [`custom_target()`](#custom_target) for details about string substitutions. -- `copy` *(added 0.47.0)* as explained above, if specified Meson only +- `copy` *(since 0.47.0)*: as explained above, if specified Meson only copies the file from input to output. -- `depfile` *(added 0.52.0)* is a dependency file that the command can write listing +- `depfile` *(since 0.52.0)*: a dependency file that the command can write listing all the additional files this target depends on. A change in any one of these files triggers a reconfiguration. -- `format` *(added 0.46.0)* the format of defines. It defaults to `meson`, and so substitutes +- `format` *(since 0.46.0)*: the format of defines. It defaults to `meson`, and so substitutes `#mesondefine` statements and variables surrounded by `@` characters, you can also use `cmake` to replace `#cmakedefine` statements and variables with the `${variable}` syntax. Finally you can use `cmake@` in which case substitutions will apply on `#cmakedefine` statements and variables with the `@variable@` syntax. -- `input` the input file name. If it's not specified in configuration +- `input`: the input file name. If it's not specified in configuration mode, all the variables in the `configuration:` object (see above) are written to the `output:` file. -- `install` *(added 0.50.0)* When true, this generated file is installed during +- `install` *(since 0.50.0)*: when true, this generated file is installed during the install step, and `install_dir` must be set and not empty. When false, this generated file is not installed regardless of the value of `install_dir`. When omitted it defaults to true when `install_dir` is set and not empty, false otherwise. -- `install_dir` the subdirectory to install the generated file to +- `install_dir`: the subdirectory to install the generated file to (e.g. `share/myproject`), if omitted or given the value of empty string, the file is not installed. -- `install_mode` *(added 0.47.0)* specify the file mode in symbolic format +- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format and optionally the owner/uid and group/gid for the installed files. -- `output` the output file name (since v0.41.0, may contain - `@PLAINNAME@` or `@BASENAME@` substitutions). In configuration mode, +- `output`: the output file name. *(since 0.41.0)* may contain + `@PLAINNAME@` or `@BASENAME@` substitutions. In configuration mode, the permissions of the input file (if it is specified) are copied to the output file. -- `output_format` *(added 0.47.0)* the format of the output to generate when no input +- `output_format` *(since 0.47.0)*: the format of the output to generate when no input was specified. It defaults to `c`, in which case preprocessor directives will be prefixed with `#`, you can also use `nasm`, in which case the prefix will be `%`. -- `encoding` *(added v0.47.0)* set the file encoding for the input and output file, +- `encoding` *(since 0.47.0)*: set the file encoding for the input and output file, defaults to utf-8. The supported encodings are those of python3, see [standard-encodings](https://docs.python.org/3/library/codecs.html#standard-encodings). @@ -319,34 +319,30 @@ Create a custom top level build target. The only positional argument is the name of this target and the keyword arguments are the following. -- `build_by_default` *(added 0.38)* causes, when set to true, to +- `build_by_default` *(since 0.38.0)*: causes, when set to true, to have this target be built by default. This means it will be built when - `ninja` is called without any arguments or asked to build a target - like `ninja test` that depends on ninja's [default - target](https://ninja-build.org/manual.html#_default_target_statements) - set to `all` by meson. The same behavior applies for backends other - than `ninja`. The default value is `false`. - *(changed in 0.50)* if `build_by_default` is explicitly set to false, `install` + `meson compile` is called without any arguments. The default value is `false`. + *(since 0.50.0)* If `build_by_default` is explicitly set to false, `install` will no longer override it. If `build_by_default` is not set, `install` will still determine its default. -- `build_always` (deprecated) if `true` this target is always considered out of +- `build_always` **(deprecated)**: if `true` this target is always considered out of date and is rebuilt every time. Equivalent to setting both `build_always_stale` and `build_by_default` to true. -- `build_always_stale` *(added 0.47)* if `true` the target is always considered out of date. +- `build_always_stale` *(since 0.47.0)*: if `true` the target is always considered out of date. Useful for things such as build timestamps or revision control tags. The associated command is run even if the outputs are up to date. -- `capture`, there are some compilers that can't be told to write +- `capture`: there are some compilers that can't be told to write their output to a file but instead write it to standard output. When this argument is set to true, Meson captures `stdout` and writes it to the target file. Note that your command argument list may not contain `@OUTPUT@` when capture mode is active. -- `console` *(added 0.48)* keyword argument conflicts with `capture`, and is meant +- `console` *(since 0.48.0)*: keyword argument conflicts with `capture`, and is meant for commands that are resource-intensive and take a long time to finish. With the Ninja backend, setting this will add this target to [Ninja's `console` pool](https://ninja-build.org/manual.html#_the_literal_console_literal_pool), which has special properties such as not buffering stdout and serializing all targets in this pool. -- `command` command to run to create outputs from inputs. The command +- `command`: command to run to create outputs from inputs. The command may be strings or the return value of functions that return file-like objects such as [`find_program()`](#find_program), [`executable()`](#executable), [`configure_file()`](#configure_file), @@ -356,48 +352,47 @@ following. Note: always specify commands in array form `['commandname', '-arg1', '-arg2']` rather than as a string `'commandname -arg1 -arg2'` as the latter will *not* work. -- `depend_files` files ([`string`](#string-object), +- `depend_files`: files ([`string`](#string-object), [`files()`](#files), or [`configure_file()`](#configure_file)) that this target depends on but are not listed in the `command` keyword argument. Useful for adding regen dependencies. -- `depends` specifies that this target depends on the specified +- `depends`: specifies that this target depends on the specified target(s), even though it does not take any of them as a command line argument. This is meant for cases where you have a tool that e.g. does globbing internally. Usually you should just put the generated sources as inputs and Meson will set up all dependencies automatically. -- `depfile` is a dependency file that the command can write listing +- `depfile`: a dependency file that the command can write listing all the additional files this target depends on, for example a C compiler would list all the header files it included, and a change in any one of these files triggers a recompilation -- `input` list of source files. As of 0.41.0 the list will be flattened. -- `install` when true, this target is installed during the install step -- `install_dir` directory to install to -- `install_mode` *(added 0.47.0)* the file mode and optionally the +- `input`: list of source files. *(since 0.41.0)* the list is flattened. +- `install`: when true, this target is installed during the install step +- `install_dir`: directory to install to +- `install_mode` *(since 0.47.0)*: the file mode and optionally the owner/uid and group/gid -- `output` list of output files +- `output`: list of output files The list of strings passed to the `command` keyword argument accept the following special string substitutions: -- `@INPUT@` the full path to the input passed to `input`. If more than +- `@INPUT@`: the full path to the input passed to `input`. If more than one input is specified, all of them will be substituted as separate arguments only if the command uses `'@INPUT@'` as a standalone-argument. For instance, this would not work: `command : ['cp', './@INPUT@']`, but this would: `command : ['cp', '@INPUT@']`. -- `@OUTPUT@` the full path to the output passed to `output`. If more +- `@OUTPUT@`: the full path to the output passed to `output`. If more than one outputs are specified, the behavior is the same as `@INPUT@`. -- `@INPUT0@` `@INPUT1@` `...` the full path to the input with the specified array index in `input` -- `@OUTPUT0@` `@OUTPUT1@` `...` the full path to the output with the specified array index in `output` -- `@OUTDIR@` the full path to the directory where the output(s) must be written -- `@DEPFILE@` the full path to the dependency file passed to `depfile` +- `@INPUT0@` `@INPUT1@` `...`: the full path to the input with the specified array index in `input` +- `@OUTPUT0@` `@OUTPUT1@` `...`: the full path to the output with the specified array index in `output` +- `@OUTDIR@`: the full path to the directory where the output(s) must be written +- `@DEPFILE@`: the full path to the dependency file passed to `depfile` - `@PLAINNAME@`: the input filename, without a path - `@BASENAME@`: the input filename, with extension removed -- `@PRIVATE_DIR@`: path to a directory where the custom target must store all its intermediate files, available since 0.50.1 +- `@PRIVATE_DIR@` *(since 0.50.1)*: path to a directory where the custom target must store all its intermediate files. -The `depfile` keyword argument also accepts the `@BASENAME@` and `@PLAINNAME@` -substitutions. *(since 0.47)* +*(since 0.47.0)* The `depfile` keyword argument also accepts the `@BASENAME@` and `@PLAINNAME@` substitutions. The returned object also has methods that are documented in the [object methods section](#custom-target-object) below. @@ -414,22 +409,21 @@ internal to the current build. The main use case for this is in subprojects. This allows a subproject to easily specify how it should be used. This makes it interchangeable with the same dependency that is provided externally by the system. This function has the following -keyword arguments. - - - `compile_args`, compile arguments to use - - `dependencies`, other dependencies needed to use this dependency - - `include_directories`, the directories to add to header search path, - must be include_directories objects or, since 0.50.0, plain strings - - `link_args`, link arguments to use - - `link_with`, libraries to link against - - `link_whole`, libraries to link fully, same as [`executable`](#executable) - Since 0.46.0 - - `sources`, sources to add to targets (or generated header files - that should be built before sources including them are built) - - `version`, the version of this dependency, such as `1.2.3` - - `variables`, a dictionary of arbitrary strings, this is meant to be used - in subprojects where special variables would be provided via cmake or - pkg-config. Since 0.54.0 +keyword arguments: + +- `compile_args`: compile arguments to use. +- `dependencies`: other dependencies needed to use this dependency. +- `include_directories`: the directories to add to header search path, + must be include_directories objects or *(since 0.50.0)* plain strings +- `link_args`: link arguments to use. +- `link_with`: libraries to link against. +- `link_whole` *(since 0.46.0)*: libraries to link fully, same as [`executable`](#executable). +- `sources`: sources to add to targets (or generated header files + that should be built before sources including them are built) +- `version`: the version of this dependency, such as `1.2.3` +- `variables` *(since 0.54.0)*: a dictionary of arbitrary strings, this is meant to be used + in subprojects where special variables would be provided via cmake or + pkg-config. ### dependency() @@ -445,12 +439,12 @@ logic](Dependencies.md#dependencies-with-custom-lookup-functionality) are also supported. This function supports the following keyword arguments: -- `default_options` *(added 0.37.0)* an array of default option values +- `default_options` *(since 0.37.0)*: an array of default option values that override those set in the subproject's `meson_options.txt` (like `default_options` in [`project()`](#project), they only have effect when Meson is run for the first time, and command line arguments override any default options in build files) -- `fallback` specifies a subproject fallback to use in case the +- `fallback`: specifies a subproject fallback to use in case the dependency is not found in the system. The value is an array `['subproj_name', 'subproj_dep']` where the first value is the name of the subproject and the second is the variable name in that @@ -459,36 +453,36 @@ arguments: [`dependency()`](#dependency), etc. Note that this means the fallback dependency may be a not-found dependency, in which case the value of the `required:` kwarg will be obeyed. - *Since 0.54.0* `'subproj_dep'` argument can be omitted in the case the + *(since 0.54.0)* `'subproj_dep'` argument can be omitted in the case the subproject used `meson.override_dependency('dependency_name', subproj_dep)`. In that case, the `fallback` keyword argument can be a single string instead of a list of 2 strings. -- `language` *(added 0.42.0)* defines what language-specific +- `language` *(since 0.42.0)*: defines what language-specific dependency to find if it's available for multiple languages. -- `method` defines the way the dependency is detected, the default is +- `method`: defines the way the dependency is detected, the default is `auto` but can be overridden to be e.g. `qmake` for Qt development, and [different dependencies support different values]( Dependencies.md#dependencies-with-custom-lookup-functionality) for this (though `auto` will work on all of them) -- `native` if set to `true`, causes Meson to find the dependency on +- `native`: if set to `true`, causes Meson to find the dependency on the build machine system rather than the host system (i.e. where the cross compiled binary will run on), usually only needed if you build a tool to be used during compilation. -- `not_found_message` *(added 0.50.0)* is an optional string that will +- `not_found_message` *(since 0.50.0)*: an optional string that will be printed as a `message()` if the dependency was not found. -- `required`, when set to false, Meson will proceed with the build - even if the dependency is not found. Since *0.47.0* the value of a +- `required`: when set to false, Meson will proceed with the build + even if the dependency is not found. *(since 0.47.0)* The value of a [`feature`](Build-options.md#features) option can also be passed. -- `static` tells the dependency provider to try to get static +- `static`: tells the dependency provider to try to get static libraries instead of dynamic ones (note that this is not supported by all dependency backends) -- `version` specifies the required version, a string containing a +- `version` *(since 0.37.0)*: specifies the required version, a string containing a comparison operator followed by the version string, examples include - `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching. *(Added 0.37.0)* + `>1.0.0`, `<=2.3.5` or `3.1.4` for exact matching. You can also specify multiple restrictions by passing a list to this keyword argument, such as: `['>=3.14.0', '<=4.1.0']`. These requirements are never met if the version is unknown. -- `include_type` *(added 0.52.0)* is an enum flag, marking how the dependency +- `include_type` *(since 0.52.0)*: an enum flag, marking how the dependency flags should be converted. Supported values are `'preserve'`, `'system'` and `'non-system'`. System dependencies may be handled differently on some platforms, for instance, using `-isystem` instead of `-I`, where possible. @@ -499,9 +493,8 @@ arguments: keywords may also be accepted (e.g. `modules` specifies submodules to use for dependencies such as Qt5 or Boost. `components` allows the user to manually add CMake `COMPONENTS` for the `find_package` lookup) -- `disabler` if `true` and the dependency couldn't be found, return a - [disabler object](#disabler-object) instead of a not-found dependency. - *Since 0.49.0* +- `disabler` *(since 0.49.0)*: if `true` and the dependency couldn't be found, + returns a [disabler object](#disabler-object) instead of a not-found dependency. If dependency_name is `''`, the dependency is always not found. So with `required: false`, this always returns a dependency object for @@ -518,7 +511,9 @@ The returned object also has methods that are documented in the ### disabler() -Returns a [disabler object](#disabler-object). Added in 0.44.0. +*(since 0.44.0)* + +Returns a [disabler object](#disabler-object). ### error() @@ -534,10 +529,11 @@ Print the argument string and halts the build process. environment_object environment(...) ``` -Returns an empty [environment variable -object](#environment-object). Added in 0.35.0. +*(since 0.35.0)* + +Returns an empty [environment variable object](#environment-object). -Since *0.52.0* takes an optional dictionary as first argument. If +*(since 0.52.0)* Takes an optional dictionary as first argument. If provided, each key/value pair is added into the `environment_object` as if `set()` method was called for each of them. @@ -577,100 +573,99 @@ Executable supports the following keyword arguments. Note that just like the positional arguments above, these keyword arguments can also be passed to [shared and static libraries](#library). -- `_pch` precompiled header file to use for the given language -- `_args` compiler flags to use for the given language; +- `_pch`: precompiled header file to use for the given language +- `_args`: compiler flags to use for the given language; eg: `cpp_args` for C++ -- `build_by_default` causes, when set to true, to have this target be - built by default, that is, when invoking plain `ninja`, the default - value is true for all built target types, since 0.38.0 -- `build_rpath` a string to add to target's rpath definition in the +- `build_by_default` *(since 0.38.0)*: causes, when set to true, to + have this target be built by default. This means it will be built when + `meson compile` is called without any arguments. The default value is + `true` for all built target types. +- `build_rpath`: a string to add to target's rpath definition in the build dir, but which will be removed on install -- `dependencies` one or more objects created with +- `dependencies`: one or more objects created with [`dependency`](#dependency) or [`find_library`](#compiler-object) (for external deps) or [`declare_dependency`](#declare_dependency) (for deps built by the project) -- `extra_files` are not used for the build itself but are shown as +- `extra_files`: not used for the build itself but are shown as source files in IDEs that group files by targets (such as Visual Studio) -- `gui_app` when set to true flags this target as a GUI application on - platforms where this makes a difference (e.g. Windows) -- `link_args` flags to use during linking. You can use UNIX-style +- `gui_app`: when set to true flags this target as a GUI application on + platforms where this makes a difference (e.g. Windows). +- `link_args`: flags to use during linking. You can use UNIX-style flags here for all platforms. -- `link_depends` strings, files, or custom targets the link step +- `link_depends`: strings, files, or custom targets the link step depends on such as a symbol visibility map. The purpose is to automatically trigger a re-link (but not a re-compile) of the target when this file changes. -- `link_language` since 0.51.0 (broken until 0.55.0) makes the linker for this +- `link_language` *(since 0.51.0)* *(broken until 0.55.0)*: makes the linker for this target be for the specified language. It is generally unnecessary to set this, as meson will detect the right linker to use in most cases. There are only two cases where this is needed. One, your main function in an executable is not in the language meson picked, or second you want to force a library to use only one ABI. -- `link_whole` links all contents of the given static libraries - whether they are used by not, equivalent to the - `-Wl,--whole-archive` argument flag of GCC, available since 0.40.0. - As of 0.41.0 if passed a list that list will be flattened. Starting - from version 0.51.0 this argument also accepts outputs produced by +- `link_whole` *(since 0.40.0)*: links all contents of the given static libraries + whether they are used by not, equivalent to the `-Wl,--whole-archive` argument flag of GCC. + *(since 0.41.0)* If passed a list that list will be flattened. + *(since 0.51.0)* This argument also accepts outputs produced by custom targets. The user must ensure that the output is a library in the correct format. -- `link_with`, one or more shared or static libraries (built by this - project) that this target should be linked with, If passed a list - this list will be flattened as of 0.41.0. Starting with version - 0.51.0, the arguments can also be custom targets. In this case Meson - will assume that merely adding the output file in the linker command +- `link_with`: one or more shared or static libraries (built by this + project) that this target should be linked with. *(since 0.41.0)* If passed a + list this list will be flattened. *(since 0.51.0)* The arguments can also be custom targets. + In this case Meson will assume that merely adding the output file in the linker command line is sufficient to make linking work. If this is not sufficient, then the build system writer must write all other steps manually. -- `export_dynamic` when set to true causes the target's symbols to be +- `export_dynamic` *(since 0.45.0)*: when set to true causes the target's symbols to be dynamically exported, allowing modules built using the [`shared_module`](#shared_module) function to refer to functions, variables and other symbols defined in the executable itself. Implies - the `implib` argument. Since 0.45.0 -- `implib` when set to true, an import library is generated for the + the `implib` argument. +- `implib` *(since 0.42.0)*: when set to true, an import library is generated for the executable (the name of the import library is based on *exe_name*). Alternatively, when set to a string, that gives the base name for the import library. The import library is used when the returned build target object appears in `link_with:` elsewhere. Only has any effect on platforms where that is meaningful (e.g. Windows). Implies - the `export_dynamic` argument. Since 0.42.0 -- `implicit_include_directories` is a boolean telling whether Meson + the `export_dynamic` argument. +- `implicit_include_directories` *(since 0.42.0)*: a boolean telling whether Meson adds the current source and build directories to the include path, - defaults to `true`, since 0.42.0 -- `include_directories` one or more objects created with the - `include_directories` function, or, since 0.50.0, strings, which + defaults to `true`. +- `include_directories`: one or more objects created with the + `include_directories` function, or *(since 0.50.0)* strings, which will be transparently expanded to include directory objects -- `install`, when set to true, this executable should be installed, defaults to `false` -- `install_dir` override install directory for this file. The value is +- `install`: when set to true, this executable should be installed, defaults to `false` +- `install_dir`: override install directory for this file. The value is relative to the `prefix` specified. F.ex, if you want to install plugins into a subdir, you'd use something like this: `install_dir : get_option('libdir') / 'projectname-1.0'`. -- `install_mode` *(added 0.47.0)* specify the file mode in symbolic format +- `install_mode` *(since 0.47.0)*: specify the file mode in symbolic format and optionally the owner/uid and group/gid for the installed files. -- `install_rpath` a string to set the target's rpath to after install +- `install_rpath`: a string to set the target's rpath to after install (but *not* before that). On Windows, this argument has no effect. -- `objects` list of prebuilt object files (usually for third party +- `objects`: list of prebuilt object files (usually for third party products you don't have source to) that should be linked in this target, **never** use this for object files that you build yourself. -- `name_suffix` the string that will be used as the extension for the +- `name_suffix`: the string that will be used as the extension for the target by overriding the default. By default on Windows this is `exe` and on other platforms it is omitted. Set this to `[]`, or omit the keyword argument for the default behaviour. -- `override_options` takes an array of strings in the same format as +- `override_options` *(since 0.40.0)*: takes an array of strings in the same format as `project`'s `default_options` overriding the values of these options - for this target only, since 0.40.0. -- `gnu_symbol_visibility` specifies how symbols should be exported, see + for this target only. +- `gnu_symbol_visibility` *(since 0.48.0)*: specifies how symbols should be exported, see e.g [the GCC Wiki](https://gcc.gnu.org/wiki/Visibility) for more information. This value can either be an empty string or one of `default`, `internal`, `hidden`, `protected` or `inlineshidden`, which is the same as `hidden` but also includes things like C++ implicit constructors as specified in the GCC manual. Ignored on compilers that - do not support GNU visibility arguments. Available since 0.48.0. -- `d_import_dirs` list of directories to look in for string imports used + do not support GNU visibility arguments. +- `d_import_dirs`: list of directories to look in for string imports used in the D programming language -- `d_unittest`, when set to true, the D modules are compiled in debug mode -- `d_module_versions` list of module version identifiers set when compiling D sources -- `d_debug` list of module debug identifiers set when compiling D sources -- `pie` *(added 0.49.0)* build a position-independent executable -- `native`, is a boolean controlling whether the target is compiled for the +- `d_unittest`: when set to true, the D modules are compiled in debug mode +- `d_module_versions`: list of module version identifiers set when compiling D sources +- `d_debug`: list of module debug identifiers set when compiling D sources +- `pie` *(since 0.49.0)*: build a position-independent executable +- `native`: is a boolean controlling whether the target is compiled for the build or host machines. Defaults to false, building for the host machine. The list of `sources`, `objects`, and `dependencies` is always @@ -682,7 +677,7 @@ The returned object also has methods that are documented in the ### find_library() -This function is deprecated and in the 0.31.0 release it was moved to +*(since 0.31.0)* **(deprecated)** Use `find_library()` method of [the compiler object](#compiler-object) as obtained from `meson.get_compiler(lang)`. @@ -696,12 +691,11 @@ This function is deprecated and in the 0.31.0 release it was moved to to be searched for in `PATH`, or a script in the current source directory. -`program_name2` and later positional arguments are used as fallback +*(since 0.37.0)* `program_name2` and later positional arguments are used as fallback strings to search for. This is meant to be used for cases where the program may have many alternative names, such as `foo` and `foo.py`. The function will check for the arguments one by one and the -first one that is found is returned. Meson versions earlier than -0.37.0 only accept one argument. +first one that is found is returned. Keyword arguments are the following: @@ -709,21 +703,21 @@ Keyword arguments are the following: abort if no program can be found. If `required` is set to `false`, Meson continue even if none of the programs can be found. You can then use the `.found()` method on the [returned object](#external-program-object) to check - whether it was found or not. Since *0.47.0* the value of a + whether it was found or not. *(since 0.47.0)* The value of a [`feature`](Build-options.md#features) option can also be passed to the `required` keyword argument. -- `native` *(since 0.43)* defines how this executable should be searched. By default +- `native` *(since 0.43.0)*: defines how this executable should be searched. By default it is set to `false`, which causes Meson to first look for the executable in the cross file (when cross building) and if it is not defined there, then from the system. If set to `true`, the cross file is ignored and the program is only searched from the system. -- `disabler` if `true` and the program couldn't be found, return a +- `disabler` *(since 0.49.0)*: if `true` and the program couldn't be found, return a [disabler object](#disabler-object) instead of a not-found object. - *Since 0.49.0* + -- `version` *(since 0.52.0)* Specifies the required version, see +- `version` *(since 0.52.0)*: specifies the required version, see [`dependency()`](#dependency) for argument format. The version of the program is determined by running `program_name --version` command. If stdout is empty it fallbacks to stderr. If the output contains more text than simply a version @@ -731,7 +725,7 @@ Keyword arguments are the following: If the output is more complicated than that, the version checking will have to be done manually using [`run_command()`](#run_command). -- `dirs` *(since 0.53.0)* Extra list of absolute paths where to look for program +- `dirs` *(since 0.53.0)*: extra list of absolute paths where to look for program names. Meson will also autodetect scripts with a shebang line and run them @@ -799,22 +793,22 @@ argument is the executable to use. It can either be a self-built executable or one returned by find_program. Keyword arguments are the following: -- `arguments` a list of template strings that will be the command line +- `arguments`: a list of template strings that will be the command line arguments passed to the executable -- `depends` is an array of build targets that must be built before this +- `depends` *(since 0.51.0)*: is an array of build targets that must be built before this generator can be run. This is used if you have a generator that calls - a second executable that is built in this project. Available since 0.51.0 -- `depfile` is a template string pointing to a dependency file that a + a second executable that is built in this project. +- `depfile`: is a template string pointing to a dependency file that a generator can write listing all the additional files this target depends on, for example a C compiler would list all the header files it included, and a change in any one of these files triggers a recompilation -- `output` a template string (or list of template strings) defining +- `output`: a template string (or list of template strings) defining how an output file name is (or multiple output names are) generated from a single source file name -- `capture` when this argument is set to true, Meson captures `stdout` - of the `executable` and writes it to the target file specified as - `output`. Available since v0.43.0. +- `capture` *(since 0.43.0)*: when this argument is set to true, Meson + captures `stdout` of the `executable` and writes it to the target file + specified as `output`. The returned object also has methods that are documented in the [object methods section](#generator-object) below. @@ -977,13 +971,13 @@ except Visual Studio). Installs files from the source tree that are listed as positional arguments. The following keyword arguments are supported: -- `install_dir` the absolute or relative path to the installation +- `install_dir`: the absolute or relative path to the installation directory. If this is a relative path, it is assumed to be relative to the prefix. - If omitted, the directory defaults to `{datadir}/{projectname}` *(added 0.45.0)*. + If omitted, the directory defaults to `{datadir}/{projectname}` *(since 0.45.0)*. -- `install_mode` specify the file mode in symbolic format and +- `install_mode`: specify the file mode in symbolic format and optionally the owner/uid and group/gid for the installed files. For example: @@ -995,10 +989,10 @@ arguments. The following keyword arguments are supported: To leave any of these three as the default, specify `false`. -- `rename` if specified renames each source file into corresponding +- `rename` *(since 0.46.0)*: if specified renames each source file into corresponding file from `rename` list. Nested paths are allowed and they are joined with `install_dir`. Length of `rename` list must be equal to - the number of sources. *(added 0.46.0)* + the number of sources. See [Installing](Installing.md) for more examples. @@ -1035,10 +1029,11 @@ This will install `common.h` and `kola.h` into `/{prefix}/cust/myproj`: install_headers('common.h', 'proj/kola.h', install_dir : 'cust', subdir : 'myproj') ``` -The `install_mode` argument can be used to specify the file mode in symbolic -format and optionally the owner/uid and group/gid for the installed files. -An example value could be `['rwxr-sr-x', 'root', 'root']`. -*(Added 0.47.0)*. +Accepts the following keywords: + +- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic + format and optionally the owner/uid and group/gid for the installed files. + An example value could be `['rwxr-sr-x', 'root', 'root']`. ### install_man() @@ -1051,12 +1046,13 @@ man directory during the install step. This directory can be overridden by specifying it with the `install_dir` keyword argument. -The `install_mode` argument can be used to specify the file mode in symbolic -format and optionally the owner/uid and group/gid for the installed files. -An example value could be `['rwxr-sr-x', 'root', 'root']`. -*(Added 0.47.0)*. +Accepts the following keywords: + +- `install_mode` *(since 0.47.0)*: can be used to specify the file mode in symbolic + format and optionally the owner/uid and group/gid for the installed files. + An example value could be `['rwxr-sr-x', 'root', 'root']`. -Since 0.49.0, [manpages are no longer compressed implicitly][install_man_49]. +*(since 0.49.0)* [manpages are no longer compressed implicitly][install_man_49]. [install_man_49]: https://mesonbuild.com/Release-notes-for-0-49-0.html#manpages-are-no-longer-compressed-implicitly @@ -1077,11 +1073,10 @@ The following keyword arguments are supported: - `exclude_directories`: a list of directory names that should not be installed. Names are interpreted as paths relative to the `subdir_name` location. - `install_dir`: the location to place the installed subdirectory. -- `install_mode`: the file mode in symbolic format and optionally - the owner/uid and group/gid for the installed files. *(Added 0.47.0)* -- `strip_directory`: install directory contents. `strip_directory=false` by default. +- `install_mode` *(since 0.47.0)*: the file mode in symbolic format and optionally + the owner/uid and group/gid for the installed files. +- `strip_directory` *(since 0.45.0)*: install directory contents. `strip_directory=false` by default. If `strip_directory=true` only the last component of the source path is used. - Since 0.45.0 For a given directory `foo`: ```text @@ -1126,7 +1121,9 @@ share/ bool is_disabler(var) ``` -Returns true if a variable is a disabler and false otherwise. Added in 0.52.0. +*(since 0.52.0)* + +Returns true if a variable is a disabler and false otherwise. ### is_variable() @@ -1153,6 +1150,8 @@ the jar with `java -jar file.jar`. string join_paths(string1, string2, ...) ``` +*(since 0.36.0)* + Joins the given strings into a file system path segment. For example `join_paths('foo', 'bar')` results in `foo/bar`. If any one of the individual segments is an absolute path, all segments before it are @@ -1161,9 +1160,7 @@ dropped. That means that `join_paths('foo', '/bar')` returns `/bar`. **Warning** Don't use `join_paths()` for sources in [`library`](#library) and [`executable`](#executable), you should use [`files`](#files) instead. -*Added 0.36.0* - -Since 0.49.0 using the`/` operator on strings is equivalent to calling +*(since 0.49.0)* Using the`/` operator on strings is equivalent to calling `join_paths`. ```meson @@ -1193,12 +1190,12 @@ library basis using the [dependency()](#dependency)) `static` keyword. The keyword arguments for this are the same as for [`executable`](#executable) with the following additions: -- `name_prefix` the string that will be used as the prefix for the +- `name_prefix`: the string that will be used as the prefix for the target output filename by overriding the default (only used for libraries). By default this is `lib` on all platforms and compilers, except for MSVC shared libraries where it is omitted to follow convention, and Cygwin shared libraries where it is `cyg`. -- `name_suffix` the string that will be used as the suffix for the +- `name_suffix`: the string that will be used as the suffix for the target output filename by overriding the default (see also: [executable()](#executable)). By default, for shared libraries this is `dylib` on macOS, `dll` on Windows, and `so` everywhere else. @@ -1206,7 +1203,7 @@ The keyword arguments for this are the same as for static libraries use the `lib` suffix, but we use `a` to avoid a potential name clash with shared libraries which also generate import libraries with a `lib` suffix. -- `rust_crate_type` specifies the crate type for Rust +- `rust_crate_type`: specifies the crate type for Rust libraries. Defaults to `dylib` for shared libraries and `rlib` for static libraries. @@ -1224,7 +1221,7 @@ them for the default behaviour for each platform. This function prints its argument to stdout. -**Since 0.54.0** Can take more more than one argument that will be separated by +*(since 0.54.0)* Can take more more than one argument that will be separated by space. ### warning() @@ -1233,11 +1230,11 @@ space. void warning(text) ``` -This function prints its argument to stdout prefixed with WARNING:. +*(since 0.44.0)* -*Added 0.44.0* +This function prints its argument to stdout prefixed with WARNING:. -**Since 0.54.0** Can take more more than one argument that will be separated by +*(since 0.54.0)* Can take more more than one argument that will be separated by space. ### summary() @@ -1247,6 +1244,8 @@ space. void summary(dictionary) ``` +*(since 0.53.0)* + This function is used to summarize build configuration at the end of the build process. This function provides a way for projects (and subprojects) to report this information in a clear way. @@ -1262,10 +1261,10 @@ pair doesn't appear twice. All sections will be collected and printed at the end of the configuration in the same order as they have been called. Keyword arguments: -- `section` title to group a set of key/value pairs. -- `bool_yn` if set to true, all boolean values will be replaced by green YES +- `section`: title to group a set of key/value pairs. +- `bool_yn`: if set to true, all boolean values will be replaced by green YES or red NO. -- `list_sep` *Since 0.54.0* string used to separate list values (e.g. `', '`). +- `list_sep` *(since 0.54.0)*: string used to separate list values (e.g. `', '`). Example: ```meson @@ -1300,8 +1299,6 @@ My Project 1.0 True ``` -*Added 0.53.0* - ### project() ``` meson @@ -1312,7 +1309,7 @@ The first argument to this function must be a string defining the name of this project. It is followed by programming languages that the project uses. Supported values for languages are `c`, `cpp` (for `C++`), `cuda`, `d`, `objc`, `objcpp`, `fortran`, `java`, `cs` (for `C#`), -`vala` and `rust`. Since version `0.40.0` the list of languages +`vala` and `rust`. *(since 0.40.0)* The list of languages is optional. The project name can be any string you want, it's not used for @@ -1324,40 +1321,40 @@ Library_. Project supports the following keyword arguments. - - `default_options` takes an array of strings. The strings are in the - form `key=value` and have the same format as options to - `meson configure`. For example to set the default project type you would - set this: `default_options : ['buildtype=debugoptimized']`. Note - that these settings are only used when running Meson for the first - time. Global options such as `buildtype` can only be specified in - the master project, settings in subprojects are ignored. Project - specific options are used normally even in subprojects. - - - - `license` takes a string or array of strings describing the - license(s) the code is under. Usually this would be something like - `license : 'GPL2+'`, but if the code has multiple licenses you can - specify them as an array like this: `license : ['proprietary', - 'GPL3']`. Note that the text is informal and is only written to - the dependency manifest. Meson does not do any license validation, - you are responsible for verifying that you abide by all licensing - terms. You can access the value in your Meson build files with - `meson.project_license()`. - - - `meson_version` takes a string describing which Meson version the - project requires. Usually something like `>=0.28.0`. - - - `subproject_dir` specifies the top level directory name that holds - Meson subprojects. This is only meant as a compatibility option - for existing code bases that house their embedded source code in a - custom directory. All new projects should not set this but instead - use the default value. It should be noted that this keyword - argument is ignored inside subprojects. There can be only one - subproject dir and it is set in the top level Meson file. - - - `version`, which is a free form string describing the version of - this project. You can access the value in your Meson build files - with `meson.project_version()`. +- `default_options`: takes an array of strings. The strings are in the + form `key=value` and have the same format as options to + `meson configure`. For example to set the default project type you would + set this: `default_options : ['buildtype=debugoptimized']`. Note + that these settings are only used when running Meson for the first + time. Global options such as `buildtype` can only be specified in + the master project, settings in subprojects are ignored. Project + specific options are used normally even in subprojects. + + +- `license`: takes a string or array of strings describing the + license(s) the code is under. Usually this would be something like + `license : 'GPL2+'`, but if the code has multiple licenses you can + specify them as an array like this: `license : ['proprietary', + 'GPL3']`. Note that the text is informal and is only written to + the dependency manifest. Meson does not do any license validation, + you are responsible for verifying that you abide by all licensing + terms. You can access the value in your Meson build files with + `meson.project_license()`. + +- `meson_version`: takes a string describing which Meson version the + project requires. Usually something like `>=0.28.0`. + +- `subproject_dir`: specifies the top level directory name that holds + Meson subprojects. This is only meant as a compatibility option + for existing code bases that house their embedded source code in a + custom directory. All new projects should not set this but instead + use the default value. It should be noted that this keyword + argument is ignored inside subprojects. There can be only one + subproject dir and it is set in the top level Meson file. + +- `version`: which is a free form string describing the version of + this project. You can access the value in your Meson build files + with `meson.project_version()`. ### run_command() @@ -1379,15 +1376,13 @@ respectively. This function supports the following keyword arguments: - - `check` takes a boolean. If `true`, the exit status code of the command will + - `check` *(since 0.47.0)*: takes a boolean. If `true`, the exit status code of the command will be checked, and the configuration will fail if it is non-zero. The default is `false`. - Since 0.47.0 - - `env` environment variables to set, such as `['NAME1=value1', + - `env` *(since 0.50.0)*: environment variables to set, such as `['NAME1=value1', 'NAME2=value2']`, or an [`environment()` object](#environment-object) which allows more sophisticated - environment juggling. *Since 0.52.0* a dictionary is also accepted. - Since 0.50.0 + environment juggling. *(since 0.52.0)* A dictionary is also accepted. See also [External commands](External-commands.md). @@ -1399,8 +1394,8 @@ runtarget run_target(target_name, ...) This function creates a new top-level target that runs a specified command with the specified arguments. Like all top-level targets, this -integrates with the selected backend. For instance, with Ninja you can -run it as `ninja target_name`. Note that a run target produces no +integrates with the selected backend. For instance, you can +run it as `meson compile target_name`. Note that a run target produces no output as far as Meson is concerned. It is only meant for tasks such as running a code formatter or flashing an external device's firmware with a built file. @@ -1428,8 +1423,7 @@ and subdirectory the target was defined in, respectively. Assigns a value to the given variable name. Calling `set_variable('foo', bar)` is equivalent to `foo = bar`. -**Note:** Prior to v0.46.1, the `value` parameter could not be an -array type, due to flattening of the function parameters. +*(since 0.46.1)* The `value` parameter can be an array type. ### shared_library() @@ -1441,7 +1435,7 @@ Builds a shared library with the given sources. Positional and keyword arguments are the same as for [`library`](#library) with the following extra keyword arguments. -- `soversion` a string specifying the soversion of this shared +- `soversion`: a string specifying the soversion of this shared library, such as `0`. On Linux and Windows this is used to set the soversion (or equivalent) in the filename. For example, if `soversion` is `4`, a Windows DLL will be called `foo-4.dll` and one @@ -1449,19 +1443,19 @@ extra keyword arguments. `libfoo.so.4`. If this is not specified, the first part of `version` is used instead (see below). For example, if `version` is `3.6.0` and `soversion` is not defined, it is set to `3`. -- `version` a string specifying the version of this shared library, +- `version`: a string specifying the version of this shared library, such as `1.1.0`. On Linux and OS X, this is used to set the shared library version in the filename, such as `libfoo.so.1.1.0` and `libfoo.1.1.0.dylib`. If this is not specified, `soversion` is used instead (see above). -- `darwin_versions` *(added 0.48)* an integer, string, or a list of +- `darwin_versions` *(since 0.48.0)*: an integer, string, or a list of versions to use for setting dylib `compatibility version` and `current version` on macOS. If a list is specified, it must be either zero, one, or two elements. If only one element is specified or if it's not a list, the specified value will be used for setting both compatibility version and current version. If unspecified, the `soversion` will be used as per the aforementioned rules. -- `vs_module_defs` a string, a File object, or Custom Target for a +- `vs_module_defs`: a string, a File object, or Custom Target for a Microsoft module definition file for controlling symbol exports, etc., on platforms where that is possible (e.g. Windows). @@ -1471,6 +1465,8 @@ extra keyword arguments. buildtarget shared_module(module_name, list_of_sources, ...) ``` +*(since 0.37.0)* + Builds a shared module with the given sources. Positional and keyword arguments are the same as for [`library`](#library). @@ -1485,7 +1481,7 @@ you will need to set the `export_dynamic` argument of the executable to Supports the following extra keyword arguments: -- `vs_module_defs`, *(Added 0.52.0)*, a string, a File object, or +- `vs_module_defs` *(since 0.52.0)*: a string, a File object, or Custom Target for a Microsoft module definition file for controlling symbol exports, etc., on platforms where that is possible (e.g. Windows). @@ -1495,8 +1491,6 @@ platforms, notably OSX. Consider using a [`shared_library`](#shared_library) instead, if you need to both `dlopen()` and link with a library. -*Added 0.37.0* - ### static_library() ``` meson @@ -1507,7 +1501,7 @@ Builds a static library with the given sources. Positional and keyword arguments are otherwise the same as for [`library`](#library), but it has one argument the others don't have: - - `pic`, *(Added 0.36.0)* builds the library as positional + - `pic` *(since 0.36.0)*: builds the library as positional independent code (so it can be linked into a shared library). This option has no effect on Windows and OS X since it doesn't make sense on Windows and PIC cannot be disabled on OS X. @@ -1530,7 +1524,7 @@ and must only be executed once. This function has one keyword argument. - - `if_found` takes one or several dependency objects and will only + - `if_found`: takes one or several dependency objects and will only recurse in the subdir if they all return `true` when queried with `.found()` @@ -1575,16 +1569,15 @@ example a subproject called `foo` must be located in `${MESON_SOURCE_ROOT}/subprojects/foo`. Supports the following keyword arguments: - - `default_options` *(added 0.37.0)* an array of default option values + - `default_options` *(since 0.37.0)*: an array of default option values that override those set in the subproject's `meson_options.txt` (like `default_options` in `project`, they only have effect when Meson is run for the first time, and command line arguments override - any default options in build files). *Since 0.54.0* `default_library` + any default options in build files). *(since 0.54.0)*: `default_library` built-in option can also be overridden. - - `version` keyword argument that works just like the one in - `dependency`. It specifies what version the subproject should be, - as an example `>=1.0.1` - - `required` *(added 0.48.0)* By default, `required` is `true` and + - `version`: works just like the same as in `dependency`. + It specifies what version the subproject should be, as an example `>=1.0.1` + - `required` *(since 0.48.0)*: By default, `required` is `true` and Meson will abort if the subproject could not be setup. You can set this to `false` and then use the `.found()` method on the [returned object](#subproject-object). You may also pass the value of a @@ -1613,12 +1606,12 @@ object](#build-target-object) returned by object](#external-program-object) returned by [`find_program()`](#find_program). -*Since 0.55.0* When cross compiling, if an exe_wrapper is needed and defined +*(since 0.55.0)* When cross compiling, if an exe_wrapper is needed and defined the environment variable `MESON_EXE_WRAPPER` will be set to the string value of that wrapper (implementation detail: using `mesonlib.join_args`). Test scripts may use this to run cross built binaries. If your test needs `MESON_EXE_WRAPPER` in cross build situations it is your responsibility to -return code 77 to tell the harness to report "skip" +return code 77 to tell the harness to report "skip". By default, environment variable [`MALLOC_PERTURB_`](http://man7.org/linux/man-pages/man3/mallopt.3.html) @@ -1640,53 +1633,52 @@ test(..., env: nomalloc, ...) #### test() Keyword arguments -- `args` arguments to pass to the executable +- `args`: arguments to pass to the executable -- `env` environment variables to set, such as `['NAME1=value1', +- `env`: environment variables to set, such as `['NAME1=value1', 'NAME2=value2']`, or an [`environment()` object](#environment-object) which allows more sophisticated - environment juggling. *Since 0.52.0* a dictionary is also accepted. + environment juggling. *(since 0.52.0)* A dictionary is also accepted. -- `is_parallel` when false, specifies that no other test must be +- `is_parallel`: when false, specifies that no other test must be running at the same time as this test -- `should_fail` when true the test is considered passed if the +- `should_fail`: when true the test is considered passed if the executable returns a non-zero return value (i.e. reports an error) -- `suite` `'label'` (or list of labels `['label1', 'label2']`) +- `suite`: `'label'` (or list of labels `['label1', 'label2']`) attached to this test. The suite name is qualified by a (sub)project name resulting in `(sub)project_name:label`. In the case of a list of strings, the suite names will be `(sub)project_name:label1`, `(sub)project_name:label2`, etc. -- `timeout` the amount of seconds the test is allowed to run, a test +- `timeout`: the amount of seconds the test is allowed to run, a test that exceeds its time limit is always considered failed, defaults to 30 seconds -- `workdir` absolute path that will be used as the working directory +- `workdir`: absolute path that will be used as the working directory for the test -- `depends` specifies that this test depends on the specified +- `depends` *(since 0.46.0)*: specifies that this test depends on the specified target(s), even though it does not take any of them as a command line argument. This is meant for cases where test finds those targets internally, e.g. plugins or globbing. Those targets are built before test is executed even if they have `build_by_default : false`. - Since 0.46.0 -- `protocol` *(Since 0.50.0)* specifies how the test results are parsed and can +- `protocol` *(since 0.50.0)*: specifies how the test results are parsed and can be one of `exitcode`, `tap`, or `gtest`. For more information about test harness protocol read [Unit Tests](Unit-tests.md). The following values are accepted: - `exitcode`: the executable's exit code is used by the test harness - to record the outcome of the test) - - `tap` ([Test Anything Protocol](https://www.testanything.org/)) - - `gtest`. *(Since 0.55.0)* for Google Tests. + to record the outcome of the test). + - `tap`: [Test Anything Protocol](https://www.testanything.org/). + - `gtest` *(since 0.55.0)*: for Google Tests. -- `priority` specifies the priority of a test. Tests with a +- `priority` *(since 0.52.0)*:specifies the priority of a test. Tests with a higher priority are *started* before tests with a lower priority. The starting order of tests with identical priorities is implementation-defined. The default priority is 0, negative numbers are - permitted. Since 0.52.0 + permitted. Defined tests can be run in a backend-agnostic way by calling `meson test` inside the build dir, or by using backend-specific @@ -1702,15 +1694,15 @@ This command detects revision control commit information at build time and places it in the specified output file. This file is guaranteed to be up to date on every build. Keywords are similar to `custom_target`. -- `command` string list with the command to execute, see +- `command`: string list with the command to execute, see [`custom_target`](#custom_target) for details on how this command must be specified -- `fallback` version number to use when no revision control +- `fallback`: version number to use when no revision control information is present, such as when building from a release tarball (defaults to `meson.project_version()`) -- `input` file to modify (e.g. `version.c.in`) (required) -- `output` file to write the results to (e.g. `version.c`) (required) -- `replace_string` string in the input file to substitute with the +- `input`: file to modify (e.g. `version.c.in`) (required) +- `output`: file to write the results to (e.g. `version.c`) (required) +- `replace_string`: string in the input file to substitute with the commit information (defaults to `@VCS_TAG@`) Meson will read the contents of `input`, substitute the @@ -1738,31 +1730,30 @@ The `meson` object allows you to introspect various properties of the system. This object is always mapped in the `meson` variable. It has the following methods. -- `add_dist_script(script_name, arg1, arg, ...)` causes the script +- `add_dist_script(script_name, arg1, arg2, ...)` *(since 0.48.0)*: causes the script given as argument to run during `dist` operation after the distribution source has been generated but before it is archived. Note that this runs the script file that is in the _staging_ directory, not the one in the source directory. If the script file can not be found in the staging directory, it is a hard error. This command can only invoked from the main project, calling - it from a subproject is a hard error. Available since 0.48.0. Before - 0.49.0, the function only accepted a single argument. Since 0.54.0 - the `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` environment variables - are set when dist scripts are run. - *(Since 0.55.0)* The output of `configure_file`, `files`, and `find_program` + it from a subproject is a hard error. *(since 0.49.0)* Accepts multiple arguments + for the fscript. *(since 0.54.0)* The `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` + environment variables are set when dist scripts are run. + *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program` as well as strings. -- `add_install_script(script_name, arg1, arg2, ...)` causes the script +- `add_install_script(script_name, arg1, arg2, ...)`: causes the script given as an argument to be run during the install step, this script will have the environment variables `MESON_SOURCE_ROOT`, `MESON_BUILD_ROOT`, `MESON_INSTALL_PREFIX`, `MESON_INSTALL_DESTDIR_PREFIX`, and `MESONINTROSPECT` set. All positional arguments are passed as parameters. - *(Since 0.55.0)* The output of `configure_file`, `files`, `find_program`, + *(since 0.55.0)* The output of `configure_file`, `files`, `find_program`, `custom_target`, indexes of `custom_target`, `executable`, `library`, and other built targets as well as strings. - *(added 0.54)* If `meson install` is called with the `--quiet` option, the + *(since 0.54.0)* If `meson install` is called with the `--quiet` option, the environment variable `MESON_INSTALL_QUIET` will be set. Meson uses the `DESTDIR` environment variable as set by the @@ -1787,23 +1778,23 @@ the following methods. shell would. If your script uses Python, `shlex.split()` is the easiest correct way to do this. -- `add_postconf_script(script_name, arg1, arg2, ...)` will run the +- `add_postconf_script(script_name, arg1, arg2, ...)`: runs the executable given as an argument after all project files have been generated. This script will have the environment variables `MESON_SOURCE_ROOT` and `MESON_BUILD_ROOT` set. - *(Since 0.55.0)* The output of `configure_file`, `files`, and `find_program` + *(since 0.55.0)* The output of `configure_file`, `files`, and `find_program` as well as strings. -- `backend()` *(added 0.37.0)* returns a string representing the +- `backend()` *(since 0.37.0)*: returns a string representing the current backend: `ninja`, `vs2010`, `vs2015`, `vs2017`, `vs2019`, or `xcode`. -- `build_root()` returns a string with the absolute path to the build +- `build_root()`: returns a string with the absolute path to the build root directory. Note: this function will return the build root of the parent project if called from a subproject, which is usually not what you want. Try using `current_build_dir()`. -- `source_root()` returns a string with the absolute path to the +- `source_root()`: returns a string with the absolute path to the source root directory. Note: you should use the `files()` function to refer to files in the root source directory instead of constructing paths manually with `meson.source_root()`. This @@ -1811,17 +1802,17 @@ the following methods. from a subproject, which is usually not what you want. Try using `current_source_dir()`. -- `current_build_dir()` returns a string with the absolute path to the +- `current_build_dir()`: returns a string with the absolute path to the current build directory. -- `current_source_dir()` returns a string to the current source +- `current_source_dir()`: returns a string to the current source directory. Note: **you do not need to use this function** when passing files from the current source directory to a function since that is the default. Also, you can use the `files()` function to refer to files in the current or any other source directory instead of constructing paths manually with `meson.current_source_dir()`. -- `get_compiler(language)` returns [an object describing a +- `get_compiler(language)`: returns [an object describing a compiler](#compiler-object), takes one positional argument which is the language to use. It also accepts one keyword argument, `native` which when set to true makes Meson return the compiler for the build @@ -1830,55 +1821,52 @@ the following methods. returns the "cross" compiler if we're currently cross-compiling and the "native" compiler if we're not. -- `get_cross_property(propname, fallback_value)` - *Consider get_external_property() instead*. Returns the given +- `get_cross_property(propname, fallback_value)`: + *Consider `get_external_property()` instead*. Returns the given property from a cross file, the optional fallback_value is returned if not cross compiling or the given property is not found. - `get_external_property(propname, fallback_value, native: true/false)` - *(added 0.54.0)* returns the given property from a native or cross file. + *(since 0.54.0)*: returns the given property from a native or cross file. The optional fallback_value is returned if the given property is not found. The optional `native: true` forces retrieving a variable from the native file, even when cross-compiling. If `native: false` or not specified, variable is retrieved from the cross-file if cross-compiling, and from the native-file when not cross-compiling. -- `can_run_host_binaries()` returns true if the build machine can run +- `can_run_host_binaries()` *(since 0.55.0)*: returns true if the build machine can run binaries compiled for the host. This returns true unless you are cross compiling, need a helper to run host binaries, and don't have one. For example when cross compiling from Linux to Windows, one can use `wine` - as the helper. *New in 0.55.0* + as the helper. -- `has_exe_wrapper()` alias of `can_run_host_binaries` - *Deprecated since 0.55.0* +- `has_exe_wrapper()`: *(since 0.55.0)* **(deprecated)**. Use `can_run_host_binaries` instead. -- `install_dependency_manifest(output_name)` installs a manifest file +- `install_dependency_manifest(output_name)`: installs a manifest file containing a list of all subprojects, their versions and license files to the file name given as the argument. -- `is_cross_build()` returns `true` if the current build is a [cross +- `is_cross_build()`: returns `true` if the current build is a [cross build](Cross-compilation.md) and `false` otherwise. -- `is_subproject()` returns `true` if the current project is being +- `is_subproject()`: returns `true` if the current project is being built as a subproject of some other project and `false` otherwise. -- `is_unity()` returns `true` when doing a [unity +- `is_unity()`: returns `true` when doing a [unity build](Unity-builds.md) (multiple sources are combined before compilation to reduce build time) and `false` otherwise. -- `override_find_program(progname, program)` [*(Added - 0.46.0)*](Release-notes-for-0.46.0.md#can-override-find_program) +- `override_find_program(progname, program)` *(since 0.46.0)*: specifies that whenever `find_program` is used to find a program named `progname`, Meson should not look it up on the system but instead return `program`, which may either be the result of - `find_program`, `configure_file` or `executable`. *Since 0.55.0* if a version + `find_program`, `configure_file` or `executable`. *(since 0.55.0)* If a version check is passed to `find_program` for a program that has been overridden with an executable, the current project version is used. If `program` is an `executable`, it cannot be used during configure. -- `override_dependency(name, dep_object)` [*(Added - 0.54.0)*](Release-notes-for-0.54.0.md#override-dependency) +- `override_dependency(name, dep_object)` *(since 0.54.0)*: specifies that whenever `dependency(name, ...)` is used, Meson should not look it up on the system but instead return `dep_object`, which may either be the result of `dependency()` or `declare_dependency()`. It takes optional @@ -1886,16 +1874,16 @@ the following methods. project to retrieve the dependency without having to know the dependency variable name: `dependency(name, fallback : subproject_name)`. -- `project_version()` returns the version string specified in +- `project_version()`: returns the version string specified in `project` function call. -- `project_license()` returns the array of licenses specified in +- `project_license()`: returns the array of licenses specified in `project` function call. -- `project_name()` returns the project name specified in the `project` +- `project_name()`: returns the project name specified in the `project` function call. -- `version()` return a string with the version of Meson. +- `version()`: return a string with the version of Meson. ### `build_machine` object @@ -1904,19 +1892,19 @@ doing the actual compilation. See [Cross-compilation](Cross-compilation.md). It has the following methods: -- `cpu_family()` returns the CPU family name. [This +- `cpu_family()`: returns the CPU family name. [This table](Reference-tables.md#cpu-families) contains all known CPU families. These are guaranteed to continue working. -- `cpu()` returns a more specific CPU name, such as `i686`, `amd64`, +- `cpu()`: returns a more specific CPU name, such as `i686`, `amd64`, etc. -- `system()` returns the operating system name. [This +- `system()`: returns the operating system name. [This table](Reference-tables.md#operating-system-names) Lists all of the currently known Operating System names, these are guaranteed to continue working. -- `endian()` returns `big` on big-endian systems and `little` on +- `endian()`: returns `big` on big-endian systems and `little` on little-endian systems. Currently, these values are populated using @@ -1964,58 +1952,58 @@ the cross-info file, `host_machine` values are returned instead. All [strings](Syntax.md#strings) have the following methods. Strings are immutable, all operations return their results as a new string. -- `contains(string)` returns true if string contains the string - specified as the argument +- `contains(string)`: returns true if string contains the string + specified as the argument. -- `endswith(string)` returns true if string ends with the string - specified as the argument +- `endswith(string)`: returns true if string ends with the string + specified as the argument. -- `format()` formats text, see the [Syntax - manual](Syntax.md#string-formatting) for usage info +- `format()`: formats text, see the [Syntax + manual](Syntax.md#string-formatting) for usage info. -- `join(list_of_strings)` is the opposite of split, for example - `'.'.join(['a', 'b', 'c']` yields `'a.b.c'` +- `join(list_of_strings)`: the opposite of split, for example + `'.'.join(['a', 'b', 'c']` yields `'a.b.c'`. -- `split(split_character)` splits the string at the specified +- `split(split_character)`: splits the string at the specified character (or whitespace if not set) and returns the parts in an - array + array. -- `startswith(string)` returns true if string starts with the string +- `startswith(string)`: returns true if string starts with the string specified as the argument -- `strip()` removes whitespace at the beginning and end of the string - *(added 0.43.0)* optionally can take one positional string argument, - and all characters in that string will be stripped +- `strip()`: removes whitespace at the beginning and end of the string. + *(since 0.43.0)* Optionally can take one positional string argument, + and all characters in that string will be stripped. -- `to_int` returns the string converted to an integer (error if string - is not a number) +- `to_int()`: returns the string converted to an integer (error if string + is not a number). -- `to_lower()` creates a lower case version of the string +- `to_lower()`: creates a lower case version of the string. -- `to_upper()` creates an upper case version of the string +- `to_upper()`: creates an upper case version of the string. -- `underscorify()` creates a string where every non-alphabetical - non-number character is replaced with `_` +- `underscorify()`: creates a string where every non-alphabetical + non-number character is replaced with `_`. -- `version_compare(comparison_string)` does semantic version +- `version_compare(comparison_string)`: does semantic version comparison, if `x = '1.2.3'` then `x.version_compare('>1.0.0')` - returns `true` + returns `true`. ### `Number` object [Numbers](Syntax.md#numbers) support these methods: - - `is_even()` returns true if the number is even - - `is_odd()` returns true if the number is odd - - `to_string()` returns the value of the number as a string. +- `is_even()`: returns true if the number is even +- `is_odd()`: returns true if the number is odd +- `to_string()`: returns the value of the number as a string. ### `boolean` object A [boolean](Syntax.md#booleans) object has two simple methods: -- `to_int()` as above, but returns either `1` or `0` +- `to_int()`: returns either `1` or `0`. -- `to_string()` returns the string `'true'` if the boolean is true or +- `to_string()`: returns the string `'true'` if the boolean is true or `'false'` otherwise. You can also pass it two strings as positional arguments to specify what to return for true/false. For instance, `bool.to_string('yes', 'no')` will return `yes` if the boolean is @@ -2025,27 +2013,29 @@ A [boolean](Syntax.md#booleans) object has two simple methods: The following methods are defined for all [arrays](Syntax.md#arrays): -- `contains(item)`, returns `true` if the array contains the object +- `contains(item)`: returns `true` if the array contains the object given as argument, `false` otherwise -- `get(index, fallback)`, returns the object at the given index, +- `get(index, fallback)`: returns the object at the given index, negative indices count from the back of the array, indexing out of - bounds returns the `fallback` value *(added 0.38.0)* or, if it is + bounds returns the `fallback` value *(since 0.38.0)* or, if it is not specified, causes a fatal error -- `length()`, the size of the array +- `length()`: the size of the array You can also iterate over arrays with the [`foreach` statement](Syntax.md#foreach-statements). ### `dictionary` object +*(since 0.47.0)* + The following methods are defined for all [dictionaries](Syntax.md#dictionaries): -- `has_key(key)` returns `true` if the dictionary contains the key +- `has_key(key)`: returns `true` if the dictionary contains the key given as argument, `false` otherwise -- `get(key, fallback)`, returns the value for the key given as first +- `get(key, fallback)`: returns the value for the key given as first argument if it is present in the dictionary, or the optional fallback value given as the second argument. If a single argument was given and the key was not found, causes a fatal error @@ -2053,9 +2043,7 @@ The following methods are defined for all [dictionaries](Syntax.md#dictionaries) You can also iterate over dictionaries with the [`foreach` statement](Syntax.md#foreach-statements). -Dictionaries are available since 0.47.0. - -Since 0.48.0 dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`). +*(since 0.48.0)* Dictionaries can be added (e.g. `d1 = d2 + d3` and `d1 += d2`). Values from the second dictionary overrides values from the first. ## Returned objects @@ -2069,204 +2057,202 @@ This object is returned by for a given language and allows you to query its properties. It has the following methods: -- `alignment(typename)` returns the alignment of the type specified in +- `alignment(typename)`: returns the alignment of the type specified in the positional argument, you can specify external dependencies to use with `dependencies` keyword argument. -- `cmd_array()` returns an array containing the command arguments for +- `cmd_array()`: returns an array containing the command arguments for the current compiler. -- `compiles(code)` returns true if the code fragment given in the +- `compiles(code)`: returns true if the code fragment given in the positional argument compiles, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code. -- `compute_int(expr, ...')` computes the value of the given expression +- `compute_int(expr, ...')`: computes the value of the given expression (as an example `1 + 2`). When cross compiling this is evaluated with an iterative algorithm, you can specify keyword arguments `low` (defaults to -1024), `high` (defaults to 1024) and `guess` to specify max and min values for the search and the value to try first. -- `find_library(lib_name, ...)` tries to find the library specified in +- `find_library(lib_name, ...)`: tries to find the library specified in the positional argument. The [result object](#external-library-object) can be used just like the return value of `dependency`. If the keyword argument `required` is false, Meson will proceed even if the library is not found. By default the library is searched for in the system library directory (e.g. /usr/lib). This can be overridden with the `dirs` keyword - argument, which can be either a string or a list of strings. Since - *0.47.0* the value of a [`feature`](Build-options.md#features) + argument, which can be either a string or a list of strings. + *(since 0.47.0)* The value of a [`feature`](Build-options.md#features) option can also be passed to the `required` keyword argument. - *Since 0.49.0* if the keyword argument `disabler` is `true` and the + *(since 0.49.0)* If the keyword argument `disabler` is `true` and the dependency couldn't be found, return a [disabler object](#disabler-object) - instead of a not-found dependency. *Since 0.50.0* the `has_headers` keyword + instead of a not-found dependency. *(since 0.50.0)* The `has_headers` keyword argument can be a list of header files that must be found as well, using `has_header()` method. All keyword arguments prefixed with `header_` will be - passed down to `has_header()` method with the prefix removed. *Since 0.51.0* - the `static` keyword (boolean) can be set to `true` to limit the search to + passed down to `has_header()` method with the prefix removed. *(since 0.51.0)* + The `static` keyword (boolean) can be set to `true` to limit the search to static libraries and `false` for dynamic/shared. -- `first_supported_argument(list_of_strings)`, given a list of +- `first_supported_argument(list_of_strings)`: given a list of strings, returns the first argument that passes the `has_argument` test or an empty array if none pass. -- `first_supported_link_argument(list_of_strings)` *(added 0.46.0)*, +- `first_supported_link_argument(list_of_strings)` *(since 0.46.0)*: given a list of strings, returns the first argument that passes the `has_link_argument` test or an empty array if none pass. -- `get_define(definename)` returns the given preprocessor symbol's +- `get_define(definename)`: returns the given preprocessor symbol's value as a string or empty string if it is not defined. - Starting with 0.47.0, this method will concatenate string literals as + *(since 0.47.0)* This method will concatenate string literals as the compiler would. E.g. `"a" "b"` will become `"ab"`. -- `get_id()` returns a string identifying the compiler. For example, +- `get_id()`: returns a string identifying the compiler. For example, `gcc`, `msvc`, [and more](Reference-tables.md#compiler-ids). -- `get_argument_syntax()` *(new in 0.49.0)* returns a string identifying the type +- `get_argument_syntax()` *(since 0.49.0)*: returns a string identifying the type of arguments the compiler takes. Can be one of `gcc`, `msvc`, or an undefined string value. This method is useful for identifying compilers that are not gcc or msvc, but use the same argument syntax as one of those two compilers such as clang or icc, especially when they use different syntax on different operating systems. -- `get_linker_id()` *(added 0.53.0)* returns a string identifying the linker. +- `get_linker_id()` *(since 0.53.0)*: returns a string identifying the linker. For example, `ld.bfd`, `link`, [and more](Reference-tables.md#linker-ids). -- `get_supported_arguments(list_of_string)` *(added 0.43.0)* returns +- `get_supported_arguments(list_of_string)` *(since 0.43.0)*: returns an array containing only the arguments supported by the compiler, as if `has_argument` were called on them individually. -- `get_supported_link_arguments(list_of_string)` *(added 0.46.0)* returns +- `get_supported_link_arguments(list_of_string)` *(since 0.46.0)*: returns an array containing only the arguments supported by the linker, as if `has_link_argument` were called on them individually. -- `has_argument(argument_name)` returns true if the compiler accepts +- `has_argument(argument_name)`: returns true if the compiler accepts the specified command line argument, that is, can compile code without erroring out or printing a warning about an unknown flag. -- `has_link_argument(argument_name)` *(added 0.46.0)* returns true if +- `has_link_argument(argument_name)` *(since 0.46.0)*: returns true if the linker accepts the specified command line argument, that is, can compile and link code without erroring out or printing a warning about an unknown flag. Link arguments will be passed to the compiler, so should usually have the `-Wl,` prefix. On VisualStudio a `/link` argument will be prepended. -- `has_function(funcname)` returns true if the given function is +- `has_function(funcname)`: returns true if the given function is provided by the standard library or a library passed in with the `args` keyword, you can specify external dependencies to use with `dependencies` keyword argument. -- `check_header` *(added 0.47.0)* returns true if the specified header is *usable* with +- `check_header` *(since 0.47.0)*: returns true if the specified header is *usable* with the specified prefix, dependencies, and arguments. You can specify external dependencies to use with `dependencies` keyword argument and extra code to put above the header test with the `prefix` keyword. In order to look for headers in a specific directory you can use `args : '-I/extra/include/dir`, but this should only be used in exceptional cases for includes that can't be - detected via pkg-config and passed via `dependencies`. Since *0.50.0* the + detected via pkg-config and passed via `dependencies`. *(since 0.50.0)* The `required` keyword argument can be used to abort if the header cannot be found. -- `has_header` returns true if the specified header *exists*, and is +- `has_header`: returns true if the specified header *exists*, and is faster than `check_header()` since it only does a pre-processor check. You can specify external dependencies to use with `dependencies` keyword argument and extra code to put above the header test with the `prefix` keyword. In order to look for headers in a specific directory you can use `args : '-I/extra/include/dir`, but this should only be used in exceptional cases for includes that can't be - detected via pkg-config and passed via `dependencies`. Since *0.50.0* the + detected via pkg-config and passed via `dependencies`. *(since 0.50.0)* The `required` keyword argument can be used to abort if the header cannot be found. -- `has_header_symbol(headername, symbolname)` allows one to detect +- `has_header_symbol(headername, symbolname)`: detects whether a particular symbol (function, variable, #define, type definition, etc) is declared in the specified header, you can specify external dependencies to use with `dependencies` keyword - argument. Since *0.50.0* the `required` keyword argument can be used to abort - if the symbol cannot be found. + argument. *(since 0.50.0)* The `required` keyword argument can be + used to abort if the symbol cannot be found. -- `has_member(typename, membername)` takes two arguments, type name +- `has_member(typename, membername)`: takes two arguments, type name and member name and returns true if the type has the specified member, you can specify external dependencies to use with `dependencies` keyword argument. -- `has_members(typename, membername1, membername2, ...)` takes at +- `has_members(typename, membername1, membername2, ...)`: takes at least two arguments, type name and one or more member names, returns true if the type has all the specified members, you can specify external dependencies to use with `dependencies` keyword argument. -- `has_multi_arguments(arg1, arg2, arg3, ...)` is the same as +- `has_multi_arguments(arg1, arg2, arg3, ...)` *(since 0.37.0)*: the same as `has_argument` but takes multiple arguments and uses them all in a - single compiler invocation, available since 0.37.0. + single compiler invocation. -- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(added 0.46.0)* - is the same as `has_link_argument` but takes multiple arguments and +- `has_multi_link_arguments(arg1, arg2, arg3, ...)` *(since 0.46.0)*: + the same as `has_link_argument` but takes multiple arguments and uses them all in a single compiler invocation. -- `has_type(typename)` returns true if the specified token is a type, +- `has_type(typename)`: returns true if the specified token is a type, you can specify external dependencies to use with `dependencies` keyword argument. -- `links(code)` returns true if the code fragment given in the +- `links(code)`: returns true if the code fragment given in the positional argument compiles and links, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code. -- `run(code)` attempts to compile and execute the given code fragment, +- `run(code)`: attempts to compile and execute the given code fragment, returns a run result object, you can specify external dependencies to use with `dependencies` keyword argument, `code` can be either a string containing source code or a `file` object pointing to the source code. -- `symbols_have_underscore_prefix()` returns `true` if the C symbol - mangling is one underscore (`_`) prefixed to the symbol, available - since 0.37.0. +- `symbols_have_underscore_prefix()` *(since 0.37.0)*: returns `true` + if the C symbol mangling is one underscore (`_`) prefixed to the symbol. -- `sizeof(typename, ...)` returns the size of the given type +- `sizeof(typename, ...)`: returns the size of the given type (e.g. `'int'`) or -1 if the type is unknown, to add includes set them in the `prefix` keyword argument, you can specify external dependencies to use with `dependencies` keyword argument. -- `version()` returns the compiler's version number as a string. +- `version()`: returns the compiler's version number as a string. -- `has_function_attribute(name)` *(added in 0.48.0)* returns `true` if the +- `has_function_attribute(name)` *(since 0.48.0)*: returns `true` if the compiler supports the GNU style (`__attribute__(...)`) `name`. This is preferable to manual compile checks as it may be optimized for compilers that do not support such attributes. [This table](Reference-tables.md#gcc-__attribute__) lists all of the supported attributes. -- `get_supported_function_attributes(list_of_names)` *(added in 0.48.0)* +- `get_supported_function_attributes(list_of_names)` *(since 0.48.0)*: returns an array containing any names that are supported GCC style attributes. Equivalent to `has_function_attribute` was called on each of them individually. The following keyword arguments can be used: -- `args` can be used to pass a list of compiler arguments that are +- `args`: used to pass a list of compiler arguments that are required to find the header or symbol. For example, you might need to pass the include path `-Isome/path/to/header` if a header is not - in the default include path. In versions newer than 0.38.0 you - should use the `include_directories` keyword described below. You - may also want to pass a library name `-lfoo` for `has_function` to - check for a function. Supported by all methods except `get_id`, - `version`, and `find_library`. + in the default include path. *(since 0.38.0)* you should use the + `include_directories` keyword described below. You may also want to + pass a library name `-lfoo` for `has_function` to check for a function. + Supported by all methods except `get_id`, `version`, and `find_library`. -- `include_directories` specifies extra directories for header - searches. *(added 0.38.0)* +- `include_directories` *(since 0.38.0)*: specifies extra directories for + header searches. -- `name` the name to use for printing a message about the compiler +- `name`: the name to use for printing a message about the compiler check. Supported by the methods `compiles()`, `links()`, and `run()`. If this keyword argument is not passed to those methods, no message will be printed about the check. -- `no_builtin_args` when set to true, the compiler arguments controlled +- `no_builtin_args`: when set to true, the compiler arguments controlled by built-in configuration options are not added. -- `prefix` can be used to add #includes and other things that are +- `prefix`: adds #includes and other things that are required for the symbol to be declared. System definitions should be passed via compiler args (eg: `_GNU_SOURCE` is often required for some symbols to be exposed on Linux, and it should be passed via @@ -2297,15 +2283,15 @@ A build target is either an [executable](#executable), [both shared and static library](#both_libraries) or [shared module](#shared_module). -- `extract_all_objects()` is same as `extract_objects` but returns all - object files generated by this target. Since 0.46.0 keyword argument +- `extract_all_objects()`: is same as `extract_objects` but returns all + object files generated by this target. *(since 0.46.0)* keyword argument `recursive` must be set to `true` to also return objects passed to the `object` argument of this target. By default only objects built for this target are returned to maintain backward compatibility with previous versions. The default will eventually be changed to `true` in a future version. -- `extract_objects(source1, source2, ...)` takes as its arguments +- `extract_objects(source1, source2, ...)`: takes as its arguments a number of source files as [`string`](#string-object) or [`files()`](#files) and returns an opaque value representing the object files generated for those source files. This is typically used @@ -2313,17 +2299,17 @@ A build target is either an [executable](#executable), some source files with custom flags. To use the object file(s) in another build target, use the `objects:` keyword argument. -- `full_path()` returns a full path pointing to the result target file. +- `full_path()`: returns a full path pointing to the result target file. NOTE: In most cases using the object itself will do the same job as this and will also allow Meson to setup inter-target dependencies correctly. Please file a bug if that doesn't work for you. -- `private_dir_include()` returns a opaque value that works like +- `private_dir_include()`: returns a opaque value that works like `include_directories` but points to the private directory of this target, usually only needed if an another target needs to access some generated internal headers of this target -- `name()` *Since 0.54.0*, returns the target name. +- `name()` *(since 0.54.0)*: returns the target name. ### `configuration` data object @@ -2334,20 +2320,19 @@ configuration values to be used for generating configuration files. A more in-depth description can be found in the [the configuration wiki page](Configuration.md) It has three methods: -- `get(varname, default_value)` returns the value of `varname`, if the +- `get(varname, default_value)`: returns the value of `varname`, if the value has not been set returns `default_value` if it is defined - *(added 0.38.0)* and errors out if not + *(since 0.38.0)* and errors out if not -- `get_unquoted(varname, default_value)` returns the value of `varname` - but without surrounding double quotes (`"`). If the value has not been - set returns `default_value` if it is defined and errors out if not. - Available since 0.44.0 +- `get_unquoted(varname, default_value)` *(since 0.44.0)*: returns the value + of `varname` but without surrounding double quotes (`"`). If the value has + not been set returns `default_value` if it is defined and errors out if not. -- `has(varname)`, returns `true` if the specified variable is set +- `has(varname)`: returns `true` if the specified variable is set -- `merge_from(other)` takes as argument a different configuration data - object and copies all entries from that object to the current - object, available since 0.42.0 +- `merge_from(other)` *(since 0.42.0)*: takes as argument a different + configuration data object and copies all entries from that object to + the current. - `set(varname, value)`, sets a variable to a given value @@ -2369,20 +2354,20 @@ cause a syntax error. This object is returned by [`custom_target`](#custom_target) and contains a target with the following methods: -- `full_path()` returns a full path pointing to the result target file +- `full_path()`: returns a full path pointing to the result target file NOTE: In most cases using the object itself will do the same job as this and will also allow Meson to setup inter-target dependencies correctly. Please file a bug if that doesn't work for you. - *Since 0.54.0* it can be also called on indexes objects: + *(since 0.54.0)* It can be also called on indexes objects: `custom_targets[i].full_path()`. -- `[index]` returns an opaque object that references this target, and +- `[index]`: returns an opaque object that references this target, and can be used as a source in other targets. When it is used as such it will make that target depend on this custom target, but the only source added will be the one that corresponds to the index of the custom target's output argument. -- `to_list()` *Since 0.54.0*, returns a list of opaque objects that references +- `to_list()` *(since 0.54.0)*: returns a list of opaque objects that references this target, and can be used as a source in other targets. This can be used to iterate outputs with `foreach` loop. @@ -2391,48 +2376,48 @@ contains a target with the following methods: This object is returned by [`dependency()`](#dependency) and contains an external dependency with the following methods: - - `found()` which returns whether the dependency was found + - `found()`: returns whether the dependency was found. - - `name()` *(Added 0.48.0)* returns the name of the dependency that was + - `name()` *(since 0.48.0)*: returns the name of the dependency that was searched. Returns `internal` for dependencies created with `declare_dependency()`. - - `get_pkgconfig_variable(varname)` *(Added 0.36.0)* will get the + - `get_pkgconfig_variable(varname)` *(since 0.36.0)*: gets the pkg-config variable specified, or, if invoked on a non pkg-config - dependency, error out. *(Added 0.44.0)* You can also redefine a + dependency, error out. *(since 0.44.0)* You can also redefine a variable by passing a list to the `define_variable` parameter that can affect the retrieved variable: `['prefix', '/'])`. - *(Added 0.45.0)* A warning is issued if the variable is not defined, + *(since 0.45.0)* A warning is issued if the variable is not defined, unless a `default` parameter is specified. - - `get_configtool_variable(varname)` *(Added 0.44.0)* will get the + - `get_configtool_variable(varname)` *(since 0.44.0)*: gets the command line argument from the config tool (with `--` prepended), or, if invoked on a non config-tool dependency, error out. - - `type_name()` which returns a string describing the type of the + - `type_name()`: returns a string describing the type of the dependency, the most common values are `internal` for deps created with `declare_dependency()` and `pkgconfig` for system dependencies obtained with Pkg-config. - - `version()` is the version number as a string, for example `1.2.8`. + - `version()`: the version number as a string, for example `1.2.8`. `unknown` if the dependency provider doesn't support determining the version. - - `include_type()` returns whether the value set by the `include_type` kwarg + - `include_type()`: returns whether the value set by the `include_type` kwarg - - `as_system(value)` returns a copy of the dependency object, which has changed + - `as_system(value)`: returns a copy of the dependency object, which has changed the value of `include_type` to `value`. The `value` argument is optional and defaults to `'preserve'`. - `partial_dependency(compile_args : false, link_args : false, links - : false, includes : false, sources : false)` *(Added 0.46.0)* returns + : false, includes : false, sources : false)` *(since 0.46.0)*: returns a new dependency object with the same name, version, found status, type name, and methods as the object that called it. This new object will only inherit other attributes from its parent as controlled by keyword arguments. If the parent has any dependencies, those will be applied to the new - partial dependency with the same rules. So , given: + partial dependency with the same rules. So, given: ```meson dep1 = declare_dependency(compile_args : '-Werror=foo', link_with : 'libfoo') @@ -2457,14 +2442,14 @@ an external dependency with the following methods: - `get_variable(cmake : str, pkgconfig : str, configtool : str, internal: str, default_value : str, pkgconfig_define : [str, str])` - *(Added in 0.51.0)* A generic variable getter method, which replaces the + *(since 0.51.0)*: a generic variable getter method, which replaces the get_*type*_variable methods. This allows one to get the variable from a dependency without knowing specifically how that dependency was found. If default_value is set and the value cannot be gotten from the object then default_value is returned, if it is not set then an error is raised. - *New in 0.54.0, the `internal` keyword* + *(since 0.54.0)* added `internal` keyword. ### `disabler` object @@ -2474,7 +2459,7 @@ statement (function call, logical op, etc) they will cause the statement evaluation to immediately short circuit to return a disabler object. A disabler object has one method: - - `found()`, always returns `false` +- `found()`: always returns `false`. ### `external program` object @@ -2482,15 +2467,14 @@ This object is returned by [`find_program()`](#find_program) and contains an external (i.e. not built as part of this project) program and has the following methods: -- `found()` which returns whether the executable was found +- `found()`: returns whether the executable was found. -- `path()` which returns a string pointing to the script or executable +- `path()`: *(since 0.55.0)* **(deprecated)** use `full_path()` instead. + Returns a string pointing to the script or executable **NOTE:** You should not need to use this method. Passing the object - itself should work in all cases. For example: `run_command(obj, arg1, arg2)` - *Since 0.55.0* this method has been deprecated in favor of `full_path()` for - consistency with other returned objects. + itself should work in all cases. For example: `run_command(obj, arg1, arg2)`. -- `full_path()` *Since 0.55.0* which returns a string pointing to the script or +- `full_path()` (*since 0.55.0*): which returns a string pointing to the script or executable **NOTE:** You should not need to use this method. Passing the object itself should work in all cases. For example: `run_command(obj, arg1, arg2)`. @@ -2501,7 +2485,7 @@ detailed information about how environment variables should be set during tests. It should be passed as the `env` keyword argument to tests and other functions. It has the following methods. -- `append(varname, value1, value2, ...)` appends the given values to +- `append(varname, value1, value2, ...)`: appends the given values to the old value of the environment variable, e.g. `env.append('FOO', 'BAR', 'BAZ', separator : ';')` produces `BOB;BAR;BAZ` if `FOO` had the value `BOB` and plain `BAR;BAZ` if the value was not defined. If @@ -2509,10 +2493,10 @@ tests and other functions. It has the following methods. separator for the host operating system will be used, i.e. ';' for Windows and ':' for UNIX/POSIX systems. -- `prepend(varname, value1, value2, ...)` is the same as `append` - except that it writes to the beginning of the variable +- `prepend(varname, value1, value2, ...)`: same as `append` + except that it writes to the beginning of the variable. -- `set(varname, value1, value2)` sets the environment variable +- `set(varname, value1, value2)`: sets the environment variable specified in the first argument to the values in the second argument joined by the separator, e.g. `env.set('FOO', 'BAR'),` sets envvar `FOO` to value `BAR`. See `append()` above for how separators work. @@ -2526,27 +2510,27 @@ This object is returned by [`find_library()`](#find_library) and contains an external (i.e. not built as part of this project) library. This object has the following methods: - - `found()` which returns whether the library was found. +- `found()`: returns whether the library was found. - - `type_name()` *(added 0.48.0)* which returns a string describing - the type of the dependency, which will be `library` in this case. +- `type_name()` *(since 0.48.0)*: returns a string describing + the type of the dependency, which will be `library` in this case. - - `partial_dependency(compile_args : false, link_args : false, links - : false, includes : false, source : false)` *(Added 0.46.0)* returns - a new dependency object with the same name, version, found status, - type name, and methods as the object that called it. This new - object will only inherit other attributes from its parent as - controlled by keyword arguments. +- `partial_dependency(compile_args : false, link_args : false, links + : false, includes : false, source : false)` *(since 0.46.0)*: returns + a new dependency object with the same name, version, found status, + type name, and methods as the object that called it. This new + object will only inherit other attributes from its parent as + controlled by keyword arguments. ### Feature option object -The following methods are defined for all [`feature` options](Build-options.md#features): +*(since 0.47.0)* -- `enabled()` returns whether the feature was set to `'enabled'` -- `disabled()` returns whether the feature was set to `'disabled'` -- `auto()` returns whether the feature was set to `'auto'` +The following methods are defined for all [`feature` options](Build-options.md#features): -Feature options are available since 0.47.0. +- `enabled()`: returns whether the feature was set to `'enabled'` +- `disabled()`: returns whether the feature was set to `'disabled'` +- `auto()`: returns whether the feature was set to `'auto'` ### `generator` object @@ -2554,7 +2538,7 @@ This object is returned by [`generator()`](#generator) and contains a generator that is used to transform files from one type to another by an executable (e.g. `idl` files into source code and headers). -* `process(list_of_files, ...)` takes a list of files, causes them to +- `process(list_of_files, ...)`: takes a list of files, causes them to be processed and returns an object containing the result which can then, for example, be passed into a build target definition. The keyword argument `extra_args`, if specified, will be used to replace @@ -2572,10 +2556,10 @@ an executable (e.g. `idl` files into source code and headers). This object is returned by [`subproject()`](#subproject) and is an opaque object representing it. -- `found()` *(added 0.48.0)* which returns whether the subproject was +- `found()` *(since 0.48.0)*: returns whether the subproject was successfully setup -- `get_variable(name, fallback)` fetches the specified variable from +- `get_variable(name, fallback)`: fetches the specified variable from inside the subproject. This is useful to, for instance, get a [declared dependency](#declare_dependency) from the [subproject](Subprojects.md). @@ -2590,9 +2574,9 @@ This object encapsulates the result of trying to compile and run a sample piece of code with [`compiler.run()`](#compiler-object) or [`run_command()`](#run_command). It has the following methods: -- `compiled()` if true, the compilation succeeded, if false it did not +- `compiled()`: if true, the compilation succeeded, if false it did not and the other methods return unspecified data. This is only available for `compiler.run()` results. -- `returncode()` the return code of executing the compiled binary -- `stderr()` the standard error produced when the command was run -- `stdout()` the standard out produced when the command was run +- `returncode()`: the return code of executing the compiled binary +- `stderr()`: the standard error produced when the command was run +- `stdout()`: the standard out produced when the command was run -- cgit v1.1 From 83a973ca04cf53dd98ff487b4273155b82cf554a Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 30 Jun 2020 18:32:27 +0300 Subject: Replaced `ninja` with `meson` [skip ci] --- docs/markdown/Feature-autodetection.md | 2 +- docs/markdown/Localisation.md | 4 ++-- docs/markdown/Run-targets.md | 2 +- docs/markdown/Tutorial.md | 2 +- docs/markdown/Unit-tests.md | 2 +- docs/markdown/howtox.md | 6 +++--- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/markdown/Feature-autodetection.md b/docs/markdown/Feature-autodetection.md index a568313..4d366d9 100644 --- a/docs/markdown/Feature-autodetection.md +++ b/docs/markdown/Feature-autodetection.md @@ -28,7 +28,7 @@ the binaries `gcovr`, `lcov` and `genhtml`. If version 3.3 or higher of the first is found, targets called *coverage-text*, *coverage-xml* and *coverage-html* are generated. Alternatively, if the latter two are found, only the target *coverage-html* is generated. Coverage -reports can then be produced simply by calling e.g. `ninja +reports can then be produced simply by calling e.g. `meson compile coverage-xml`. As a convenience, a high-level *coverage* target is also generated which will produce all 3 coverage report types, if possible. diff --git a/docs/markdown/Localisation.md b/docs/markdown/Localisation.md index ce9e3b6..ed63e13 100644 --- a/docs/markdown/Localisation.md +++ b/docs/markdown/Localisation.md @@ -48,7 +48,7 @@ Then we need to generate the main pot file. The potfile can have any name but is Run the following command from your build folder to generate the pot file. It is recommended to inspect it manually afterwards and fill in e.g. proper copyright and contact information. ```console -$ ninja intltest-pot +$ meson compile intltest-pot ``` ### generate .po files @@ -56,5 +56,5 @@ $ ninja intltest-pot For each language listed in the array above we need a corresponding `.po` file. Those can be generated by running the following command from your build folder. ```console -$ ninja intltest-update-po +$ meson compile intltest-update-po ``` diff --git a/docs/markdown/Run-targets.md b/docs/markdown/Run-targets.md index 38129a6..b584bf7 100644 --- a/docs/markdown/Run-targets.md +++ b/docs/markdown/Run-targets.md @@ -29,7 +29,7 @@ run_target('inspector', Run targets are not run by default. To run it run the following command. ```console -$ ninja inspector +$ meson compile inspector ``` All additional entries in `run_target`'s `command` array are passed unchanged to the inspector script, so you can do things like this: diff --git a/docs/markdown/Tutorial.md b/docs/markdown/Tutorial.md index be6888d..c5a4e6b 100644 --- a/docs/markdown/Tutorial.md +++ b/docs/markdown/Tutorial.md @@ -74,7 +74,7 @@ Now we are ready to build our code. ``` $ cd builddir -$ ninja +$ meson compile ``` Once that is done we can run the resulting binary. diff --git a/docs/markdown/Unit-tests.md b/docs/markdown/Unit-tests.md index 4f51d35..60fcad2 100644 --- a/docs/markdown/Unit-tests.md +++ b/docs/markdown/Unit-tests.md @@ -12,7 +12,7 @@ e = executable('prog', 'testprog.c') test('name of test', e) ``` -You can add as many tests as you want. They are run with the command `ninja +You can add as many tests as you want. They are run with the command `meson test`. Meson captures the output of all tests and writes it in the log file diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index 5f9d136..c89f883 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -150,7 +150,7 @@ Then issue the following commands. ```console $ meson compile $ meson test -$ ninja coverage-html (or coverage-xml) +$ meson compile coverage-html (or coverage-xml) ``` The coverage report can be found in the meson-logs subdirectory. @@ -190,14 +190,14 @@ test failures. Install scan-build and configure your project. Then do this: ```console -$ ninja scan-build +$ meson compile scan-build ``` You can use the `SCANBUILD` environment variable to choose the scan-build executable. ```console -$ SCANBUILD= ninja scan-build +$ SCANBUILD= meson compile scan-build ``` -- cgit v1.1 From b6981bd16eb0227173a85d4b26a4b060dab16998 Mon Sep 17 00:00:00 2001 From: TheQwertiest Date: Tue, 30 Jun 2020 23:56:08 +0300 Subject: Made Commands.md dynamically generated (#7346) --- docs/markdown/Commands.md | 658 -------------------------------------- docs/markdown_dynamic/Commands.md | 296 +++++++++++++++++ docs/meson.build | 32 +- run_unittests.py | 105 ++---- tools/copy_files.py | 55 ++++ tools/regenerate_docs.py | 150 +++++++++ 6 files changed, 548 insertions(+), 748 deletions(-) delete mode 100644 docs/markdown/Commands.md create mode 100644 docs/markdown_dynamic/Commands.md create mode 100644 tools/copy_files.py create mode 100755 tools/regenerate_docs.py diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md deleted file mode 100644 index e2a352a..0000000 --- a/docs/markdown/Commands.md +++ /dev/null @@ -1,658 +0,0 @@ -# Command-line commands - -There are two different ways of invoking Meson. First, you can run it directly -from the source tree with the command `/path/to/source/meson.py`. Meson may -also be installed in which case the command is simply `meson`. In this manual -we only use the latter format for simplicity. - -Meson is invoked using the following syntax: -`meson [COMMAND] [COMMAND_OPTIONS]` - -This section describes all available commands and some of their Optional arguments. -The most common workflow is to run [`setup`](#setup), followed by [`compile`](#compile), and then [`install`](#install). - -For the full list of all available options for a specific command use the following syntax: -`meson COMMAND --help` - -### configure - -``` -$ meson configure [-h] [--prefix PREFIX] [--bindir BINDIR] - [--datadir DATADIR] [--includedir INCLUDEDIR] - [--infodir INFODIR] [--libdir LIBDIR] - [--libexecdir LIBEXECDIR] [--localedir LOCALEDIR] - [--localstatedir LOCALSTATEDIR] [--mandir MANDIR] - [--sbindir SBINDIR] [--sharedstatedir SHAREDSTATEDIR] - [--sysconfdir SYSCONFDIR] - [--auto-features {enabled,disabled,auto}] - [--backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode}] - [--buildtype {plain,debug,debugoptimized,release,minsize,custom}] - [--debug] [--default-library {shared,static,both}] - [--errorlogs] [--install-umask INSTALL_UMASK] - [--layout {mirror,flat}] [--optimization {0,g,1,2,3,s}] - [--stdsplit] [--strip] [--unity {on,off,subprojects}] - [--unity-size UNITY_SIZE] [--warnlevel {0,1,2,3}] - [--werror] - [--wrap-mode {default,nofallback,nodownload,forcefallback}] - [--force-fallback-for FORCE_FALLBACK_FOR] - [--pkg-config-path PKG_CONFIG_PATH] - [--build.pkg-config-path BUILD.PKG_CONFIG_PATH] - [--cmake-prefix-path CMAKE_PREFIX_PATH] - [--build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH] - [-D option] [--clearcache] - [builddir] -``` - -Changes options of a configured meson project. - -``` -positional arguments: - builddir - -optional arguments: - -h, --help show this help message and exit - --prefix PREFIX Installation prefix. - --bindir BINDIR Executable directory. - --datadir DATADIR Data file directory. - --includedir INCLUDEDIR Header file directory. - --infodir INFODIR Info page directory. - --libdir LIBDIR Library directory. - --libexecdir LIBEXECDIR Library executable directory. - --localedir LOCALEDIR Locale data directory. - --localstatedir LOCALSTATEDIR Localstate data directory. - --mandir MANDIR Manual page directory. - --sbindir SBINDIR System executable directory. - --sharedstatedir SHAREDSTATEDIR Architecture-independent data directory. - --sysconfdir SYSCONFDIR Sysconf data directory. - --auto-features {enabled,disabled,auto} - Override value of all 'auto' features - (default: auto). - --backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode} - Backend to use (default: ninja). - --buildtype {plain,debug,debugoptimized,release,minsize,custom} - Build type to use (default: debug). - --debug Debug - --default-library {shared,static,both} - Default library type (default: shared). - --errorlogs Whether to print the logs from failing - tests - --install-umask INSTALL_UMASK Default umask to apply on permissions of - installed files (default: 022). - --layout {mirror,flat} Build directory layout (default: - mirror). - --optimization {0,g,1,2,3,s} Optimization level (default: 0). - --stdsplit Split stdout and stderr in test logs - --strip Strip targets on install - --unity {on,off,subprojects} Unity build (default: off). - --unity-size UNITY_SIZE Unity block size (default: (2, None, - 4)). - --warnlevel {0,1,2,3} Compiler warning level to use (default: - 1). - --werror Treat warnings as errors - --wrap-mode {default,nofallback,nodownload,forcefallback} - Wrap mode (default: default). - --force-fallback-for FORCE_FALLBACK_FOR - Force fallback for those subprojects - (default: []). - --pkg-config-path PKG_CONFIG_PATH List of additional paths for pkg-config - to search (default: []). (just for host - machine) - --build.pkg-config-path BUILD.PKG_CONFIG_PATH - List of additional paths for pkg-config - to search (default: []). (just for build - machine) - --cmake-prefix-path CMAKE_PREFIX_PATH - List of additional prefixes for cmake to - search (default: []). (just for host - machine) - --build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH - List of additional prefixes for cmake to - search (default: []). (just for build - machine) - -D option Set the value of an option, can be used - several times to set multiple options. - --clearcache Clear cached state (e.g. found - dependencies) -``` - -Most arguments are the same as in [`setup`](#setup). - -Note: reconfiguring project will not reset options to their default values (even if they were changed in `meson.build`). - -#### Examples: - -List all available options: -``` -meson configure builddir -``` - -Change value of a single option: -``` -meson configure builddir -Doption=new_value -``` - -### compile - -*(since 0.54.0)* - -``` -$ meson compile [-h] [--clean] [-C BUILDDIR] [-j JOBS] [-l LOAD_AVERAGE] - [--verbose] [--ninja-args NINJA_ARGS] [--vs-args VS_ARGS] - [TARGET [TARGET ...]] -``` - -Builds a default or a specified target of a configured meson project. - -``` -positional arguments: - TARGET Targets to build. Target has the - following format: [PATH_TO_TARGET/]TARGE - T_NAME[:TARGET_TYPE]. - -optional arguments: - -h, --help show this help message and exit - --clean Clean the build directory. - -C BUILDDIR The directory containing build files to - be built. - -j JOBS, --jobs JOBS The number of worker jobs to run (if - supported). If the value is less than 1 - the build program will guess. - -l LOAD_AVERAGE, --load-average LOAD_AVERAGE - The system load average to try to - maintain (if supported). - --verbose Show more verbose output. - --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied - only on `ninja` backend). - --vs-args VS_ARGS Arguments to pass to `msbuild` (applied - only on `vs` backend). -``` - -`--verbose` argument is available since 0.55.0. - -#### Targets - -*(since 0.55.0)* - -`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where: -- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). -- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. -- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'. - -`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. - -#### Backend specific arguments - -*(since 0.55.0)* - -`BACKEND-args` use the following syntax: - -If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command: - -``` -$ meson compile --ninja-args=-n,-d,explain -``` - -would add `-n`, `-d` and `explain` arguments to ninja invocation. - -If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this: - -``` -$ meson compile "--ninja-args=['a,b', 'c d']" -``` - -#### Examples: - -Build the project: -``` -meson compile -C builddir -``` - -Execute a dry run on ninja backend with additional debug info: -``` -meson compile --ninja-args=-n,-d,explain -``` - -Build three targets: two targets that have the same `foo` name, but different type, and a `bar` target: -``` -meson compile foo:shared_library foo:static_library bar -``` - -Produce a coverage html report (if available): -``` -meson compile coverage-html -``` - -### dist - -*(since 0.52.0)* - -``` -$ meson dist [-h] [-C WD] [--formats FORMATS] [--include-subprojects] - [--no-tests] -``` - -Generates a release archive from the current source tree. - -``` -optional arguments: - -h, --help show this help message and exit - -C WD directory to cd into before running - --formats FORMATS Comma separated list of archive types to create. - --include-subprojects Include source code of subprojects that have been used - for the build. - --no-tests Do not build and test generated packages. -``` - -See [notes about creating releases](Creating-releases.md) for more info. - -#### Examples: - -Create a release archive: -``` -meson dist -C builddir -``` - -### init - -*(since 0.45.0)* - -``` -$ meson init [-h] [-C WD] [-n NAME] [-e EXECUTABLE] [-d DEPS] - [-l {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust}] [-b] - [--builddir BUILDDIR] [-f] [--type {executable,library}] - [--version VERSION] - [sourcefile [sourcefile ...]] -``` - -Creates a basic set of build files based on a template. - -``` -positional arguments: - sourcefile source files. default: all recognized - files in current directory - -optional arguments: - -h, --help show this help message and exit - -C WD directory to cd into before running - -n NAME, --name NAME project name. default: name of current - directory - -e EXECUTABLE, --executable EXECUTABLE - executable name. default: project name - -d DEPS, --deps DEPS dependencies, comma-separated - -l {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust}, --language {c,cpp,cs,cuda,d,fortran,java,objc,objcpp,rust} - project language. default: autodetected - based on source files - -b, --build build after generation - --builddir BUILDDIR directory for build - -f, --force force overwrite of existing files and - directories. - --type {executable,library} project type. default: executable based - project - --version VERSION project version. default: 0.1 -``` - -#### Examples: - -Create a project in `sourcedir`: -``` -meson init -C sourcedir -``` - -### introspect - -``` -$ meson introspect [-h] [--ast] [--benchmarks] [--buildoptions] - [--buildsystem-files] [--dependencies] - [--scan-dependencies] [--installed] [--projectinfo] - [--targets] [--tests] - [--backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode}] - [-a] [-i] [-f] - [builddir] -``` - -Displays information about a configured meson project. - -``` -positional arguments: - builddir The build directory - -optional arguments: - -h, --help show this help message and exit - --ast Dump the AST of the meson file. - --benchmarks List all benchmarks. - --buildoptions List all build options. - --buildsystem-files List files that make up the build - system. - --dependencies List external dependencies. - --scan-dependencies Scan for dependencies used in the - meson.build file. - --installed List all installed files and - directories. - --projectinfo Information about projects. - --targets List top level targets. - --tests List all unit tests. - --backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode} - The backend to use for the - --buildoptions introspection. - -a, --all Print all available information. - -i, --indent Enable pretty printed JSON. - -f, --force-object-output Always use the new JSON format for - multiple entries (even for 0 and 1 - introspection commands) -``` - -#### Examples: - -Display basic information about a configured project in `builddir`: -``` -meson introspect builddir -``` - -### install - -*(since 0.47.0)* - -``` -$ meson install [-h] [-C WD] [--no-rebuild] [--only-changed] [--quiet] -``` - -Installs the project to the prefix specified in [`setup`](#setup). - -``` -optional arguments: - -h, --help show this help message and exit - -C WD directory to cd into before running - --no-rebuild Do not rebuild before installing. - --only-changed Only overwrite files that are older than the copied file. - --quiet Do not print every file that was installed. -``` - -See [the installation documentation](Installing.md) for more info. - -#### Examples: - -Install project to `prefix`: -``` -meson install -C builddir -``` - -Install project to `$DESTDIR/prefix`: -``` -DESTDIR=/path/to/staging/area meson install -C builddir -``` - -### rewrite - -*(since 0.50.0)* - -``` -$ meson rewrite [-h] [-s SRCDIR] [-V] [-S] - {target,kwargs,default-options,command} ... -``` - -Modifies the meson project. - -``` -optional arguments: - -h, --help show this help message and exit - -s SRCDIR, --sourcedir SRCDIR Path to source directory. - -V, --verbose Enable verbose output - -S, --skip-errors Skip errors instead of aborting - -Rewriter commands: - Rewrite command to execute - - {target,kwargs,default-options,command} - target Modify a target - kwargs Modify keyword arguments - default-options Modify the project default options - command Execute a JSON array of commands -``` - -See [the meson file rewriter documentation](Rewriter.md) for more info. - -### setup - -``` -$ meson setup [-h] [--prefix PREFIX] [--bindir BINDIR] [--datadir DATADIR] - [--includedir INCLUDEDIR] [--infodir INFODIR] - [--libdir LIBDIR] [--libexecdir LIBEXECDIR] - [--localedir LOCALEDIR] [--localstatedir LOCALSTATEDIR] - [--mandir MANDIR] [--sbindir SBINDIR] - [--sharedstatedir SHAREDSTATEDIR] [--sysconfdir SYSCONFDIR] - [--auto-features {enabled,disabled,auto}] - [--backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode}] - [--buildtype {plain,debug,debugoptimized,release,minsize,custom}] - [--debug] [--default-library {shared,static,both}] - [--errorlogs] [--install-umask INSTALL_UMASK] - [--layout {mirror,flat}] [--optimization {0,g,1,2,3,s}] - [--stdsplit] [--strip] [--unity {on,off,subprojects}] - [--unity-size UNITY_SIZE] [--warnlevel {0,1,2,3}] [--werror] - [--wrap-mode {default,nofallback,nodownload,forcefallback}] - [--force-fallback-for FORCE_FALLBACK_FOR] - [--pkg-config-path PKG_CONFIG_PATH] - [--build.pkg-config-path BUILD.PKG_CONFIG_PATH] - [--cmake-prefix-path CMAKE_PREFIX_PATH] - [--build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH] - [-D option] [--native-file NATIVE_FILE] - [--cross-file CROSS_FILE] [-v] [--fatal-meson-warnings] - [--reconfigure] [--wipe] - [builddir] [sourcedir] -``` - -Configures a build directory for the meson project. - -This is the default meson command (invoked if there was no COMMAND supplied). - -``` -positional arguments: - builddir - sourcedir - -optional arguments: - -h, --help show this help message and exit - --prefix PREFIX Installation prefix. - --bindir BINDIR Executable directory. - --datadir DATADIR Data file directory. - --includedir INCLUDEDIR Header file directory. - --infodir INFODIR Info page directory. - --libdir LIBDIR Library directory. - --libexecdir LIBEXECDIR Library executable directory. - --localedir LOCALEDIR Locale data directory. - --localstatedir LOCALSTATEDIR Localstate data directory. - --mandir MANDIR Manual page directory. - --sbindir SBINDIR System executable directory. - --sharedstatedir SHAREDSTATEDIR Architecture-independent data directory. - --sysconfdir SYSCONFDIR Sysconf data directory. - --auto-features {enabled,disabled,auto} - Override value of all 'auto' features - (default: auto). - --backend {ninja,vs,vs2010,vs2015,vs2017,vs2019,xcode} - Backend to use (default: ninja). - --buildtype {plain,debug,debugoptimized,release,minsize,custom} - Build type to use (default: debug). - --debug Debug - --default-library {shared,static,both} - Default library type (default: shared). - --errorlogs Whether to print the logs from failing - tests - --install-umask INSTALL_UMASK Default umask to apply on permissions of - installed files (default: 022). - --layout {mirror,flat} Build directory layout (default: - mirror). - --optimization {0,g,1,2,3,s} Optimization level (default: 0). - --stdsplit Split stdout and stderr in test logs - --strip Strip targets on install - --unity {on,off,subprojects} Unity build (default: off). - --unity-size UNITY_SIZE Unity block size (default: (2, None, - 4)). - --warnlevel {0,1,2,3} Compiler warning level to use (default: - 1). - --werror Treat warnings as errors - --wrap-mode {default,nofallback,nodownload,forcefallback} - Wrap mode (default: default). - --force-fallback-for FORCE_FALLBACK_FOR - Force fallback for those subprojects - (default: []). - --pkg-config-path PKG_CONFIG_PATH List of additional paths for pkg-config - to search (default: []). (just for host - machine) - --build.pkg-config-path BUILD.PKG_CONFIG_PATH - List of additional paths for pkg-config - to search (default: []). (just for build - machine) - --cmake-prefix-path CMAKE_PREFIX_PATH - List of additional prefixes for cmake to - search (default: []). (just for host - machine) - --build.cmake-prefix-path BUILD.CMAKE_PREFIX_PATH - List of additional prefixes for cmake to - search (default: []). (just for build - machine) - -D option Set the value of an option, can be used - several times to set multiple options. - --native-file NATIVE_FILE File containing overrides for native - compilation environment. - --cross-file CROSS_FILE File describing cross compilation - environment. - -v, --version show program's version number and exit - --fatal-meson-warnings Make all Meson warnings fatal - --reconfigure Set options and reconfigure the project. - Useful when new options have been added - to the project and the default value is - not working. - --wipe Wipe build directory and reconfigure - using previous command line options. - Useful when build directory got - corrupted, or when rebuilding with a - newer version of meson. -``` - -See [meson introduction page](Running-Meson.md#configuring-the-build-directory) for more info. - -#### Examples: - -Configures `builddir` with default values: -``` -meson setup builddir -``` - -### subprojects - -*(since 0.49.0)* - -``` -$ meson subprojects [-h] {update,checkout,download,foreach} ... -``` - -Manages subprojects of the meson project. - -``` -optional arguments: - -h, --help show this help message and exit - -Commands: - {update,checkout,download,foreach} - update Update all subprojects from wrap files - checkout Checkout a branch (git only) - download Ensure subprojects are fetched, even if - not in use. Already downloaded subprojects - are not modified. This can be used to pre- - fetch all subprojects and avoid downloads - during configure. - foreach Execute a command in each subproject - directory. -``` - -### test - -``` -$ meson test [-h] [--repeat REPEAT] [--no-rebuild] [--gdb] - [--gdb-path GDB_PATH] [--list] [--wrapper WRAPPER] [-C WD] - [--suite SUITE] [--no-suite SUITE] [--no-stdsplit] - [--print-errorlogs] [--benchmark] [--logbase LOGBASE] - [--num-processes NUM_PROCESSES] [-v] [-q] - [-t TIMEOUT_MULTIPLIER] [--setup SETUP] - [--test-args TEST_ARGS] - [args [args ...]] -``` - -Run tests for the configure meson project. - -``` -positional arguments: - args Optional list of tests to run - -optional arguments: - -h, --help show this help message and exit - --repeat REPEAT Number of times to run the tests. - --no-rebuild Do not rebuild before running tests. - --gdb Run test under gdb. - --gdb-path GDB_PATH Path to the gdb binary (default: gdb). - --list List available tests. - --wrapper WRAPPER wrapper to run tests with (e.g. - Valgrind) - -C WD directory to cd into before running - --suite SUITE Only run tests belonging to the given - suite. - --no-suite SUITE Do not run tests belonging to the given - suite. - --no-stdsplit Do not split stderr and stdout in test - logs. - --print-errorlogs Whether to print failing tests' logs. - --benchmark Run benchmarks instead of tests. - --logbase LOGBASE Base name for log file. - --num-processes NUM_PROCESSES How many parallel processes to use. - -v, --verbose Do not redirect stdout and stderr - -q, --quiet Produce less output to the terminal. - -t TIMEOUT_MULTIPLIER, --timeout-multiplier TIMEOUT_MULTIPLIER - Define a multiplier for test timeout, - for example when running tests in - particular conditions they might take - more time to execute. - --setup SETUP Which test setup to use. - --test-args TEST_ARGS Arguments to pass to the specified - test(s) or all tests -``` - -See [the unit test documentation](Unit-tests.md) for more info. - -#### Examples: - -Run tests for the project: -``` -meson test -C builddir -``` - -Run only `specific_test_1` and `specific_test_2`: -``` -meson test -C builddir specific_test_1 specific_test_2 -``` - -### wrap - -``` -$ meson wrap [-h] {list,search,install,update,info,status,promote} ... -``` - -An utility to manage WrapDB dependencies. - -``` -optional arguments: - -h, --help show this help message and exit - -Commands: - {list,search,install,update,info,status,promote} - list show all available projects - search search the db by name - install install the specified project - update update the project to its newest - available release - info show available versions of a project - status show installed and available versions of - your projects - promote bring a subsubproject up to the master - project -``` - -See [the WrapDB tool documentation](Using-wraptool.md) for more info. diff --git a/docs/markdown_dynamic/Commands.md b/docs/markdown_dynamic/Commands.md new file mode 100644 index 0000000..a35b4da --- /dev/null +++ b/docs/markdown_dynamic/Commands.md @@ -0,0 +1,296 @@ +# Command-line commands + +There are two different ways of invoking Meson. First, you can run it directly +from the source tree with the command `/path/to/source/meson.py`. Meson may +also be installed in which case the command is simply `meson`. In this manual +we only use the latter format for simplicity. + +Meson is invoked using the following syntax: +`meson [COMMAND] [COMMAND_OPTIONS]` + +This section describes all available commands and some of their Optional arguments. +The most common workflow is to run [`setup`](#setup), followed by [`compile`](#compile), and then [`install`](#install). + +For the full list of all available options for a specific command use the following syntax: +`meson COMMAND --help` + +### configure + +``` +{{ cmd_help['configure']['usage'] }} +``` + +Changes options of a configured meson project. + +``` +{{ cmd_help['configure']['arguments'] }} +``` + +Most arguments are the same as in [`setup`](#setup). + +Note: reconfiguring project will not reset options to their default values (even if they were changed in `meson.build`). + +#### Examples: + +List all available options: +``` +meson configure builddir +``` + +Change value of a single option: +``` +meson configure builddir -Doption=new_value +``` + +### compile + +*(since 0.54.0)* + +``` +{{ cmd_help['compile']['usage'] }} +``` + +Builds a default or a specified target of a configured meson project. + +``` +{{ cmd_help['compile']['arguments'] }} +``` + +`--verbose` argument is available since 0.55.0. + +#### Targets + +*(since 0.55.0)* + +`TARGET` has the following syntax `[PATH/]NAME[:TYPE]`, where: +- `NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). +- `PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. +- `TYPE`: type of the target. Can be one of the following: 'executable', 'static_library', 'shared_library', 'shared_module', 'custom', 'run', 'jar'. + +`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. + +#### Backend specific arguments + +*(since 0.55.0)* + +`BACKEND-args` use the following syntax: + +If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command: + +``` +$ meson compile --ninja-args=-n,-d,explain +``` + +would add `-n`, `-d` and `explain` arguments to ninja invocation. + +If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this: + +``` +$ meson compile "--ninja-args=['a,b', 'c d']" +``` + +#### Examples: + +Build the project: +``` +meson compile -C builddir +``` + +Execute a dry run on ninja backend with additional debug info: +``` +meson compile --ninja-args=-n,-d,explain +``` + +Build three targets: two targets that have the same `foo` name, but different type, and a `bar` target: +``` +meson compile foo:shared_library foo:static_library bar +``` + +Produce a coverage html report (if available): +``` +meson compile coverage-html +``` + +### dist + +*(since 0.52.0)* + +``` +{{ cmd_help['dist']['usage'] }} +``` + +Generates a release archive from the current source tree. + +``` +{{ cmd_help['dist']['arguments'] }} +``` + +See [notes about creating releases](Creating-releases.md) for more info. + +#### Examples: + +Create a release archive: +``` +meson dist -C builddir +``` + +### init + +*(since 0.45.0)* + +``` +{{ cmd_help['init']['usage'] }} +``` + +Creates a basic set of build files based on a template. + +``` +{{ cmd_help['init']['arguments'] }} +``` + +#### Examples: + +Create a project in `sourcedir`: +``` +meson init -C sourcedir +``` + +### introspect + +``` +{{ cmd_help['introspect']['usage'] }} +``` + +Displays information about a configured meson project. + +``` +{{ cmd_help['introspect']['arguments'] }} +``` + +#### Examples: + +Display basic information about a configured project in `builddir`: +``` +meson introspect builddir +``` + +### install + +*(since 0.47.0)* + +``` +{{ cmd_help['install']['usage'] }} +``` + +Installs the project to the prefix specified in [`setup`](#setup). + +``` +{{ cmd_help['install']['arguments'] }} +``` + +See [the installation documentation](Installing.md) for more info. + +#### Examples: + +Install project to `prefix`: +``` +meson install -C builddir +``` + +Install project to `$DESTDIR/prefix`: +``` +DESTDIR=/path/to/staging/area meson install -C builddir +``` + +### rewrite + +*(since 0.50.0)* + +``` +{{ cmd_help['rewrite']['usage'] }} +``` + +Modifies the meson project. + +``` +{{ cmd_help['rewrite']['arguments'] }} +``` + +See [the meson file rewriter documentation](Rewriter.md) for more info. + +### setup + +``` +{{ cmd_help['setup']['usage'] }} +``` + +Configures a build directory for the meson project. + +This is the default meson command (invoked if there was no COMMAND supplied). + +``` +{{ cmd_help['setup']['arguments'] }} +``` + +See [meson introduction page](Running-Meson.md#configuring-the-build-directory) for more info. + +#### Examples: + +Configures `builddir` with default values: +``` +meson setup builddir +``` + +### subprojects + +*(since 0.49.0)* + +``` +{{ cmd_help['subprojects']['usage'] }} +``` + +Manages subprojects of the meson project. + +``` +{{ cmd_help['subprojects']['arguments'] }} +``` + +### test + +``` +{{ cmd_help['test']['usage'] }} +``` + +Run tests for the configure meson project. + +``` +{{ cmd_help['test']['arguments'] }} +``` + +See [the unit test documentation](Unit-tests.md) for more info. + +#### Examples: + +Run tests for the project: +``` +meson test -C builddir +``` + +Run only `specific_test_1` and `specific_test_2`: +``` +meson test -C builddir specific_test_1 specific_test_2 +``` + +### wrap + +``` +{{ cmd_help['wrap']['usage'] }} +``` + +An utility to manage WrapDB dependencies. + +``` +{{ cmd_help['wrap']['arguments'] }} +``` + +See [the WrapDB tool documentation](Using-wraptool.md) for more info. diff --git a/docs/meson.build b/docs/meson.build index 32aab07..c07a200 100644 --- a/docs/meson.build +++ b/docs/meson.build @@ -1,16 +1,40 @@ project('Meson documentation', version: '1.0') +cur_bdir = meson.current_build_dir() + +# Copy all files to build dir, since HotDoc uses relative paths +run_command( + files('../tools/copy_files.py'), + '-C', meson.current_source_dir(), + '--output-dir', cur_bdir, + 'markdown', 'theme', 'sitemap.txt', + check: true) + +# Only the script knows which files are being generated +docs_gen = custom_target( + 'gen_docs', + input: files('markdown/index.md'), + output: 'gen_docs.dummy', + command: [ + files('../tools/regenerate_docs.py'), + '--output-dir', join_paths(cur_bdir, 'markdown'), + '--dummy-output-file', '@OUTPUT@', + ], + build_by_default: true, + install: false) + hotdoc = import('hotdoc') documentation = hotdoc.generate_doc(meson.project_name(), project_version: meson.project_version(), - sitemap: 'sitemap.txt', + sitemap: join_paths(cur_bdir, 'sitemap.txt'), build_by_default: true, - index: 'markdown/index.md', + depends: docs_gen, + index: join_paths(cur_bdir, 'markdown/index.md'), install: false, extra_assets: ['images/'], - include_paths: ['markdown'], + include_paths: [join_paths(cur_bdir, 'markdown')], default_license: 'CC-BY-SAv4.0', - html_extra_theme: join_paths('theme', 'extra'), + html_extra_theme: join_paths(cur_bdir, 'theme', 'extra'), git_upload_repository: 'git@github.com:jpakkane/jpakkane.github.io.git', edit_on_github_repository: 'https://github.com/mesonbuild/meson/', syntax_highlighting_activate: true, diff --git a/run_unittests.py b/run_unittests.py index a02284c..c4978c2 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4783,81 +4783,16 @@ recommended as it is not supported on some platforms''') ''' Test that all listed meson commands are documented in Commands.md. ''' - help_usage_start_pattern = re.compile(r'^usage:[\t ]*[\r\n]*', re.MULTILINE) - help_positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE) - help_options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE) - help_commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE) - - def get_next_start(iterators, end): - return next((i.start() for i in iterators if i), end) - - def parse_help(help): - help_len = len(help) - usage = help_usage_start_pattern.search(help) - positionals = help_positional_start_pattern.search(help) - options = help_options_start_pattern.search(help) - commands = help_commands_start_pattern.search(help) - - arguments_start = get_next_start([positionals, options, commands], None) - self.assertIsNotNone(arguments_start, 'Cmd command is missing argument list') - - return { - 'usage': help[usage.end():arguments_start], - 'arguments': help[arguments_start:help_len], - } - - md_code_pattern = re.compile(r'^```[\r\n]*', re.MULTILINE) - md_usage_pattern = re.compile(r'^\$ ', re.MULTILINE) - - def parse_section(text, section_start, section_end): - matches = [i - for i in md_code_pattern.finditer(text, pos=section_start, endpos=section_end)] - self.assertGreaterEqual(len(matches), 4, '.md command is missing usage description and/or argument list') - - usage = md_usage_pattern.search(text, pos=matches[0].end(), endpos=matches[1].start()) - - return { - 'usage': text[usage.end():matches[1].start()], - 'arguments': text[matches[2].end():matches[3].start()], - } - - def normalize_text(text): - # clean up formatting - out = re.sub(r'( {2,}|\t+)', r' ', text, flags=re.MULTILINE) # replace whitespace chars with a single space - out = re.sub(r'\r\n+', r'\r', out, flags=re.MULTILINE) # replace newlines with a single linux EOL - out = re.sub(r'(^ +| +$)', '', out, flags=re.MULTILINE) # strip lines - out = re.sub(r'(^\n)', '', out, flags=re.MULTILINE) # remove empty lines - return out - - def clean_dir_arguments(text): - # Remove platform specific defaults - args = [ - 'prefix', - 'bindir', - 'datadir', - 'includedir', - 'infodir', - 'libdir', - 'libexecdir', - 'localedir', - 'localstatedir', - 'mandir', - 'sbindir', - 'sharedstatedir', - 'sysconfdir' - ] - out = text - for a in args: - out = re.sub(r'(--' + a + r' .+?)[ |\n]\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL) - return out - ## Get command sections + doc_path = 'docs/markdown_dynamic/Commands.md' md = None - with open('docs/markdown/Commands.md', encoding='utf-8') as f: + with open(doc_path, encoding='utf-8') as f: md = f.read() self.assertIsNotNone(md) + ## Get command sections + section_pattern = re.compile(r'^### (.+)$', re.MULTILINE) md_command_section_matches = [i for i in section_pattern.finditer(md)] md_command_sections = dict() @@ -4872,26 +4807,24 @@ recommended as it is not supported on some platforms''') help_output = self._run(self.meson_command + ['--help']) help_commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(',')) - self.assertEqual(md_commands | {'help'}, help_commands) - - ## Validate command options + self.assertEqual(md_commands | {'help'}, help_commands, 'Doc file: `{}`'.format(doc_path)) - for command in md_commands: - print('Current command: {}'.format(command)) - - help_cmd_output = self._run(self.meson_command + [command, '--help'], override_envvars={'COLUMNS': '80'}) + ## Validate that each section has proper placeholders - parsed_help = parse_help(help_cmd_output) - parsed_section = parse_section(md, *md_command_sections[command]) + def get_data_pattern(command): + return re.compile( + r'^```[\r\n]' + r'{{ cmd_help\[\'' + command + r'\'\]\[\'usage\'\] }}[\r\n]' + r'^```[\r\n]' + r'.*?' + r'^```[\r\n]' + r'{{ cmd_help\[\'' + command + r'\'\]\[\'arguments\'\] }}[\r\n]' + r'^```', + flags = re.MULTILINE|re.DOTALL) - for p in [parsed_help, parsed_section]: - p['usage'] = normalize_text(p['usage']) - p['arguments'] = normalize_text(p['arguments']) - if command in ['setup', 'configure']: - parsed_help['arguments'] = clean_dir_arguments(parsed_help['arguments']) - - self.assertEqual(parsed_help['usage'], parsed_section['usage']) - self.assertEqual(parsed_help['arguments'], parsed_section['arguments']) + for command in md_commands: + m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1]) + self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path)) def test_coverage(self): if mesonbuild.environment.detect_msys2_arch(): diff --git a/tools/copy_files.py b/tools/copy_files.py new file mode 100644 index 0000000..39eaa0a --- /dev/null +++ b/tools/copy_files.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python3 + + +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +''' +Copy files +''' + +import argparse +import shutil +import typing as T +from pathlib import Path + +PathLike = T.Union[Path,str] + +def copy_files(files: T.List[str], input_dir: PathLike, output_dir: PathLike) -> None: + if not input_dir: + raise ValueError(f'Input directory value is not set') + if not output_dir: + raise ValueError(f'Output directory value is not set') + + input_dir = Path(input_dir).resolve() + output_dir = Path(output_dir).resolve() + output_dir.mkdir(parents=True, exist_ok=True) + + for f in files: + if (input_dir/f).is_dir(): + shutil.copytree(input_dir/f, output_dir/f) + else: + shutil.copy2(input_dir/f, output_dir/f) + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Copy files') + parser.add_argument('files', metavar='FILE', nargs='*') + parser.add_argument('-C', dest='input_dir', required=True) + parser.add_argument('--output-dir', required=True) + + args = parser.parse_args() + + copy_files(files=args.files, + input_dir=args.input_dir, + output_dir=args.output_dir) diff --git a/tools/regenerate_docs.py b/tools/regenerate_docs.py new file mode 100755 index 0000000..d443570 --- /dev/null +++ b/tools/regenerate_docs.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python3 + + +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +''' +Regenerate markdown docs by using `meson.py` from the root dir +''' + +import argparse +import jinja2 +import os +import re +import subprocess +import sys +import textwrap +import typing as T +from pathlib import Path + +PathLike = T.Union[Path,str] + +def _get_meson_output(root_dir: Path, args: T.List): + env = os.environ.copy() + env['COLUMNS'] = '80' + return subprocess.run([str(sys.executable), str(root_dir/'meson.py')] + args, check=True, capture_output=True, text=True, env=env).stdout.strip() + +def get_commands_data(root_dir: Path): + usage_start_pattern = re.compile(r'^usage: ', re.MULTILINE) + positional_start_pattern = re.compile(r'^positional arguments:[\t ]*[\r\n]+', re.MULTILINE) + options_start_pattern = re.compile(r'^optional arguments:[\t ]*[\r\n]+', re.MULTILINE) + commands_start_pattern = re.compile(r'^[A-Za-z ]*[Cc]ommands:[\t ]*[\r\n]+', re.MULTILINE) + + def get_next_start(iterators, end): + return next((i.start() for i in iterators if i), end) + + def normalize_text(text): + # clean up formatting + out = text + out = re.sub(r'\r\n', r'\r', out, flags=re.MULTILINE) # replace newlines with a linux EOL + out = re.sub(r'^ +$', '', out, flags=re.MULTILINE) # remove trailing whitespace + out = re.sub(r'(?:^\n+|\n+$)', '', out) # remove trailing empty lines + return out + + def parse_cmd(cmd): + cmd_len = len(cmd) + usage = usage_start_pattern.search(cmd) + positionals = positional_start_pattern.search(cmd) + options = options_start_pattern.search(cmd) + commands = commands_start_pattern.search(cmd) + + arguments_start = get_next_start([positionals, options, commands], None) + assert arguments_start + + # replace `usage:` with `$` and dedent + dedent_size = (usage.end() - usage.start()) - len('$ ') + usage_text = textwrap.dedent(f'{dedent_size * " "}$ {normalize_text(cmd[usage.end():arguments_start])}') + + return { + 'usage': usage_text, + 'arguments': normalize_text(cmd[arguments_start:cmd_len]), + } + + def clean_dir_arguments(text): + # Remove platform specific defaults + args = [ + 'prefix', + 'bindir', + 'datadir', + 'includedir', + 'infodir', + 'libdir', + 'libexecdir', + 'localedir', + 'localstatedir', + 'mandir', + 'sbindir', + 'sharedstatedir', + 'sysconfdir' + ] + out = text + for a in args: + out = re.sub(r'(--' + a + r' .+?)\s+\(default:.+?\)(\.)?', r'\1\2', out, flags=re.MULTILINE|re.DOTALL) + return out + + output = _get_meson_output(root_dir, ['--help']) + commands = set(c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', output, re.MULTILINE|re.DOTALL)[0].split(',')) + commands.remove('help') + + cmd_data = dict() + + for cmd in commands: + cmd_output = _get_meson_output(root_dir, [cmd, '--help']) + cmd_data[cmd] = parse_cmd(cmd_output) + if cmd in ['setup', 'configure']: + cmd_data[cmd]['arguments'] = clean_dir_arguments(cmd_data[cmd]['arguments']) + + return cmd_data + +def regenerate_commands(root_dir: Path, output_dir: Path) -> None: + with open(root_dir/'docs'/'markdown_dynamic'/'Commands.md') as f: + template = f.read() + + cmd_data = get_commands_data(root_dir) + + t = jinja2.Template(template, undefined=jinja2.StrictUndefined, keep_trailing_newline=True) + content = t.render(cmd_help=cmd_data) + + output_file = output_dir/'Commands.md' + with open(output_file, 'w') as f: + f.write(content) + + print(f'`{output_file}` was regenerated') + +def regenerate_docs(output_dir: PathLike, + dummy_output_file: T.Optional[PathLike]) -> None: + if not output_dir: + raise ValueError(f'Output directory value is not set') + + output_dir = Path(output_dir).resolve() + output_dir.mkdir(parents=True, exist_ok=True) + + root_dir = Path(__file__).resolve().parent.parent + + regenerate_commands(root_dir, output_dir) + + if dummy_output_file: + with open(output_dir/dummy_output_file, 'w') as f: + f.write('dummy file for custom_target output') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Generate meson docs') + parser.add_argument('--output-dir', required=True) + parser.add_argument('--dummy-output-file', type=str) + + args = parser.parse_args() + + regenerate_docs(output_dir=args.output_dir, + dummy_output_file=args.dummy_output_file) -- cgit v1.1 From 56c9e95b04b51def7443a514e5021fa7b70fe8c8 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Sat, 4 Apr 2020 17:39:39 -0400 Subject: Implicit dependency fallback when a subproject wrap or dir exists --- docs/markdown/Reference-manual.md | 4 +++- docs/markdown/snippets/implicit_fallback.md | 9 +++++++++ mesonbuild/interpreter.py | 8 ++++++++ run_unittests.py | 5 +++++ test cases/common/102 subproject subdir/meson.build | 4 ++++ .../102 subproject subdir/subprojects/sub_implicit/meson.build | 4 ++++ test cases/linuxlike/5 dependency versions/meson.build | 4 ++-- 7 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 docs/markdown/snippets/implicit_fallback.md create mode 100644 test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index dad8c12..293e41f 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -456,7 +456,9 @@ arguments: *(since 0.54.0)* `'subproj_dep'` argument can be omitted in the case the subproject used `meson.override_dependency('dependency_name', subproj_dep)`. In that case, the `fallback` keyword argument can be a single string instead - of a list of 2 strings. + of a list of 2 strings. *Since 0.55.0* the `fallback` keyword argument can be + omitted when there is a wrap file or a directory with the same `dependency_name`, + and subproject used `meson.override_dependency('dependency_name', subproj_dep)`. - `language` *(since 0.42.0)*: defines what language-specific dependency to find if it's available for multiple languages. - `method`: defines the way the dependency is detected, the default is diff --git a/docs/markdown/snippets/implicit_fallback.md b/docs/markdown/snippets/implicit_fallback.md new file mode 100644 index 0000000..87003d0 --- /dev/null +++ b/docs/markdown/snippets/implicit_fallback.md @@ -0,0 +1,9 @@ +## Implicit dependency fallback + +`dependency('foo')` now automatically fallback if the dependency is not found on +the system but a subproject wrap file or directory exists with the same name. + +That means that simply adding `subprojects/foo.wrap` is enough to add fallback +to any `dependency('foo')` call. It is however requires that the subproject call +`meson.override_dependency('foo', foo_dep)` to specify which dependency object +should be used for `foo`. diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 487bdd6..053db12 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3549,6 +3549,14 @@ external dependencies (including libraries) must go to "dependencies".''') return self.notfound_dependency() has_fallback = 'fallback' in kwargs + if not has_fallback and name: + # Add an implicit fallback if we have a wrap file or a directory with the same name. + subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) + wrap_, directory = wrap.get_directory(subproject_dir_abs, name) + if wrap_ or os.path.exists(os.path.join(subproject_dir_abs, directory)): + kwargs['fallback'] = name + has_fallback = True + if 'default_options' in kwargs and not has_fallback: mlog.warning('The "default_options" keyworg argument does nothing without a "fallback" keyword argument.', location=self.current_node) diff --git a/run_unittests.py b/run_unittests.py index c4978c2..2f9fb7f 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4081,6 +4081,11 @@ recommended as it is not supported on some platforms''') 'version': '1.0' }, { + 'descriptive_name': 'sub_implicit', + 'name': 'sub_implicit', + 'version': '1.0', + }, + { 'descriptive_name': 'sub-novar', 'name': 'sub_novar', 'version': '1.0', diff --git a/test cases/common/102 subproject subdir/meson.build b/test cases/common/102 subproject subdir/meson.build index 8299a37..46a2bce 100644 --- a/test cases/common/102 subproject subdir/meson.build +++ b/test cases/common/102 subproject subdir/meson.build @@ -25,3 +25,7 @@ dependency('sub-novar', fallback : 'sub_novar') # Verify a subproject can force a dependency to be not-found d = dependency('sub-notfound', fallback : 'sub_novar', required : false) assert(not d.found(), 'Dependency should be not-found') + +# Verify that implicit fallback works because subprojects/sub_implicit directory exists +d = dependency('sub_implicit') +assert(d.found(), 'Should implicitly fallback') diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build new file mode 100644 index 0000000..613bd05 --- /dev/null +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build @@ -0,0 +1,4 @@ +project('sub_implicit', 'c', version : '1.0') + +dep = declare_dependency() +meson.override_dependency('sub_implicit', dep) diff --git a/test cases/linuxlike/5 dependency versions/meson.build b/test cases/linuxlike/5 dependency versions/meson.build index 94f424d..164e679 100644 --- a/test cases/linuxlike/5 dependency versions/meson.build +++ b/test cases/linuxlike/5 dependency versions/meson.build @@ -31,10 +31,10 @@ dependency('somebrokenlib', version : '>=1.0', required : false) # Search for an external dependency that won't be found, but must later be # found via fallbacks -somelibnotfound = dependency('somelib', required : false) +somelibnotfound = dependency('somelib1', required : false) assert(somelibnotfound.found() == false, 'somelibnotfound was found?') # Find internal dependency without version -somelibver = dependency('somelib', +somelibver = dependency('somelib1', fallback : ['somelibnover', 'some_dep']) assert(somelibver.type_name() == 'internal', 'somelibver should be of type "internal", not ' + somelibver.type_name()) # Find an internal dependency again with the same name and a specific version -- cgit v1.1 From 2a7f72885ff0623a0a625efb5ffeca6299fc4cf7 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Sat, 4 Apr 2020 23:56:33 -0400 Subject: wrap: Add 'provide' section --- docs/markdown/Reference-manual.md | 4 +- docs/markdown/Wrap-dependency-system-manual.md | 32 ++++++++++++++- docs/markdown/snippets/implicit_fallback.md | 7 ++++ mesonbuild/interpreter.py | 18 ++++---- mesonbuild/wrap/wrap.py | 48 ++++++++++++++++++++-- .../common/102 subproject subdir/meson.build | 11 +++++ .../subprojects/sub_implicit.wrap | 5 +++ .../subprojects/sub_implicit/meson.build | 4 ++ .../subprojects/s2/subprojects/athing.wrap | 3 +- 9 files changed, 116 insertions(+), 16 deletions(-) create mode 100644 test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 293e41f..9bca74b 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -458,7 +458,9 @@ arguments: In that case, the `fallback` keyword argument can be a single string instead of a list of 2 strings. *Since 0.55.0* the `fallback` keyword argument can be omitted when there is a wrap file or a directory with the same `dependency_name`, - and subproject used `meson.override_dependency('dependency_name', subproj_dep)`. + and subproject registered the dependency using + `meson.override_dependency('dependency_name', subproj_dep)`, or when the wrap + file has `dependency_name` in its `[provide]` section. - `language` *(since 0.42.0)*: defines what language-specific dependency to find if it's available for multiple languages. - `method`: defines the way the dependency is detected, the default is diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index e59a6be..cb7c6d6 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -105,7 +105,7 @@ of downloading the file, even if `--wrap-mode` option is set to valid value (such as a git tag) for the VCS's `checkout` command, or (for git) `head` to track upstream's default branch. Required. -## Specific to wrap-git +### Specific to wrap-git - `depth` - shallowly clone the repository to X number of commits. Note that git always allow shallowly cloning branches, but in order to clone commit ids shallowly, the server must support @@ -138,6 +138,36 @@ put them somewhere where you can download them. Meson build patches are only supported for wrap-file mode. When using wrap-git, the repository must contain all Meson build definitions. +## `provide` section + +*Since *0.55.0* + +Wrap files can define the dependencies it provides in the `[provide]` section. +When a wrap file provides the dependency `foo` any call do `dependency('foo')` +will automatically fallback to that subproject even if no `fallback` keyword +argument is given. Each entry in the format `dependency_name = variable_name`, +where `dependency_name` usually match the corresponding pkg-config name and +`variable_name` is the name of a variable defined in the subproject that should +be returned for that dependency. In the case the subproject uses +`meson.override_dependency('foo', foo_dep)` the `variable_name` can be left empty +in the wrap file. + +For example `glib.wrap` provides `glib-2.0`, `gobject-2.0` and `gio-2.0`. A wrap +file for glib would look like: +```ini +[wrap-git] +url=https://gitlab.gnome.org/GNOME/glib.git +revision=glib-2-62 + +[provide] +glib-2.0=glib_dep +gobject-2.0=gobject_dep +gio-2.0=gio_dep +``` + +With such wrap file, `dependency('glib-2.0')` will automatically fallback to use +`glib.wrap` and return `glib_dep` variable from the subproject. + ## Using wrapped projects Wraps provide a convenient way of obtaining a project into your subproject directory. diff --git a/docs/markdown/snippets/implicit_fallback.md b/docs/markdown/snippets/implicit_fallback.md index 87003d0..bad1c71 100644 --- a/docs/markdown/snippets/implicit_fallback.md +++ b/docs/markdown/snippets/implicit_fallback.md @@ -7,3 +7,10 @@ That means that simply adding `subprojects/foo.wrap` is enough to add fallback to any `dependency('foo')` call. It is however requires that the subproject call `meson.override_dependency('foo', foo_dep)` to specify which dependency object should be used for `foo`. + +## Wrap file `provide` section + +Wrap files can define the dependencies it provides in the `[provide]` section. +When a wrap file provides the dependency `foo` any call do `dependency('foo')` +will automatically fallback to that subproject even if no `fallback` keyword +argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section). diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 053db12..7c55932 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2779,10 +2779,9 @@ external dependencies (including libraries) must go to "dependencies".''') self.subproject_dir, dirname)) return subproject - subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) - r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'), current_subproject=self.subproject) + r = self.environment.wrap_resolver try: - resolved = r.resolve(dirname, method) + resolved = r.resolve(dirname, method, self.subproject) except wrap.WrapException as e: subprojdir = os.path.join(self.subproject_dir, r.directory) if isinstance(e, wrap.WrapNotFoundException): @@ -2798,7 +2797,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise e subdir = os.path.join(self.subproject_dir, resolved) - subdir_abs = os.path.join(subproject_dir_abs, resolved) + subdir_abs = os.path.join(self.environment.get_source_dir(), subdir) os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True) self.global_args_frozen = True @@ -3062,6 +3061,10 @@ external dependencies (including libraries) must go to "dependencies".''') self.subproject_dir = spdirname self.build.subproject_dir = self.subproject_dir + if not self.is_subproject(): + wrap_mode = self.coredata.get_builtin_option('wrap_mode') + subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) + self.environment.wrap_resolver = wrap.Resolver(subproject_dir_abs, wrap_mode) self.build.projects[self.subproject] = proj_name mlog.log('Project name:', mlog.bold(proj_name)) @@ -3551,10 +3554,9 @@ external dependencies (including libraries) must go to "dependencies".''') has_fallback = 'fallback' in kwargs if not has_fallback and name: # Add an implicit fallback if we have a wrap file or a directory with the same name. - subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) - wrap_, directory = wrap.get_directory(subproject_dir_abs, name) - if wrap_ or os.path.exists(os.path.join(subproject_dir_abs, directory)): - kwargs['fallback'] = name + provider = self.environment.wrap_resolver.find_provider(name) + if provider: + kwargs['fallback'] = provider has_fallback = True if 'default_options' in kwargs and not has_fallback: diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 689fb4f..63ee349 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -111,6 +111,10 @@ class PackageDefinition: self.config.read(fname) except configparser.Error: raise WrapException('Failed to parse {}'.format(self.basename)) + self.parse_wrap_section() + self.parse_provide_section() + + def parse_wrap_section(self): if len(self.config.sections()) < 1: raise WrapException('Missing sections in {}'.format(self.basename)) self.wrap_section = self.config.sections()[0] @@ -120,6 +124,11 @@ class PackageDefinition: self.type = self.wrap_section[5:] self.values = dict(self.config[self.wrap_section]) + def parse_provide_section(self): + self.provide = {self.name: None} + if self.config.has_section('provide'): + self.provide.update(self.config['provide']) + def get(self, key: str) -> str: try: return self.values[key] @@ -145,17 +154,48 @@ def get_directory(subdir_root: str, packagename: str): return wrap, directory class Resolver: - def __init__(self, subdir_root: str, wrap_mode=WrapMode.default, current_subproject: str = ''): + def __init__(self, subdir_root: str, wrap_mode=WrapMode.default): self.wrap_mode = wrap_mode self.subdir_root = subdir_root - self.current_subproject = current_subproject self.cachedir = os.path.join(self.subdir_root, 'packagecache') self.filesdir = os.path.join(self.subdir_root, 'packagefiles') + self.wraps = {} # type: T.Dict[str, T.Tuple[T.Optional[PackageDefinition], T.Optional[str]]] + self.load_wraps() - def resolve(self, packagename: str, method: str) -> str: + def load_wraps(self): + if not os.path.isdir(self.subdir_root): + return + # Load wrap files upfront + for f in os.listdir(self.subdir_root): + if f.endswith('.wrap'): + packagename = f[:-5] + wrap, directory = get_directory(self.subdir_root, packagename) + for k in wrap.provide.keys(): + self.wraps[k] = (wrap, directory) + elif os.path.isdir(os.path.join(self.subdir_root, f)): + # Keep it in the case we have dirs with no corresponding wrap file. + self.wraps.setdefault(f, (None, f)) + + def find_provider(self, packagename: str): + # Return value is in the same format as fallback kwarg: + # ['subproject_name', 'variable_name'], or 'subproject_name'. + wrap, directory = self.wraps.get(packagename, (None, None)) + if wrap: + dep_var = wrap.provide[packagename] + if dep_var: + return [wrap.name, dep_var] + return wrap.name + return directory + + def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str: + self.current_subproject = current_subproject self.packagename = packagename - self.wrap, self.directory = get_directory(self.subdir_root, self.packagename) + self.wrap, self.directory = self.wraps.get(packagename, (None, self.packagename)) + if self.wrap and packagename != self.wrap.name: + m = 'subproject() must not be called by the name of a dependency it provides. Expecting {!r} but got {!r}.' + raise WrapException(m.format(self.wrap.name, packagename)) self.dirname = os.path.join(self.subdir_root, self.directory) + meson_file = os.path.join(self.dirname, 'meson.build') cmake_file = os.path.join(self.dirname, 'CMakeLists.txt') diff --git a/test cases/common/102 subproject subdir/meson.build b/test cases/common/102 subproject subdir/meson.build index 46a2bce..bc202a3 100644 --- a/test cases/common/102 subproject subdir/meson.build +++ b/test cases/common/102 subproject subdir/meson.build @@ -29,3 +29,14 @@ assert(not d.found(), 'Dependency should be not-found') # Verify that implicit fallback works because subprojects/sub_implicit directory exists d = dependency('sub_implicit') assert(d.found(), 'Should implicitly fallback') + +# Verify that implicit fallback works because sub_implicit.wrap has +# `sub_implicit_provide1=` and the subproject overrides sub_implicit_provide1. +d = dependency('sub_implicit_provide1') +assert(d.found(), 'Should implicitly fallback') + +# Verify that implicit fallback works because sub_implicit.wrap has +# `sub_implicit_provide2=sub_implicit_provide2_dep` and does not override +# sub_implicit_provide2. +d = dependency('sub_implicit_provide2') +assert(d.found(), 'Should implicitly fallback') diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap new file mode 100644 index 0000000..c14fff0 --- /dev/null +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap @@ -0,0 +1,5 @@ +[wrap-file] + +[provide] +sub_implicit_provide1= +sub_implicit_provide2=sub_implicit_provide2_dep diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build index 613bd05..64374d3 100644 --- a/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build @@ -2,3 +2,7 @@ project('sub_implicit', 'c', version : '1.0') dep = declare_dependency() meson.override_dependency('sub_implicit', dep) +meson.override_dependency('sub_implicit_provide1', dep) + +# This one is not overriden but the wrap file tells the variable name to use. +sub_implicit_provide2_dep = dep diff --git a/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap b/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap index 09ba4e8..11b2178 100644 --- a/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap +++ b/test cases/unit/12 promote/subprojects/s2/subprojects/athing.wrap @@ -1,2 +1 @@ -The contents of this wrap file are never evaluated so they -can be anything. +[wrap-file] -- cgit v1.1 From 71804e56eb3612eabc51887fe4d46961684a3ecc Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Mon, 13 Apr 2020 18:12:43 -0400 Subject: wrap: Add special 'dependency_names' key in [provide] section The value for that key must be a coma separated list of dependecy names provided by that subproject, when no variable name is needed because the subproject uses override_dependency(). --- docs/markdown/Wrap-dependency-system-manual.md | 22 ++++++++++++++++++---- mesonbuild/wrap/wrap.py | 14 +++++++++++++- .../common/102 subproject subdir/meson.build | 2 +- .../subprojects/sub_implicit.wrap | 4 ++-- 4 files changed, 34 insertions(+), 8 deletions(-) diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index cb7c6d6..b927944 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -145,12 +145,15 @@ wrap-git, the repository must contain all Meson build definitions. Wrap files can define the dependencies it provides in the `[provide]` section. When a wrap file provides the dependency `foo` any call do `dependency('foo')` will automatically fallback to that subproject even if no `fallback` keyword -argument is given. Each entry in the format `dependency_name = variable_name`, +argument is given. It is recommended for subprojects to call +`meson.override_dependency('foo', foo_dep)`, dependency name can then be added into +the special `dependency_names` entry which takes coma separated list of dependency +names. For backward compatibility with subprojects that does not call +`meson.override_dependency()`, the variable name can be provided in the wrap file +with entries in the format `dependency_name = variable_name`, where `dependency_name` usually match the corresponding pkg-config name and `variable_name` is the name of a variable defined in the subproject that should -be returned for that dependency. In the case the subproject uses -`meson.override_dependency('foo', foo_dep)` the `variable_name` can be left empty -in the wrap file. +be returned for that dependency. For example `glib.wrap` provides `glib-2.0`, `gobject-2.0` and `gio-2.0`. A wrap file for glib would look like: @@ -165,6 +168,17 @@ gobject-2.0=gobject_dep gio-2.0=gio_dep ``` +Alternatively, when using a recent enough version of glib that uses +`meson.override_dependency()`: +```ini +[wrap-git] +url=https://gitlab.gnome.org/GNOME/glib.git +revision=glib-2-62 + +[provide] +dependency_names = glib-2.0, gobject-2.0, gio-2.0 +``` + With such wrap file, `dependency('glib-2.0')` will automatically fallback to use `glib.wrap` and return `glib_dep` variable from the subproject. diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 63ee349..d645c2c 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -127,7 +127,19 @@ class PackageDefinition: def parse_provide_section(self): self.provide = {self.name: None} if self.config.has_section('provide'): - self.provide.update(self.config['provide']) + for k, v in self.config['provide'].items(): + if k == 'dependency_names': + # A coma separated list of dependency names that does not + # need a variable name + names = {n.strip(): None for n in v.split(',')} + self.provide.update(names) + continue + if not v: + m = ('Empty dependency variable name for {!r} in {}. ' + 'If the subproject uses meson.override_dependency() ' + 'it can be added in the "dependency_names" special key.') + raise WrapException(m.format(k, self.basename)) + self.provide[k] = v def get(self, key: str) -> str: try: diff --git a/test cases/common/102 subproject subdir/meson.build b/test cases/common/102 subproject subdir/meson.build index bc202a3..6faff75 100644 --- a/test cases/common/102 subproject subdir/meson.build +++ b/test cases/common/102 subproject subdir/meson.build @@ -31,7 +31,7 @@ d = dependency('sub_implicit') assert(d.found(), 'Should implicitly fallback') # Verify that implicit fallback works because sub_implicit.wrap has -# `sub_implicit_provide1=` and the subproject overrides sub_implicit_provide1. +# `dependency_names=sub_implicit_provide1` and the subproject overrides sub_implicit_provide1. d = dependency('sub_implicit_provide1') assert(d.found(), 'Should implicitly fallback') diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap index c14fff0..e668a8d 100644 --- a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap @@ -1,5 +1,5 @@ [wrap-file] [provide] -sub_implicit_provide1= -sub_implicit_provide2=sub_implicit_provide2_dep +dependency_names = sub_implicit_provide1 +sub_implicit_provide2 = sub_implicit_provide2_dep -- cgit v1.1 From 288d1ae5a5de13c8844635023caf27378df4919b Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 15 Apr 2020 16:42:21 -0400 Subject: wrap: Do not fallback implicitly on optional dependency This fix the following common pattern, we don't want to implicitly fallback on the first line: foo_dep = dependency('foo', required: false) if not foo_dep.found() foo_dep = cc.find_library('foo', required : false) if not foo_dep.found() foo_dep = dependency('foo', fallback: 'foo') endif endif --- mesonbuild/interpreter.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 7c55932..89c9daa 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3552,8 +3552,11 @@ external dependencies (including libraries) must go to "dependencies".''') return self.notfound_dependency() has_fallback = 'fallback' in kwargs - if not has_fallback and name: - # Add an implicit fallback if we have a wrap file or a directory with the same name. + if not has_fallback and name and required: + # Add an implicit fallback if we have a wrap file or a directory with the same name, + # but only if this dependency is required. It is common to first check for a pkg-config, + # then fallback to use find_library() and only afterward check again the dependency + # with a fallback. provider = self.environment.wrap_resolver.find_provider(name) if provider: kwargs['fallback'] = provider -- cgit v1.1 From f08eed37cb69ba0d793c0f1d086eaef7f25c2ea3 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Mon, 20 Apr 2020 19:14:49 -0400 Subject: find_program: Fallback if a wrap file provide the program name We don't need the legacy variable name system as for dependency() fallbacks because meson.override_find_program() is largely used already, so we can just rely on it. --- docs/markdown/Wrap-dependency-system-manual.md | 13 ++++ docs/markdown/snippets/implicit_fallback.md | 5 ++ mesonbuild/interpreter.py | 83 +++++++++++++++------- mesonbuild/wrap/wrap.py | 14 ++++ test cases/common/187 find override/meson.build | 3 + .../common/187 find override/subprojects/sub.wrap | 5 ++ .../187 find override/subprojects/sub/meson.build | 4 ++ 7 files changed, 102 insertions(+), 25 deletions(-) create mode 100644 test cases/common/187 find override/subprojects/sub.wrap create mode 100644 test cases/common/187 find override/subprojects/sub/meson.build diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index b927944..dd8595b 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -71,6 +71,7 @@ revision = head - `directory` - name of the subproject root directory, defaults to the name of the wrap. Since *0.55.0* those can be used in all wrap types, they were previously reserved to `wrap-file`: + - `patch_url` - download url to retrieve an optional overlay archive - `patch_fallback_url` - fallback URL to be used when download from `patch_url` fails *Since: 0.55.0* - `patch_filename` - filename of the downloaded overlay archive @@ -182,6 +183,18 @@ dependency_names = glib-2.0, gobject-2.0, gio-2.0 With such wrap file, `dependency('glib-2.0')` will automatically fallback to use `glib.wrap` and return `glib_dep` variable from the subproject. +Programs can also be provided by wrap files, with the `program_names` key: +```ini +[wrap-git] +... + +[provide] +program_names = myprog, otherprog +``` + +With such wrap file, `find_program('myprog')` will automatically fallback to use +the subproject, assuming it uses `meson.override_find_program('myprog')`. + ## Using wrapped projects Wraps provide a convenient way of obtaining a project into your subproject directory. diff --git a/docs/markdown/snippets/implicit_fallback.md b/docs/markdown/snippets/implicit_fallback.md index bad1c71..3d5a833 100644 --- a/docs/markdown/snippets/implicit_fallback.md +++ b/docs/markdown/snippets/implicit_fallback.md @@ -14,3 +14,8 @@ Wrap files can define the dependencies it provides in the `[provide]` section. When a wrap file provides the dependency `foo` any call do `dependency('foo')` will automatically fallback to that subproject even if no `fallback` keyword argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section). + +## `find_program()` fallback + +When a program cannot be found on the system but a wrap file has its name in the +`[provide]` section, that subproject will be used as fallback. diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 89c9daa..e616d85 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3251,7 +3251,7 @@ external dependencies (including libraries) must go to "dependencies".''') return success - def program_from_file_for(self, for_machine, prognames, silent): + def program_from_file_for(self, for_machine, prognames): for p in unholder(prognames): if isinstance(p, mesonlib.File): continue # Always points to a local (i.e. self generated) file. @@ -3290,15 +3290,13 @@ external dependencies (including libraries) must go to "dependencies".''') if progobj.found(): return progobj - def program_from_overrides(self, command_names, silent=False): + def program_from_overrides(self, command_names, extra_info): for name in command_names: if not isinstance(name, str): continue if name in self.build.find_overrides: exe = self.build.find_overrides[name] - if not silent: - mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'), - '(overridden: %s)' % exe.description()) + extra_info.append(mlog.blue('(overriden)')) return ExternalProgramHolder(exe, self.subproject, self.backend) return None @@ -3316,40 +3314,75 @@ external dependencies (including libraries) must go to "dependencies".''') % name) self.build.find_overrides[name] = exe + def notfound_program(self, args): + return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) + # TODO update modules to always pass `for_machine`. It is bad-form to assume # the host machine. def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST, required=True, silent=True, wanted='', search_dirs=None): - if not isinstance(args, list): - args = [args] + args = mesonlib.listify(args) - progobj = self.program_from_overrides(args, silent=silent) - if progobj is None: - progobj = self.program_from_file_for(for_machine, args, silent=silent) - if progobj is None: - progobj = self.program_from_system(args, search_dirs, silent=silent) - if progobj is None and args[0].endswith('python3'): - prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True) - progobj = ExternalProgramHolder(prog, self.subproject) - if required and (progobj is None or not progobj.found()): - raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args)) + extra_info = [] + progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info) if progobj is None: - return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) - # Only store successful lookups - self.store_name_lookups(args) + progobj = self.notfound_program(args) + + if not progobj.found(): + mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO')) + if required: + m = 'Program {!r} not found' + raise InterpreterException(m.format(progobj.get_name())) + return progobj + if wanted: version = progobj.get_version(self) is_found, not_found, found = mesonlib.version_compare_many(version, wanted) if not is_found: mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), - 'found {!r} but need:'.format(version), - ', '.join(["'{}'".format(e) for e in not_found])) + 'found', mlog.normal_cyan(version), 'but need:', + mlog.bold(', '.join(["'{}'".format(e) for e in not_found]))) if required: m = 'Invalid version of program, need {!r} {!r} found {!r}.' - raise InvalidArguments(m.format(progobj.get_name(), not_found, version)) - return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) + raise InterpreterException(m.format(progobj.get_name(), not_found, version)) + return self.notfound_program(args) + extra_info.insert(0, mlog.normal_cyan(version)) + + # Only store successful lookups + self.store_name_lookups(args) + mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.green('YES'), *extra_info) return progobj + def program_lookup(self, args, for_machine, required, search_dirs, extra_info): + progobj = self.program_from_overrides(args, extra_info) + if progobj: + return progobj + + fallback = None + wrap_mode = self.coredata.get_builtin_option('wrap_mode') + if wrap_mode != WrapMode.nofallback: + fallback = self.environment.wrap_resolver.find_program_provider(args) + if fallback and wrap_mode == WrapMode.forcefallback: + return self.find_program_fallback(fallback, args, required, extra_info) + + progobj = self.program_from_file_for(for_machine, args) + if progobj is None: + progobj = self.program_from_system(args, search_dirs, silent=True) + if progobj is None and args[0].endswith('python3'): + prog = dependencies.ExternalProgram('python3', mesonlib.python_command, silent=True) + progobj = ExternalProgramHolder(prog, self.subproject) if prog.found() else None + if progobj is None and fallback and required: + progobj = self.find_program_fallback(fallback, args, required, extra_info) + + return progobj + + def find_program_fallback(self, fallback, args, required, extra_info): + mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program', + mlog.bold(' '.join(args))) + sp_kwargs = { 'required': required } + self.do_subproject(fallback, 'meson', sp_kwargs) + return self.program_from_overrides(args, extra_info) + @FeatureNewKwargs('find_program', '0.53.0', ['dirs']) @FeatureNewKwargs('find_program', '0.52.0', ['version']) @FeatureNewKwargs('find_program', '0.49.0', ['disabler']) @@ -3362,7 +3395,7 @@ external dependencies (including libraries) must go to "dependencies".''') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled') - return ExternalProgramHolder(dependencies.NonExistingExternalProgram(' '.join(args)), self.subproject) + return self.notfound_program(args) search_dirs = extract_search_dirs(kwargs) wanted = mesonlib.stringlistify(kwargs.get('version', [])) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index d645c2c..19e2175 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -126,6 +126,7 @@ class PackageDefinition: def parse_provide_section(self): self.provide = {self.name: None} + self.provide_programs = [] if self.config.has_section('provide'): for k, v in self.config['provide'].items(): if k == 'dependency_names': @@ -134,6 +135,11 @@ class PackageDefinition: names = {n.strip(): None for n in v.split(',')} self.provide.update(names) continue + if k == 'program_names': + # A coma separated list of program names + names = {n.strip(): None for n in v.split(',')} + self.provide_programs += names + continue if not v: m = ('Empty dependency variable name for {!r} in {}. ' 'If the subproject uses meson.override_dependency() ' @@ -199,6 +205,14 @@ class Resolver: return wrap.name return directory + def find_program_provider(self, names: T.List[str]): + wraps = [i[0] for i in self.wraps.values()] + for name in names: + for wrap in wraps: + if wrap and name in wrap.provide_programs: + return wrap.name + return None + def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str: self.current_subproject = current_subproject self.packagename = packagename diff --git a/test cases/common/187 find override/meson.build b/test cases/common/187 find override/meson.build index 3b8af80..b277459 100644 --- a/test cases/common/187 find override/meson.build +++ b/test cases/common/187 find override/meson.build @@ -10,3 +10,6 @@ if not gencodegen.found() endif subdir('otherdir') + +tool = find_program('sometool') +assert(tool.found()) diff --git a/test cases/common/187 find override/subprojects/sub.wrap b/test cases/common/187 find override/subprojects/sub.wrap new file mode 100644 index 0000000..17aa332 --- /dev/null +++ b/test cases/common/187 find override/subprojects/sub.wrap @@ -0,0 +1,5 @@ +[wrap-file] +directory = sub + +[provide] +program_names = sometool diff --git a/test cases/common/187 find override/subprojects/sub/meson.build b/test cases/common/187 find override/subprojects/sub/meson.build new file mode 100644 index 0000000..640f270 --- /dev/null +++ b/test cases/common/187 find override/subprojects/sub/meson.build @@ -0,0 +1,4 @@ +project('tools') + +exe = find_program('gencodegen') +meson.override_find_program('sometool', exe) -- cgit v1.1 From fba796cf1312715b8b97dcb80a4b5c200332b2e8 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Fri, 29 May 2020 11:36:06 -0400 Subject: Fix typo: coma->comma --- docs/markdown/Wrap-dependency-system-manual.md | 2 +- mesonbuild/wrap/wrap.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index dd8595b..c96c73a 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -148,7 +148,7 @@ When a wrap file provides the dependency `foo` any call do `dependency('foo')` will automatically fallback to that subproject even if no `fallback` keyword argument is given. It is recommended for subprojects to call `meson.override_dependency('foo', foo_dep)`, dependency name can then be added into -the special `dependency_names` entry which takes coma separated list of dependency +the special `dependency_names` entry which takes comma separated list of dependency names. For backward compatibility with subprojects that does not call `meson.override_dependency()`, the variable name can be provided in the wrap file with entries in the format `dependency_name = variable_name`, diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 19e2175..1a6399d 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -130,13 +130,13 @@ class PackageDefinition: if self.config.has_section('provide'): for k, v in self.config['provide'].items(): if k == 'dependency_names': - # A coma separated list of dependency names that does not + # A comma separated list of dependency names that does not # need a variable name names = {n.strip(): None for n in v.split(',')} self.provide.update(names) continue if k == 'program_names': - # A coma separated list of program names + # A comma separated list of program names names = {n.strip(): None for n in v.split(',')} self.provide_programs += names continue -- cgit v1.1 From 13316f99feaa9831146f6456ce11916042a871cb Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 4 Jun 2020 14:35:18 -0400 Subject: wrap: Refactor to split wraps dictionary into 3 separate dicts It makes the code cleaner to have 3 separate dictionaries for packagename, dependency and programs. --- mesonbuild/interpreter.py | 2 +- mesonbuild/mdist.py | 2 +- mesonbuild/wrap/wrap.py | 107 ++++++++++++++++++++++++---------------------- 3 files changed, 59 insertions(+), 52 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index e616d85..3e64a67 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3590,7 +3590,7 @@ external dependencies (including libraries) must go to "dependencies".''') # but only if this dependency is required. It is common to first check for a pkg-config, # then fallback to use find_library() and only afterward check again the dependency # with a fallback. - provider = self.environment.wrap_resolver.find_provider(name) + provider = self.environment.wrap_resolver.find_dep_provider(name) if provider: kwargs['fallback'] = provider has_fallback = True diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py index 5ab0ad4..9d94ace 100644 --- a/mesonbuild/mdist.py +++ b/mesonbuild/mdist.py @@ -259,7 +259,7 @@ def run(options): if options.include_subprojects: subproject_dir = os.path.join(src_root, b.subproject_dir) for sub in b.subprojects: - _, directory = wrap.get_directory(subproject_dir, sub) + directory = wrap.get_directory(subproject_dir, sub) subprojects.append(os.path.join(b.subproject_dir, directory)) extra_meson_args.append('-Dwrap_mode=nodownload') diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 1a6399d..536b8ae 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -103,9 +103,22 @@ class WrapNotFoundException(WrapException): class PackageDefinition: def __init__(self, fname: str): - self.filename = fname + self.type = None + self.values = {} # type: T.Dict[str, str] + self.provided_deps = {} # type: T.Dict[str, T.Optional[str]] + self.provided_programs = [] # type: T.List[str] self.basename = os.path.basename(fname) - self.name = self.basename[:-5] + self.name = self.basename + if self.name.endswith('.wrap'): + self.name = self.name[:-5] + self.provided_deps[self.name] = None + if fname.endswith('.wrap'): + self.parse_wrap(fname) + self.directory = self.values.get('directory', self.name) + if os.path.dirname(self.directory): + raise WrapException('Directory key must be a name and not a path') + + def parse_wrap(self, fname: str): try: self.config = configparser.ConfigParser(interpolation=None) self.config.read(fname) @@ -125,27 +138,25 @@ class PackageDefinition: self.values = dict(self.config[self.wrap_section]) def parse_provide_section(self): - self.provide = {self.name: None} - self.provide_programs = [] if self.config.has_section('provide'): for k, v in self.config['provide'].items(): if k == 'dependency_names': # A comma separated list of dependency names that does not # need a variable name names = {n.strip(): None for n in v.split(',')} - self.provide.update(names) + self.provided_deps.update(names) continue if k == 'program_names': # A comma separated list of program names - names = {n.strip(): None for n in v.split(',')} - self.provide_programs += names + names = [n.strip() for n in v.split(',')] + self.provided_programs += names continue if not v: m = ('Empty dependency variable name for {!r} in {}. ' 'If the subproject uses meson.override_dependency() ' 'it can be added in the "dependency_names" special key.') raise WrapException(m.format(k, self.basename)) - self.provide[k] = v + self.provided_deps[k] = v def get(self, key: str) -> str: try: @@ -154,22 +165,12 @@ class PackageDefinition: m = 'Missing key {!r} in {}' raise WrapException(m.format(key, self.basename)) -def load_wrap(subdir_root: str, packagename: str) -> PackageDefinition: +def get_directory(subdir_root: str, packagename: str) -> str: fname = os.path.join(subdir_root, packagename + '.wrap') if os.path.isfile(fname): - return PackageDefinition(fname) - return None - -def get_directory(subdir_root: str, packagename: str): - directory = packagename - # We always have to load the wrap file, if it exists, because it could - # override the default directory name. - wrap = load_wrap(subdir_root, packagename) - if wrap and 'directory' in wrap.values: - directory = wrap.get('directory') - if os.path.dirname(directory): - raise WrapException('Directory key must be a name and not a path') - return wrap, directory + wrap = PackageDefinition(fname) + return wrap.directory + return packagename class Resolver: def __init__(self, subdir_root: str, wrap_mode=WrapMode.default): @@ -177,49 +178,60 @@ class Resolver: self.subdir_root = subdir_root self.cachedir = os.path.join(self.subdir_root, 'packagecache') self.filesdir = os.path.join(self.subdir_root, 'packagefiles') - self.wraps = {} # type: T.Dict[str, T.Tuple[T.Optional[PackageDefinition], T.Optional[str]]] + self.wraps = {} # type: T.Dict[str, PackageDefinition] + self.provided_deps = {} # type: T.Dict[str, PackageDefinition] + self.provided_programs = {} # type: T.Dict[str, PackageDefinition] self.load_wraps() def load_wraps(self): if not os.path.isdir(self.subdir_root): return - # Load wrap files upfront for f in os.listdir(self.subdir_root): - if f.endswith('.wrap'): - packagename = f[:-5] - wrap, directory = get_directory(self.subdir_root, packagename) - for k in wrap.provide.keys(): - self.wraps[k] = (wrap, directory) - elif os.path.isdir(os.path.join(self.subdir_root, f)): - # Keep it in the case we have dirs with no corresponding wrap file. - self.wraps.setdefault(f, (None, f)) - - def find_provider(self, packagename: str): + fname = os.path.join(self.subdir_root, f) + # Ignore not .wrap files, and reserved directories. + if (os.path.isfile(fname) and not fname.endswith('.wrap')) or \ + f in ['packagecache', 'packagefiles']: + continue + wrap = PackageDefinition(fname) + # We could have added a dummy package definition for the directory, + # replace it now with the proper wrap. This happens if we already + # downloaded the subproject into 'foo-1.0' directory and we now found + # 'foo.wrap' file. + if wrap.directory in self.wraps: + del self.wraps[wrap.directory] + self.wraps[wrap.name] = wrap + for k in wrap.provided_deps.keys(): + self.provided_deps[k] = wrap + for k in wrap.provided_programs: + self.provided_programs[k] = wrap + + def find_dep_provider(self, packagename: str): # Return value is in the same format as fallback kwarg: # ['subproject_name', 'variable_name'], or 'subproject_name'. - wrap, directory = self.wraps.get(packagename, (None, None)) + wrap = self.provided_deps.get(packagename) if wrap: - dep_var = wrap.provide[packagename] + dep_var = wrap.provided_deps.get(packagename) if dep_var: return [wrap.name, dep_var] return wrap.name - return directory + return None def find_program_provider(self, names: T.List[str]): - wraps = [i[0] for i in self.wraps.values()] for name in names: - for wrap in wraps: - if wrap and name in wrap.provide_programs: - return wrap.name + wrap = self.provided_programs.get(name) + if wrap: + return wrap.name return None def resolve(self, packagename: str, method: str, current_subproject: str = '') -> str: self.current_subproject = current_subproject self.packagename = packagename - self.wrap, self.directory = self.wraps.get(packagename, (None, self.packagename)) - if self.wrap and packagename != self.wrap.name: - m = 'subproject() must not be called by the name of a dependency it provides. Expecting {!r} but got {!r}.' - raise WrapException(m.format(self.wrap.name, packagename)) + self.directory = packagename + self.wrap = self.wraps.get(packagename) + if not self.wrap: + m = 'Subproject directory not found and {}.wrap file not found' + raise WrapNotFoundException(m.format(self.packagename)) + self.directory = self.wrap.directory self.dirname = os.path.join(self.subdir_root, self.directory) meson_file = os.path.join(self.dirname, 'meson.build') @@ -241,11 +253,6 @@ class Resolver: if not os.path.isdir(self.dirname): raise WrapException('Path already exists but is not a directory') else: - # A wrap file is required to download - if not self.wrap: - m = 'Subproject directory not found and {}.wrap file not found' - raise WrapNotFoundException(m.format(self.packagename)) - if self.wrap.type == 'file': self.get_file() else: -- cgit v1.1 From 41aefd2145bf287c9c07139a7334845f8c03374c Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Fri, 12 Jun 2020 11:45:26 -0400 Subject: wrap: Abort if multiple wraps provide the same dep or program --- mesonbuild/wrap/wrap.py | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 536b8ae..e0c7a04 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -186,23 +186,35 @@ class Resolver: def load_wraps(self): if not os.path.isdir(self.subdir_root): return - for f in os.listdir(self.subdir_root): - fname = os.path.join(self.subdir_root, f) - # Ignore not .wrap files, and reserved directories. - if (os.path.isfile(fname) and not fname.endswith('.wrap')) or \ - f in ['packagecache', 'packagefiles']: + root, dirs, files = next(os.walk(self.subdir_root)) + for i in files: + if not i.endswith('.wrap'): continue + fname = os.path.join(self.subdir_root, i) wrap = PackageDefinition(fname) - # We could have added a dummy package definition for the directory, - # replace it now with the proper wrap. This happens if we already - # downloaded the subproject into 'foo-1.0' directory and we now found - # 'foo.wrap' file. - if wrap.directory in self.wraps: - del self.wraps[wrap.directory] self.wraps[wrap.name] = wrap + if wrap.directory in dirs: + dirs.remove(wrap.directory) + # Add dummy package definition for directories not associated with a wrap file. + for i in dirs: + if i in ['packagecache', 'packagefiles']: + continue + fname = os.path.join(self.subdir_root, i) + wrap = PackageDefinition(fname) + self.wraps[wrap.name] = wrap + + for wrap in self.wraps.values(): for k in wrap.provided_deps.keys(): + if k in self.provided_deps: + prev_wrap = self.provided_deps[k] + m = 'Multiple wrap files provide {!r} dependency: {} and {}' + raise WrapException(m.format(k, wrap.basename, prev_wrap.basename)) self.provided_deps[k] = wrap for k in wrap.provided_programs: + if k in self.provided_programs: + prev_wrap = self.provided_programs[k] + m = 'Multiple wrap files provide {!r} program: {} and {}' + raise WrapException(m.format(k, wrap.basename, prev_wrap.basename)) self.provided_programs[k] = wrap def find_dep_provider(self, packagename: str): -- cgit v1.1 From 95c3fee47d75ef493865b3ffbcea7989ef246287 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Fri, 12 Jun 2020 11:57:25 -0400 Subject: wrap: Update doc to give dependency_names example first --- docs/markdown/Wrap-dependency-system-manual.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index c96c73a..0ff304d 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -156,28 +156,28 @@ where `dependency_name` usually match the corresponding pkg-config name and `variable_name` is the name of a variable defined in the subproject that should be returned for that dependency. -For example `glib.wrap` provides `glib-2.0`, `gobject-2.0` and `gio-2.0`. A wrap -file for glib would look like: +For example when using a recent enough version of glib that uses +`meson.override_dependency()` to override `glib-2.0`, `gobject-2.0` and `gio-2.0`, +a wrap file would look like: ```ini [wrap-git] url=https://gitlab.gnome.org/GNOME/glib.git revision=glib-2-62 [provide] -glib-2.0=glib_dep -gobject-2.0=gobject_dep -gio-2.0=gio_dep +dependency_names = glib-2.0, gobject-2.0, gio-2.0 ``` -Alternatively, when using a recent enough version of glib that uses -`meson.override_dependency()`: +With older version of glib dependency variable names need to be specified: ```ini [wrap-git] url=https://gitlab.gnome.org/GNOME/glib.git revision=glib-2-62 [provide] -dependency_names = glib-2.0, gobject-2.0, gio-2.0 +glib-2.0=glib_dep +gobject-2.0=gobject_dep +gio-2.0=gio_dep ``` With such wrap file, `dependency('glib-2.0')` will automatically fallback to use -- cgit v1.1 From f7a07ee91a77f68e27b1cf60f5ffcd3296f15b7b Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 23 Jun 2020 21:34:54 -0400 Subject: interpreter: Already configured fallback should be used for optional dep --- mesonbuild/interpreter.py | 8 +++++--- test cases/common/102 subproject subdir/meson.build | 7 +++++++ .../common/102 subproject subdir/subprojects/sub_implicit.wrap | 1 + .../102 subproject subdir/subprojects/sub_implicit/meson.build | 3 +++ 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 3e64a67..7896f51 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3585,13 +3585,15 @@ external dependencies (including libraries) must go to "dependencies".''') return self.notfound_dependency() has_fallback = 'fallback' in kwargs - if not has_fallback and name and required: + if not has_fallback and name: # Add an implicit fallback if we have a wrap file or a directory with the same name, # but only if this dependency is required. It is common to first check for a pkg-config, # then fallback to use find_library() and only afterward check again the dependency - # with a fallback. + # with a fallback. If the fallback has already been configured then we have to use it + # even if the dependency is not required. provider = self.environment.wrap_resolver.find_dep_provider(name) - if provider: + dirname = mesonlib.listify(provider)[0] + if provider and (required or dirname in self.subprojects): kwargs['fallback'] = provider has_fallback = True diff --git a/test cases/common/102 subproject subdir/meson.build b/test cases/common/102 subproject subdir/meson.build index 6faff75..93093bf 100644 --- a/test cases/common/102 subproject subdir/meson.build +++ b/test cases/common/102 subproject subdir/meson.build @@ -40,3 +40,10 @@ assert(d.found(), 'Should implicitly fallback') # sub_implicit_provide2. d = dependency('sub_implicit_provide2') assert(d.found(), 'Should implicitly fallback') + +# sub_implicit.wrap provides glib-2.0 and we already configured that subproject, +# so we must not return the system dependency here. Using glib-2.0 here because +# some CI runners have it installed. +d = dependency('glib-2.0', required : false) +assert(d.found()) +assert(d.type_name() == 'internal') diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap index e668a8d..6f2dab6 100644 --- a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap @@ -1,5 +1,6 @@ [wrap-file] [provide] +glib-2.0 = glib_dep dependency_names = sub_implicit_provide1 sub_implicit_provide2 = sub_implicit_provide2_dep diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build index 64374d3..24609ae 100644 --- a/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit/meson.build @@ -6,3 +6,6 @@ meson.override_dependency('sub_implicit_provide1', dep) # This one is not overriden but the wrap file tells the variable name to use. sub_implicit_provide2_dep = dep + +# This one is not overriden but the wrap file tells the variable name to use. +glib_dep = dep -- cgit v1.1 From 7c90639078650d9457db21a024d437cbb90452c1 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 23 Jun 2020 22:03:43 -0400 Subject: interpreter: Don't abort if dep isn't required and sub didn't override --- mesonbuild/interpreter.py | 8 ++++++-- test cases/common/102 subproject subdir/meson.build | 7 +++++++ .../common/102 subproject subdir/subprojects/sub_implicit.wrap | 2 +- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 7896f51..12d6cde 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3489,8 +3489,12 @@ external dependencies (including libraries) must go to "dependencies".''') raise DependencyException(m.format(display_name)) return DependencyHolder(cached_dep, self.subproject) else: - m = 'Subproject {} did not override dependency {}' - raise DependencyException(m.format(subproj_path, display_name)) + if required: + m = 'Subproject {} did not override dependency {}' + raise DependencyException(m.format(subproj_path, display_name)) + mlog.log('Dependency', mlog.bold(display_name), 'from subproject', + mlog.bold(subproj_path), 'found:', mlog.red('NO')) + return self.notfound_dependency() if subproject.found(): self.verify_fallback_consistency(dirname, varname, cached_dep) dep = self.subprojects[dirname].get_variable_method([varname], {}) diff --git a/test cases/common/102 subproject subdir/meson.build b/test cases/common/102 subproject subdir/meson.build index 93093bf..a891ca9 100644 --- a/test cases/common/102 subproject subdir/meson.build +++ b/test cases/common/102 subproject subdir/meson.build @@ -47,3 +47,10 @@ assert(d.found(), 'Should implicitly fallback') d = dependency('glib-2.0', required : false) assert(d.found()) assert(d.type_name() == 'internal') + +# sub_implicit.wrap provides gobject-2.0 and we already configured that subproject, +# so we must not return the system dependency here. But since the subproject did +# not override that dependency and its not required, not-found should be returned. +# Using gobject-2.0 here because some CI runners have it installed. +d = dependency('gobject-2.0', required : false) +assert(not d.found()) diff --git a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap index 6f2dab6..a809c43 100644 --- a/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap +++ b/test cases/common/102 subproject subdir/subprojects/sub_implicit.wrap @@ -2,5 +2,5 @@ [provide] glib-2.0 = glib_dep -dependency_names = sub_implicit_provide1 +dependency_names = sub_implicit_provide1, gobject-2.0 sub_implicit_provide2 = sub_implicit_provide2_dep -- cgit v1.1 From 576493982da325a739f04e5455ef0643b49d94f1 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Mon, 29 Jun 2020 15:18:14 -0400 Subject: wrap: Add failing unit test for fallback consistency Dependency 'foo' is overriden with 'foo_dep' so using fallback variable name 'bar_dep' should abort. --- test cases/failing/106 fallback consistency/meson.build | 3 +++ test cases/failing/106 fallback consistency/subprojects/foo.wrap | 6 ++++++ .../failing/106 fallback consistency/subprojects/foo/meson.build | 6 ++++++ test cases/failing/106 fallback consistency/test.json | 7 +++++++ 4 files changed, 22 insertions(+) create mode 100644 test cases/failing/106 fallback consistency/meson.build create mode 100644 test cases/failing/106 fallback consistency/subprojects/foo.wrap create mode 100644 test cases/failing/106 fallback consistency/subprojects/foo/meson.build create mode 100644 test cases/failing/106 fallback consistency/test.json diff --git a/test cases/failing/106 fallback consistency/meson.build b/test cases/failing/106 fallback consistency/meson.build new file mode 100644 index 0000000..1b007f5 --- /dev/null +++ b/test cases/failing/106 fallback consistency/meson.build @@ -0,0 +1,3 @@ +project('fallback consistency') + +dependency('foo') diff --git a/test cases/failing/106 fallback consistency/subprojects/foo.wrap b/test cases/failing/106 fallback consistency/subprojects/foo.wrap new file mode 100644 index 0000000..28055d9 --- /dev/null +++ b/test cases/failing/106 fallback consistency/subprojects/foo.wrap @@ -0,0 +1,6 @@ +[wrap-file] +source_url = http://host.invalid/foo.tar.gz +source_filename = foo.tar.gz + +[provide] +foo = bar_dep diff --git a/test cases/failing/106 fallback consistency/subprojects/foo/meson.build b/test cases/failing/106 fallback consistency/subprojects/foo/meson.build new file mode 100644 index 0000000..fb58a4a --- /dev/null +++ b/test cases/failing/106 fallback consistency/subprojects/foo/meson.build @@ -0,0 +1,6 @@ +project('sub') + +foo_dep = declare_dependency() +meson.override_dependency('foo', foo_dep) + +bar_dep = declare_dependency() diff --git a/test cases/failing/106 fallback consistency/test.json b/test cases/failing/106 fallback consistency/test.json new file mode 100644 index 0000000..af1a429 --- /dev/null +++ b/test cases/failing/106 fallback consistency/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "test cases/failing/106 fallback consistency/meson.build:3:0: ERROR: Inconsistency: Subproject has overridden the dependency with another variable than 'bar_dep'" + } + ] +} -- cgit v1.1 From 14cc2efcfef9a404498b3532ffa8130cc092f6f6 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Wed, 1 Jul 2020 18:12:44 +0300 Subject: Store website build script. [skip ci] --- tools/build_website.py | 51 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100755 tools/build_website.py diff --git a/tools/build_website.py b/tools/build_website.py new file mode 100755 index 0000000..5486b69 --- /dev/null +++ b/tools/build_website.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +import os, sys, subprocess, shutil + +assert(os.getcwd() == '/home/jpakkane') + +from glob import glob + +def purge(fname): + if not os.path.exists(fname): + return + if os.path.isdir(fname): + shutil.rmtree(fname) + os.unlink(fname) + +def update(): + webdir = 'mesonweb' + repodir = 'mesonwebbuild' + docdir = os.path.join(repodir, 'docs') + builddir = os.path.join(docdir, 'builddir') + htmldir = os.path.join(builddir, 'Meson documentation-doc/html') +# subprocess.check_call(['git', 'pull'], cwd=webdir) + subprocess.check_call(['git', 'fetch', '-a'], cwd=repodir) + subprocess.check_call(['git', 'reset', '--hard', 'origin/master'], + cwd=repodir) + if os.path.isdir(htmldir): + shutil.rmtree(htmldir) + if os.path.isdir(builddir): + shutil.rmtree(builddir) + env = os.environ.copy() + env['PATH'] = env['PATH'] + ':/home/jpakkane/.local/bin' + subprocess.check_call(['../meson.py', '.', 'builddir'], cwd=docdir, env=env) + subprocess.check_call(['ninja'], cwd=builddir) + old_files = glob(os.path.join(webdir, '*')) + for f in old_files: + base = f[len(webdir)+1:] + if base == 'CNAME' or base == 'favicon.png': + continue + subprocess.check_call(['git', 'rm', '-rf', base], cwd=webdir) + assert(os.path.isdir(webdir)) + new_entries = glob(os.path.join(htmldir, '*')) + for e in new_entries: + shutil.move(e, webdir) + subprocess.check_call('git add *', shell=True, cwd=webdir) + subprocess.check_call(['git', 'commit', '-a', '-m', 'Bleep. Bloop. I am a bot.'], + cwd=webdir) + subprocess.check_call(['git', 'push'], cwd=webdir) + shutil.rmtree(builddir) + +if __name__ == '__main__': + update() -- cgit v1.1 From a296ee44a1d6bbf72c87bbb6f1063272e0cc24bf Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Thu, 2 Jul 2020 18:44:50 +0300 Subject: Skip doc test when docs are not available. --- run_unittests.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 2f9fb7f..5fd790c 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4789,6 +4789,9 @@ recommended as it is not supported on some platforms''') Test that all listed meson commands are documented in Commands.md. ''' + # The docs directory is not in release tarballs. + if not os.path.isdir('docs'): + raise unittest.SkipTest('Doc directory does not exist.') doc_path = 'docs/markdown_dynamic/Commands.md' md = None -- cgit v1.1 From 85686e21d78a587de64dc31b5f50a513ac4b585e Mon Sep 17 00:00:00 2001 From: Marvin Scholz Date: Thu, 2 Jul 2020 11:21:57 +0200 Subject: environment: Correctly detect arm64 as aarch64 On some systems aarch64 is reported as arm64. Due to mesons mangling of everything that starts with arm, it would end up being detected as arm (which implies 32 bit) which is incorrect. --- mesonbuild/environment.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index d1cbfe7..8083aa8 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -341,6 +341,8 @@ def detect_cpu_family(compilers: CompilersDict) -> str: trial = 'x86' elif trial == 'bepc': trial = 'x86' + elif trial == 'arm64': + trial = 'aarch64' elif trial.startswith('arm') or trial.startswith('earm'): trial = 'arm' elif trial.startswith(('powerpc64', 'ppc64')): -- cgit v1.1 From f8bec97084756f1f21e470fffa6c1b5546a43681 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Wed, 1 Jul 2020 18:18:16 +0300 Subject: Bump version number for RC1. --- man/meson.1 | 2 +- mesonbuild/coredata.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/man/meson.1 b/man/meson.1 index eb3fec2..2054afa 100644 --- a/man/meson.1 +++ b/man/meson.1 @@ -1,4 +1,4 @@ -.TH MESON "1" "March 2020" "meson 0.54.0" "User Commands" +.TH MESON "1" "July 2020" "meson 0.55.0" "User Commands" .SH NAME meson - a high productivity build system .SH DESCRIPTION diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 329c333..6ef04bc 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -38,7 +38,7 @@ if T.TYPE_CHECKING: OptionDictType = T.Dict[str, 'UserOption[T.Any]'] -version = '0.54.999' +version = '0.55.0.rc1' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode'] default_yielding = False -- cgit v1.1 From 92075f5ef3c5df3b5e6ec4ae5189e58acb2e7c3f Mon Sep 17 00:00:00 2001 From: Alexander Neumann Date: Sat, 23 May 2020 19:35:47 +0200 Subject: give user control of option skip_sanity_check --- mesonbuild/interpreter.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 12d6cde..aad77b0 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3196,13 +3196,13 @@ external dependencies (including libraries) must go to "dependencies".''') return success def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool: - if for_machine != MachineChoice.HOST: - return False - if not self.environment.is_cross_build(): - return False should = self.environment.properties.host.get('skip_sanity_check', False) if not isinstance(should, bool): raise InterpreterException('Option skip_sanity_check must be a boolean.') + if for_machine != MachineChoice.HOST and not should: + return False + if not self.environment.is_cross_build() and not should: + return False return should def add_languages_for(self, args, required, for_machine: MachineChoice): -- cgit v1.1 From f66c8c35dab22f547601a48217ee8a53be28e858 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 2 Jul 2020 16:39:10 -0400 Subject: qt: Fix has_tools() when required=False Improve logs by making it clear when the program is found but has wrong version. --- mesonbuild/dependencies/ui.py | 27 +++++++++++++---------- mesonbuild/interpreter.py | 8 +++++-- mesonbuild/modules/qt.py | 51 +++++++++++++------------------------------ 3 files changed, 36 insertions(+), 50 deletions(-) diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index 3bba3dc..95dfe2b 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -229,11 +229,14 @@ class QtBaseDependency(ExternalDependency): bins = ['moc', 'uic', 'rcc', 'lrelease'] found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name)) for b in bins} + wanted = '== {}'.format(self.version) def gen_bins(): for b in bins: if self.bindir: yield os.path.join(self.bindir, b), b, False + # prefer the -qt of the tool to the plain one, as we + # don't know what the unsuffixed one points to without calling it. yield '{}-{}'.format(b, self.name), b, False yield b, b, self.required if b != 'lrelease' else False @@ -241,12 +244,6 @@ class QtBaseDependency(ExternalDependency): if found[name].found(): continue - # prefer the -qt of the tool to the plain one, as we - # don't know what the unsuffixed one points to without calling it. - p = interp_obj.find_program_impl([b], silent=True, required=required).held_object - if not p.found(): - continue - if name == 'lrelease': arg = ['-version'] elif mesonlib.version_compare(self.version, '>= 5'): @@ -255,12 +252,18 @@ class QtBaseDependency(ExternalDependency): arg = ['-v'] # Ensure that the version of qt and each tool are the same - _, out, err = mesonlib.Popen_safe(p.get_command() + arg) - if b.startswith('lrelease') or not self.version.startswith('4'): - care = out - else: - care = err - if mesonlib.version_compare(self.version, '== {}'.format(care.split(' ')[-1])): + def get_version(p): + _, out, err = mesonlib.Popen_safe(p.get_command() + arg) + if b.startswith('lrelease') or not self.version.startswith('4'): + care = out + else: + care = err + return care.split(' ')[-1].replace(')', '') + + p = interp_obj.find_program_impl([b], required=required, + version_func=get_version, + wanted=wanted).held_object + if p.found(): found[name] = p return tuple([found[b] for b in bins]) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index aad77b0..633615d 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3320,7 +3320,8 @@ external dependencies (including libraries) must go to "dependencies".''') # TODO update modules to always pass `for_machine`. It is bad-form to assume # the host machine. def find_program_impl(self, args, for_machine: MachineChoice = MachineChoice.HOST, - required=True, silent=True, wanted='', search_dirs=None): + required=True, silent=True, wanted='', search_dirs=None, + version_func=None): args = mesonlib.listify(args) extra_info = [] @@ -3336,7 +3337,10 @@ external dependencies (including libraries) must go to "dependencies".''') return progobj if wanted: - version = progobj.get_version(self) + if version_func: + version = version_func(progobj) + else: + version = progobj.get_version(self) is_found, not_found, found = mesonlib.version_compare_many(version, wanted) if not is_found: mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index c7da530..c2b1e01 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -15,8 +15,8 @@ import os from .. import mlog from .. import build -from ..mesonlib import MesonException, Popen_safe, extract_as_list, File, unholder -from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency +from ..mesonlib import MesonException, extract_as_list, File, unholder +from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency, NonExistingExternalProgram import xml.etree.ElementTree as ET from . import ModuleReturnValue, get_include_args, ExtensionModule from ..interpreterbase import noPosargs, permittedKwargs, FeatureNew, FeatureNewKwargs @@ -36,43 +36,22 @@ class QtBaseModule(ExtensionModule): self.snippets.add('has_tools') self.qt_version = qt_version - def _detect_tools(self, env, method): + def _detect_tools(self, env, method, required=True): if self.tools_detected: return + self.tools_detected = True mlog.log('Detecting Qt{version} tools'.format(version=self.qt_version)) - # FIXME: We currently require QtX to exist while importing the module. - # We should make it gracefully degrade and not create any targets if - # the import is marked as 'optional' (not implemented yet) - kwargs = {'required': 'true', 'modules': 'Core', 'silent': 'true', 'method': method} + kwargs = {'required': required, 'modules': 'Core', 'method': method} qt = _QT_DEPS_LUT[self.qt_version](env, kwargs) - # Get all tools and then make sure that they are the right version - self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter) - # Moc, uic and rcc write their version strings to stderr. - # Moc and rcc return a non-zero result when doing so. - # What kind of an idiot thought that was a good idea? - for compiler, compiler_name in ((self.moc, "Moc"), (self.uic, "Uic"), (self.rcc, "Rcc"), (self.lrelease, "lrelease")): - if compiler.found(): - # Workaround since there is no easy way to know which tool/version support which flag - for flag in ['-v', '-version']: - p, stdout, stderr = Popen_safe(compiler.get_command() + [flag])[0:3] - if p.returncode == 0: - break - stdout = stdout.strip() - stderr = stderr.strip() - if 'Qt {}'.format(self.qt_version) in stderr: - compiler_ver = stderr - elif 'version {}.'.format(self.qt_version) in stderr: - compiler_ver = stderr - elif ' {}.'.format(self.qt_version) in stdout: - compiler_ver = stdout - else: - raise MesonException('{name} preprocessor is not for Qt {version}. Output:\n{stdo}\n{stderr}'.format( - name=compiler_name, version=self.qt_version, stdo=stdout, stderr=stderr)) - mlog.log(' {}:'.format(compiler_name.lower()), mlog.green('YES'), '({path}, {version})'.format( - path=compiler.get_path(), version=compiler_ver.split()[-1])) - else: - mlog.log(' {}:'.format(compiler_name.lower()), mlog.red('NO')) - self.tools_detected = True + if qt.found(): + # Get all tools and then make sure that they are the right version + self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter) + else: + suffix = '-qt{}'.format(self.qt_version) + self.moc = NonExistingExternalProgram(name='moc' + suffix) + self.uic = NonExistingExternalProgram(name='uic' + suffix) + self.rcc = NonExistingExternalProgram(name='rcc' + suffix) + self.lrelease = NonExistingExternalProgram(name='lrelease' + suffix) def parse_qrc(self, state, rcc_file): if type(rcc_file) is str: @@ -128,7 +107,7 @@ class QtBaseModule(ExtensionModule): if disabled: mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled') return False - self._detect_tools(state.environment, method) + self._detect_tools(state.environment, method, required=False) for tool in (self.moc, self.uic, self.rcc, self.lrelease): if not tool.found(): if required: -- cgit v1.1 From b510d644feb8e0268236f0871a96c1ae86de5cab Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 2 Jul 2020 22:58:13 -0400 Subject: find_program: Do not use fallback when before parsing project() Mesa is doing `project(... find_program() ...)` so environment.wrap_resolver is not defined yet. --- mesonbuild/environment.py | 1 + mesonbuild/interpreter.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 8083aa8..bf09a88 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -643,6 +643,7 @@ class Environment: self.clang_static_linker = ['llvm-ar'] self.default_cmake = ['cmake'] self.default_pkgconfig = ['pkg-config'] + self.wrap_resolver = None def create_new_coredata(self, options): # WARNING: Don't use any values from coredata in __init__. It gets diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 633615d..11dac38 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3364,7 +3364,7 @@ external dependencies (including libraries) must go to "dependencies".''') fallback = None wrap_mode = self.coredata.get_builtin_option('wrap_mode') - if wrap_mode != WrapMode.nofallback: + if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver: fallback = self.environment.wrap_resolver.find_program_provider(args) if fallback and wrap_mode == WrapMode.forcefallback: return self.find_program_fallback(fallback, args, required, extra_info) -- cgit v1.1 From e1094df77e16835861b160ea5d9ed35756a678f8 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Fri, 3 Jul 2020 14:00:04 -0400 Subject: wrap: Add back filename member in PackageDefinition It is still used by msubprojects.py and cause issues when updating wrapdb. --- mesonbuild/wrap/wrap.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index e0c7a04..54daaf3 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -103,6 +103,7 @@ class WrapNotFoundException(WrapException): class PackageDefinition: def __init__(self, fname: str): + self.filename = fname self.type = None self.values = {} # type: T.Dict[str, str] self.provided_deps = {} # type: T.Dict[str, T.Optional[str]] -- cgit v1.1 From 99e96133c83aa61ab39c3d35c3248ca05fdb7ada Mon Sep 17 00:00:00 2001 From: Thibault Saunier Date: Sat, 4 Jul 2020 07:03:34 -0400 Subject: doc: Minor typo fix in legal.md [skip ci] --- docs/markdown/legal.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/legal.md b/docs/markdown/legal.md index bd86ff4..a14b7b9 100644 --- a/docs/markdown/legal.md +++ b/docs/markdown/legal.md @@ -12,7 +12,7 @@ specific permission. It is not licensed under the same terms as the rest of the project. If you are a third party and want to use the Meson logo, you must -first must obtain written permission from Jussi Pakkanen. +first obtain written permission from Jussi Pakkanen. ## Website licensing -- cgit v1.1 From 40319c963447f7632753e694f606dc6d41a9f168 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Wed, 1 Jul 2020 06:52:56 +0530 Subject: Don't make unactionable warnings fatal Some warnings are out of the user's control, such as the RCC QT bug, or the GNU windres bug, or our informational warning about auto-disabling of options when -Db_bitcode is enabled. Such warnings should not be fatal when --fatal-meson-warnings is passed because there's no action that the user can take to fix it. The only purpose it serves is to prevent people who use those features from using --fatal-meson-warnings. --- mesonbuild/coredata.py | 4 ++-- mesonbuild/mlog.py | 4 ++-- mesonbuild/modules/qt4.py | 2 +- mesonbuild/modules/qt5.py | 2 +- mesonbuild/modules/windows.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 6ef04bc..bd7378c 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -862,8 +862,8 @@ class CoreData: def emit_base_options_warnings(self, enabled_opts: list): if 'b_bitcode' in enabled_opts: - mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.') - mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.') + mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False) + mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False) class CmdLineFileParser(configparser.ConfigParser): def __init__(self): diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index 7b8aec7..1e5a105 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -250,7 +250,7 @@ def get_error_location_string(fname: str, lineno: str) -> str: return '{}:{}:'.format(fname, lineno) def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator], - once: bool = False, **kwargs: T.Any) -> None: + once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None: from .mesonlib import MesonException, relpath # The typing requirements here are non-obvious. Lists are invariant, @@ -283,7 +283,7 @@ def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator], global log_warnings_counter log_warnings_counter += 1 - if log_fatal_warnings: + if log_fatal_warnings and fatal: raise MesonException("Fatal warnings enabled, aborting") def error(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None: diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py index 112e3e4..81a1055 100644 --- a/mesonbuild/modules/qt4.py +++ b/mesonbuild/modules/qt4.py @@ -24,5 +24,5 @@ class Qt4Module(QtBaseModule): def initialize(*args, **kwargs): mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:', - mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460')) + mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) return Qt4Module(*args, **kwargs) diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py index 96a7964..244a217 100644 --- a/mesonbuild/modules/qt5.py +++ b/mesonbuild/modules/qt5.py @@ -24,5 +24,5 @@ class Qt5Module(QtBaseModule): def initialize(*args, **kwargs): mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:', - mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460')) + mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) return Qt5Module(*args, **kwargs) diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index b3e4983..6050705 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -107,7 +107,7 @@ class WindowsModule(ExtensionModule): 'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933' for arg in extra_args: if ' ' in arg: - mlog.warning(m.format(arg)) + mlog.warning(m.format(arg), fatal=False) res_targets = [] -- cgit v1.1 From 1993fb602e447f8b6123f6dad3e0e9ca34bf6970 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Sat, 4 Jul 2020 12:39:24 -0400 Subject: doc: Improve documentation of [provide] section in wrap files Explicitly document the behaviour of dependency('foo-1.0', required: false). --- docs/markdown/Wrap-dependency-system-manual.md | 67 ++++++++++++++++++-------- docs/markdown/snippets/implicit_fallback.md | 2 +- 2 files changed, 49 insertions(+), 20 deletions(-) diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 0ff304d..54ef811 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -136,25 +136,60 @@ thousands of lines of code. Once you have a working build definition, just zip up the Meson build files (and others you have changed) and put them somewhere where you can download them. -Meson build patches are only supported for wrap-file mode. When using -wrap-git, the repository must contain all Meson build definitions. +Prior to *0.55.0* Meson build patches were only supported for wrap-file mode. +When using wrap-git, the repository must contain all Meson build definitions. +Since *0.55.0* Meson build patches are supported for any wrap modes, including +wrap-git. ## `provide` section *Since *0.55.0* Wrap files can define the dependencies it provides in the `[provide]` section. -When a wrap file provides the dependency `foo` any call do `dependency('foo')` -will automatically fallback to that subproject even if no `fallback` keyword -argument is given. It is recommended for subprojects to call -`meson.override_dependency('foo', foo_dep)`, dependency name can then be added into -the special `dependency_names` entry which takes comma separated list of dependency -names. For backward compatibility with subprojects that does not call -`meson.override_dependency()`, the variable name can be provided in the wrap file -with entries in the format `dependency_name = variable_name`, -where `dependency_name` usually match the corresponding pkg-config name and -`variable_name` is the name of a variable defined in the subproject that should -be returned for that dependency. + +``` +[provide] +dependency_names = foo-1.0 +``` + +When a wrap file provides the dependency `foo-1.0`, as above, any call to +`dependency('foo-1.0')` will automatically fallback to that subproject even if +no `fallback` keyword argument is given. A wrap file named `foo.wrap` implicitly +provides the dependency name `foo` even when the `[provide]` section is missing. + +Optional dependencies, like `dependency('foo-1.0', required: get_option('foo_opt'))` +where `foo_opt` is a feature option set to `auto`, will not fallback to the +subproject defined in the wrap file, for 2 reasons: +- It allows for looking the dependency in other ways first, for example using + `cc.find_library('foo')`, and only fallback if that fails: +``` +# this won't use fallback defined in foo.wrap +foo_dep = dependency('foo-1.0', required: false) +if not foo_dep.found() + foo_dep = cc.find_library('foo', has_headers: 'foo.h', required: false) + if not foo_dep.found() + # This will use the fallback + foo_dep = dependency('foo-1.0') + # or + foo_dep = dependency('foo-1.0', required: false, fallback: 'foo') + endif +endif +``` +- Sometimes not-found dependency is preferable to a fallback when the feature is + not explicitly requested by the user. In that case + `dependency('foo-1.0', required: get_option('foo_opt'))` will only fallback + when the user sets `foo_opt` to `enabled` instead of `auto`. + +If it is desired to fallback for an optional dependency, the `fallback` keyword +argument must be passed explicitly. For example +`dependency('foo-1.0', required: get_option('foo_opt'), fallback: 'foo')` will +use the fallback even when `foo_opt` is set to `auto`. + +This mechanism assumes the subproject calls `meson.override_dependency('foo-1.0', foo_dep)` +so Meson knows which dependency object should be used as fallback. Since that +method was introduced in version *0.54.0*, as a transitional aid for projects +that do not yet make use of it the variable name can be provided in the wrap file +with entries in the format `foo-1.0 = foo_dep`. For example when using a recent enough version of glib that uses `meson.override_dependency()` to override `glib-2.0`, `gobject-2.0` and `gio-2.0`, @@ -180,14 +215,8 @@ gobject-2.0=gobject_dep gio-2.0=gio_dep ``` -With such wrap file, `dependency('glib-2.0')` will automatically fallback to use -`glib.wrap` and return `glib_dep` variable from the subproject. - Programs can also be provided by wrap files, with the `program_names` key: ```ini -[wrap-git] -... - [provide] program_names = myprog, otherprog ``` diff --git a/docs/markdown/snippets/implicit_fallback.md b/docs/markdown/snippets/implicit_fallback.md index 3d5a833..a6cbaf0 100644 --- a/docs/markdown/snippets/implicit_fallback.md +++ b/docs/markdown/snippets/implicit_fallback.md @@ -11,7 +11,7 @@ should be used for `foo`. ## Wrap file `provide` section Wrap files can define the dependencies it provides in the `[provide]` section. -When a wrap file provides the dependency `foo` any call do `dependency('foo')` +When `foo.wrap` provides the dependency `foo-1.0` any call do `dependency('foo-1.0')` will automatically fallback to that subproject even if no `fallback` keyword argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section). -- cgit v1.1 From 7ab0090d2afd8e7301d1ee5d60dd63148ffff607 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 5 Jul 2020 23:52:03 +0300 Subject: Update version number for rc 2. --- mesonbuild/coredata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index bd7378c..55ab234 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -38,7 +38,7 @@ if T.TYPE_CHECKING: OptionDictType = T.Dict[str, 'UserOption[T.Any]'] -version = '0.55.0.rc1' +version = '0.55.0.rc2' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode'] default_yielding = False -- cgit v1.1 From e801e0435ea69b64443dd16ef5a13e93ea13546c Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Sun, 28 Jun 2020 12:57:08 -0700 Subject: symbolextractor: use try/finally in solaris_syms when wrapping gnu_syms As suggested by dcbaker in https://github.com/mesonbuild/meson/pull/7370#pullrequestreview-436872661 Signed-off-by: Alan Coopersmith --- mesonbuild/scripts/symbolextractor.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index 5240275..f4084be 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -124,9 +124,11 @@ def gnu_syms(libfilename: str, outfilename: str): def solaris_syms(libfilename: str, outfilename: str): # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump origpath = os.environ['PATH'] - os.environ['PATH'] = '/usr/gnu/bin:' + origpath - gnu_syms(libfilename, outfilename) - os.environ['PATH'] = origpath + try: + os.environ['PATH'] = '/usr/gnu/bin:' + origpath + gnu_syms(libfilename, outfilename) + finally: + os.environ['PATH'] = origpath def osx_syms(libfilename: str, outfilename: str): # Get the name of the library -- cgit v1.1 From 5b82bb8689b3cb598289f734de21e07a64fa6898 Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Tue, 7 Jul 2020 19:02:08 -0700 Subject: SolarisDynamicLinker: Check if linker supports -z type=pie As suggested by dcbaker in https://github.com/mesonbuild/meson/pull/7370#discussion_r445145889 Signed-off-by: Alan Coopersmith --- mesonbuild/linkers.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 4264e7d..c6c677c 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -1100,7 +1100,13 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): def get_pie_args(self) -> T.List[str]: # Available in Solaris 11.2 and later - return ['-z', 'type=pie'] + pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp')) + for line in (stdo + stde).split('\n'): + if '-z type' in line: + if 'pie' in line: + return ['-z', 'type=pie'] + break + return [] def get_asneeded_args(self) -> T.List[str]: return self._apply_prefix(['-z', 'ignore']) -- cgit v1.1 From 20d405ec18845a18694e4985701dd89523d6dc73 Mon Sep 17 00:00:00 2001 From: Alan Coopersmith Date: Tue, 7 Jul 2020 19:13:36 -0700 Subject: SolarisDynamicLinker: report ld's version instead of gcc's Signed-off-by: Alan Coopersmith --- mesonbuild/environment.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index bf09a88..00922c1 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -930,9 +930,15 @@ class Environment: cls = GnuBFDDynamicLinker linker = cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) elif 'Solaris' in e or 'Solaris' in o: + for line in (o+e).split('\n'): + if 'ld: Software Generation Utilities' in line: + v = line.split(':')[2].lstrip() + break + else: + v = 'unknown version' linker = SolarisDynamicLinker( compiler, for_machine, comp_class.LINKER_PREFIX, override, - version=search_version(e)) + version=v) else: raise EnvironmentException('Unable to determine dynamic linker') return linker -- cgit v1.1 From f1edb83bb6a7941a3215bf0c3025e454510946e4 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Sun, 5 Jul 2020 14:56:58 -0400 Subject: doc: Small tweaks to fallback documentation [skip ci] --- docs/markdown/Reference-manual.md | 2 ++ docs/markdown/Wrap-dependency-system-manual.md | 6 ++++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 9bca74b..966d408 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -461,6 +461,8 @@ arguments: and subproject registered the dependency using `meson.override_dependency('dependency_name', subproj_dep)`, or when the wrap file has `dependency_name` in its `[provide]` section. + See [Wrap documentation](Wrap-dependency-system-manual.md#provide-section) + for more details. - `language` *(since 0.42.0)*: defines what language-specific dependency to find if it's available for multiple languages. - `method`: defines the way the dependency is detected, the default is diff --git a/docs/markdown/Wrap-dependency-system-manual.md b/docs/markdown/Wrap-dependency-system-manual.md index 54ef811..8e6282e 100644 --- a/docs/markdown/Wrap-dependency-system-manual.md +++ b/docs/markdown/Wrap-dependency-system-manual.md @@ -147,7 +147,7 @@ wrap-git. Wrap files can define the dependencies it provides in the `[provide]` section. -``` +```ini [provide] dependency_names = foo-1.0 ``` @@ -162,7 +162,8 @@ where `foo_opt` is a feature option set to `auto`, will not fallback to the subproject defined in the wrap file, for 2 reasons: - It allows for looking the dependency in other ways first, for example using `cc.find_library('foo')`, and only fallback if that fails: -``` + +```meson # this won't use fallback defined in foo.wrap foo_dep = dependency('foo-1.0', required: false) if not foo_dep.found() @@ -175,6 +176,7 @@ if not foo_dep.found() endif endif ``` + - Sometimes not-found dependency is preferable to a fallback when the feature is not explicitly requested by the user. In that case `dependency('foo-1.0', required: get_option('foo_opt'))` will only fallback -- cgit v1.1 From 4c9d57200acc8f46456653393f444dfd503f33e1 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sat, 11 Jul 2020 16:00:44 +0300 Subject: Decrease version requirement to work on Xenial. --- test cases/java/3 args/meson.build | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test cases/java/3 args/meson.build b/test cases/java/3 args/meson.build index db9a35c..451e42d 100644 --- a/test cases/java/3 args/meson.build +++ b/test cases/java/3 args/meson.build @@ -1,9 +1,9 @@ project('simplejava', 'java') -add_project_arguments('-target', '1.8', language : 'java') +add_project_arguments('-target', '1.7', language : 'java') javaprog = jar('myprog', 'com/mesonbuild/Simple.java', main_class : 'com.mesonbuild.Simple', - java_args : ['-source', '1.8']) + java_args : ['-source', '1.7']) test('mytest', javaprog) -- cgit v1.1 From fa929b47148cfb19877a25ab128686ca4ae0208f Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sat, 11 Jul 2020 21:00:45 +0300 Subject: Check that hg is actually working. --- run_unittests.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/run_unittests.py b/run_unittests.py index 5fd790c..820b705 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -2834,9 +2834,25 @@ class AllPlatformTests(BasePlatformTests): # fails sometimes. pass - def test_dist_hg(self): + def has_working_hg(self): if not shutil.which('hg'): - raise unittest.SkipTest('Mercurial not found') + return False + try: + # This check should not be necessary, but + # CI under macOS passes the above test even + # though Mercurial is not installed. + if subprocess.call(['hg', '--version'], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) != 0: + return False + return True + except FileNotFoundError: + return False + + + def test_dist_hg(self): + if not self.has_working_hg(): + raise unittest.SkipTest('Mercurial not found or broken.') if self.backend is not Backend.ninja: raise unittest.SkipTest('Dist is only supported with Ninja') -- cgit v1.1 From d769b425f0b56779bad571fbf7145c8d2f7fecc9 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Fri, 26 Jun 2020 10:24:09 -0400 Subject: refactor: name .mypy.ini to reduce visual clutter MyPy looks to .mypy.ini, reducing Meson top-level visual clutter https://mypy.readthedocs.io/en/stable/config_file.html --- .mypy.ini | 5 +++++ mypy.ini | 5 ----- 2 files changed, 5 insertions(+), 5 deletions(-) create mode 100644 .mypy.ini delete mode 100644 mypy.ini diff --git a/.mypy.ini b/.mypy.ini new file mode 100644 index 0000000..b8dad03 --- /dev/null +++ b/.mypy.ini @@ -0,0 +1,5 @@ +[mypy] +strict_optional = False +show_error_context = False +show_column_numbers = True +ignore_missing_imports = True diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index b8dad03..0000000 --- a/mypy.ini +++ /dev/null @@ -1,5 +0,0 @@ -[mypy] -strict_optional = False -show_error_context = False -show_column_numbers = True -ignore_missing_imports = True -- cgit v1.1 From ab72f52743518915c7602f6d3631b9cb8080e0e2 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 12 Jul 2020 09:01:55 -0400 Subject: setuptools is not stdlib, but distutils is --- test cases/python/5 modules kwarg/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test cases/python/5 modules kwarg/meson.build b/test cases/python/5 modules kwarg/meson.build index 3c9d54f..9751ada 100644 --- a/test cases/python/5 modules kwarg/meson.build +++ b/test cases/python/5 modules kwarg/meson.build @@ -1,7 +1,7 @@ project('python kwarg') py = import('python') -prog_python = py.find_installation('python3', modules : ['setuptools']) +prog_python = py.find_installation('python3', modules : ['distutils']) assert(prog_python.found() == true, 'python not found when should be') prog_python = py.find_installation('python3', modules : ['thisbetternotexistmod'], required : false) assert(prog_python.found() == false, 'python not found but reported as found') -- cgit v1.1 From 173d1624cb82f213d1e4fd738874c4de49f9bf47 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 12 Jul 2020 09:04:59 -0400 Subject: c lang not needed for this test --- test cases/python/1 basic/meson.build | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test cases/python/1 basic/meson.build b/test cases/python/1 basic/meson.build index 9c3af10..bd9a65c 100644 --- a/test cases/python/1 basic/meson.build +++ b/test cases/python/1 basic/meson.build @@ -1,4 +1,4 @@ -project('python sample', 'c') +project('python sample') py_mod = import('python') py = py_mod.find_installation('python3') @@ -12,6 +12,7 @@ py_purelib = py.get_path('purelib') if not py_purelib.endswith('site-packages') error('Python3 purelib path seems invalid? ' + py_purelib) endif +message('Python purelib path:', py_purelib) # could be 'lib64' or 'Lib' on some systems py_platlib = py.get_path('platlib') -- cgit v1.1 From 4f1a240bc2c2029f281330c13273bfc876219c45 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 12 Jul 2020 09:08:04 -0400 Subject: raise SystemExit() generally preferred to sys.exit(1) --- test cases/python/1 basic/prog.py | 3 +-- test cases/python/1 basic/subdir/subprog.py | 3 +-- test cases/python/2 extmodule/blaster.py | 7 ++----- test cases/python/3 cython/cytest.py | 10 +++------- test cases/python/4 custom target depends extmodule/blaster.py | 6 ++---- 5 files changed, 9 insertions(+), 20 deletions(-) diff --git a/test cases/python/1 basic/prog.py b/test cases/python/1 basic/prog.py index 9d95aea..720fdb1 100755 --- a/test cases/python/1 basic/prog.py +++ b/test cases/python/1 basic/prog.py @@ -1,9 +1,8 @@ #!/usr/bin/env python3 from gluon import gluonator -import sys print('Running mainprog from root dir.') if gluonator.gluoninate() != 42: - sys.exit(1) + raise ValueError("!= 42") diff --git a/test cases/python/1 basic/subdir/subprog.py b/test cases/python/1 basic/subdir/subprog.py index 08652f0..54178e5 100755 --- a/test cases/python/1 basic/subdir/subprog.py +++ b/test cases/python/1 basic/subdir/subprog.py @@ -4,9 +4,8 @@ # point to source root. from gluon import gluonator -import sys print('Running mainprog from subdir.') if gluonator.gluoninate() != 42: - sys.exit(1) + raise ValueError("!= 42") diff --git a/test cases/python/2 extmodule/blaster.py b/test cases/python/2 extmodule/blaster.py index 7e1eae6..1f01876 100755 --- a/test cases/python/2 extmodule/blaster.py +++ b/test cases/python/2 extmodule/blaster.py @@ -1,14 +1,11 @@ #!/usr/bin/env python3 import tachyon -import sys result = tachyon.phaserize('shoot') if not isinstance(result, int): - print('Returned result not an integer.') - sys.exit(1) + raise SystemExit('Returned result not an integer.') if result != 1: - print('Returned result {} is not 1.'.format(result)) - sys.exit(1) + raise SystemExit('Returned result {} is not 1.'.format(result)) diff --git a/test cases/python/3 cython/cytest.py b/test cases/python/3 cython/cytest.py index 43443dc..c08ffee 100755 --- a/test cases/python/3 cython/cytest.py +++ b/test cases/python/3 cython/cytest.py @@ -1,23 +1,19 @@ #!/usr/bin/env python3 from storer import Storer -import sys s = Storer() if s.get_value() != 0: - print('Initial value incorrect.') - sys.exit(1) + raise SystemExit('Initial value incorrect.') s.set_value(42) if s.get_value() != 42: - print('Setting value failed.') - sys.exit(1) + raise SystemExit('Setting value failed.') try: s.set_value('not a number') - print('Using wrong argument type did not fail.') - sys.exit(1) + raise SystemExit('Using wrong argument type did not fail.') except TypeError: pass diff --git a/test cases/python/4 custom target depends extmodule/blaster.py b/test cases/python/4 custom target depends extmodule/blaster.py index 6106f6b..09039cb 100644 --- a/test cases/python/4 custom target depends extmodule/blaster.py +++ b/test cases/python/4 custom target depends extmodule/blaster.py @@ -24,9 +24,7 @@ if options.output: f.write('success') if not isinstance(result, int): - print('Returned result not an integer.') - sys.exit(1) + raise SystemExit('Returned result not an integer.') if result != 1: - print('Returned result {} is not 1.'.format(result)) - sys.exit(1) + raise SystemExit('Returned result {} is not 1.'.format(result)) -- cgit v1.1 From 895de87b9069d1fca51c3550fe7d113a43f9e586 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 12 Jul 2020 09:12:16 -0400 Subject: some python test cases don't care about backend, so run them in any case --- run_project_tests.py | 2 +- test cases/python/2 extmodule/meson.build | 4 +++ test cases/python/3 cython/meson.build | 35 +++++++++++++--------- .../4 custom target depends extmodule/meson.build | 4 +++ 4 files changed, 30 insertions(+), 15 deletions(-) diff --git a/run_project_tests.py b/run_project_tests.py index c368253..5445e01 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -939,7 +939,7 @@ def detect_tests_to_run(only: T.List[str], use_tmp: bool) -> T.List[T.Tuple[str, # CUDA tests on Windows: use Ninja backend: python run_project_tests.py --only cuda --backend ninja TestCategory('cuda', 'cuda', backend not in (Backend.ninja, Backend.xcode) or not shutil.which('nvcc')), TestCategory('python3', 'python3', backend is not Backend.ninja), - TestCategory('python', 'python', backend is not Backend.ninja), + TestCategory('python', 'python'), TestCategory('fpga', 'fpga', shutil.which('yosys') is None), TestCategory('frameworks', 'frameworks'), TestCategory('nasm', 'nasm'), diff --git a/test cases/python/2 extmodule/meson.build b/test cases/python/2 extmodule/meson.build index b4eb960..54c50b4 100644 --- a/test cases/python/2 extmodule/meson.build +++ b/test cases/python/2 extmodule/meson.build @@ -3,6 +3,10 @@ project('Python extension module', 'c', # Because Windows Python ships only with optimized libs, # we must build this project the same way. +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif + py_mod = import('python') py = py_mod.find_installation() py_dep = py.dependency() diff --git a/test cases/python/3 cython/meson.build b/test cases/python/3 cython/meson.build index 194920b..18c05d4 100644 --- a/test cases/python/3 cython/meson.build +++ b/test cases/python/3 cython/meson.build @@ -1,20 +1,27 @@ project('cython', 'c', default_options : ['warning_level=3']) -cython = find_program('cython3', required : false) -py3_dep = dependency('python3', required : false) +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif -if cython.found() and py3_dep.found() - py_mod = import('python') - py3 = py_mod.find_installation() - py3_dep = py3.dependency() - subdir('libdir') +cython = find_program('cython', required : false) +if not cython.found() + error('MESON_SKIP_TEST: Cython3 not found.') +endif - test('cython tester', - py3, - args : files('cytest.py'), - env : ['PYTHONPATH=' + pydir] - ) -else - error('MESON_SKIP_TEST: Cython3 or Python3 libraries not found, skipping test.') +py3_dep = dependency('python3', required : false) +if not py3_dep.found() + error('MESON_SKIP_TEST: Python library not found.') endif + +py_mod = import('python') +py3 = py_mod.find_installation() +py3_dep = py3.dependency() +subdir('libdir') + +test('cython tester', + py3, + args : files('cytest.py'), + env : ['PYTHONPATH=' + pydir] +) diff --git a/test cases/python/4 custom target depends extmodule/meson.build b/test cases/python/4 custom target depends extmodule/meson.build index 3835377..ccccdaf 100644 --- a/test cases/python/4 custom target depends extmodule/meson.build +++ b/test cases/python/4 custom target depends extmodule/meson.build @@ -3,6 +3,10 @@ project('Python extension module', 'c', # Because Windows Python ships only with optimized libs, # we must build this project the same way. +if meson.backend() != 'ninja' + error('MESON_SKIP_TEST: Ninja backend required') +endif + py_mod = import('python') py3 = py_mod.find_installation() py3_dep = py3.dependency(required : false) -- cgit v1.1 From 7851495064521b179b2fd02fdc73e3d55da4ae86 Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 12 Jul 2020 09:13:00 -0400 Subject: remove redundant syntax --- run_project_tests.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/run_project_tests.py b/run_project_tests.py index 5445e01..793c844 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -361,11 +361,10 @@ def _run_ci_include(args: T.List[str]) -> str: if not args: return 'At least one parameter required' try: - file_path = Path(args[0]) - data = file_path.open(errors='ignore', encoding='utf-8').read() + data = Path(args[0]).read_text(errors='ignore', encoding='utf-8') return 'Included file {}:\n{}\n'.format(args[0], data) except Exception: - return 'Failed to open {} ({})'.format(args[0]) + return 'Failed to open {}'.format(args[0]) ci_commands = { 'ci_include': _run_ci_include -- cgit v1.1 From c9d8d4628e4ae4409d09f255cc18517cdd5f1fbe Mon Sep 17 00:00:00 2001 From: Michael Hirsch Date: Sun, 12 Jul 2020 09:53:17 -0400 Subject: simplify/correct test logic before this, tests were being skipped on Ubuntu 20.04 with Anaconda Python Now, all 5 tests success --- test cases/python/2 extmodule/meson.build | 27 +++++++++-------- test cases/python/3 cython/meson.build | 7 ++--- .../4 custom target depends extmodule/meson.build | 35 +++++++++++----------- 3 files changed, 36 insertions(+), 33 deletions(-) diff --git a/test cases/python/2 extmodule/meson.build b/test cases/python/2 extmodule/meson.build index 54c50b4..18d70c8 100644 --- a/test cases/python/2 extmodule/meson.build +++ b/test cases/python/2 extmodule/meson.build @@ -7,26 +7,29 @@ if meson.backend() != 'ninja' error('MESON_SKIP_TEST: Ninja backend required') endif + py_mod = import('python') py = py_mod.find_installation() -py_dep = py.dependency() +py_dep = py.dependency(required: false) -if py_dep.found() - subdir('ext') +if not py_dep.found() + error('MESON_SKIP_TEST: Python libraries not found.') +endif - test('extmod', - py, - args : files('blaster.py'), - env : ['PYTHONPATH=' + pypathdir]) +subdir('ext') - # Check we can apply a version constraint - dependency('python3', version: '>=@0@'.format(py_dep.version())) +test('extmod', + py, + args : files('blaster.py'), + env : ['PYTHONPATH=' + pypathdir]) -else - error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.') -endif py3_pkg_dep = dependency('python3', method: 'pkg-config', required : false) if py3_pkg_dep.found() python_lib_dir = py3_pkg_dep.get_pkgconfig_variable('libdir') + + # Check we can apply a version constraint + dependency('python3', version: '>=@0@'.format(py_dep.version())) +else + message('Skipped python3 pkg-config test') endif diff --git a/test cases/python/3 cython/meson.build b/test cases/python/3 cython/meson.build index 18c05d4..5fc07a8 100644 --- a/test cases/python/3 cython/meson.build +++ b/test cases/python/3 cython/meson.build @@ -10,14 +10,13 @@ if not cython.found() error('MESON_SKIP_TEST: Cython3 not found.') endif -py3_dep = dependency('python3', required : false) +py_mod = import('python') +py3 = py_mod.find_installation() +py3_dep = py3.dependency(required: false) if not py3_dep.found() error('MESON_SKIP_TEST: Python library not found.') endif -py_mod = import('python') -py3 = py_mod.find_installation() -py3_dep = py3.dependency() subdir('libdir') test('cython tester', diff --git a/test cases/python/4 custom target depends extmodule/meson.build b/test cases/python/4 custom target depends extmodule/meson.build index ccccdaf..d8a62ed 100644 --- a/test cases/python/4 custom target depends extmodule/meson.build +++ b/test cases/python/4 custom target depends extmodule/meson.build @@ -12,6 +12,10 @@ py3 = py_mod.find_installation() py3_dep = py3.dependency(required : false) cc = meson.get_compiler('c') +if not py3_dep.found() + error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.') +endif + # Copy to the builddir so that blaster.py can find the built tachyon module # FIXME: We should automatically detect this case and append the correct paths # to PYTHONLIBDIR @@ -24,21 +28,18 @@ import os, sys with open(sys.argv[1], 'rb') as f: assert(f.read() == b'success') ''' -if py3_dep.found() - message('Detected Python version: ' + py3_dep.version()) - if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1') - error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.') - endif - subdir('ext') - - out_txt = custom_target('tachyon flux', - input : blaster_py, - output : 'out.txt', - command : [py3, '@INPUT@', '-o', '@OUTPUT@'], - depends : pylib, - build_by_default: true) - - test('flux', py3, args : ['-c', check_exists, out_txt]) -else - error('MESON_SKIP_TEST: Python3 libraries not found, skipping test.') + +message('Detected Python version: ' + py3_dep.version()) +if py3_dep.version().version_compare('>=3.8') and cc.get_id() == 'msvc' and cc.version().version_compare('<=19.00.24215.1') + error('MESON_SKIP_TEST: Python modules do not work with Python 3.8 and VS2015 or earlier.') endif +subdir('ext') + +out_txt = custom_target('tachyon flux', + input : blaster_py, + output : 'out.txt', + command : [py3, '@INPUT@', '-o', '@OUTPUT@'], + depends : pylib, + build_by_default: true) + +test('flux', py3, args : ['-c', check_exists, out_txt]) -- cgit v1.1 From d0c68dc11507a47b9b85de508e023d9590d60565 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 12 Jul 2020 17:25:51 +0300 Subject: Updated everything for release 0.55.0. --- docs/markdown/Release-notes-for-0.55.0.md | 307 +++++++++++++++++++++ docs/markdown/snippets/add_compile_backend_arg.md | 26 -- .../snippets/add_foo_script_type_additions.md | 24 -- docs/markdown/snippets/add_meson_compile_target.md | 19 -- docs/markdown/snippets/can_run_host_binaries.md | 5 - docs/markdown/snippets/clang_coverage.md | 4 - docs/markdown/snippets/cmake.md | 17 -- .../markdown/snippets/config_tool_no_cross_path.md | 7 - docs/markdown/snippets/d-lang_n_debug.md | 4 - docs/markdown/snippets/dist_not_tests.md | 5 - .../snippets/exe_wrapper_for_cross_built_tests.md | 9 - docs/markdown/snippets/find_program.md | 20 -- docs/markdown/snippets/force_fallback_for.md | 10 - docs/markdown/snippets/gir_fatal_warnings.md | 5 - docs/markdown/snippets/gtest_protocol.md | 6 - docs/markdown/snippets/implicit_fallback.md | 21 -- docs/markdown/snippets/introspect.md | 4 - docs/markdown/snippets/junit_result_generation.md | 4 - docs/markdown/snippets/keyval_kobject.md | 6 - .../markdown/snippets/link_language_all_targets.md | 8 - docs/markdown/snippets/machine_file_constants.md | 20 -- docs/markdown/snippets/options_string_concat.md | 14 - docs/markdown/snippets/response-files.md | 7 - docs/markdown/snippets/rpath_behavior.md | 7 - docs/markdown/snippets/wrap_fallback.md | 4 - docs/markdown/snippets/wrap_patch.md | 19 -- docs/sitemap.txt | 1 + mesonbuild/coredata.py | 2 +- 28 files changed, 309 insertions(+), 276 deletions(-) create mode 100644 docs/markdown/Release-notes-for-0.55.0.md delete mode 100644 docs/markdown/snippets/add_compile_backend_arg.md delete mode 100644 docs/markdown/snippets/add_foo_script_type_additions.md delete mode 100644 docs/markdown/snippets/add_meson_compile_target.md delete mode 100644 docs/markdown/snippets/can_run_host_binaries.md delete mode 100644 docs/markdown/snippets/clang_coverage.md delete mode 100644 docs/markdown/snippets/cmake.md delete mode 100644 docs/markdown/snippets/config_tool_no_cross_path.md delete mode 100644 docs/markdown/snippets/d-lang_n_debug.md delete mode 100644 docs/markdown/snippets/dist_not_tests.md delete mode 100644 docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md delete mode 100644 docs/markdown/snippets/find_program.md delete mode 100644 docs/markdown/snippets/force_fallback_for.md delete mode 100644 docs/markdown/snippets/gir_fatal_warnings.md delete mode 100644 docs/markdown/snippets/gtest_protocol.md delete mode 100644 docs/markdown/snippets/implicit_fallback.md delete mode 100644 docs/markdown/snippets/introspect.md delete mode 100644 docs/markdown/snippets/junit_result_generation.md delete mode 100644 docs/markdown/snippets/keyval_kobject.md delete mode 100644 docs/markdown/snippets/link_language_all_targets.md delete mode 100644 docs/markdown/snippets/machine_file_constants.md delete mode 100644 docs/markdown/snippets/options_string_concat.md delete mode 100644 docs/markdown/snippets/response-files.md delete mode 100644 docs/markdown/snippets/rpath_behavior.md delete mode 100644 docs/markdown/snippets/wrap_fallback.md delete mode 100644 docs/markdown/snippets/wrap_patch.md diff --git a/docs/markdown/Release-notes-for-0.55.0.md b/docs/markdown/Release-notes-for-0.55.0.md new file mode 100644 index 0000000..534c452 --- /dev/null +++ b/docs/markdown/Release-notes-for-0.55.0.md @@ -0,0 +1,307 @@ +--- +title: Release 0.55.0 +short-description: Release notes for 0.55.0 +... + +# New features + +## rpath removal now more careful + +On Linux-like systems, meson adds rpath entries to allow running apps +in the build tree, and then removes those build-time-only +rpath entries when installing. Rpath entries may also come +in via LDFLAGS and via .pc files. Meson used to remove those +latter rpath entries by accident, but is now more careful. + +## Added ability to specify targets in `meson compile` + +It's now possible to specify targets in `meson compile`, which will result in building only the requested targets. + +Usage: `meson compile [TARGET [TARGET...]]` +`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`. +`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). +`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. +`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc) + +`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. + +For example targets from the following code: +```meson +shared_library('foo', ...) +static_library('foo', ...) +executable('bar', ...) +``` +can be invoked with `meson compile foo:shared_library foo:static_library bar`. + +## Test protocol for gtest + +Due to the popularity of Gtest (google test) among C and C++ developers meson +now supports a special protocol for gtest. With this protocol meson injects +arguments to gtests to output JUnit, reads that JUnit, and adds the output to +the JUnit it generates. + +## meson.add_*_script methods accept new types + +All three (`add_install_script`, `add_dist_script`, and +`add_postconf_script`) now accept ExternalPrograms (as returned by +`find_program`), Files, and the output of `configure_file`. The dist and +postconf methods cannot accept other types because of when they are run. +While dist could, in theory, take other dependencies, it would require more +extensive changes, particularly to the backend. + +```meson +meson.add_install_script(find_program('foo'), files('bar')) +meson.add_dist_script(find_program('foo'), files('bar')) +meson.add_postconf_script(find_program('foo'), files('bar')) +``` + +The install script variant is also able to accept custom_targets, +custom_target indexes, and build targets (executables, libraries), and can +use built executables a the script to run + +```meson +installer = executable('installer', ...) +meson.add_install_script(installer, ...) +meson.add_install_script('foo.py', installer) +``` + +## Machine file constants + +Native and cross files now support string and list concatenation using the `+` +operator, and joining paths using the `/` operator. +Entries defined in the `[constants]` section can be used in any other section. +An entry defined in any other section can be used only within that same section and only +after it has been defined. + +```ini +[constants] +toolchain = '/toolchain' +common_flags = ['--sysroot=' + toolchain + '/sysroot'] + +[properties] +c_args = common_flags + ['-DSOMETHING'] +cpp_args = c_args + ['-DSOMETHING_ELSE'] + +[binaries] +c = toolchain + '/gcc' +``` + +## Configure CMake subprojects with meson.subproject_options + +Meson now supports passing configuration options to CMake and overriding +certain build details extracted from the CMake subproject. + +The new CMake configuration options object is very similar to the +[configuration data object](Reference-manual.md#configuration-data-object) object +returned by [`configuration_data`](Reference-manual.md#configuration_data). It +is generated by the `subproject_options` function + +All configuration options have to be set *before* the subproject is configured +and must be passed to the `subproject` method via the `options` key. Altering +the configuration object won't have any effect on previous `cmake.subproject` +calls. + +**Note:** The `cmake_options` kwarg for the `subproject` function is now +deprecated since it is replaced by the new `options` system. + +## find_program: Fixes when the program has been overridden by executable + +When a program has been overridden by an executable, the returned object of +find_program() had some issues: + +```meson +# In a subproject: +exe = executable('foo', ...) +meson.override_find_program('foo', exe) + +# In main project: +# The version check was crashing meson. +prog = find_program('foo', version : '>=1.0') + +# This was crashing meson. +message(prog.path()) + +# New method to be consistent with built objects. +message(prog.full_path()) +``` + +## Response files enabled on Linux, reined in on Windows + +Meson used to always use response files on Windows, +but never on Linux. + +It now strikes a happier balance, using them on both platforms, +but only when needed to avoid command line length limits. + +## `unstable-kconfig` module renamed to `unstable-keyval` + +The `unstable-kconfig` module is now renamed to `unstable-keyval`. +We expect this module to become stable once it has some usage experience, +specifically in the next or the following release + + +## Fatal warnings in `gnome.generate_gir()` + +`gnome.generate_gir()` now has `fatal_warnings` keyword argument to abort when +a warning is produced. This is useful for example in CI environment where it's +important to catch potential issues. + +## b_ndebug support for D language compilers + +D Language compilers will now set -release/--release/-frelease (depending on +the compiler) when the b_ndebug flag is set. + +## Meson test now produces JUnit xml from results + +Meson will now generate a JUnit compatible XML file from test results. it +will be in the meson-logs directory and is called testlog.junit.xml. + +## Config tool based dependencies no longer search PATH for cross compiling + +Before 0.55.0 config tool based dependencies (llvm-config, cups-config, etc), +would search system $PATH if they weren't defined in the cross file. This has +been a source of bugs and has been deprecated. It is now removed, config tool +binaries must be specified in the cross file now or the dependency will not +be found. + +## Rename has_exe_wrapper -> can_run_host_binaries + +The old name was confusing as it didn't really match the behavior of the +function. The old name remains as an alias (the behavior hasn't changed), but +is now deprecated. + +## String concatenation in meson_options.txt + +It is now possible to use string concatenation (with the `+` opperator) in the +meson_options.txt file. This allows splitting long option descriptions. + +```meson +option( + 'testoption', + type : 'string', + value : 'optval', + description : 'An option with a very long description' + + 'that does something in a specific context' +) +``` + +## Wrap fallback URL + +Wrap files can now define `source_fallback_url` and `patch_fallback_url` to be +used in case the main server is temporaly down. + +## Clang coverage support + +llvm-cov is now used to generate coverage information when clang is used as +the compiler. + +## Local wrap source and patch files + +It is now possible to use the `patch_filename` and `source_filename` value in a +`.wrap` file without `*_url` to specify a local source / patch file. All local +files must be located in the `subprojects/packagefiles` directory. The `*_hash` +entries are optional with this setup. + +## Local wrap patch directory + +Wrap files can now specify `patch_directory` instead of `patch_filename` in the +case overlay files are local. Every files in that directory, and subdirectories, +will be copied to the subproject directory. This can be used for example to add +`meson.build` files to a project not using Meson build system upstream. +The patch directory must be placed in `subprojects/packagefiles` directory. + +## Patch on all wrap types + +`patch_*` keys are not limited to `wrap-file` any more, they can be specified for +all wrap types. + +## link_language argument added to all targets + +Previously the `link_language` argument was only supposed to be allowed in +executables, because the linker used needs to be the linker for the language +that implements the main function. Unfortunately it didn't work in that case, +and, even worse, if it had been implemented properly it would have worked for +*all* targets. In 0.55.0 this restriction has been removed, and the bug fixed. +It now is valid for `executable` and all derivative of `library`. + +## meson dist --no-tests + +`meson dist` has a new option `--no-tests` to skip build and tests of generated +packages. It can be used to not waste time for example when done in CI that +already does its own testing. + +## Force fallback for + +A newly-added `--force-fallback-for` command line option can now be used to +force fallback for specific subprojects. + +Example: + +``` +meson build --force-fallback-for=foo,bar +``` + +## Implicit dependency fallback + +`dependency('foo')` now automatically fallback if the dependency is not found on +the system but a subproject wrap file or directory exists with the same name. + +That means that simply adding `subprojects/foo.wrap` is enough to add fallback +to any `dependency('foo')` call. It is however requires that the subproject call +`meson.override_dependency('foo', foo_dep)` to specify which dependency object +should be used for `foo`. + +## Wrap file `provide` section + +Wrap files can define the dependencies it provides in the `[provide]` section. +When `foo.wrap` provides the dependency `foo-1.0` any call do `dependency('foo-1.0')` +will automatically fallback to that subproject even if no `fallback` keyword +argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section). + +## `find_program()` fallback + +When a program cannot be found on the system but a wrap file has its name in the +`[provide]` section, that subproject will be used as fallback. + +## Test scripts are given the exe wrapper if needed + +Meson will now set the `MESON_EXE_WRAPPER` as the properly wrapped and joined +representation. For Unix-like OSes this means python's shelx.join, on Windows +an implementation that attempts to properly quote windows argument is used. +This allow wrapper scripts to run test binaries, instead of just skipping. + +for example, if the wrapper is `['emulator', '--script']`, it will be passed +as `MESON_EXE_WRAPPER="emulator --script"`. + +## Added ability to specify backend arguments in `meson compile` + +It's now possible to specify backend specific arguments in `meson compile`. + +Usage: `meson compile [--vs-args=args] [--ninja-args=args]` + +``` + --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend). + --vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend). +``` + +These arguments use the following syntax: + +If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command: + +``` +$ meson compile --ninja-args=-n,-d,explain +``` + +would add `-n`, `-d` and `explain` arguments to ninja invocation. + +If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this: + +``` +$ meson compile "--ninja-args=['a,b', 'c d']" +``` + +## Introspection API changes + +dumping the AST (--ast): **new in 0.55.0** +- prints the AST of a meson.build as JSON + diff --git a/docs/markdown/snippets/add_compile_backend_arg.md b/docs/markdown/snippets/add_compile_backend_arg.md deleted file mode 100644 index 76e2abb..0000000 --- a/docs/markdown/snippets/add_compile_backend_arg.md +++ /dev/null @@ -1,26 +0,0 @@ -## Added ability to specify backend arguments in `meson compile` - -It's now possible to specify backend specific arguments in `meson compile`. - -Usage: `meson compile [--vs-args=args] [--ninja-args=args]` - -``` - --ninja-args NINJA_ARGS Arguments to pass to `ninja` (applied only on `ninja` backend). - --vs-args VS_ARGS Arguments to pass to `msbuild` (applied only on `vs` backend). -``` - -These arguments use the following syntax: - -If you only pass a single string, then it is considered to have all values separated by commas. Thus invoking the following command: - -``` -$ meson compile --ninja-args=-n,-d,explain -``` - -would add `-n`, `-d` and `explain` arguments to ninja invocation. - -If you need to have commas or spaces in your string values, then you need to pass the value with proper shell quoting like this: - -``` -$ meson compile "--ninja-args=['a,b', 'c d']" -``` diff --git a/docs/markdown/snippets/add_foo_script_type_additions.md b/docs/markdown/snippets/add_foo_script_type_additions.md deleted file mode 100644 index 88a88b2..0000000 --- a/docs/markdown/snippets/add_foo_script_type_additions.md +++ /dev/null @@ -1,24 +0,0 @@ -## meson.add_*_script methods accept new types - -All three (`add_install_script`, `add_dist_script`, and -`add_postconf_script`) now accept ExternalPrograms (as returned by -`find_program`), Files, and the output of `configure_file`. The dist and -postconf methods cannot accept other types because of when they are run. -While dist could, in theory, take other dependencies, it would require more -extensive changes, particularly to the backend. - -```meson -meson.add_install_script(find_program('foo'), files('bar')) -meson.add_dist_script(find_program('foo'), files('bar')) -meson.add_postconf_script(find_program('foo'), files('bar')) -``` - -The install script variant is also able to accept custom_targets, -custom_target indexes, and build targets (executables, libraries), and can -use built executables a the script to run - -```meson -installer = executable('installer', ...) -meson.add_install_script(installer, ...) -meson.add_install_script('foo.py', installer) -``` diff --git a/docs/markdown/snippets/add_meson_compile_target.md b/docs/markdown/snippets/add_meson_compile_target.md deleted file mode 100644 index d75862f..0000000 --- a/docs/markdown/snippets/add_meson_compile_target.md +++ /dev/null @@ -1,19 +0,0 @@ -## Added ability to specify targets in `meson compile` - -It's now possible to specify targets in `meson compile`, which will result in building only the requested targets. - -Usage: `meson compile [TARGET [TARGET...]]` -`TARGET` has the following syntax: `[PATH/]NAME[:TYPE]`. -`NAME`: name of the target from `meson.build` (e.g. `foo` from `executable('foo', ...)`). -`PATH`: path to the target relative to the root `meson.build` file. Note: relative path for a target specified in the root `meson.build` is `./`. -`TYPE`: type of the target (e.g. `shared_library`, `executable` and etc) - -`PATH` and/or `TYPE` can be ommited if the resulting `TARGET` can be used to uniquely identify the target in `meson.build`. - -For example targets from the following code: -```meson -shared_library('foo', ...) -static_library('foo', ...) -executable('bar', ...) -``` -can be invoked with `meson compile foo:shared_library foo:static_library bar`. diff --git a/docs/markdown/snippets/can_run_host_binaries.md b/docs/markdown/snippets/can_run_host_binaries.md deleted file mode 100644 index 0108184..0000000 --- a/docs/markdown/snippets/can_run_host_binaries.md +++ /dev/null @@ -1,5 +0,0 @@ -## Rename has_exe_wrapper -> can_run_host_binaries - -The old name was confusing as it didn't really match the behavior of the -function. The old name remains as an alias (the behavior hasn't changed), but -is now deprecated. diff --git a/docs/markdown/snippets/clang_coverage.md b/docs/markdown/snippets/clang_coverage.md deleted file mode 100644 index 733a3d9..0000000 --- a/docs/markdown/snippets/clang_coverage.md +++ /dev/null @@ -1,4 +0,0 @@ -## Clang coverage support - -llvm-cov is now used to generate coverage information when clang is used as -the compiler. \ No newline at end of file diff --git a/docs/markdown/snippets/cmake.md b/docs/markdown/snippets/cmake.md deleted file mode 100644 index 16da78e..0000000 --- a/docs/markdown/snippets/cmake.md +++ /dev/null @@ -1,17 +0,0 @@ -## Configure CMake subprojects with meson.subproject_options - -Meson now supports passing configuration options to CMake and overriding -certain build details extracted from the CMake subproject. - -The new CMake configuration options object is very similar to the -[configuration data object](Reference-manual.md#configuration-data-object) object -returned by [`configuration_data`](Reference-manual.md#configuration_data). It -is generated by the `subproject_options` function - -All configuration options have to be set *before* the subproject is configured -and must be passed to the `subproject` method via the `options` key. Altering -the configuration object won't have any effect on previous `cmake.subproject` -calls. - -**Note:** The `cmake_options` kwarg for the `subproject` function is now -deprecated since it is replaced by the new `options` system. diff --git a/docs/markdown/snippets/config_tool_no_cross_path.md b/docs/markdown/snippets/config_tool_no_cross_path.md deleted file mode 100644 index cec22e4..0000000 --- a/docs/markdown/snippets/config_tool_no_cross_path.md +++ /dev/null @@ -1,7 +0,0 @@ -## Config tool based dependencies no longer search PATH for cross compiling - -Before 0.55.0 config tool based dependencies (llvm-config, cups-config, etc), -would search system $PATH if they weren't defined in the cross file. This has -been a source of bugs and has been deprecated. It is now removed, config tool -binaries must be specified in the cross file now or the dependency will not -be found. diff --git a/docs/markdown/snippets/d-lang_n_debug.md b/docs/markdown/snippets/d-lang_n_debug.md deleted file mode 100644 index 59f09e4..0000000 --- a/docs/markdown/snippets/d-lang_n_debug.md +++ /dev/null @@ -1,4 +0,0 @@ -## b_ndebug support for D language compilers - -D Language compilers will now set -release/--release/-frelease (depending on -the compiler) when the b_ndebug flag is set. diff --git a/docs/markdown/snippets/dist_not_tests.md b/docs/markdown/snippets/dist_not_tests.md deleted file mode 100644 index f9c971e..0000000 --- a/docs/markdown/snippets/dist_not_tests.md +++ /dev/null @@ -1,5 +0,0 @@ -## meson dist --no-tests - -`meson dist` has a new option `--no-tests` to skip build and tests of generated -packages. It can be used to not waste time for example when done in CI that -already does its own testing. diff --git a/docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md b/docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md deleted file mode 100644 index ebdd8a7..0000000 --- a/docs/markdown/snippets/exe_wrapper_for_cross_built_tests.md +++ /dev/null @@ -1,9 +0,0 @@ -## Test scripts are given the exe wrapper if needed - -Meson will now set the `MESON_EXE_WRAPPER` as the properly wrapped and joined -representation. For Unix-like OSes this means python's shelx.join, on Windows -an implementation that attempts to properly quote windows argument is used. -This allow wrapper scripts to run test binaries, instead of just skipping. - -for example, if the wrapper is `['emulator', '--script']`, it will be passed -as `MESON_EXE_WRAPPER="emulator --script"`. diff --git a/docs/markdown/snippets/find_program.md b/docs/markdown/snippets/find_program.md deleted file mode 100644 index d0bb64d..0000000 --- a/docs/markdown/snippets/find_program.md +++ /dev/null @@ -1,20 +0,0 @@ -## find_program: Fixes when the program has been overridden by executable - -When a program has been overridden by an executable, the returned object of -find_program() had some issues: - -```meson -# In a subproject: -exe = executable('foo', ...) -meson.override_find_program('foo', exe) - -# In main project: -# The version check was crashing meson. -prog = find_program('foo', version : '>=1.0') - -# This was crashing meson. -message(prog.path()) - -# New method to be consistent with built objects. -message(prog.full_path()) -``` diff --git a/docs/markdown/snippets/force_fallback_for.md b/docs/markdown/snippets/force_fallback_for.md deleted file mode 100644 index b6af209..0000000 --- a/docs/markdown/snippets/force_fallback_for.md +++ /dev/null @@ -1,10 +0,0 @@ -## Force fallback for - -A newly-added `--force-fallback-for` command line option can now be used to -force fallback for specific subprojects. - -Example: - -``` -meson build --force-fallback-for=foo,bar -``` diff --git a/docs/markdown/snippets/gir_fatal_warnings.md b/docs/markdown/snippets/gir_fatal_warnings.md deleted file mode 100644 index 951e98e..0000000 --- a/docs/markdown/snippets/gir_fatal_warnings.md +++ /dev/null @@ -1,5 +0,0 @@ -## Fatal warnings in `gnome.generate_gir()` - -`gnome.generate_gir()` now has `fatal_warnings` keyword argument to abort when -a warning is produced. This is useful for example in CI environment where it's -important to catch potential issues. diff --git a/docs/markdown/snippets/gtest_protocol.md b/docs/markdown/snippets/gtest_protocol.md deleted file mode 100644 index 14f3af9..0000000 --- a/docs/markdown/snippets/gtest_protocol.md +++ /dev/null @@ -1,6 +0,0 @@ -## Test protocol for gtest - -Due to the popularity of Gtest (google test) among C and C++ developers meson -now supports a special protocol for gtest. With this protocol meson injects -arguments to gtests to output JUnit, reads that JUnit, and adds the output to -the JUnit it generates. diff --git a/docs/markdown/snippets/implicit_fallback.md b/docs/markdown/snippets/implicit_fallback.md deleted file mode 100644 index a6cbaf0..0000000 --- a/docs/markdown/snippets/implicit_fallback.md +++ /dev/null @@ -1,21 +0,0 @@ -## Implicit dependency fallback - -`dependency('foo')` now automatically fallback if the dependency is not found on -the system but a subproject wrap file or directory exists with the same name. - -That means that simply adding `subprojects/foo.wrap` is enough to add fallback -to any `dependency('foo')` call. It is however requires that the subproject call -`meson.override_dependency('foo', foo_dep)` to specify which dependency object -should be used for `foo`. - -## Wrap file `provide` section - -Wrap files can define the dependencies it provides in the `[provide]` section. -When `foo.wrap` provides the dependency `foo-1.0` any call do `dependency('foo-1.0')` -will automatically fallback to that subproject even if no `fallback` keyword -argument is given. See [Wrap documentation](Wrap-dependency-system-manual.md#provide_section). - -## `find_program()` fallback - -When a program cannot be found on the system but a wrap file has its name in the -`[provide]` section, that subproject will be used as fallback. diff --git a/docs/markdown/snippets/introspect.md b/docs/markdown/snippets/introspect.md deleted file mode 100644 index 8eab486..0000000 --- a/docs/markdown/snippets/introspect.md +++ /dev/null @@ -1,4 +0,0 @@ -## Introspection API changes - -dumping the AST (--ast): **new in 0.55.0** -- prints the AST of a meson.build as JSON diff --git a/docs/markdown/snippets/junit_result_generation.md b/docs/markdown/snippets/junit_result_generation.md deleted file mode 100644 index fbe910b..0000000 --- a/docs/markdown/snippets/junit_result_generation.md +++ /dev/null @@ -1,4 +0,0 @@ -## Meson test now produces JUnit xml from results - -Meson will now generate a JUnit compatible XML file from test results. it -will be in the meson-logs directory and is called testlog.junit.xml. diff --git a/docs/markdown/snippets/keyval_kobject.md b/docs/markdown/snippets/keyval_kobject.md deleted file mode 100644 index 4add23c..0000000 --- a/docs/markdown/snippets/keyval_kobject.md +++ /dev/null @@ -1,6 +0,0 @@ -## `unstable-kconfig` module renamed to `unstable-keyval` - -The `unstable-kconfig` module is now renamed to `unstable-keyval`. -We expect this module to become stable once it has some usage experience, -specifically in the next or the following release - diff --git a/docs/markdown/snippets/link_language_all_targets.md b/docs/markdown/snippets/link_language_all_targets.md deleted file mode 100644 index 9019d50..0000000 --- a/docs/markdown/snippets/link_language_all_targets.md +++ /dev/null @@ -1,8 +0,0 @@ -## link_language argument added to all targets - -Previously the `link_language` argument was only supposed to be allowed in -executables, because the linker used needs to be the linker for the language -that implements the main function. Unfortunately it didn't work in that case, -and, even worse, if it had been implemented properly it would have worked for -*all* targets. In 0.55.0 this restriction has been removed, and the bug fixed. -It now is valid for `executable` and all derivative of `library`. diff --git a/docs/markdown/snippets/machine_file_constants.md b/docs/markdown/snippets/machine_file_constants.md deleted file mode 100644 index 84b0848..0000000 --- a/docs/markdown/snippets/machine_file_constants.md +++ /dev/null @@ -1,20 +0,0 @@ -## Machine file constants - -Native and cross files now support string and list concatenation using the `+` -operator, and joining paths using the `/` operator. -Entries defined in the `[constants]` section can be used in any other section. -An entry defined in any other section can be used only within that same section and only -after it has been defined. - -```ini -[constants] -toolchain = '/toolchain' -common_flags = ['--sysroot=' + toolchain + '/sysroot'] - -[properties] -c_args = common_flags + ['-DSOMETHING'] -cpp_args = c_args + ['-DSOMETHING_ELSE'] - -[binaries] -c = toolchain + '/gcc' -``` diff --git a/docs/markdown/snippets/options_string_concat.md b/docs/markdown/snippets/options_string_concat.md deleted file mode 100644 index 0fbf0f4..0000000 --- a/docs/markdown/snippets/options_string_concat.md +++ /dev/null @@ -1,14 +0,0 @@ -## String concatenation in meson_options.txt - -It is now possible to use string concatenation (with the `+` opperator) in the -meson_options.txt file. This allows splitting long option descriptions. - -```meson -option( - 'testoption', - type : 'string', - value : 'optval', - description : 'An option with a very long description' + - 'that does something in a specific context' -) -``` diff --git a/docs/markdown/snippets/response-files.md b/docs/markdown/snippets/response-files.md deleted file mode 100644 index 624b664..0000000 --- a/docs/markdown/snippets/response-files.md +++ /dev/null @@ -1,7 +0,0 @@ -## Response files enabled on Linux, reined in on Windows - -Meson used to always use response files on Windows, -but never on Linux. - -It now strikes a happier balance, using them on both platforms, -but only when needed to avoid command line length limits. diff --git a/docs/markdown/snippets/rpath_behavior.md b/docs/markdown/snippets/rpath_behavior.md deleted file mode 100644 index c46f0c2..0000000 --- a/docs/markdown/snippets/rpath_behavior.md +++ /dev/null @@ -1,7 +0,0 @@ -## rpath removal now more careful - -On Linux-like systems, meson adds rpath entries to allow running apps -in the build tree, and then removes those build-time-only -rpath entries when installing. Rpath entries may also come -in via LDFLAGS and via .pc files. Meson used to remove those -latter rpath entries by accident, but is now more careful. diff --git a/docs/markdown/snippets/wrap_fallback.md b/docs/markdown/snippets/wrap_fallback.md deleted file mode 100644 index 9b71143..0000000 --- a/docs/markdown/snippets/wrap_fallback.md +++ /dev/null @@ -1,4 +0,0 @@ -## Wrap fallback URL - -Wrap files can now define `source_fallback_url` and `patch_fallback_url` to be -used in case the main server is temporaly down. diff --git a/docs/markdown/snippets/wrap_patch.md b/docs/markdown/snippets/wrap_patch.md deleted file mode 100644 index ae66bbd..0000000 --- a/docs/markdown/snippets/wrap_patch.md +++ /dev/null @@ -1,19 +0,0 @@ -## Local wrap source and patch files - -It is now possible to use the `patch_filename` and `source_filename` value in a -`.wrap` file without `*_url` to specify a local source / patch file. All local -files must be located in the `subprojects/packagefiles` directory. The `*_hash` -entries are optional with this setup. - -## Local wrap patch directory - -Wrap files can now specify `patch_directory` instead of `patch_filename` in the -case overlay files are local. Every files in that directory, and subdirectories, -will be copied to the subproject directory. This can be used for example to add -`meson.build` files to a project not using Meson build system upstream. -The patch directory must be placed in `subprojects/packagefiles` directory. - -## Patch on all wrap types - -`patch_*` keys are not limited to `wrap-file` any more, they can be specified for -all wrap types. diff --git a/docs/sitemap.txt b/docs/sitemap.txt index be1d908..ac74870 100644 --- a/docs/sitemap.txt +++ b/docs/sitemap.txt @@ -79,6 +79,7 @@ index.md Shipping-prebuilt-binaries-as-wraps.md fallback-wraptool.md Release-notes.md + Release-notes-for-0.55.0.md Release-notes-for-0.54.0.md Release-notes-for-0.53.0.md Release-notes-for-0.52.0.md diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 55ab234..8402bec 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -38,7 +38,7 @@ if T.TYPE_CHECKING: OptionDictType = T.Dict[str, 'UserOption[T.Any]'] -version = '0.55.0.rc2' +version = '0.55.0' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode'] default_yielding = False -- cgit v1.1 From cd8bdef77640947aa0c16a0c5e8adb6b5ff80930 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 12 Jul 2020 18:30:25 +0300 Subject: Bump version number for new development. [skip ci] --- mesonbuild/coredata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 8402bec..26f33f1 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -38,7 +38,7 @@ if T.TYPE_CHECKING: OptionDictType = T.Dict[str, 'UserOption[T.Any]'] -version = '0.55.0' +version = '0.55.999' backendlist = ['ninja', 'vs', 'vs2010', 'vs2015', 'vs2017', 'vs2019', 'xcode'] default_yielding = False -- cgit v1.1 From c57616884f02dfd661bca5766acd409582dac83b Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 12 Jul 2020 18:32:54 +0300 Subject: MSI creation fixes. [skip ci] --- msi/createmsi.py | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/msi/createmsi.py b/msi/createmsi.py index 76cb520..67020a5 100644 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -13,10 +13,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + ''' - This script is for generating MSI packages - for Windows users. +This script is for generating MSI packages +for Windows users. ''' + import subprocess import shutil import uuid @@ -78,12 +80,8 @@ class PackageGenerator: self.bytesize = 32 if '32' in platform.architecture()[0] else 64 self.final_output = 'meson-{}-{}.msi'.format(self.version, self.bytesize) self.staging_dirs = ['dist', 'dist2'] - if self.bytesize == 64: - self.progfile_dir = 'ProgramFiles64Folder' - redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC142_CRT_x64.msm' - else: - self.progfile_dir = 'ProgramFilesFolder' - redist_glob = 'C:\\Program Files*\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\*\\MergeModules\\Microsoft_VC142_CRT_x86.msm' + self.progfile_dir = 'ProgramFiles64Folder' + redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\v*\\MergeModules\\Microsoft_VC142_CRT_x64.msm' trials = glob(redist_glob) if len(trials) != 1: sys.exit('Could not find unique MSM setup:' + '\n'.join(trials)) -- cgit v1.1 From 21da2c90408746b8151a5e414930d6c637c78a57 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Sat, 11 Jul 2020 23:51:56 +0530 Subject: Fix native builds on Windows ARM64 machines I made the mistake of always selecting the debug CRT for compiler checks on Windows 4 years ago: https://github.com/mesonbuild/meson/pull/543 https://github.com/mesonbuild/meson/pull/614 The idea was to always build the tests with debugging enabled so that the compiler doesn't optimize the tests away. But we stopped doing that a while ago, and also the debug CRT has no relation to that. We should select the CRT in the same way that we do for building targets: based on the options. On Windows ARM64, the debug CRT for ARM64 isn't always available, and the release CRT is available only after installing the runtime package. Without this, we will always try to pick the debug CRT even when --buildtype=debugoptimized or release. --- mesonbuild/compilers/compilers.py | 3 --- mesonbuild/compilers/cuda.py | 3 --- mesonbuild/compilers/mixins/clike.py | 12 ++++++++++-- mesonbuild/compilers/mixins/islinker.py | 3 --- mesonbuild/linkers.py | 13 ------------- 5 files changed, 10 insertions(+), 24 deletions(-) diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 8ecb972..0de59a4 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -854,9 +854,6 @@ class Compiler(metaclass=abc.ABCMeta): def bitcode_args(self) -> T.List[str]: return self.linker.bitcode_args() - def get_linker_debug_crt_args(self) -> T.List[str]: - return self.linker.get_debug_crt_args() - def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: return self.linker.get_buildtype_args(buildtype) diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py index 4e89f5d..934ad12 100644 --- a/mesonbuild/compilers/cuda.py +++ b/mesonbuild/compilers/cuda.py @@ -263,9 +263,6 @@ class CudaCompiler(Compiler): def get_depfile_suffix(self): return 'd' - def get_linker_debug_crt_args(self) -> T.List[str]: - return self._cook_link_args(self.host_compiler.get_linker_debug_crt_args()) - def get_buildtype_linker_args(self, buildtype): return self._cook_link_args(self.host_compiler.get_buildtype_linker_args(buildtype)) diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 47e97d2..95b9592 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -366,9 +366,17 @@ class CLikeCompiler: def _get_basic_compiler_args(self, env, mode: str): cargs, largs = [], [] - # Select a CRT if needed since we're linking if mode == 'link': - cargs += self.get_linker_debug_crt_args() + # Sometimes we need to manually select the CRT to use with MSVC. + # One example is when trying to do a compiler check that involves + # linking with static libraries since MSVC won't select a CRT for + # us in that case and will error out asking us to pick one. + try: + crt_val = env.coredata.base_options['b_vscrt'].value + buildtype = env.coredata.base_options['buildtype'].value + cargs += self.get_crt_compile_args(crt_val, buildtype) + except (KeyError, AttributeError): + pass # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env sys_args = env.coredata.get_external_args(self.for_machine, self.language) diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py index bf1d339..a9967d6 100644 --- a/mesonbuild/compilers/mixins/islinker.py +++ b/mesonbuild/compilers/mixins/islinker.py @@ -110,9 +110,6 @@ class BasicLinkerIsCompilerMixin: install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: return ([], set()) - def get_linker_debug_crt_args(self) -> T.List[str]: - return [] - def get_asneeded_args(self) -> T.List[str]: return [] diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index 4264e7d..fe1441e 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -451,9 +451,6 @@ class DynamicLinker(LinkerEnvVarsMixin, metaclass=abc.ABCMeta): def bitcode_args(self) -> T.List[str]: raise mesonlib.MesonException('This linker does not support bitcode bundles') - def get_debug_crt_args(self) -> T.List[str]: - return [] - def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: @@ -998,16 +995,6 @@ class VisualStudioLikeLinkerMixin: def invoked_by_compiler(self) -> bool: return not self.direct - def get_debug_crt_args(self) -> T.List[str]: - """Arguments needed to select a debug crt for the linker. - - Sometimes we need to manually select the CRT (C runtime) to use with - MSVC. One example is when trying to link with static libraries since - MSVC won't auto-select a CRT for us in that case and will error out - asking us to select one. - """ - return self._apply_prefix('/MDd') - def get_output_args(self, outputname: str) -> T.List[str]: return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname]) -- cgit v1.1 From a672ffa9f89bfe4be712da10cbec9e25136f48ca Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Sun, 12 Jul 2020 16:14:22 +0530 Subject: ci: Increase timeout for vs jobs to 120 vc2017x64vs and vc2019x64vs sometimes timeout because they exceed the 60 min default limit. --- azure-pipelines.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 85fedab..1e12f14 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -13,6 +13,7 @@ variables: jobs: - job: vs2017 + timeoutInMinutes: 120 pool: vmImage: VS2017-Win2016 @@ -41,6 +42,7 @@ jobs: - template: ci/azure-steps.yml - job: vs2019 + timeoutInMinutes: 120 pool: vmImage: windows-2019 -- cgit v1.1 From 848fcb6a537d58cf5d49ac9d3d10b4444bb420ad Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Sat, 11 Jul 2020 23:50:12 +0530 Subject: Enable target override_options for llvm-ir This was missed somehow when the feature was added a few years ago. --- mesonbuild/backend/ninjabackend.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 62bda1a..065da00 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -2155,11 +2155,11 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) return linker.get_link_debugfile_args(outname) def generate_llvm_ir_compile(self, target, src): + base_proxy = self.get_base_options_for_target(target) compiler = get_compiler_for_source(target.compilers.values(), src) commands = compiler.compiler_args() # Compiler args for compiling this target - commands += compilers.get_base_compile_args(self.environment.coredata.base_options, - compiler) + commands += compilers.get_base_compile_args(base_proxy, compiler) if isinstance(src, File): if src.is_built: src_filename = os.path.join(src.subdir, src.fname) -- cgit v1.1 From f2dad788e782a9e7b5abb8a036370f6d8e29f7fc Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Thu, 25 Jun 2020 11:32:12 -0400 Subject: qt module: rcc supports depfiles now, given a recent enough version of Qt5 Add depfile support to generated targets for Qt >= 5.14. Move warning into the module init itself, to check if the version is too old before issuing. Also tweak the wording itself, to advise upgrading to a suitable version of Qt5 instead of advising to wait for a Qt bug to be fixed. --- mesonbuild/modules/qt.py | 11 ++++++++++- mesonbuild/modules/qt4.py | 3 --- mesonbuild/modules/qt5.py | 3 --- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index c2b1e01..c810df6 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -15,7 +15,7 @@ import os from .. import mlog from .. import build -from ..mesonlib import MesonException, extract_as_list, File, unholder +from ..mesonlib import MesonException, extract_as_list, File, unholder, version_compare from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency, NonExistingExternalProgram import xml.etree.ElementTree as ET from . import ModuleReturnValue, get_include_args, ExtensionModule @@ -30,6 +30,7 @@ _QT_DEPS_LUT = { class QtBaseModule(ExtensionModule): tools_detected = False + rcc_supports_depfiles = False def __init__(self, interpreter, qt_version=5): ExtensionModule.__init__(self, interpreter) @@ -46,6 +47,11 @@ class QtBaseModule(ExtensionModule): if qt.found(): # Get all tools and then make sure that they are the right version self.moc, self.uic, self.rcc, self.lrelease = qt.compilers_detect(self.interpreter) + if version_compare(qt.version, '>=5.14.0'): + self.rcc_supports_depfiles = True + else: + mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:', + mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) else: suffix = '-qt{}'.format(self.qt_version) self.moc = NonExistingExternalProgram(name='moc' + suffix) @@ -156,6 +162,9 @@ class QtBaseModule(ExtensionModule): 'output': name + '.cpp', 'command': [self.rcc, '-name', '@BASENAME@', '-o', '@OUTPUT@', rcc_extra_arguments, '@INPUT@'], 'depend_files': qrc_deps} + if self.rcc_supports_depfiles: + rcc_kwargs['depfile'] = name + '.d' + rcc_kwargs['command'] += ['--depfile', '@DEPFILE@'] res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) sources.append(res_target) if ui_files: diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py index 81a1055..e85a150 100644 --- a/mesonbuild/modules/qt4.py +++ b/mesonbuild/modules/qt4.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import mlog from .qt import QtBaseModule @@ -23,6 +22,4 @@ class Qt4Module(QtBaseModule): def initialize(*args, **kwargs): - mlog.warning('rcc dependencies will not work properly until this upstream issue is fixed:', - mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) return Qt4Module(*args, **kwargs) diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py index 244a217..873c2db 100644 --- a/mesonbuild/modules/qt5.py +++ b/mesonbuild/modules/qt5.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .. import mlog from .qt import QtBaseModule @@ -23,6 +22,4 @@ class Qt5Module(QtBaseModule): def initialize(*args, **kwargs): - mlog.warning('rcc dependencies will not work reliably until this upstream issue is fixed:', - mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) return Qt5Module(*args, **kwargs) -- cgit v1.1 From 825ba92b98347cfe470fad605da467e59d699efb Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 14 Jul 2020 10:48:04 -0400 Subject: doc: Add warning about --force-fallback-for [skip ci] --- docs/markdown/Subprojects.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/docs/markdown/Subprojects.md b/docs/markdown/Subprojects.md index 9c54d69..02a83e6 100644 --- a/docs/markdown/Subprojects.md +++ b/docs/markdown/Subprojects.md @@ -236,6 +236,28 @@ the following command-line options: but you only want to build against the library sources for a few of them. + **Warning**: This could lead to mixing system and subproject version of the + same library in the same process. Take this case as example: + - Libraries `glib-2.0` and `gstreamer-1.0` are installed on your system. + - `gstreamer-1.0` depends on `glib-2.0`, pkg-config file `gstreamer-1.0.pc` + has `Requires: glib-2.0`. + - In your application build definition you do: + ```meson + executable('app', ..., + dependencies: [ + dependency('glib-2.0', fallback: 'glib'), + dependency('gstreamer-1.0', fallback: 'gstreamer')], + ) + ``` + - You configure with `--force-fallback-for=glib`. + This result in linking to two different versions of library `glib-2.0` + because `dependency('glib-2.0', fallback: 'glib')` will return the + subproject dependency, but `dependency('gstreamer-1.0', fallback: 'gstreamer')` + will not fallback and return the system dependency, including `glib-2.0` + library. To avoid that situation, every dependency that itself depend on + `glib-2.0` must also be forced to fallback, in this case with + `--force-fallback-for=glib,gsteamer`. + ## Download subprojects *Since 0.49.0* -- cgit v1.1 From 4b728293cd111da3d524a23fb02b457f39e5c406 Mon Sep 17 00:00:00 2001 From: Nathan Lanza Date: Tue, 14 Jul 2020 13:48:11 -0400 Subject: Replace `dynamic linker` with `linker` [skip ci] This header erroneously referred to the dynamic linker while the paragraph talks about the "link editor." Change the title to account for the difference. --- docs/markdown/howtox.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/howtox.md b/docs/markdown/howtox.md index c89f883..0d1a2a2 100644 --- a/docs/markdown/howtox.md +++ b/docs/markdown/howtox.md @@ -25,7 +25,7 @@ for the host platform in cross builds can only be specified with a cross file. There is a table of all environment variables supported [Here](Reference-tables.md#compiler-and-linker-selection-variables) -## Set dynamic linker +## Set linker *New in 0.53.0* -- cgit v1.1 From 2353d67c25629da049b192d7fddb3e7851edb8df Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 14 Jul 2020 11:57:59 -0400 Subject: wrap: Raise MesonException when git command fails This avoid printing long backtrace by default, the user already has the output of the git command printed for debugging purpose since we don't redirect stdout/stderr. --- mesonbuild/wrap/wrap.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 54daaf3..aba220e 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -61,7 +61,10 @@ def quiet_git(cmd: T.List[str], workingdir: str) -> T.Tuple[bool, str]: def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool: if not GIT: return False - return git(cmd, workingdir, check=check).returncode == 0 + try: + return git(cmd, workingdir, check=check).returncode == 0 + except subprocess.CalledProcessError: + raise WrapException('Git command failed') def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult: """ raises WrapException if not whitelisted subdomain """ -- cgit v1.1 From 9e0f78476dc2839c702f02fab8f89e2abdb7c44f Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 16 Jul 2020 14:29:00 +0200 Subject: deps: Do not deepcopy internal libraries (fixes #7457) --- mesonbuild/dependencies/base.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 23701da..1bb71f7 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -252,6 +252,16 @@ class InternalDependency(Dependency): self.ext_deps = ext_deps self.variables = variables + def __deepcopy__(self, memo: dict) -> 'InternalDependency': + result = self.__class__.__new__(self.__class__) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k in ['libraries', 'whole_libraries']: + setattr(result, k, copy.copy(v)) + else: + setattr(result, k, copy.deepcopy(v, memo)) + return result + def get_pkgconfig_variable(self, variable_name, kwargs): raise DependencyException('Method "get_pkgconfig_variable()" is ' 'invalid for an internal dependency') -- cgit v1.1 From 62f3aa1dadfe006174eabfd65bfa089c7331b0dc Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 16 Jul 2020 14:30:07 +0200 Subject: Make virtualenv inside the project work --- .gitignore | 1 + run_project_tests.py | 18 +++++++++--------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/.gitignore b/.gitignore index f87f562..fea337e 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,4 @@ packagecache /docs/hotdoc-private* *.pyc +/*venv* diff --git a/run_project_tests.py b/run_project_tests.py index 793c844..9487a04 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -1115,16 +1115,15 @@ def check_format(): '.build', '.md', } + skip_dirs = { + '.dub', # external deps are here + '.pytest_cache', + 'meson-logs', 'meson-private', + '.eggs', '_cache', # e.g. .mypy_cache + 'venv', # virtualenvs have DOS line endings + } for (root, _, filenames) in os.walk('.'): - if '.dub' in root: # external deps are here - continue - if '.pytest_cache' in root: - continue - if 'meson-logs' in root or 'meson-private' in root: - continue - if '__CMake_build' in root: - continue - if '.eggs' in root or '_cache' in root: # e.g. .mypy_cache + if any([x in root for x in skip_dirs]): continue for fname in filenames: file = Path(fname) @@ -1248,6 +1247,7 @@ if __name__ == '__main__': options.extra_args += ['--cross-file', options.cross_file] print('Meson build system', meson_version, 'Project Tests') + print('Using python', sys.version.split('\n')[0]) setup_commands(options.backend) detect_system_compiler(options) print_tool_versions() -- cgit v1.1 From 019ed04331695bb6f5c5fff70dfced34c4ba9012 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 16 Jul 2020 20:29:34 +0200 Subject: mdata: Generate mesondata.py from */data folders --- mesonbuild/mesondata.py | 374 ++++++++++++++++++++++++++++++++++++++++++++++++ tools/gen_data.py | 139 ++++++++++++++++++ 2 files changed, 513 insertions(+) create mode 100644 mesonbuild/mesondata.py create mode 100755 tools/gen_data.py diff --git a/mesonbuild/mesondata.py b/mesonbuild/mesondata.py new file mode 100644 index 0000000..1f223c2 --- /dev/null +++ b/mesonbuild/mesondata.py @@ -0,0 +1,374 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +#### +#### WARNING: This is an automatically generated file! Do not edit! +#### Generated by tools/gen_data.py +#### + + +from pathlib import Path +import typing as T + +if T.TYPE_CHECKING: + from .environment import Environment + +###################### +# BEGIN Data section # +###################### + +file_0_data_CMakeListsLLVM_txt = '''\ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION} ) + +set(PACKAGE_FOUND FALSE) + +while(TRUE) + find_package(LLVM REQUIRED CONFIG QUIET) + + # ARCHS has to be set via the CMD interface + if(LLVM_FOUND OR "${ARCHS}" STREQUAL "") + break() + endif() + + list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE) + list(REMOVE_AT ARCHS 0) +endwhile() + +if(LLVM_FOUND) + set(PACKAGE_FOUND TRUE) + + foreach(mod IN LISTS LLVM_MESON_MODULES) + # Reset variables + set(out_mods) + set(real_mods) + + # Generate a lower and upper case version + string(TOLOWER "${mod}" mod_L) + string(TOUPPER "${mod}" mod_U) + + # Get the mapped components + llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U}) + list(SORT out_mods) + list(REMOVE_DUPLICATES out_mods) + + # Make sure that the modules exist + foreach(i IN LISTS out_mods) + if(TARGET ${i}) + list(APPEND real_mods ${i}) + endif() + endforeach() + + # Set the output variables + set(MESON_LLVM_TARGETS_${mod} ${real_mods}) + foreach(i IN LISTS real_mods) + set(MESON_TARGET_TO_LLVM_${i} ${mod}) + endforeach() + endforeach() + + # Check the following variables: + # LLVM_PACKAGE_VERSION + # LLVM_VERSION + # LLVM_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED LLVM_PACKAGE_VERSION) + set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}") + elseif(DEFINED LLVM_VERSION) + set(PACKAGE_VERSION "${LLVM_VERSION}") + elseif(DEFINED LLVM_VERSION_STRING) + set(PACKAGE_VERSION "${LLVM_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # LLVM_LIBRARIES + # LLVM_LIBS + set(libs) + if(DEFINED LLVM_LIBRARIES) + set(libs LLVM_LIBRARIES) + elseif(DEFINED LLVM_LIBS) + set(libs LLVM_LIBS) + endif() + + # Check the following variables: + # LLVM_INCLUDE_DIRS + # LLVM_INCLUDES + # LLVM_INCLUDE_DIR + set(includes) + if(DEFINED LLVM_INCLUDE_DIRS) + set(includes LLVM_INCLUDE_DIRS) + elseif(DEFINED LLVM_INCLUDES) + set(includes LLVM_INCLUDES) + elseif(DEFINED LLVM_INCLUDE_DIR) + set(includes LLVM_INCLUDE_DIR) + endif() + + # Check the following variables: + # LLVM_DEFINITIONS + set(definitions) + if(DEFINED LLVM_DEFINITIONS) + set(definitions LLVM_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() +''' + +file_1_data_CMakePathInfo_txt = '''\ +cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION}) + +set(TMP_PATHS_LIST) +list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH}) +list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH}) + +set(LIB_ARCH_LIST) +if(CMAKE_LIBRARY_ARCHITECTURE_REGEX) + file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* ) + foreach(dir ${implicit_dirs}) + if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}") + list(APPEND LIB_ARCH_LIST "${dir}") + endif() + endforeach() +endif() + +# "Export" these variables: +set(MESON_ARCH_LIST ${LIB_ARCH_LIST}) +set(MESON_PATHS_LIST ${TMP_PATHS_LIST}) +set(MESON_CMAKE_ROOT ${CMAKE_ROOT}) +set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT}) +set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH}) + +message(STATUS ${TMP_PATHS_LIST}) +''' + +file_2_data_CMakeLists_txt = '''\ +# fail noisily if attempt to use this file without setting: +# cmake_minimum_required(VERSION ${CMAKE_VERSION}) +# project(... LANGUAGES ...) + +cmake_policy(SET CMP0000 NEW) + +set(PACKAGE_FOUND FALSE) +set(_packageName "${NAME}") +string(TOUPPER "${_packageName}" PACKAGE_NAME) + +while(TRUE) + find_package("${NAME}" QUIET COMPONENTS ${COMPS}) + + # ARCHS has to be set via the CMD interface + if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "") + break() + endif() + + list(GET ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE) + list(REMOVE_AT ARCHS 0) +endwhile() + +if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND) + set(PACKAGE_FOUND TRUE) + + # Check the following variables: + # FOO_VERSION + # Foo_VERSION + # FOO_VERSION_STRING + # Foo_VERSION_STRING + if(NOT DEFINED PACKAGE_VERSION) + if(DEFINED ${_packageName}_VERSION) + set(PACKAGE_VERSION "${${_packageName}_VERSION}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}") + elseif(DEFINED ${_packageName}_VERSION_STRING) + set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}") + elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING) + set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}") + endif() + endif() + + # Check the following variables: + # FOO_LIBRARIES + # Foo_LIBRARIES + # FOO_LIBS + # Foo_LIBS + set(libs) + if(DEFINED ${_packageName}_LIBRARIES) + set(libs ${_packageName}_LIBRARIES) + elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES) + set(libs ${PACKAGE_NAME}_LIBRARIES) + elseif(DEFINED ${_packageName}_LIBS) + set(libs ${_packageName}_LIBS) + elseif(DEFINED ${PACKAGE_NAME}_LIBS) + set(libs ${PACKAGE_NAME}_LIBS) + endif() + + # Check the following variables: + # FOO_INCLUDE_DIRS + # Foo_INCLUDE_DIRS + # FOO_INCLUDES + # Foo_INCLUDES + # FOO_INCLUDE_DIR + # Foo_INCLUDE_DIR + set(includes) + if(DEFINED ${_packageName}_INCLUDE_DIRS) + set(includes ${_packageName}_INCLUDE_DIRS) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS) + set(includes ${PACKAGE_NAME}_INCLUDE_DIRS) + elseif(DEFINED ${_packageName}_INCLUDES) + set(includes ${_packageName}_INCLUDES) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDES) + set(includes ${PACKAGE_NAME}_INCLUDES) + elseif(DEFINED ${_packageName}_INCLUDE_DIR) + set(includes ${_packageName}_INCLUDE_DIR) + elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR) + set(includes ${PACKAGE_NAME}_INCLUDE_DIR) + endif() + + # Check the following variables: + # FOO_DEFINITIONS + # Foo_DEFINITIONS + set(definitions) + if(DEFINED ${_packageName}_DEFINITIONS) + set(definitions ${_packageName}_DEFINITIONS) + elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS) + set(definitions ${PACKAGE_NAME}_DEFINITIONS) + endif() + + set(PACKAGE_INCLUDE_DIRS "${${includes}}") + set(PACKAGE_DEFINITIONS "${${definitions}}") + set(PACKAGE_LIBRARIES "${${libs}}") +endif() +''' + +file_3_data_preload_cmake = '''\ +if(MESON_PS_LOADED) + return() +endif() + +set(MESON_PS_LOADED ON) + +# Dummy macros that have a special meaning in the meson code +macro(meson_ps_execute_delayed_calls) +endmacro() + +macro(meson_ps_reload_vars) +endmacro() + +# Helper macro to inspect the current CMake state +macro(meson_ps_inspect_vars) + set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") + meson_ps_execute_delayed_calls() +endmacro() + + +# Override some system functions with custom code and forward the args +# to the original function +macro(add_custom_command) + meson_ps_inspect_vars() + _add_custom_command(${ARGV}) +endmacro() + +macro(add_custom_target) + meson_ps_inspect_vars() + _add_custom_target(${ARGV}) +endmacro() + +macro(set_property) + meson_ps_inspect_vars() + _set_property(${ARGV}) +endmacro() + +function(set_source_files_properties) + set(FILES) + set(I 0) + set(PROPERTIES OFF) + + while(I LESS ARGC) + if(NOT PROPERTIES) + if("${ARGV${I}}" STREQUAL "PROPERTIES") + set(PROPERTIES ON) + else() + list(APPEND FILES "${ARGV${I}}") + endif() + + math(EXPR I "${I} + 1") + else() + set(ID_IDX ${I}) + math(EXPR PROP_IDX "${ID_IDX} + 1") + + set(ID "${ARGV${ID_IDX}}") + set(PROP "${ARGV${PROP_IDX}}") + + set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}") + math(EXPR I "${I} + 2") + endif() + endwhile() +endfunction() + +set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property) +meson_ps_reload_vars() +''' + + +#################### +# END Data section # +#################### + +class DataFile: + def __init__(self, path: Path, sha256sum: str, data: str) -> None: + self.path = path + self.sha256sum = sha256sum + self.data = data + + def write_once(self, path: Path) -> None: + if not path.exists(): + path.write_text(self.data) + + def write_to_private(self, env: 'Environment') -> Path: + out_file = Path(env.scratch_dir) / 'data' / self.path.name + out_file.parent.mkdir(exist_ok=True) + self.write_once(out_file) + return out_file + + +mesondata = { + 'dependencies/data/CMakeListsLLVM.txt': DataFile( + Path('dependencies/data/CMakeListsLLVM.txt'), + '412cec3315597041a978d018cdaca282dcd47693793540da88ae2f80d0cbd7cd', + file_0_data_CMakeListsLLVM_txt, + ), + 'dependencies/data/CMakePathInfo.txt': DataFile( + Path('dependencies/data/CMakePathInfo.txt'), + '90da8b443982d9c87139b7dc84228eb58cab4315764949637208f25e2bda7db2', + file_1_data_CMakePathInfo_txt, + ), + 'dependencies/data/CMakeLists.txt': DataFile( + Path('dependencies/data/CMakeLists.txt'), + '71a2d58381f912bbfb1c8709884d34d721f682edf2fca001e1f582f0bffd0da7', + file_2_data_CMakeLists_txt, + ), + 'cmake/data/preload.cmake': DataFile( + Path('cmake/data/preload.cmake'), + '064d047b18a5c919ad016b838bed50c5d40aebe9e53da0e70eff9d52a2c1ca1f', + file_3_data_preload_cmake, + ), +} diff --git a/tools/gen_data.py b/tools/gen_data.py new file mode 100755 index 0000000..2cc05a4 --- /dev/null +++ b/tools/gen_data.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python3 + +# Copyright 2020 Daniel Mensinger + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import hashlib +import textwrap +import re +from pathlib import Path +from datetime import datetime +import typing as T + +class DataFile: + file_counter = 0 + + def __init__(self, path: Path, root: Path): + self.path = path + self.id = self.path.relative_to(root) + self.data_str = f'file_{DataFile.file_counter}_data_' + re.sub('[^a-zA-Z0-9]', '_', self.path.name) + DataFile.file_counter += 1 + + b = self.path.read_bytes() + self.data = b.decode() + self.sha256sum = hashlib.sha256(b).hexdigest() + + def __repr__(self) -> str: + return f'<{type(self).__name__}: [{self.sha256sum}] {self.id}>' + +def main() -> int: + root_dir = Path(__file__).resolve().parents[1] + mesonbuild_dir = root_dir / 'mesonbuild' + out_file = mesonbuild_dir / 'mesondata.py' + + data_dirs = mesonbuild_dir.glob('**/data') + + data_files: T.List[DataFile] = [] + + for d in data_dirs: + for p in d.iterdir(): + data_files += [DataFile(p, mesonbuild_dir)] + + print(f'Found {len(data_files)} data files') + + # Generate the data script + data = '' + + data += textwrap.dedent(f'''\ + # Copyright {datetime.today().year} The Meson development team + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + + + #### + #### WARNING: This is an automatically generated file! Do not edit! + #### Generated by {Path(__file__).resolve().relative_to(root_dir)} + #### + + + from pathlib import Path + import typing as T + + if T.TYPE_CHECKING: + from .environment import Environment + + ###################### + # BEGIN Data section # + ###################### + + ''') + + for i in data_files: + data += f"{i.data_str} = '''\\\n{i.data}'''\n\n" + + data += textwrap.dedent(f''' + #################### + # END Data section # + #################### + + class DataFile: + def __init__(self, path: Path, sha256sum: str, data: str) -> None: + self.path = path + self.sha256sum = sha256sum + self.data = data + + def write_once(self, path: Path) -> None: + if not path.exists(): + path.write_text(self.data) + + def write_to_private(self, env: 'Environment') -> Path: + out_file = Path(env.scratch_dir) / 'data' / self.path.name + out_file.parent.mkdir(exist_ok=True) + self.write_once(out_file) + return out_file + + + mesondata = {{ + ''') + + for i in data_files: + data += textwrap.indent(textwrap.dedent(f"""\ + '{i.id}': DataFile( + Path('{i.id}'), + '{i.sha256sum}', + {i.data_str}, + ), + """), ' ') + + data += textwrap.dedent('''\ + } + ''') + + print(f'Updating {out_file}') + out_file.write_text(data) + return 0 + +if __name__ == '__main__': + sys.exit(main()) -- cgit v1.1 From 05ddd6543d4c4fc33b4c64f26291e73f49733f71 Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 16 Jul 2020 20:33:57 +0200 Subject: mdata: remove setuptools and use mesondata instead --- mesonbuild/cmake/interpreter.py | 5 ++--- mesonbuild/dependencies/base.py | 6 ++---- msi/createmsi.py | 4 ---- setup.py | 5 ----- 4 files changed, 4 insertions(+), 16 deletions(-) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index 0516947..f404109 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -15,8 +15,6 @@ # This class contains the basic functionality needed to run any interpreter # or an interpreter-based tool. -import pkg_resources - from .common import CMakeException, CMakeTarget, TargetOptions from .client import CMakeClient, RequestCMakeInputs, RequestConfigure, RequestCompute, RequestCodeModel from .fileapi import CMakeFileAPI @@ -25,6 +23,7 @@ from .traceparser import CMakeTraceParser, CMakeGeneratorTarget from .. import mlog, mesonlib from ..environment import Environment from ..mesonlib import MachineChoice, OrderedSet, version_compare +from ..mesondata import mesondata from ..compilers.compilers import lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header from enum import Enum from functools import lru_cache @@ -814,7 +813,7 @@ class CMakeInterpreter: raise CMakeException('Unable to find CMake') self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, permissive=True) - preload_file = pkg_resources.resource_filename('mesonbuild', 'cmake/data/preload.cmake') + preload_file = mesondata['cmake/data/preload.cmake'].write_to_private(self.env) # Prefere CMAKE_PROJECT_INCLUDE over CMAKE_TOOLCHAIN_FILE if possible, # since CMAKE_PROJECT_INCLUDE was actually designed for code injection. diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index 23701da..4c9c9fe 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -29,8 +29,6 @@ import typing as T from enum import Enum from pathlib import Path, PurePath -import pkg_resources - from .. import mlog from .. import mesonlib from ..compilers import clib_langs @@ -40,6 +38,7 @@ from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException from ..mesonlib import MachineChoice, MesonException, OrderedSet, PerMachine from ..mesonlib import Popen_safe, version_compare_many, version_compare, listify, stringlistify, extract_as_list, split_args from ..mesonlib import Version, LibType +from ..mesondata import mesondata if T.TYPE_CHECKING: from ..compilers.compilers import CompilerType # noqa: F401 @@ -1512,8 +1511,7 @@ class CMakeDependency(ExternalDependency): build_dir = self._get_build_dir() # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt - # Per the warning in pkg_resources, this is *not* a path and os.path and Pathlib are *not* safe to use here. - cmake_txt = pkg_resources.resource_string('mesonbuild', 'dependencies/data/' + cmake_file).decode() + cmake_txt = mesondata['dependencies/data/' + cmake_file].data # In general, some Fortran CMake find_package() also require C language enabled, # even if nothing from C is directly used. An easy Fortran example that fails diff --git a/msi/createmsi.py b/msi/createmsi.py index 67020a5..eca4493 100644 --- a/msi/createmsi.py +++ b/msi/createmsi.py @@ -151,8 +151,6 @@ class PackageGenerator: if os.path.exists(sdir): shutil.rmtree(sdir) main_stage, ninja_stage = self.staging_dirs - dep_data_dir = 'mesonbuild/dependencies/data' - cmake_data_dir = 'mesonbuild/cmake/data' modules = self.get_all_modules_from_dir('mesonbuild/modules') modules += self.get_all_modules_from_dir('mesonbuild/scripts') modules += self.get_more_modules() @@ -174,8 +172,6 @@ class PackageGenerator: pyinst_cmd += ['meson.py'] subprocess.check_call(pyinst_cmd) shutil.move(pyinstaller_tmpdir + '/meson', main_stage) - shutil.copytree(dep_data_dir, main_stage + '/mesonbuild/dependencies/data') - shutil.copytree(cmake_data_dir, main_stage + '/mesonbuild/cmake/data') if not os.path.exists(os.path.join(main_stage, 'meson.exe')): sys.exit('Meson exe missing from staging dir.') os.mkdir(ninja_stage) diff --git a/setup.py b/setup.py index 1f95be7..145f19c 100644 --- a/setup.py +++ b/setup.py @@ -37,10 +37,6 @@ packages = ['mesonbuild', 'mesonbuild.scripts', 'mesonbuild.templates', 'mesonbuild.wrap'] -package_data = { - 'mesonbuild.dependencies': ['data/CMakeLists.txt', 'data/CMakeListsLLVM.txt', 'data/CMakePathInfo.txt'], - 'mesonbuild.cmake': ['data/run_ctgt.py', 'data/preload.cmake'], -} data_files = [] if sys.platform != 'win32': # Only useful on UNIX-like systems @@ -51,6 +47,5 @@ if __name__ == '__main__': setup(name='meson', version=version, packages=packages, - package_data=package_data, entry_points=entries, data_files=data_files,) -- cgit v1.1 From 393d6e133d9abd584a2fc414971628e84ea48b7c Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Thu, 16 Jul 2020 20:34:15 +0200 Subject: mdata: Add test to ensure mesondata.py is up-to-date --- run_unittests.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/run_unittests.py b/run_unittests.py index 820b705..2c03a3e 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -1485,6 +1485,38 @@ class DataTests(unittest.TestCase): astint = AstInterpreter('.', '', '') self.assertEqual(set(interp.funcs.keys()), set(astint.funcs.keys())) + def test_mesondata_is_up_to_date(self): + from mesonbuild.mesondata import mesondata + err_msg = textwrap.dedent(''' + + ########################################################### + ### mesonbuild.mesondata is not up-to-date ### + ### Please regenerate it by running tools/gen_data.py ### + ########################################################### + + ''') + + root_dir = Path(__file__).resolve().parent + mesonbuild_dir = root_dir / 'mesonbuild' + + data_dirs = mesonbuild_dir.glob('**/data') + data_files = [] # type: T.List[T.Tuple(str, str)] + + for i in data_dirs: + for p in i.iterdir(): + data_files += [(p.relative_to(mesonbuild_dir).as_posix(), hashlib.sha256(p.read_bytes()).hexdigest())] + + from pprint import pprint + current_files = set(mesondata.keys()) + scanned_files = set([x[0] for x in data_files]) + + self.assertSetEqual(current_files, scanned_files, err_msg + 'Data files were added or removed\n') + errors = [] + for i in data_files: + if mesondata[i[0]].sha256sum != i[1]: + errors += [i[0]] + + self.assertListEqual(errors, [], err_msg + 'Files were changed') class BasePlatformTests(unittest.TestCase): prefix = '/usr' -- cgit v1.1 From 59910c437a81b94c72e3cbdfc2c3612fae576d6e Mon Sep 17 00:00:00 2001 From: Daniel Mensinger Date: Fri, 17 Jul 2020 18:17:47 +0200 Subject: Fix setuptools 49 test. (fixes #7452) --- run_meson_command_tests.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index 6ed3d8f..7bc6185 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -129,6 +129,10 @@ class CommandTests(unittest.TestCase): os.environ['PYTHONPATH'] = os.path.join(str(pylibdir), '') os.environ['PATH'] = str(bindir) + os.pathsep + os.environ['PATH'] self._run(python_command + ['setup.py', 'install', '--prefix', str(prefix)]) + # Fix importlib-metadata by appending all dirs in pylibdir + PYTHONPATHS = [pylibdir] + [x for x in pylibdir.iterdir()] + PYTHONPATHS = [os.path.join(str(x), '') for x in PYTHONPATHS] + os.environ['PYTHONPATH'] = os.pathsep.join(PYTHONPATHS) # Check that all the files were installed correctly self.assertTrue(bindir.is_dir()) self.assertTrue(pylibdir.is_dir()) -- cgit v1.1 From 43129a11edab304c98e94af4d894f5f872617fa5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Storsj=C3=B6?= Date: Fri, 17 Jul 2020 09:20:57 +0300 Subject: Fix a comment about cross compilation with clang-cl [skip ci] This comment was added in 0fbd09609f4, where the case of a forward slash was added for the case when cross compiling. --- mesonbuild/backend/ninjabackend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 065da00..8c66fd0 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -459,7 +459,7 @@ int dummy; # different locales have different messages with a different # number of colons. Match up to the the drive name 'd:\'. # When used in cross compilation, the path separator is a - # backslash rather than a forward slash so handle both. + # forward slash rather than a backslash so handle both. matchre = re.compile(rb"^(.*\s)([a-zA-Z]:\\|\/).*stdio.h$") def detect_prefix(out): -- cgit v1.1 From 063b74ebba51133d5a79b86c984165c6709ad9d8 Mon Sep 17 00:00:00 2001 From: Elliot Haisley <35050275+Apache-HB@users.noreply.github.com> Date: Sat, 18 Jul 2020 15:34:24 -0400 Subject: fix msvc not recognising b_ndebug fixes #7404 --- mesonbuild/compilers/cpp.py | 2 +- test cases/windows/17 msvc ndebug/main.cpp | 9 +++++++++ test cases/windows/17 msvc ndebug/meson.build | 7 +++++++ 3 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 test cases/windows/17 msvc ndebug/main.cpp create mode 100644 test cases/windows/17 msvc ndebug/meson.build diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 478a68c..f5b0c05 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -577,7 +577,7 @@ class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixi is_cross: bool, info: 'MachineInfo', exe_wrap, target, **kwargs): CPPCompiler.__init__(self, exelist, version, for_machine, is_cross, info, exe_wrap, **kwargs) MSVCCompiler.__init__(self, target) - self.base_options = ['b_pch', 'b_vscrt'] # FIXME add lto, pgo and the like + self.base_options = ['b_pch', 'b_vscrt', 'b_ndebug'] # FIXME add lto, pgo and the like self.id = 'msvc' def get_options(self): diff --git a/test cases/windows/17 msvc ndebug/main.cpp b/test cases/windows/17 msvc ndebug/main.cpp new file mode 100644 index 0000000..d647d71 --- /dev/null +++ b/test cases/windows/17 msvc ndebug/main.cpp @@ -0,0 +1,9 @@ +int main() { +#ifdef NDEBUG + // NDEBUG is defined + return 0; +#else + // NDEBUG is not defined + return 1; +#endif +} \ No newline at end of file diff --git a/test cases/windows/17 msvc ndebug/meson.build b/test cases/windows/17 msvc ndebug/meson.build new file mode 100644 index 0000000..78eaf89 --- /dev/null +++ b/test cases/windows/17 msvc ndebug/meson.build @@ -0,0 +1,7 @@ +project('msvc_ndebug', 'cpp', + default_options : [ 'b_ndebug=true' ] +) + +exe = executable('exe', 'main.cpp') + +test('ndebug', exe) -- cgit v1.1 From 998c78fb0a404f34512414d93b08cf45166be357 Mon Sep 17 00:00:00 2001 From: James Hilliard Date: Thu, 16 Jul 2020 04:24:17 -0600 Subject: coredata: add missing EnvironmentException import Fixes: NameError: name 'EnvironmentException' is not defined Signed-off-by: James Hilliard --- mesonbuild/coredata.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 26f33f1..af43b31 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -19,8 +19,9 @@ from itertools import chain from pathlib import PurePath from collections import OrderedDict, defaultdict from .mesonlib import ( - MesonException, MachineChoice, PerMachine, OrderedSet, - default_libdir, default_libexecdir, default_prefix, split_args + MesonException, EnvironmentException, MachineChoice, PerMachine, + OrderedSet, default_libdir, default_libexecdir, default_prefix, + split_args ) from .envconfig import get_env_var_pair from .wrap import WrapMode -- cgit v1.1 From aa0d75deaee925b9ceb3d98ef8f5de0167587c71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Andr=C3=A9=20Lureau?= Date: Mon, 13 Jul 2020 12:52:39 +0400 Subject: Stabilize keyval module We have experimented with the module for about a year in a qemu branch (https://wiki.qemu.org/Features/Meson), and we would like to start moving the build system to meson. For that, keyval should have the stability guarantees. Cc: Paolo Bonzini --- docs/markdown/Keyval-module.md | 4 +- docs/markdown/snippets/keyval.md | 7 +++ mesonbuild/modules/keyval.py | 71 ++++++++++++++++++++++ mesonbuild/modules/unstable_keyval.py | 71 ---------------------- .../222 source set realistic example/meson.build | 2 +- test cases/keyval/1 basic/meson.build | 2 +- test cases/keyval/2 subdir/dir/meson.build | 2 +- .../keyval/3 load_config files/dir/meson.build | 2 +- .../keyval/4 load_config builddir/meson.build | 2 +- 9 files changed, 85 insertions(+), 78 deletions(-) create mode 100644 docs/markdown/snippets/keyval.md create mode 100644 mesonbuild/modules/keyval.py delete mode 100644 mesonbuild/modules/unstable_keyval.py diff --git a/docs/markdown/Keyval-module.md b/docs/markdown/Keyval-module.md index 643265e..afc48fa 100644 --- a/docs/markdown/Keyval-module.md +++ b/docs/markdown/Keyval-module.md @@ -1,5 +1,5 @@ --- -short-description: Unstable keyval module +short-description: Keyval module authors: - name: Mark Schulte, Paolo Bonzini years: [2017, 2019] @@ -23,7 +23,7 @@ chosen the configuration options), output a ".config" file. The module may be imported as follows: ``` meson -keyval = import('unstable-keyval') +keyval = import('keyval') ``` The following functions will then be available as methods on the object diff --git a/docs/markdown/snippets/keyval.md b/docs/markdown/snippets/keyval.md new file mode 100644 index 0000000..895de9b --- /dev/null +++ b/docs/markdown/snippets/keyval.md @@ -0,0 +1,7 @@ +## `unstable-keyval` is now stable `keyval` + +The `unstable-keyval` has been renamed to `keyval` and now promises stability +guarantees. + +Meson will print a warning when you load an `unstable-` module that has been +stabilised (so `unstable-keyval` is still accepted for example). diff --git a/mesonbuild/modules/keyval.py b/mesonbuild/modules/keyval.py new file mode 100644 index 0000000..3da2992 --- /dev/null +++ b/mesonbuild/modules/keyval.py @@ -0,0 +1,71 @@ +# Copyright 2017, 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import ExtensionModule + +from .. import mesonlib +from ..mesonlib import typeslistify +from ..interpreterbase import FeatureNew, noKwargs +from ..interpreter import InvalidCode + +import os + +class KeyvalModule(ExtensionModule): + + @FeatureNew('Keyval Module', '0.55.0') + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.snippets.add('load') + + def _load_file(self, path_to_config): + result = dict() + try: + with open(path_to_config) as f: + for line in f: + if '#' in line: + comment_idx = line.index('#') + line = line[:comment_idx] + line = line.strip() + try: + name, val = line.split('=', 1) + except ValueError: + continue + result[name.strip()] = val.strip() + except IOError as e: + raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e)) + + return result + + @noKwargs + def load(self, interpreter, state, args, kwargs): + sources = typeslistify(args, (str, mesonlib.File)) + if len(sources) != 1: + raise InvalidCode('load takes only one file input.') + + s = sources[0] + is_built = False + if isinstance(s, mesonlib.File): + is_built = is_built or s.is_built + s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir) + else: + s = os.path.join(interpreter.environment.source_dir, s) + + if s not in interpreter.build_def_files and not is_built: + interpreter.build_def_files.append(s) + + return self._load_file(s) + + +def initialize(*args, **kwargs): + return KeyvalModule(*args, **kwargs) diff --git a/mesonbuild/modules/unstable_keyval.py b/mesonbuild/modules/unstable_keyval.py deleted file mode 100644 index 3da2992..0000000 --- a/mesonbuild/modules/unstable_keyval.py +++ /dev/null @@ -1,71 +0,0 @@ -# Copyright 2017, 2019 The Meson development team - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# http://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from . import ExtensionModule - -from .. import mesonlib -from ..mesonlib import typeslistify -from ..interpreterbase import FeatureNew, noKwargs -from ..interpreter import InvalidCode - -import os - -class KeyvalModule(ExtensionModule): - - @FeatureNew('Keyval Module', '0.55.0') - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.snippets.add('load') - - def _load_file(self, path_to_config): - result = dict() - try: - with open(path_to_config) as f: - for line in f: - if '#' in line: - comment_idx = line.index('#') - line = line[:comment_idx] - line = line.strip() - try: - name, val = line.split('=', 1) - except ValueError: - continue - result[name.strip()] = val.strip() - except IOError as e: - raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e)) - - return result - - @noKwargs - def load(self, interpreter, state, args, kwargs): - sources = typeslistify(args, (str, mesonlib.File)) - if len(sources) != 1: - raise InvalidCode('load takes only one file input.') - - s = sources[0] - is_built = False - if isinstance(s, mesonlib.File): - is_built = is_built or s.is_built - s = s.absolute_path(interpreter.environment.source_dir, interpreter.environment.build_dir) - else: - s = os.path.join(interpreter.environment.source_dir, s) - - if s not in interpreter.build_def_files and not is_built: - interpreter.build_def_files.append(s) - - return self._load_file(s) - - -def initialize(*args, **kwargs): - return KeyvalModule(*args, **kwargs) diff --git a/test cases/common/222 source set realistic example/meson.build b/test cases/common/222 source set realistic example/meson.build index 106b81d..d986b99 100644 --- a/test cases/common/222 source set realistic example/meson.build +++ b/test cases/common/222 source set realistic example/meson.build @@ -9,7 +9,7 @@ if cppid == 'pgi' endif ss = import('sourceset') -keyval = import('unstable-keyval') +keyval = import('keyval') zlib = declare_dependency(compile_args: '-DZLIB=1') another = declare_dependency(compile_args: '-DANOTHER=1') diff --git a/test cases/keyval/1 basic/meson.build b/test cases/keyval/1 basic/meson.build index fc7ddb3..c3e4466 100644 --- a/test cases/keyval/1 basic/meson.build +++ b/test cases/keyval/1 basic/meson.build @@ -1,6 +1,6 @@ project('keyval basic test') -k = import('unstable-keyval') +k = import('keyval') conf = k.load('.config') if not conf.has_key('CONFIG_VAL1') diff --git a/test cases/keyval/2 subdir/dir/meson.build b/test cases/keyval/2 subdir/dir/meson.build index dc1b478..291ad93 100644 --- a/test cases/keyval/2 subdir/dir/meson.build +++ b/test cases/keyval/2 subdir/dir/meson.build @@ -1,5 +1,5 @@ -k = import('unstable-keyval') +k = import('keyval') conf = k.load(meson.source_root() / '.config') diff --git a/test cases/keyval/3 load_config files/dir/meson.build b/test cases/keyval/3 load_config files/dir/meson.build index 43fba13..adc5289 100644 --- a/test cases/keyval/3 load_config files/dir/meson.build +++ b/test cases/keyval/3 load_config files/dir/meson.build @@ -1,5 +1,5 @@ -k = import('unstable-keyval') +k = import('keyval') conf = k.load(files('config')) diff --git a/test cases/keyval/4 load_config builddir/meson.build b/test cases/keyval/4 load_config builddir/meson.build index 1bb0285..6bd83db 100644 --- a/test cases/keyval/4 load_config builddir/meson.build +++ b/test cases/keyval/4 load_config builddir/meson.build @@ -1,6 +1,6 @@ project('keyval builddir test') -k = import('unstable-keyval') +k = import('keyval') out_conf = configure_file(input: 'config', output: 'out-config', copy: true) conf = k.load(out_conf) -- cgit v1.1 From 1c945511eb50134974192a34e06bac7f3ce7a74b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Marc-Andr=C3=A9=20Lureau?= Date: Wed, 15 Jul 2020 10:45:58 +0400 Subject: Print a warning when importing a stabilized module --- mesonbuild/interpreter.py | 24 +++++++++++++++++------- test cases/keyval/1 basic/meson.build | 2 ++ test cases/keyval/1 basic/test.json | 7 +++++++ 3 files changed, 26 insertions(+), 7 deletions(-) create mode 100644 test cases/keyval/1 basic/test.json diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 11dac38..24c2cc8 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2559,6 +2559,15 @@ class Interpreter(InterpreterBase): except InvalidArguments: pass + def import_module(self, modname): + if modname in self.modules: + return + try: + module = importlib.import_module('mesonbuild.modules.' + modname) + except ImportError: + raise InvalidArguments('Module "%s" does not exist' % (modname, )) + self.modules[modname] = module.initialize(self) + @stringArgs @noKwargs def func_import(self, node, args, kwargs): @@ -2567,14 +2576,15 @@ class Interpreter(InterpreterBase): modname = args[0] if modname.startswith('unstable-'): plainname = modname.split('-', 1)[1] - mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node) - modname = 'unstable_' + plainname - if modname not in self.modules: try: - module = importlib.import_module('mesonbuild.modules.' + modname) - except ImportError: - raise InvalidArguments('Module "%s" does not exist' % (modname, )) - self.modules[modname] = module.initialize(self) + # check if stable module exists + self.import_module(plainname) + mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname)) + modname = plainname + except InvalidArguments: + mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node) + modname = 'unstable_' + plainname + self.import_module(modname) return ModuleHolder(modname, self.modules[modname], self) @stringArgs diff --git a/test cases/keyval/1 basic/meson.build b/test cases/keyval/1 basic/meson.build index c3e4466..4207b8e 100644 --- a/test cases/keyval/1 basic/meson.build +++ b/test cases/keyval/1 basic/meson.build @@ -14,3 +14,5 @@ endif if conf.get('CONFIG_VAL_VAL').to_int() != 4 error('Expected CONFIG_VAL_VAL to be 4') endif + +k = import('unstable-keyval') diff --git a/test cases/keyval/1 basic/test.json b/test cases/keyval/1 basic/test.json new file mode 100644 index 0000000..dbdc5af --- /dev/null +++ b/test cases/keyval/1 basic/test.json @@ -0,0 +1,7 @@ +{ + "stdout": [ + { + "line": "WARNING: Module unstable-keyval is now stable, please use the keyval module instead." + } + ] +} -- cgit v1.1 From 4974fa39ac68b5bec8c61e630ae7cf268a086b87 Mon Sep 17 00:00:00 2001 From: wb9688 <46277131+wb9688@users.noreply.github.com> Date: Sun, 19 Jul 2020 21:41:36 +0200 Subject: Fix dirs typo in Vala docs [skip ci] Co-authored-by: wb9688 --- docs/markdown/Vala.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Vala.md b/docs/markdown/Vala.md index cbb58a9..0a29847 100644 --- a/docs/markdown/Vala.md +++ b/docs/markdown/Vala.md @@ -237,7 +237,7 @@ dependencies = [ dependency('glib-2.0'), dependency('gobject-2.0'), meson.get_compiler('c').find_library('foo'), - meson.get_compiler('vala').find_library('foo', dir: vapi_dir), + meson.get_compiler('vala').find_library('foo', dirs: vapi_dir), ] sources = files('app.vala') -- cgit v1.1 From 804a71e8f2b7c1011c91bd016df435fc952677a0 Mon Sep 17 00:00:00 2001 From: Jussi Pakkanen Date: Sun, 19 Jul 2020 18:44:18 +0300 Subject: Do not add rpaths for static libs. Closes #5191. --- mesonbuild/build.py | 3 ++- run_unittests.py | 7 +++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index d7f3b66..a06979c 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -819,7 +819,8 @@ class BuildTarget(Target): def get_link_dep_subdirs(self): result = OrderedSet() for i in self.link_targets: - result.add(i.get_subdir()) + if not isinstance(i, StaticLibrary): + result.add(i.get_subdir()) result.update(i.get_link_dep_subdirs()) return result diff --git a/run_unittests.py b/run_unittests.py index 820b705..aba77d5 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -7030,6 +7030,13 @@ c = ['{0}'] windows_proof_rmtree(os.path.join(testdir, 'subprojects', 'foo')) os.unlink(wrap_filename) + def test_no_rpath_for_static(self): + testdir = os.path.join(self.common_test_dir, '5 linkstatic') + self.init(testdir) + self.build() + build_rpath = get_rpath(os.path.join(self.builddir, 'prog')) + self.assertIsNone(build_rpath) + class BaseLinuxCrossTests(BasePlatformTests): # Don't pass --libdir when cross-compiling. We have tests that -- cgit v1.1 From 8f106a2b9a7824075e55d3f044f2c0c5dd3ee700 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?St=C3=A9phane=20Cerveau?= Date: Thu, 9 Jul 2020 12:34:34 +0200 Subject: string: add substring method This method aims to offer a simple way to 'substring' an existing string with start and end values. --- docs/markdown/Reference-manual.md | 3 +++ docs/markdown/Syntax.md | 20 ++++++++++++++++++++ mesonbuild/interpreterbase.py | 14 ++++++++++++++ test cases/common/38 string operations/meson.build | 15 +++++++++++++++ 4 files changed, 52 insertions(+) diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 966d408..080fe3e 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -1977,6 +1977,9 @@ are immutable, all operations return their results as a new string. - `startswith(string)`: returns true if string starts with the string specified as the argument +- `substring(start,end)` *(since 0.56.0)*: returns a substring specified from start to end. + Both `start` and `end` arguments are optional, so, for example, `'foobar'.substring()` will return `'foobar'`. + - `strip()`: removes whitespace at the beginning and end of the string. *(since 0.43.0)* Optionally can take one positional string argument, and all characters in that string will be stripped. diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md index 7cb39e9..bbe3dbb 100644 --- a/docs/markdown/Syntax.md +++ b/docs/markdown/Syntax.md @@ -220,6 +220,26 @@ is_x86 = target.startswith('x86') # boolean value 'true' is_bsd = target.to_lower().endswith('bsd') # boolean value 'true' ``` +#### .substring() + +Since 0.56.0, you can extract a substring from a string. + +```meson +# Similar to the Python str[start:end] syntax +target = 'x86_FreeBSD' +platform = target.substring(0, 3) # prefix string value 'x86' +system = target.substring(4) # suffix string value 'FreeBSD' +``` + +The method accepts negative values where negative `start` is relative to the end of +string `len(string) - start` as well as negative `end`. + +```meson +string = 'foobar' +target.substring(-5, -3) # => 'oo' +target.substring(1, -1) # => 'ooba' +``` + #### .split(), .join() ```meson diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 634f4f2..9f35601 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -1026,6 +1026,20 @@ The result of this is undefined and will become a hard error in a future Meson r if not isinstance(cmpr, str): raise InterpreterException('Version_compare() argument must be a string.') return mesonlib.version_compare(obj, cmpr) + elif method_name == 'substring': + if len(posargs) > 2: + raise InterpreterException('substring() takes maximum two arguments.') + start = 0 + end = len(obj) + if len (posargs) > 0: + if not isinstance(posargs[0], int): + raise InterpreterException('substring() argument must be an int') + start = posargs[0] + if len (posargs) > 1: + if not isinstance(posargs[1], int): + raise InterpreterException('substring() argument must be an int') + end = posargs[1] + return obj[start:end] raise InterpreterException('Unknown method "%s" for a string.' % method_name) def format_string(self, templ: str, args: T.List[TYPE_nvar]) -> str: diff --git a/test cases/common/38 string operations/meson.build b/test cases/common/38 string operations/meson.build index 6596142..8a06a82 100644 --- a/test cases/common/38 string operations/meson.build +++ b/test cases/common/38 string operations/meson.build @@ -101,3 +101,18 @@ assert('\\\\n' == bs_bs_n, 'Four backslash broken before n') assert('\\\\\n' == bs_bs_nl, 'Five backslash broken before n') assert('\\\\' == bs_bs, 'Double-backslash broken') assert('\\' == bs, 'Backslash broken') + +mysubstring='foobarbaz' +assert(mysubstring.substring() == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(0) == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(1) == 'oobarbaz', 'substring is broken') +assert(mysubstring.substring(-5) == 'arbaz', 'substring is broken') +assert(mysubstring.substring(1, 4) == 'oob', 'substring is broken') +assert(mysubstring.substring(1,-5) == 'oob', 'substring is broken') +assert(mysubstring.substring(1, 0) == '', 'substring is broken') +assert(mysubstring.substring(0, 100) == 'foobarbaz', 'substring is broken') +assert(mysubstring.substring(-1, -5) == '', 'substring is broken') +assert(mysubstring.substring(10, -25) == '', 'substring is broken') +assert(mysubstring.substring(-4, 2) == '', 'substring is broken') +assert(mysubstring.substring(10, 9) == '', 'substring is broken') +assert(mysubstring.substring(8, 10) == 'z', 'substring is broken') -- cgit v1.1 From 7f1e9b7492862d5a99064abc6d1a4ce903ba6fd9 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Thu, 9 Jul 2020 09:26:39 -0400 Subject: summary: Wrap lines when printing lists When a list_sep is provided (e.g. ', ') all items are printed on the same line, which gets ugly on very long lists (e.g. list of plugins enabled). --- mesonbuild/interpreter.py | 34 +++++++++++++++++++++++++++++----- run_unittests.py | 4 ++++ test cases/unit/73 summary/meson.build | 1 + 3 files changed, 34 insertions(+), 5 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 24c2cc8..df81569 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1844,10 +1844,18 @@ class Summary: if bool_yn and isinstance(i, bool): formatted_values.append(mlog.green('YES') if i else mlog.red('NO')) else: - formatted_values.append(i) + formatted_values.append(str(i)) self.sections[section][k] = (formatted_values, list_sep) self.max_key_len = max(self.max_key_len, len(k)) + def text_len(self, v): + if isinstance(v, str): + return len(v) + elif isinstance(v, mlog.AnsiDecorator): + return len(v.text) + else: + raise RuntimeError('Expecting only strings or AnsiDecorator') + def dump(self): mlog.log(self.project_name, mlog.normal_cyan(self.project_version)) for section, values in self.sections.items(): @@ -1859,12 +1867,28 @@ class Summary: indent = self.max_key_len - len(k) + 3 end = ' ' if v else '' mlog.log(' ' * indent, k + ':', end=end) - if list_sep is None: - indent = self.max_key_len + 6 - list_sep = '\n' + ' ' * indent - mlog.log(*v, sep=list_sep) + indent = self.max_key_len + 6 + self.dump_value(v, list_sep, indent) mlog.log('') # newline + def dump_value(self, arr, list_sep, indent): + lines_sep = '\n' + ' ' * indent + if list_sep is None: + mlog.log(*arr, sep=lines_sep) + return + max_len = shutil.get_terminal_size().columns + line = [] + line_len = indent + lines_sep = list_sep.rstrip() + lines_sep + for v in arr: + v_len = self.text_len(v) + len(list_sep) + if line and line_len + v_len > max_len: + mlog.log(*line, sep=list_sep, end=lines_sep) + line_len = indent + line = [] + line.append(v) + line_len += v_len + mlog.log(*line, sep=list_sep) class MesonMain(InterpreterObject): def __init__(self, build, interpreter): diff --git a/run_unittests.py b/run_unittests.py index aba77d5..ff6bb9c 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -4635,6 +4635,10 @@ recommended as it is not supported on some platforms''') no: NO coma list: a, b, c + Plugins + long coma list: alpha, alphacolor, apetag, audiofx, audioparsers, auparse, + autodetect, avi + Subprojects sub: YES sub2: NO Problem encountered: This subproject failed diff --git a/test cases/unit/73 summary/meson.build b/test cases/unit/73 summary/meson.build index df4540d..1bc05ca 100644 --- a/test cases/unit/73 summary/meson.build +++ b/test cases/unit/73 summary/meson.build @@ -13,3 +13,4 @@ summary('A number', 1, section: 'Configuration') summary('yes', true, bool_yn : true, section: 'Configuration') summary('no', false, bool_yn : true, section: 'Configuration') summary('coma list', ['a', 'b', 'c'], list_sep: ', ', section: 'Configuration') +summary('long coma list', ['alpha', 'alphacolor', 'apetag', 'audiofx', 'audioparsers', 'auparse', 'autodetect', 'avi'], list_sep: ', ', section: 'Plugins') -- cgit v1.1 From a7a6a4833f5eb2d6c2270945ec762fdae0bb0786 Mon Sep 17 00:00:00 2001 From: cmcneish <46431947+cmcneish@users.noreply.github.com> Date: Tue, 21 Jul 2020 07:40:29 -0700 Subject: Add boost_root support to properties files (#7210) * Add boost_root support to properties files This commit implements `boost_root`, `boost_includedir`, and `boost_librarydir` variable support to native and cross properties files. The search order is currently environment variables, then these variables, and finally a platform-dependent search. * Add preliminary boost_root / boost_includedir tests Each test contains a fake "version.hpp", as that's how boost detection is currently being done. We look for this file relative to the root directory, which probably shouldn't be allowed (it previously was for BOOST_LIBRARYDIR but not for BOOST_ROOT). It also cannot help with breakage detection in libraries, however it looks like this wasn't getting tested beforehand. I've given the two unique version numbers that shouldn't be present in any stock version of boost (001 and 002). * Add return type to detect_split_root * Return empty list when nothing found in BOOST_ROOT, rather than None * Update boost_root tests * Create nativefile.ini based on location of run_project_tests.py * Add fake libraries to ensure boost_librarydir is being used * Require all search paths for boost to be absolute * Redo boost search ordering To better match things like pkg-config, we now look through native/cross files, then environment variables, then system locations for boost installations. Path detection does not fall back from one method to the next for properties or environment variables--if boost_root, boost_librarydir, or boost_includedir is specified, they must be sufficient to find boost. Likewise for BOOST_ROOT and friends. pkg-config detection is still optional falling back to system-wide detection, for Conan. (Also, fix a typo in test 33's nativefile) * Correct return type for detect_roots * Correct boost dependency search order in documentation * Print debug information for boost library finding, to resolve CI issues * Handle native/cross file templates in a more consistent way All tests can now create a `nativefile.ini.in` if they need to use some parameter that the testing framework knows about but they can't. * Pass str--rather than PosixPath--to os.path.exists, for Python35 * Look for boost minor versions, rather than boost patch versions in test cases * Drop fake dylib versions of boost_regex * Prefer get_env_var to use of os.environ * Correct error reporting for relative BOOST_ROOT paths * Bump version this appears in. Also, change "properties file" to "machine file" as that appears to be the more common language. --- docs/markdown/Dependencies.md | 8 +- mesonbuild/dependencies/boost.py | 148 +++++++++++++++------ run_project_tests.py | 28 +++- .../32 boost root/boost/include/boost/version.hpp | 3 + .../boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib | 0 .../boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib | 0 .../boost/lib/libboost_regex.so.0.1.0 | 0 test cases/frameworks/32 boost root/meson.build | 6 + .../frameworks/32 boost root/nativefile.ini.in | 2 + .../boost/extra-dir/include/boost/version.hpp | 3 + .../boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib | 0 .../boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib | 0 .../boost/lib/libboost_regex.so.0.2.0 | 0 .../frameworks/33 boost split root/meson.build | 6 + .../33 boost split root/nativefile.ini.in | 3 + 15 files changed, 163 insertions(+), 44 deletions(-) create mode 100644 test cases/frameworks/32 boost root/boost/include/boost/version.hpp create mode 100644 test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib create mode 100644 test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib create mode 100644 test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 create mode 100644 test cases/frameworks/32 boost root/meson.build create mode 100644 test cases/frameworks/32 boost root/nativefile.ini.in create mode 100644 test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp create mode 100644 test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib create mode 100644 test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib create mode 100644 test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 create mode 100644 test cases/frameworks/33 boost split root/meson.build create mode 100644 test cases/frameworks/33 boost split root/nativefile.ini.in diff --git a/docs/markdown/Dependencies.md b/docs/markdown/Dependencies.md index a8f6d8a..b89a0aa 100644 --- a/docs/markdown/Dependencies.md +++ b/docs/markdown/Dependencies.md @@ -288,8 +288,12 @@ You can call `dependency` multiple times with different modules and use those to link against your targets. If your boost headers or libraries are in non-standard locations you -can set the BOOST_ROOT, BOOST_INCLUDEDIR, and/or BOOST_LIBRARYDIR -environment variables. +can set the `BOOST_ROOT`, or the `BOOST_INCLUDEDIR` and `BOOST_LIBRARYDIR` +environment variables. *(added in 0.56.0)* You can also set these +parameters as `boost_root`, `boost_include`, and `boost_librarydir` in your +native or cross machine file. Note that machine file variables are +preferred to environment variables, and that specifying any of these +disables system-wide search for boost. You can set the argument `threading` to `single` to use boost libraries that have been compiled for single-threaded use instead. diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 907c0c2..3ad534e 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -20,6 +20,7 @@ from pathlib import Path from .. import mlog from .. import mesonlib +from ..envconfig import get_env_var from ..environment import Environment from .base import DependencyException, ExternalDependency, PkgConfigDependency @@ -163,8 +164,8 @@ class BoostLibraryFile(): if not tags: return - # Without any tags mt is assumed, however, an absents of mt in the name - # with tags present indicates that the lib was build without mt support + # Without any tags mt is assumed, however, an absence of mt in the name + # with tags present indicates that the lib was built without mt support self.mt = False for i in tags: if i == 'mt': @@ -367,36 +368,27 @@ class BoostDependency(ExternalDependency): self.arch = environment.machines[self.for_machine].cpu_family self.arch = boost_arch_map.get(self.arch, None) - # Prefere BOOST_INCLUDEDIR and BOOST_LIBRARYDIR if preset - boost_manual_env = [x in os.environ for x in ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR']] - if all(boost_manual_env): - inc_dir = Path(os.environ['BOOST_INCLUDEDIR']) - lib_dir = Path(os.environ['BOOST_LIBRARYDIR']) - mlog.debug('Trying to find boost with:') - mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir)) - mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir)) - - boost_inc_dir = None - for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']: - if j.is_file(): - boost_inc_dir = self._include_dir_from_version_header(j) - break - if not boost_inc_dir: - self.is_found = False - return + # First, look for paths specified in a machine file + props = self.env.properties[self.for_machine] + boost_property_env = [props.get('boost_includedir'), props.get('boost_librarydir'), props.get('boost_root')] + if any(boost_property_env): + self.detect_boost_machine_file(props) + return - self.is_found = self.run_check([boost_inc_dir], [lib_dir]) + # Next, look for paths in the environment + boost_manual_env_list = ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR', 'BOOST_ROOT', 'BOOSTROOT'] + boost_manual_env = [get_env_var(self.for_machine, self.env.is_cross_build, x) for x in boost_manual_env_list] + if any(boost_manual_env): + self.detect_boost_env() return - elif any(boost_manual_env): - mlog.warning('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.') - # A) Detect potential boost root directories (uses also BOOST_ROOT env var) - roots = self.detect_roots() - roots = list(mesonlib.OrderedSet(roots)) + # Finally, look for paths from .pc files and from searching the filesystem + self.detect_roots() - # B) Foreach candidate + def check_and_set_roots(self, roots) -> None: + roots = list(mesonlib.OrderedSet(roots)) for j in roots: - # 1. Look for the boost headers (boost/version.pp) + # 1. Look for the boost headers (boost/version.hpp) mlog.debug('Checking potential boost root {}'.format(j.as_posix())) inc_dirs = self.detect_inc_dirs(j) inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions @@ -411,6 +403,74 @@ class BoostDependency(ExternalDependency): self.boost_root = j break + def detect_boost_machine_file(self, props) -> None: + incdir = props.get('boost_includedir') + libdir = props.get('boost_librarydir') + + if incdir and libdir: + inc_dir = Path(props['boost_includedir']) + lib_dir = Path(props['boost_librarydir']) + + if not inc_dir.is_absolute() or not lib_dir.is_absolute(): + raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute') + + mlog.debug('Trying to find boost with:') + mlog.debug(' - boost_includedir = {}'.format(inc_dir)) + mlog.debug(' - boost_librarydir = {}'.format(lib_dir)) + + return self.detect_split_root(inc_dir, lib_dir) + + elif incdir or libdir: + raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)') + + rootdir = props.get('boost_root') + # It shouldn't be possible to get here without something in boost_root + assert(rootdir) + + raw_paths = mesonlib.stringlistify(rootdir) + paths = [Path(x) for x in raw_paths] + if paths and any([not x.is_absolute() for x in paths]): + raise DependencyException('boost_root path given in machine file must be absolute') + + self.check_and_set_roots(paths) + + def detect_boost_env(self): + boost_includedir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_INCLUDEDIR') + boost_librarydir = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_LIBRARYDIR') + + boost_manual_env = [boost_includedir, boost_librarydir] + if all(boost_manual_env): + inc_dir = Path(boost_includedir) + lib_dir = Path(boost_librarydir) + + if not inc_dir.is_absolute() or not lib_dir.is_absolute(): + raise DependencyException('Paths given in BOOST_INCLUDEDIR and BOOST_LIBRARYDIR must be absolute') + + mlog.debug('Trying to find boost with:') + mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir)) + mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir)) + + return self.detect_split_root(inc_dir, lib_dir) + + elif any(boost_manual_env): + raise DependencyException('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.') + + boost_root = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOST_ROOT') + boostroot = get_env_var(self.for_machine, self.env.is_cross_build, 'BOOSTROOT') + + # It shouldn't be possible to get here without something in BOOST_ROOT or BOOSTROOT + assert(boost_root or boostroot) + + for path, name in [(boost_root, 'BOOST_ROOT'), (boostroot, 'BOOSTROOT')]: + if path: + raw_paths = path.split(os.pathsep) + paths = [Path(x) for x in raw_paths] + if paths and any([not x.is_absolute() for x in paths]): + raise DependencyException('Paths in {} must be absolute'.format(name)) + break + + self.check_and_set_roots(paths) + def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool: mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs])) mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs])) @@ -560,6 +620,12 @@ class BoostDependency(ExternalDependency): except (KeyError, IndexError, AttributeError): pass + # mlog.debug(' - static: {}'.format(self.static)) + # mlog.debug(' - not explicit static: {}'.format(not self.explicit_static)) + # mlog.debug(' - mt: {}'.format(self.multithreading)) + # mlog.debug(' - version: {}'.format(lib_vers)) + # mlog.debug(' - arch: {}'.format(self.arch)) + # mlog.debug(' - vscrt: {}'.format(vscrt)) libs = [x for x in libs if x.static == self.static or not self.explicit_static] libs = [x for x in libs if x.mt == self.multithreading] libs = [x for x in libs if x.version_matches(lib_vers)] @@ -592,20 +658,22 @@ class BoostDependency(ExternalDependency): libs += [BoostLibraryFile(i)] return [x for x in libs if x.is_boost()] # Filter out no boost libraries - def detect_roots(self) -> T.List[Path]: - roots = [] # type: T.List[Path] + def detect_split_root(self, inc_dir, lib_dir) -> None: + boost_inc_dir = None + for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']: + if j.is_file(): + boost_inc_dir = self._include_dir_from_version_header(j) + break + if not boost_inc_dir: + self.is_found = False + return - # Add roots from the environment - for i in ['BOOST_ROOT', 'BOOSTROOT']: - if i in os.environ: - raw_paths = os.environ[i].split(os.pathsep) - paths = [Path(x) for x in raw_paths] - if paths and any([not x.is_absolute() for x in paths]): - raise DependencyException('Paths in {} must be absolute'.format(i)) - roots += paths - return roots # Do not add system paths if BOOST_ROOT is present + self.is_found = self.run_check([boost_inc_dir], [lib_dir]) + + def detect_roots(self) -> None: + roots = [] # type: T.List[Path] - # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarely + # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily # allows BoostDependency to find boost from Conan. See #5438 try: boost_pc = PkgConfigDependency('boost', self.env, {'required': False}) @@ -660,7 +728,7 @@ class BoostDependency(ExternalDependency): tmp = [x.resolve() for x in tmp] roots += tmp - return roots + self.check_and_set_roots(roots) def log_details(self) -> str: res = '' diff --git a/run_project_tests.py b/run_project_tests.py index 793c844..0f0bc4e 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -41,6 +41,7 @@ from mesonbuild import compilers from mesonbuild import mesonlib from mesonbuild import mlog from mesonbuild import mtest +from mesonbuild.build import ConfigurationData from mesonbuild.mesonlib import MachineChoice, Popen_safe from mesonbuild.coredata import backendlist, version as meson_version @@ -475,6 +476,28 @@ def create_deterministic_builddir(test: TestDef, use_tmpdir: bool) -> str: os.mkdir(abs_pathname) return abs_pathname +def format_parameter_file(file_basename: str, test: TestDef, test_build_dir: str) -> Path: + confdata = ConfigurationData() + confdata.values = {'MESON_TEST_ROOT': (str(test.path.absolute()), 'base directory of current test')} + + template = test.path / (file_basename + '.in') + destination = Path(test_build_dir) / file_basename + mesonlib.do_conf_file(str(template), str(destination), confdata, 'meson') + + return destination + +def detect_parameter_files(test: TestDef, test_build_dir: str) -> (Path, Path): + nativefile = test.path / 'nativefile.ini' + crossfile = test.path / 'crossfile.ini' + + if os.path.exists(str(test.path / 'nativefile.ini.in')): + nativefile = format_parameter_file('nativefile.ini', test, test_build_dir) + + if os.path.exists(str(test.path / 'crossfile.ini.in')): + crossfile = format_parameter_file('crossfile.ini', test, test_build_dir) + + return nativefile, crossfile + def run_test(test: TestDef, extra_args, compiler, backend, flags, commands, should_fail, use_tmp: bool): if test.skip: return None @@ -497,8 +520,9 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, extra_args, if 'libdir' not in test.do_not_set_opts: gen_args += ['--libdir', 'lib'] gen_args += [test.path.as_posix(), test_build_dir] + flags + extra_args - nativefile = test.path / 'nativefile.ini' - crossfile = test.path / 'crossfile.ini' + + nativefile, crossfile = detect_parameter_files(test, test_build_dir) + if nativefile.exists(): gen_args.extend(['--native-file', nativefile.as_posix()]) if crossfile.exists(): diff --git a/test cases/frameworks/32 boost root/boost/include/boost/version.hpp b/test cases/frameworks/32 boost root/boost/include/boost/version.hpp new file mode 100644 index 0000000..65e4fab --- /dev/null +++ b/test cases/frameworks/32 boost root/boost/include/boost/version.hpp @@ -0,0 +1,3 @@ +#define BOOST_VERSION 100 + +#error This is not a real version of boost diff --git a/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x32-0_1.lib new file mode 100644 index 0000000..e69de29 diff --git a/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib b/test cases/frameworks/32 boost root/boost/lib/boost_regex-vc142-mt-gd-x64-0_1.lib new file mode 100644 index 0000000..e69de29 diff --git a/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 b/test cases/frameworks/32 boost root/boost/lib/libboost_regex.so.0.1.0 new file mode 100644 index 0000000..e69de29 diff --git a/test cases/frameworks/32 boost root/meson.build b/test cases/frameworks/32 boost root/meson.build new file mode 100644 index 0000000..50d2f0d --- /dev/null +++ b/test cases/frameworks/32 boost root/meson.build @@ -0,0 +1,6 @@ +project('boosttest', 'cpp') + +dep = dependency('boost', modules : 'regex', required: false) + +assert(dep.found(), 'expected to find a fake version of boost') +assert(dep.version() == '0.1.0', 'expected to find version 0.1.0') diff --git a/test cases/frameworks/32 boost root/nativefile.ini.in b/test cases/frameworks/32 boost root/nativefile.ini.in new file mode 100644 index 0000000..54510d7 --- /dev/null +++ b/test cases/frameworks/32 boost root/nativefile.ini.in @@ -0,0 +1,2 @@ +[properties] +boost_root = '@MESON_TEST_ROOT@/boost' diff --git a/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp b/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp new file mode 100644 index 0000000..3ba19ee --- /dev/null +++ b/test cases/frameworks/33 boost split root/boost/extra-dir/include/boost/version.hpp @@ -0,0 +1,3 @@ +#define BOOST_VERSION 200 + +#error This is not a real version of boost diff --git a/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x32-0_2.lib new file mode 100644 index 0000000..e69de29 diff --git a/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib b/test cases/frameworks/33 boost split root/boost/lib/boost_regex-vc142-mt-gd-x64-0_2.lib new file mode 100644 index 0000000..e69de29 diff --git a/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 b/test cases/frameworks/33 boost split root/boost/lib/libboost_regex.so.0.2.0 new file mode 100644 index 0000000..e69de29 diff --git a/test cases/frameworks/33 boost split root/meson.build b/test cases/frameworks/33 boost split root/meson.build new file mode 100644 index 0000000..a2353bb --- /dev/null +++ b/test cases/frameworks/33 boost split root/meson.build @@ -0,0 +1,6 @@ +project('boosttest', 'cpp') + +dep = dependency('boost', modules : 'regex', required: false) + +assert(dep.found(), 'expected to find a fake version of boost') +assert(dep.version() == '0.2.0', 'expected to find version 0.2.0') diff --git a/test cases/frameworks/33 boost split root/nativefile.ini.in b/test cases/frameworks/33 boost split root/nativefile.ini.in new file mode 100644 index 0000000..7bd5ac2 --- /dev/null +++ b/test cases/frameworks/33 boost split root/nativefile.ini.in @@ -0,0 +1,3 @@ +[properties] +boost_includedir = '@MESON_TEST_ROOT@/boost/extra-dir/include' +boost_librarydir = '@MESON_TEST_ROOT@/boost/lib' -- cgit v1.1 From 84928888e1d47d9e013c38a0b260d5042e4d2392 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Mon, 20 Jul 2020 19:41:19 +0530 Subject: vim: Syntax-highlight continue and break statements --- data/syntax-highlighting/vim/syntax/meson.vim | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/data/syntax-highlighting/vim/syntax/meson.vim b/data/syntax-highlighting/vim/syntax/meson.vim index d0d15d9..1100113 100644 --- a/data/syntax-highlighting/vim/syntax/meson.vim +++ b/data/syntax-highlighting/vim/syntax/meson.vim @@ -32,8 +32,9 @@ set cpo&vim " http://mesonbuild.com/Syntax.html syn keyword mesonConditional elif else if endif -syn keyword mesonRepeat foreach endforeach +syn keyword mesonRepeat foreach endforeach syn keyword mesonOperator and not or in +syn keyword mesonStatement continue break syn match mesonComment "#.*$" contains=mesonTodo,@Spell syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained -- cgit v1.1 From 07e62fb42b9e5012e7b2964cdbf42b9e5f4e497b Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Mon, 20 Jul 2020 19:48:43 +0530 Subject: Don't spam about usage of rspfiles to stdout Put it in the log file if necessary. Users don't need to know this. It's very spammy. --- mesonbuild/backend/ninjabackend.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 8c66fd0..3b4a4bd 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -348,7 +348,7 @@ class NinjaBuildElement: use_rspfile = self._should_use_rspfile() if use_rspfile: rulename = self.rulename + '_RSP' - mlog.log("Command line for building %s is long, using a response file" % self.outfilenames) + mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames) else: rulename = self.rulename line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins) -- cgit v1.1 From 6bf61b2a384ab42c679097ea749f8c5235f1e9f8 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 22 Jul 2020 10:42:20 -0400 Subject: visualstudio: warning_level 0 should not map to /W1 In every other compilers level 0 maps to no argument at all. --- mesonbuild/compilers/mixins/visualstudio.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index 4dfd8b4..93101b5 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -114,7 +114,7 @@ class VisualStudioLikeCompiler(metaclass=abc.ABCMeta): # See: https://ninja-build.org/manual.html#_deps always_args = ['/nologo', '/showIncludes'] warn_args = { - '0': ['/W1'], + '0': [], '1': ['/W2'], '2': ['/W3'], '3': ['/W4'], -- cgit v1.1 From c016401f95bce461a9acefc86bb75884684d9a5f Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Wed, 8 Jul 2020 15:22:06 -0400 Subject: coredata: Make warning_level per subproject builtin option --- docs/markdown/Builtin-options.md | 61 ++++++++++++++-------- docs/markdown/snippets/per_subproject.md | 4 ++ mesonbuild/coredata.py | 2 +- .../common/230 persubproject options/meson.build | 4 +- .../subprojects/sub1/foo.c | 3 ++ .../subprojects/sub1/meson.build | 4 +- 6 files changed, 54 insertions(+), 24 deletions(-) create mode 100644 docs/markdown/snippets/per_subproject.md diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index e7101d5..6234ecf 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -59,27 +59,27 @@ Options that are labeled "per machine" in the table are set per machine. See the [specifying options per machine](#Specifying-options-per-machine) section for details. -| Option | Default value | Description | Is per machine | -| ------ | ------------- | ----------- | -------------- | -| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | -| backend {ninja, vs,
vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | -| buildtype {plain, debug,
debugoptimized, release, minsize, custom} | debug | Build type to use | no | -| debug | true | Debug | no | -| default_library {shared, static, both} | shared | Default library type | no | -| errorlogs | true | Whether to print the logs from failing tests. | no | -| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | -| layout {mirror,flat} | mirror | Build directory layout | no | -| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | -| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | -| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | -| stdsplit | true | Split stdout and stderr in test logs | no | -| strip | false | Strip targets on install | no | -| unity {on, off, subprojects} | off | Unity build | no | -| unity_size {>=2} | 4 | Unity file block size | no | -| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | -| werror | false | Treat warnings as errors | no | -| wrap_mode {default, nofallback,
nodownload, forcefallback} | default | Wrap mode to use | no | -| force_fallback_for | [] | Force fallback for those dependencies | no | +| Option | Default value | Description | Is per machine | Is per subproject | +| ------ | ------------- | ----------- | -------------- | ----------------- | +| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no | +| backend {ninja, vs,
vs2010, vs2015, vs2017, vs2019, xcode} | ninja | Backend to use | no | no | +| buildtype {plain, debug,
debugoptimized, release, minsize, custom} | debug | Build type to use | no | no | +| debug | true | Debug | no | no | +| default_library {shared, static, both} | shared | Default library type | no | yes | +| errorlogs | true | Whether to print the logs from failing tests. | no | no | +| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no | +| layout {mirror,flat} | mirror | Build directory layout | no | no | +| optimization {0, g, 1, 2, 3, s} | 0 | Optimization level | no | no | +| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | no | +| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | no | +| stdsplit | true | Split stdout and stderr in test logs | no | no | +| strip | false | Strip targets on install | no | no | +| unity {on, off, subprojects} | off | Unity build | no | no | +| unity_size {>=2} | 4 | Unity file block size | no | no | +| warning_level {0, 1, 2, 3} | 1 | Set the warning level. From 0 = none to 3 = highest | no | yes | +| werror | false | Treat warnings as errors | no | yes | +| wrap_mode {default, nofallback,
nodownload, forcefallback} | default | Wrap mode to use | no | no | +| force_fallback_for | [] | Force fallback for those dependencies | no | no | For setting optimization levels and toggling debug, you can either set the @@ -215,3 +215,22 @@ the command line, as there was no `build.` prefix. Similarly named fields in the `[properties]` section of the cross file would effect cross compilers, but the code paths were fairly different allowing differences in behavior to crop out. + +## Specifying options per subproject + +Since *0.54.0* `default_library` and `werror` built-in options can be defined +per subproject. This is useful for example when building shared libraries in the +main project, but static link a subproject, or when the main project must build +with no warnings but some subprojects cannot. + +Most of the time this would be used either by the parent project by setting +subproject's default_options (e.g. `subproject('foo', default_options: 'default_library=static')`), +or by the user using the command line `-Dfoo:default_library=static`. + +The value is overriden in this order: +- Value from parent project +- Value from subproject's default_options if set +- Value from subproject() default_options if set +- Value from command line if set + +Since 0.56.0 `warning_level` can also be defined per subproject. diff --git a/docs/markdown/snippets/per_subproject.md b/docs/markdown/snippets/per_subproject.md new file mode 100644 index 0000000..6de6068 --- /dev/null +++ b/docs/markdown/snippets/per_subproject.md @@ -0,0 +1,4 @@ +## Per subproject `warning_level` option + +`warning_level` can now be defined per subproject, in the same way as +`default_library` and `werror`. diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index af43b31..e2a6954 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -1182,7 +1182,7 @@ builtin_options = OrderedDict([ ('strip', BuiltinOption(UserBooleanOption, 'Strip targets on install', False)), ('unity', BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])), ('unity_size', BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))), - ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'])), + ('warning_level', BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3'], yielding=False)), ('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)), ('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])), ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])), diff --git a/test cases/common/230 persubproject options/meson.build b/test cases/common/230 persubproject options/meson.build index 20dff90..f76a70c 100644 --- a/test cases/common/230 persubproject options/meson.build +++ b/test cases/common/230 persubproject options/meson.build @@ -1,9 +1,11 @@ project('persubproject options', 'c', default_options : ['default_library=both', - 'werror=true']) + 'werror=true', + 'warning_level=3']) assert(get_option('default_library') == 'both', 'Parent default_library should be "both"') assert(get_option('werror')) +assert(get_option('warning_level') == '3') # Check it build both by calling a method only both_libraries target implement lib = library('lib1', 'foo.c') diff --git a/test cases/common/230 persubproject options/subprojects/sub1/foo.c b/test cases/common/230 persubproject options/subprojects/sub1/foo.c index 63e4de6..82ad2c2 100644 --- a/test cases/common/230 persubproject options/subprojects/sub1/foo.c +++ b/test cases/common/230 persubproject options/subprojects/sub1/foo.c @@ -1,5 +1,8 @@ int foo(void); int foo(void) { + /* This is built with -Werror, it would error if warning_level=3 was inherited + * from main project and not overridden by this subproject's default_options. */ + int x; return 0; } diff --git a/test cases/common/230 persubproject options/subprojects/sub1/meson.build b/test cases/common/230 persubproject options/subprojects/sub1/meson.build index 7afc934..4e4bc1b 100644 --- a/test cases/common/230 persubproject options/subprojects/sub1/meson.build +++ b/test cases/common/230 persubproject options/subprojects/sub1/meson.build @@ -1,6 +1,8 @@ -project('sub1', 'c') +project('sub1', 'c', + default_options : ['warning_level=0']) assert(get_option('default_library') == 'both', 'Should inherit parent project default_library') +assert(get_option('warning_level') == '0') # Check it build both by calling a method only both_libraries target implement lib = library('lib1', 'foo.c') -- cgit v1.1 From 6e7d548b11ac4ae940c3e08e16d3118ac3a40420 Mon Sep 17 00:00:00 2001 From: Nirbheek Chauhan Date: Mon, 20 Jul 2020 23:28:01 +0530 Subject: compilers: Cache gnu-mixins has_arguments() checks want_output gives us the output file. We don't need the file. We just need the compiler stderr, which is always stored. --- mesonbuild/compilers/mixins/gnu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py index 3526a91..83f7047 100644 --- a/mesonbuild/compilers/mixins/gnu.py +++ b/mesonbuild/compilers/mixins/gnu.py @@ -362,7 +362,7 @@ class GnuCompiler(GnuLikeCompiler): # For some compiler command line arguments, the GNU compilers will # emit a warning on stderr indicating that an option is valid for a # another language, but still complete with exit_success - with self._build_wrapper(code, env, args, None, mode, disable_cache=False, want_output=True) as p: + with self._build_wrapper(code, env, args, None, mode) as p: result = p.returncode == 0 if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stde: result = False -- cgit v1.1 From 903d5dd8a7dc1d6f8bef79e66d6ebc07ca2956a8 Mon Sep 17 00:00:00 2001 From: Marcel Hollerbach Date: Thu, 16 Jul 2020 09:53:18 +0200 Subject: ninjabackend: check if target has compiler attribute otherwise we are getting errors like: Traceback (most recent call last): File "/usr/local/lib/python3.6/dist-packages/mesonbuild/mesonmain.py", line 131, in run return options.run_func(options) File "/usr/local/lib/python3.6/dist-packages/mesonbuild/msetup.py", line 245, in run app.generate() File "/usr/local/lib/python3.6/dist-packages/mesonbuild/msetup.py", line 159, in generate self._generate(env) File "/usr/local/lib/python3.6/dist-packages/mesonbuild/msetup.py", line 215, in _generate intr.backend.generate() File "/usr/local/lib/python3.6/dist-packages/mesonbuild/backend/ninjabackend.py", line 518, in generate self.generate_coverage_rules() File "/usr/local/lib/python3.6/dist-packages/mesonbuild/backend/ninjabackend.py", line 991, in generate_coverage_rules self.generate_coverage_command(e, []) File "/usr/local/lib/python3.6/dist-packages/mesonbuild/backend/ninjabackend.py", line 975, in generate_coverage_command for compiler in target.compilers.values(): AttributeError: 'RunTarget' object has no attribute 'compilers' This extends the 109 generatecode test case to also define a test, so coverage can really detect something. --- mesonbuild/backend/ninjabackend.py | 2 ++ run_unittests.py | 20 +++++++++++++++++++- test cases/common/109 generatorcustom/meson.build | 4 +++- 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 3b4a4bd..32fb8db 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -972,6 +972,8 @@ int dummy; targets = self.build.get_targets().values() use_llvm_cov = False for target in targets: + if not hasattr(target, 'compilers'): + continue for compiler in target.compilers.values(): if compiler.get_id() == 'clang' and not compiler.info.is_darwin(): use_llvm_cov = True diff --git a/run_unittests.py b/run_unittests.py index fac0f50..b5294b9 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -1272,7 +1272,6 @@ class InternalTests(unittest.TestCase): self.assertFalse(errors) - @unittest.skipIf(is_tarball(), 'Skipping because this is a tarball release') class DataTests(unittest.TestCase): @@ -4905,6 +4904,25 @@ recommended as it is not supported on some platforms''') self.run_tests() self.run_target('coverage') + def test_coverage_complex(self): + if mesonbuild.environment.detect_msys2_arch(): + raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') + gcovr_exe, gcovr_new_rootdir = mesonbuild.environment.detect_gcovr() + if not gcovr_exe: + raise unittest.SkipTest('gcovr not found, or too old') + testdir = os.path.join(self.common_test_dir, '109 generatorcustom') + env = get_fake_env(testdir, self.builddir, self.prefix) + cc = env.detect_c_compiler(MachineChoice.HOST) + if cc.get_id() == 'clang': + if not mesonbuild.environment.detect_llvm_cov(): + raise unittest.SkipTest('llvm-cov not found') + if cc.get_id() == 'msvc': + raise unittest.SkipTest('Test only applies to non-MSVC compilers') + self.init(testdir, extra_args=['-Db_coverage=true']) + self.build() + self.run_tests() + self.run_target('coverage') + def test_coverage_html(self): if mesonbuild.environment.detect_msys2_arch(): raise unittest.SkipTest('Skipped due to problems with coverage on MSYS2') diff --git a/test cases/common/109 generatorcustom/meson.build b/test cases/common/109 generatorcustom/meson.build index 17d27e5..b3f50bb 100644 --- a/test cases/common/109 generatorcustom/meson.build +++ b/test cases/common/109 generatorcustom/meson.build @@ -14,5 +14,7 @@ allinone = custom_target('alltogether', output : 'alltogether.h', command : [catter, '@INPUT@', '@OUTPUT@']) -executable('proggie', 'main.c', allinone) +proggie = executable('proggie', 'main.c', allinone) + +test('proggie', proggie) -- cgit v1.1 From bc2338efd33dce9f07ecc2942e090f56006596e8 Mon Sep 17 00:00:00 2001 From: "Matthew D. Scholefield" Date: Tue, 28 Jul 2020 12:47:20 -0500 Subject: Describe syntax of `meson configure [skip ci] This makes it a lot easier for people to quickly figure out how to customize parameter. --- docs/markdown/Builtin-options.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md index 6234ecf..de801ab 100644 --- a/docs/markdown/Builtin-options.md +++ b/docs/markdown/Builtin-options.md @@ -17,7 +17,7 @@ by setting them inside `default_options` of `project()` in your `meson.build`. For legacy reasons `--warnlevel` is the cli argument for the `warning_level` option. -They can also be edited after setup using `meson configure`. +They can also be edited after setup using `meson configure -Doption=value`. Installation options are all relative to the prefix, except: -- cgit v1.1 From 3dea817a597e4d2f10bfe3c1e006a0983555ad15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= Date: Fri, 24 Jul 2020 15:52:06 +0200 Subject: Only emit warning about "native:" on projects with minimum required version 'native:' keyword was only added in 0.54. For projects declaring meson_version >= 0.54, warn, because those projects can and should set the keyword. For older projects declaring support for older versions, don't warn and use the default implicitly. Fixes https://github.com/mesonbuild/meson/issues/6849. --- mesonbuild/interpreter.py | 7 +++++-- test cases/warning/2 languages missing native/meson.build | 3 ++- test cases/warning/2 languages missing native/test.json | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index df81569..45813c1 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -3122,8 +3122,11 @@ external dependencies (including libraries) must go to "dependencies".''') return self.add_languages(args, required, self.machine_from_native_kwarg(kwargs)) else: # absent 'native' means 'both' for backwards compatibility - mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.', - location=self.current_node) + tv = FeatureNew.get_target_version(self.subproject) + if FeatureNew.check_version(tv, '0.54.0'): + mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.', + location=self.current_node) + success = self.add_languages(args, False, MachineChoice.BUILD) success &= self.add_languages(args, required, MachineChoice.HOST) return success diff --git a/test cases/warning/2 languages missing native/meson.build b/test cases/warning/2 languages missing native/meson.build index f4aa956..e204715 100644 --- a/test cases/warning/2 languages missing native/meson.build +++ b/test cases/warning/2 languages missing native/meson.build @@ -1,2 +1,3 @@ -project('languages missing native') +project('languages missing native', + meson_version : '>= 0.54') add_languages('c') diff --git a/test cases/warning/2 languages missing native/test.json b/test cases/warning/2 languages missing native/test.json index 36da0a7..f929654 100644 --- a/test cases/warning/2 languages missing native/test.json +++ b/test cases/warning/2 languages missing native/test.json @@ -1,7 +1,7 @@ { "stdout": [ { - "line": "test cases/warning/2 languages missing native/meson.build:2: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build." + "line": "test cases/warning/2 languages missing native/meson.build:3: WARNING: add_languages is missing native:, assuming languages are wanted for both host and build." } ] } -- cgit v1.1 From 1123f4f311e1ac89b341dea2cb5922f29dc502e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= Date: Fri, 24 Jul 2020 15:23:05 +0200 Subject: Avoid warning about multiple outputs when building private directory name Fixup for b4b1a2c5a145c1459fc4563a289e164e23bd6a02. A warning would be printed for any rule with multiple outputs, for example: WARNING: custom_target 'coredump.conf.5' has more than one output! Using the first one. WARNING: custom_target 'dnssec-trust-anchors.d.5' has more than one output! Using the first one. WARNING: custom_target 'halt.8' has more than one output! Using the first one. Fixes https://github.com/systemd/systemd/issues/16461. --- mesonbuild/backend/backends.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index cfd3a39..effd222 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -183,9 +183,9 @@ class Backend: self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) - def get_target_filename(self, t): + def get_target_filename(self, t, *, warn_multi_output: bool = True): if isinstance(t, build.CustomTarget): - if len(t.get_outputs()) != 1: + if warn_multi_output and len(t.get_outputs()) != 1: mlog.warning('custom_target {!r} has more than one output! ' 'Using the first one.'.format(t.name)) filename = t.get_outputs()[0] @@ -261,7 +261,7 @@ class Backend: return self.build_to_src def get_target_private_dir(self, target): - return os.path.join(self.get_target_filename(target) + '.p') + return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p') def get_target_private_dir_abs(self, target): return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) -- cgit v1.1 From 7ef51abfc565b147273097201f8eb286b1688736 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Zbigniew=20J=C4=99drzejewski-Szmek?= Date: Fri, 24 Jul 2020 16:29:09 +0200 Subject: Fix typo --- mesonbuild/interpreterbase.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 9f35601..822167c 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -330,7 +330,7 @@ class FeatureDeprecated(FeatureCheckBase): @staticmethod def check_version(target_version: str, feature_version: str) -> bool: - # For deprecatoin checks we need to return the inverse of FeatureNew checks + # For deprecation checks we need to return the inverse of FeatureNew checks return not mesonlib.version_compare_condition_with_min(target_version, feature_version) @staticmethod -- cgit v1.1 From 1ce4258c219fe08b6d6eaa6aa944f27d91d054cb Mon Sep 17 00:00:00 2001 From: James Hilliard Date: Sat, 18 Jul 2020 17:01:33 -0600 Subject: backends: fix rpath match pattern Since -Wl,-rpath= is not the only valid rpath ldflags syntax we need to try and match all valid rpath ldflags. In addition we should prevent -Wl,--just-symbols from being used to set rpath due to inconsistent compiler support. Signed-off-by: James Hilliard --- mesonbuild/backend/backends.py | 30 ++++++++++++++++++++++++++++-- run_unittests.py | 39 +++++++++++++++++++++++++++------------ 2 files changed, 55 insertions(+), 14 deletions(-) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index cfd3a39..e053f67 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -14,6 +14,7 @@ from collections import OrderedDict from functools import lru_cache +from pathlib import Path import enum import json import os @@ -455,10 +456,35 @@ class Backend: args.extend(self.environment.coredata.get_external_link_args(target.for_machine, lang)) except Exception: pass + # Match rpath formats: + # -Wl,-rpath= + # -Wl,-rpath, + rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)') + # Match solaris style compat runpath formats: + # -Wl,-R + # -Wl,-R, + runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)') + # Match symbols formats: + # -Wl,--just-symbols= + # -Wl,--just-symbols, + symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)') for arg in args: - if arg.startswith('-Wl,-rpath='): - for dir in arg.replace('-Wl,-rpath=','').split(':'): + rpath_match = rpath_regex.match(arg) + if rpath_match: + for dir in rpath_match.group(1).split(':'): dirs.add(dir) + runpath_match = runpath_regex.match(arg) + if runpath_match: + for dir in runpath_match.group(1).split(':'): + # The symbols arg is an rpath if the path is a directory + if Path(dir).is_dir(): + dirs.add(dir) + symbols_match = symbols_regex.match(arg) + if symbols_match: + for dir in symbols_match.group(1).split(':'): + # Prevent usage of --just-symbols to specify rpath + if Path(dir).is_dir(): + raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir)) return dirs def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True): diff --git a/run_unittests.py b/run_unittests.py index b5294b9..73131c7 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -6473,19 +6473,34 @@ class LinuxlikeTests(BasePlatformTests): self.init(yonder_dir) self.build() self.install(use_destdir=False) - self.new_builddir() - # Build an app that uses that installed library. - # Supply the rpath to the installed library via LDFLAGS - # (as systems like buildroot and guix are wont to do) - # and verify install preserves that rpath. - env = {'LDFLAGS': '-Wl,-rpath=' + yonder_libdir, - 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} - self.init(testdir, override_envvars=env) - self.build() - self.install(use_destdir=False) - got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) - self.assertEqual(got_rpath, yonder_libdir) + # Since rpath has multiple valid formats we need to + # test that they are all properly used. + rpath_formats = [ + ('-Wl,-rpath=', False), + ('-Wl,-rpath,', False), + ('-Wl,--just-symbols=', True), + ('-Wl,--just-symbols,', True), + ('-Wl,-R', False), + ('-Wl,-R,', False) + ] + for rpath_format, exception in rpath_formats: + # Build an app that uses that installed library. + # Supply the rpath to the installed library via LDFLAGS + # (as systems like buildroot and guix are wont to do) + # and verify install preserves that rpath. + self.new_builddir() + env = {'LDFLAGS': rpath_format + yonder_libdir, + 'PKG_CONFIG_PATH': os.path.join(yonder_libdir, 'pkgconfig')} + if exception: + with self.assertRaises(subprocess.CalledProcessError): + self.init(testdir, override_envvars=env) + break + self.init(testdir, override_envvars=env) + self.build() + self.install(use_destdir=False) + got_rpath = get_rpath(os.path.join(yonder_prefix, 'bin/rpathified')) + self.assertEqual(got_rpath, yonder_libdir, rpath_format) @skip_if_not_base_option('b_sanitize') def test_pch_with_address_sanitizer(self): -- cgit v1.1 From 8da1b29899e795d87f0a4e51adb532119af48727 Mon Sep 17 00:00:00 2001 From: Camilo Celis Guzman Date: Tue, 14 Jul 2020 15:09:41 +0200 Subject: mtest: report failures and overall summary *after* test(s) logs --- mesonbuild/mtest.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 4aafe62..0d81692 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -794,6 +794,7 @@ class TestHarness: def __init__(self, options: argparse.Namespace): self.options = options self.collected_logs = [] # type: T.List[str] + self.collected_failures = [] # type: T.List[str] self.fail_count = 0 self.expectedfail_count = 0 self.unexpectedpass_count = 0 @@ -908,6 +909,7 @@ class TestHarness: if not self.options.quiet or result.res not in ok_statuses: if result.res not in ok_statuses and mlog.colorize_console: if result.res in bad_statuses: + self.collected_failures.append(result_str) decorator = mlog.red elif result.res is TestResult.SKIP: decorator = mlog.yellow @@ -928,7 +930,11 @@ class TestHarness: self.junit.log(name, result) def print_summary(self) -> None: - msg = textwrap.dedent(''' + # Prepend a list of failures + msg = '' if len(self.collected_failures) < 1 else "\nSummary of Failures:\n\n" + msg += '\n'.join(self.collected_failures) + msg += textwrap.dedent(''' + Ok: {:<4} Expected Fail: {:<4} Fail: {:<4} @@ -1128,8 +1134,8 @@ class TestHarness: break self.drain_futures(futures) - self.print_summary() self.print_collected_logs() + self.print_summary() if self.logfilename: print('Full log written to {}'.format(self.logfilename)) -- cgit v1.1 From 40c197d524181713968ac5a1f052671744915c62 Mon Sep 17 00:00:00 2001 From: Xavier Claessens Date: Tue, 21 Jul 2020 23:05:17 -0400 Subject: pkgconfig: Fix various corner cases See unit tests for the exact scenarios this PR fixes. --- mesonbuild/build.py | 13 +--- mesonbuild/modules/pkgconfig.py | 91 ++++++++++++++++++++------ run_unittests.py | 13 ++++ test cases/common/47 pkgconfig-gen/meson.build | 36 ++++++++++ test cases/common/47 pkgconfig-gen/simple5.c | 6 ++ test cases/common/47 pkgconfig-gen/test.json | 6 +- 6 files changed, 135 insertions(+), 30 deletions(-) create mode 100644 test cases/common/47 pkgconfig-gen/simple5.c diff --git a/mesonbuild/build.py b/mesonbuild/build.py index a06979c..5e6db73 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1011,23 +1011,16 @@ This will become a hard error in a future Meson release.''') def get_extra_args(self, language): return self.extra_args.get(language, []) - def get_dependencies(self, exclude=None, for_pkgconfig=False): + def get_dependencies(self, exclude=None): transitive_deps = [] if exclude is None: exclude = [] for t in itertools.chain(self.link_targets, self.link_whole_targets): if t in transitive_deps or t in exclude: continue - # When generating `Libs:` and `Libs.private:` lists in pkg-config - # files we don't want to include static libraries that we link_whole - # or are uninstalled (they're implicitly promoted to link_whole). - # But we still need to include their transitive dependencies, - # a static library we link_whole would itself link to a shared - # library or an installed static library. - if not for_pkgconfig or (not t.is_internal() and t not in self.link_whole_targets): - transitive_deps.append(t) + transitive_deps.append(t) if isinstance(t, StaticLibrary): - transitive_deps += t.get_dependencies(transitive_deps + exclude, for_pkgconfig) + transitive_deps += t.get_dependencies(transitive_deps + exclude) return transitive_deps def get_source_subdir(self): diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index b7a12ff..1cb7698 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -36,6 +36,7 @@ class DependenciesHelper: self.priv_reqs = [] self.cflags = [] self.version_reqs = {} + self.link_whole_targets = [] def add_pub_libs(self, libs): libs, reqs, cflags = self._process_libs(libs, True) @@ -130,10 +131,7 @@ class DependenciesHelper: if obj.found(): processed_libs += obj.get_link_args() processed_cflags += obj.get_compile_args() - if public: - self.add_pub_libs(obj.libraries) - else: - self.add_priv_libs(obj.libraries) + self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public) elif isinstance(obj, dependencies.Dependency): if obj.found(): processed_libs += obj.get_link_args() @@ -148,12 +146,13 @@ class DependenciesHelper: processed_libs.append(obj) elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)): processed_libs.append(obj) - if isinstance(obj, build.StaticLibrary) and public: - self.add_pub_libs(obj.get_dependencies(for_pkgconfig=True)) - self.add_pub_libs(obj.get_external_deps()) - else: - self.add_priv_libs(obj.get_dependencies(for_pkgconfig=True)) - self.add_priv_libs(obj.get_external_deps()) + # If there is a static library in `Libs:` all its deps must be + # public too, otherwise the generated pc file will never be + # usable without --static. + self._add_lib_dependencies(obj.link_targets, + obj.link_whole_targets, + obj.external_deps, + isinstance(obj, build.StaticLibrary) and public) elif isinstance(obj, str): processed_libs.append(obj) else: @@ -161,6 +160,31 @@ class DependenciesHelper: return processed_libs, processed_reqs, processed_cflags + def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public): + add_libs = self.add_pub_libs if public else self.add_priv_libs + # Recursively add all linked libraries + for t in link_targets: + # Internal libraries (uninstalled static library) will be promoted + # to link_whole, treat them as such here. + if t.is_internal(): + self._add_link_whole(t, public) + else: + add_libs([t]) + for t in link_whole_targets: + self._add_link_whole(t, public) + # And finally its external dependencies + add_libs(external_deps) + + def _add_link_whole(self, t, public): + # Don't include static libraries that we link_whole. But we still need to + # include their dependencies: a static library we link_whole + # could itself link to a shared library or an installed static library. + # Keep track of link_whole_targets so we can remove them from our + # lists in case a library is link_with and link_whole at the same time. + # See remove_dups() below. + self.link_whole_targets.append(t) + self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public) + def add_version_reqs(self, name, version_reqs): if version_reqs: if name not in self.version_reqs: @@ -196,6 +220,32 @@ class DependenciesHelper: return ', '.join(result) def remove_dups(self): + # Set of ids that have already been handled and should not be added any more + exclude = set() + + # We can't just check if 'x' is excluded because we could have copies of + # the same SharedLibrary object for example. + def _ids(x): + if hasattr(x, 'generated_pc'): + yield x.generated_pc + if isinstance(x, build.Target): + yield x.get_id() + yield x + + # Exclude 'x' in all its forms and return if it was already excluded + def _add_exclude(x): + was_excluded = False + for i in _ids(x): + if i in exclude: + was_excluded = True + else: + exclude.add(i) + return was_excluded + + # link_whole targets are already part of other targets, exclude them all. + for t in self.link_whole_targets: + _add_exclude(t) + def _fn(xs, libs=False): # Remove duplicates whilst preserving original order result = [] @@ -206,19 +256,21 @@ class DependenciesHelper: cannot_dedup = libs and isinstance(x, str) and \ not x.startswith(('-l', '-L')) and \ x not in known_flags - if x not in result or cannot_dedup: - result.append(x) + if not cannot_dedup and _add_exclude(x): + continue + result.append(x) return result - self.pub_libs = _fn(self.pub_libs, True) + + # Handle lists in priority order: public items can be excluded from + # private and Requires can excluded from Libs. self.pub_reqs = _fn(self.pub_reqs) - self.priv_libs = _fn(self.priv_libs, True) + self.pub_libs = _fn(self.pub_libs, True) self.priv_reqs = _fn(self.priv_reqs) + self.priv_libs = _fn(self.priv_libs, True) + # Reset exclude list just in case some values can be both cflags and libs. + exclude = set() self.cflags = _fn(self.cflags) - # Remove from private libs/reqs if they are in public already - self.priv_libs = [i for i in self.priv_libs if i not in self.pub_libs] - self.priv_reqs = [i for i in self.priv_reqs if i not in self.pub_reqs] - class PkgConfigModule(ExtensionModule): def _get_lname(self, l, msg, pcfile): @@ -267,7 +319,6 @@ class PkgConfigModule(ExtensionModule): def generate_pkgconfig_file(self, state, deps, subdirs, name, description, url, version, pcfile, conflicts, variables, uninstalled=False, dataonly=False): - deps.remove_dups() coredata = state.environment.get_coredata() if uninstalled: outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled') @@ -460,6 +511,8 @@ class PkgConfigModule(ExtensionModule): if compiler: deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None)) + deps.remove_dups() + def parse_variable_list(stringlist): reserved = ['prefix', 'libdir', 'includedir'] variables = [] diff --git a/run_unittests.py b/run_unittests.py index 73131c7..2b0e4e1 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -5836,6 +5836,19 @@ class LinuxlikeTests(BasePlatformTests): out = self._run(cmd + ['--libs'], override_envvars=env).strip().split() self.assertEqual(out, ['-llibmain2', '-llibinternal']) + # See common/47 pkgconfig-gen/meson.build for description of the case this test + with open(os.path.join(privatedir1, 'simple2.pc')) as f: + content = f.read() + self.assertIn('Libs: -L${libdir} -lsimple2 -lz -lsimple1', content) + + with open(os.path.join(privatedir1, 'simple3.pc')) as f: + content = f.read() + self.assertEqual(1, content.count('-lsimple3')) + + with open(os.path.join(privatedir1, 'simple5.pc')) as f: + content = f.read() + self.assertNotIn('-lstat2', content) + def test_pkgconfig_uninstalled(self): testdir = os.path.join(self.common_test_dir, '47 pkgconfig-gen') self.init(testdir) diff --git a/test cases/common/47 pkgconfig-gen/meson.build b/test cases/common/47 pkgconfig-gen/meson.build index eb2afe4..8c16cd5 100644 --- a/test cases/common/47 pkgconfig-gen/meson.build +++ b/test cases/common/47 pkgconfig-gen/meson.build @@ -1,5 +1,12 @@ project('pkgconfig-gen', 'c') +# Some CI runners does not have zlib, just skip them as we need some common +# external dependency. +cc = meson.get_compiler('c') +if not cc.find_library('z', required: false).found() + error('MESON_SKIP_TEST: zlib missing') +endif + # First check we have pkg-config >= 0.29 pkgconfig = find_program('pkg-config', required: false) if not pkgconfig.found() @@ -59,3 +66,32 @@ pkgg.generate( version : libver, dataonly: true ) + +# Regression test for 2 cases: +# - link_whole from InternalDependency used to be ignored, but we should still +# recurse to add libraries they link to. In this case it must add `-lsimple1` +# in generated pc file. +# - dependencies from InternalDependency used to be ignored. In this it must add +# `-lz` in generated pc file. +simple1 = shared_library('simple1', 'simple.c') +stat1 = static_library('stat1', 'simple.c', link_with: simple1) +dep = declare_dependency(link_whole: stat1, dependencies: cc.find_library('z')) +simple2 = library('simple2', 'simple.c') +pkgg.generate(simple2, libraries: dep) + +# Regression test: as_system() does a deepcopy() of the InternalDependency object +# which caused `-lsimple3` to be duplicated because generator used to compare +# Target instances instead of their id. +simple3 = shared_library('simple3', 'simple.c') +dep1 = declare_dependency(link_with: simple3) +dep2 = dep1.as_system() +pkgg.generate(libraries: [dep1, dep2], + name: 'simple3', + description: 'desc') + +# Regression test: stat2 is both link_with and link_whole, it should not appear +# in generated pc file. +stat2 = static_library('stat2', 'simple.c', install: true) +simple4 = library('simple4', 'simple.c', link_with: stat2) +simple5 = library('simple5', 'simple5.c', link_with: simple4, link_whole: stat2) +pkgg.generate(simple5) diff --git a/test cases/common/47 pkgconfig-gen/simple5.c b/test cases/common/47 pkgconfig-gen/simple5.c new file mode 100644 index 0000000..9f924bd --- /dev/null +++ b/test cases/common/47 pkgconfig-gen/simple5.c @@ -0,0 +1,6 @@ +int simple5(void); + +int simple5(void) +{ + return 0; +} diff --git a/test cases/common/47 pkgconfig-gen/test.json b/test cases/common/47 pkgconfig-gen/test.json index 1c6a452..702e7fe 100644 --- a/test cases/common/47 pkgconfig-gen/test.json +++ b/test cases/common/47 pkgconfig-gen/test.json @@ -1,9 +1,13 @@ { "installed": [ {"type": "file", "file": "usr/include/simple.h"}, + {"type": "file", "file": "usr/lib/libstat2.a"}, {"type": "file", "file": "usr/lib/pkgconfig/simple.pc"}, {"type": "file", "file": "usr/lib/pkgconfig/libfoo.pc"}, {"type": "file", "file": "usr/lib/pkgconfig/libhello.pc"}, - {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"} + {"type": "file", "file": "usr/lib/pkgconfig/libhello_nolib.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple2.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple3.pc"}, + {"type": "file", "file": "usr/lib/pkgconfig/simple5.pc"} ] } -- cgit v1.1 From cc201a539674babf46f726859587afb5ed6a6867 Mon Sep 17 00:00:00 2001 From: Mark Hindley Date: Sun, 21 Jun 2020 14:13:52 +0100 Subject: Support armel/armhf builds on native arm64 hosts. --- mesonbuild/environment.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 00922c1..a82c8f8 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -451,7 +451,8 @@ def machine_info_can_run(machine_info: MachineInfo): true_build_cpu_family = detect_cpu_family({}) return \ (machine_info.cpu_family == true_build_cpu_family) or \ - ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) + ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \ + ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm')) def search_version(text: str) -> str: # Usually of the type 4.1.4 but compiler output may contain -- cgit v1.1 From a6164ca5a81224b7ed672401a47260f498f06e44 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 6 Feb 2020 09:10:01 -0800 Subject: Allow setting project options from cross or native files This allows adding a `[project options]` section to a cross or native file that contains the options defined for a project in it's meson_option.txt file. --- docs/markdown/Machine-files.md | 20 ++++++ .../snippets/project_options_in_machine_files.md | 37 ++++++++++ mesonbuild/coredata.py | 10 ++- mesonbuild/environment.py | 19 +++++ run_unittests.py | 83 +++++++++++++++++++++- .../unit/75 user options for subproject/.gitignore | 1 + .../75 user options for subproject/meson.build | 3 + 7 files changed, 171 insertions(+), 2 deletions(-) create mode 100644 docs/markdown/snippets/project_options_in_machine_files.md create mode 100644 test cases/unit/75 user options for subproject/.gitignore create mode 100644 test cases/unit/75 user options for subproject/meson.build diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index 9011f79..26af44a 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -12,6 +12,7 @@ The following sections are allowed: - binaries - paths - properties +- project options ### constants @@ -166,6 +167,25 @@ section may contain random key value pairs accessed using the The properties section can contain any variable you like, and is accessed via `meson.get_external_property`, or `meson.get_cross_property`. +### Project specific options + +*New in 0.54.0* + +Being able to set project specific options in a native or cross files can be +done using the `[project options]` section of the specific file (if doing a +cross build the options from the native file will be ignored) + +For setting options in supbprojects use the `:project options` +section instead. + +```ini +[project options] +build-tests = true + +[zlib:project options] +build-tests = false +``` + ## Loading multiple machine files Native files allow layering (cross files can be layered since meson 0.52.0). diff --git a/docs/markdown/snippets/project_options_in_machine_files.md b/docs/markdown/snippets/project_options_in_machine_files.md new file mode 100644 index 0000000..78b129a --- /dev/null +++ b/docs/markdown/snippets/project_options_in_machine_files.md @@ -0,0 +1,37 @@ +## Project options can be set in native or cross files + +A new set of sections has been added to the cross and native files, `[project +options]` and `[:project options]`, where `subproject_name` +is the name of a subproject. Any options that are allowed in the project can +be set from this section. They have the lowest precedent, and will be +overwritten by command line arguments. + + +```meson +option('foo', type : 'string', value : 'foo') +``` + +```ini +[project options] +foo = 'other val' +``` + +```console +meson build --native-file my.ini +``` + +Will result in the option foo having the value `other val`, + +```console +meson build --native-file my.ini -Dfoo='different val' +``` + +Will result in the option foo having the value `different val`, + + +Subproject options are assigned like this: + +```ini +[zlib:project options] +foo = 'some val' +``` diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index e2a6954..49104a7 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -377,6 +377,7 @@ class CoreData: host_cache = DependencyCache(self.builtins_per_machine, MachineChoice.BUILD) self.deps = PerMachine(build_cache, host_cache) # type: PerMachine[DependencyCache] self.compiler_check_cache = OrderedDict() + # Only to print a warning if it changes between Meson invocations. self.config_files = self.__load_config_files(options, scratch_dir, 'native') self.builtin_options_libdir_cross_fixup() @@ -734,7 +735,7 @@ class CoreData: if not self.is_cross_build(): self.copy_build_options_from_regular_ones() - def set_default_options(self, default_options, subproject, env): + def set_default_options(self, default_options: T.Mapping[str, str], subproject: str, env: 'Environment') -> None: # Warn if the user is using two different ways of setting build-type # options that override each other if 'buildtype' in env.cmd_line_options and \ @@ -755,6 +756,13 @@ class CoreData: k = subproject + ':' + k cmd_line_options[k] = v + # load the values for user options out of the appropriate machine file, + # then overload the command line + for k, v in env.user_options.get(subproject, {}).items(): + if subproject: + k = '{}:{}'.format(subproject, k) + cmd_line_options[k] = v + # Override project default_options using conf files (cross or native) for k, v in env.paths.host: if v is not None: diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index a82c8f8..c872aee 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -553,6 +553,9 @@ class Environment: # architecture, just the build and host architectures paths = PerMachineDefaultable() + # We only need one of these as project options are not per machine + user_options = {} + ## Setup build machine defaults # Will be fully initialized later using compilers later. @@ -565,12 +568,26 @@ class Environment: ## Read in native file(s) to override build machine configuration + def load_user_options(): + for section in config.keys(): + if section.endswith('project options'): + if ':' in section: + project = section.split(':')[0] + else: + project = '' + user_options[project] = config.get(section, {}) + if self.coredata.config_files is not None: config = coredata.parse_machine_files(self.coredata.config_files) binaries.build = BinaryTable(config.get('binaries', {})) paths.build = Directories(**config.get('paths', {})) properties.build = Properties(config.get('properties', {})) + # Don't run this if there are any cross files, we don't want to use + # the native values if we're doing a cross build + if not self.coredata.cross_files: + load_user_options() + ## Read in cross file(s) to override host machine configuration if self.coredata.cross_files: @@ -582,6 +599,7 @@ class Environment: if 'target_machine' in config: machines.target = MachineInfo.from_literal(config['target_machine']) paths.host = Directories(**config.get('paths', {})) + load_user_options() ## "freeze" now initialized configuration, and "save" to the class. @@ -589,6 +607,7 @@ class Environment: self.binaries = binaries.default_missing() self.properties = properties.default_missing() self.paths = paths.default_missing() + self.user_options = user_options exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper') if exe_wrapper is not None: diff --git a/run_unittests.py b/run_unittests.py index 2b0e4e1..9d96ce0 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -7672,7 +7672,10 @@ class NativeFileTests(BasePlatformTests): for section, entries in values.items(): f.write('[{}]\n'.format(section)) for k, v in entries.items(): - f.write("{}='{}'\n".format(k, v)) + if isinstance(v, bool): + f.write("{}={}\n".format(k, v)) + else: + f.write("{}='{}'\n".format(k, v)) return filename def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs): @@ -7996,6 +7999,54 @@ class NativeFileTests(BasePlatformTests): self.init(testcase, extra_args=['--native-file', config]) self.build() + def test_user_options(self): + testcase = os.path.join(self.common_test_dir, '43 options') + for opt, value in [('testoption', 'some other val'), ('other_one', True), + ('combo_opt', 'one'), ('array_opt', ['two']), + ('integer_opt', 0)]: + config = self.helper_create_native_file({'project options': {opt: value}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_user_options_command_line_overrides(self): + testcase = os.path.join(self.common_test_dir, '43 options') + config = self.helper_create_native_file({'project options': {'other_one': True}}) + self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false']) + + def test_user_options_subproject(self): + testcase = os.path.join(self.unit_test_dir, '75 user options for subproject') + + s = os.path.join(testcase, 'subprojects') + if not os.path.exists(s): + os.mkdir(s) + s = os.path.join(s, 'sub') + if not os.path.exists(s): + sub = os.path.join(self.common_test_dir, '43 options') + shutil.copytree(sub, s) + + for opt, value in [('testoption', 'some other val'), ('other_one', True), + ('combo_opt', 'one'), ('array_opt', ['two']), + ('integer_opt', 0)]: + config = self.helper_create_native_file({'project options': {'sub:{}'.format(opt): value}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + def test_option_bool(self): + # Bools are allowed to be unquoted + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({'built-in options': {'werror': True}}) + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'werror' in each['name']: + self.assertEqual(each['value'], True) + break + else: + self.fail('Did not find werror in build options?') + class CrossFileTests(BasePlatformTests): @@ -8005,6 +8056,11 @@ class CrossFileTests(BasePlatformTests): This is mainly aimed to testing overrides from cross files. """ + def setUp(self): + super().setUp() + self.current_config = 0 + self.current_wrapper = 0 + def _cross_file_generator(self, *, needs_exe_wrapper: bool = False, exe_wrapper: T.Optional[T.List[str]] = None) -> str: if is_windows(): @@ -8133,6 +8189,21 @@ class CrossFileTests(BasePlatformTests): self.init(testdir, extra_args=['--cross-file=' + name], inprocess=True) self.wipe() + def helper_create_cross_file(self, values): + """Create a config file as a temporary file. + + values should be a nested dictionary structure of {section: {key: + value}} + """ + filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) + self.current_config += 1 + with open(filename, 'wt') as f: + for section, entries in values.items(): + f.write('[{}]\n'.format(section)) + for k, v in entries.items(): + f.write("{}='{}'\n".format(k, v)) + return filename + def test_cross_file_dirs(self): testcase = os.path.join(self.unit_test_dir, '60 native file override') self.init(testcase, default_args=False, @@ -8189,6 +8260,16 @@ class CrossFileTests(BasePlatformTests): '-Ddef_sharedstatedir=sharedstatebar', '-Ddef_sysconfdir=sysconfbar']) + def test_user_options(self): + # This is just a touch test for cross file, since the implementation + # shares code after loading from the files + testcase = os.path.join(self.common_test_dir, '43 options') + config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}}) + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + + class TAPParserTests(unittest.TestCase): def assert_test(self, events, **kwargs): if 'explanation' not in kwargs: diff --git a/test cases/unit/75 user options for subproject/.gitignore b/test cases/unit/75 user options for subproject/.gitignore new file mode 100644 index 0000000..4976afc --- /dev/null +++ b/test cases/unit/75 user options for subproject/.gitignore @@ -0,0 +1 @@ +subprojects/* diff --git a/test cases/unit/75 user options for subproject/meson.build b/test cases/unit/75 user options for subproject/meson.build new file mode 100644 index 0000000..0bc395b --- /dev/null +++ b/test cases/unit/75 user options for subproject/meson.build @@ -0,0 +1,3 @@ +project('user option for subproject') + +p = subproject('sub') -- cgit v1.1 From 54fb61627851a0fe765d31955629ff5d7be2d064 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 3 Jun 2020 11:19:36 -0700 Subject: docs/Machine-files: remove duplicate Properties section --- docs/markdown/Machine-files.md | 7 ------- 1 file changed, 7 deletions(-) diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index 26af44a..ae0219b 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -158,13 +158,6 @@ command line will override any options in the native file. For example, passing In addition to special data that may be specified in cross files, this section may contain random key value pairs accessed using the -`meson.get_external_property()` - -## Properties - -*New for native files in 0.54.0* - -The properties section can contain any variable you like, and is accessed via `meson.get_external_property`, or `meson.get_cross_property`. ### Project specific options -- cgit v1.1 From 46a45570212fbbe559cb42f1732021ee037b660c Mon Sep 17 00:00:00 2001 From: Alexis Jeandet Date: Fri, 31 Jul 2020 19:17:35 +0200 Subject: Doc: Added clarification on introspection files gen order [skip ci] Signed-off-by: Alexis Jeandet --- docs/markdown/IDE-integration.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/markdown/IDE-integration.md b/docs/markdown/IDE-integration.md index 2cc4f4f..ee51b64 100644 --- a/docs/markdown/IDE-integration.md +++ b/docs/markdown/IDE-integration.md @@ -25,7 +25,8 @@ With this command meson will configure the project and also generate introspection information that is stored in `intro-*.json` files in the `meson-info` directory. The introspection dump will be automatically updated when meson is (re)configured, or the build options change. Thus, an IDE can -watch for changes in this directory to know when something changed. +watch for changes in this directory to know when something changed. Note that +`meson-info.json` guaranteed to be the last file written. The `meson-info` directory should contain the following files: -- cgit v1.1 From af763e093a8172536d96e24901b82edd2e5b2dc9 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 10 Jun 2020 10:28:21 -0700 Subject: mconf/mintro: use authoritative list of options from coredata This splits the directory options and non-directory options into two dicts, and then merges them later to maintain API. --- mesonbuild/coredata.py | 29 +++++++++++++++++------------ mesonbuild/mconf.py | 14 +------------- mesonbuild/mintro.py | 14 +------------- 3 files changed, 19 insertions(+), 38 deletions(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 49104a7..3330ae5 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -1158,23 +1158,25 @@ class BuiltinOption(T.Generic[_T, _U]): cmdline_name = self.argparse_name_to_arg(prefix + name) parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs) + # Update `docs/markdown/Builtin-options.md` after changing the options below -builtin_options = OrderedDict([ - # Directories - ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())), - ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')), - ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')), - ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')), - ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')), - ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())), - ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())), - ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')), +BUILTIN_DIR_OPTIONS = OrderedDict([ + ('prefix', BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())), + ('bindir', BuiltinOption(UserStringOption, 'Executable directory', 'bin')), + ('datadir', BuiltinOption(UserStringOption, 'Data file directory', 'share')), + ('includedir', BuiltinOption(UserStringOption, 'Header file directory', 'include')), + ('infodir', BuiltinOption(UserStringOption, 'Info page directory', 'share/info')), + ('libdir', BuiltinOption(UserStringOption, 'Library directory', default_libdir())), + ('libexecdir', BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())), + ('localedir', BuiltinOption(UserStringOption, 'Locale data directory', 'share/locale')), ('localstatedir', BuiltinOption(UserStringOption, 'Localstate data directory', 'var')), ('mandir', BuiltinOption(UserStringOption, 'Manual page directory', 'share/man')), ('sbindir', BuiltinOption(UserStringOption, 'System executable directory', 'sbin')), ('sharedstatedir', BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')), ('sysconfdir', BuiltinOption(UserStringOption, 'Sysconf data directory', 'etc')), - # Core options +]) # type: OptionDictType + +BUILTIN_CORE_OPTIONS = OrderedDict([ ('auto_features', BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')), ('backend', BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist)), ('buildtype', BuiltinOption(UserComboOption, 'Build type to use', 'debug', @@ -1194,7 +1196,9 @@ builtin_options = OrderedDict([ ('werror', BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)), ('wrap_mode', BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback'])), ('force_fallback_for', BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])), -]) +]) # type: OptionDictType + +builtin_options = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items())) builtin_options_per_machine = OrderedDict([ ('pkg_config_path', BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])), @@ -1230,3 +1234,4 @@ forbidden_target_names = {'clean': None, 'dist': None, 'distcheck': None, } + diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 2e03cab..f070355 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -184,19 +184,7 @@ class Conf: if not self.default_values_only: print(' Build dir ', self.build_dir) - dir_option_names = ['bindir', - 'datadir', - 'includedir', - 'infodir', - 'libdir', - 'libexecdir', - 'localedir', - 'localstatedir', - 'mandir', - 'prefix', - 'sbindir', - 'sharedstatedir', - 'sysconfdir'] + dir_option_names = list(coredata.BUILTIN_DIR_OPTIONS) test_option_names = ['errorlogs', 'stdsplit'] core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names] diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index cccedaa..0049bbd 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -200,19 +200,7 @@ def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Di def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]: optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] - dir_option_names = ['bindir', - 'datadir', - 'includedir', - 'infodir', - 'libdir', - 'libexecdir', - 'localedir', - 'localstatedir', - 'mandir', - 'prefix', - 'sbindir', - 'sharedstatedir', - 'sysconfdir'] + dir_option_names = list(cdata.BUILTIN_DIR_OPTIONS) test_option_names = ['errorlogs', 'stdsplit'] core_option_names = [k for k in coredata.builtins if k not in dir_option_names + test_option_names] -- cgit v1.1 From bbba6a7f365f8b7dc7f2d4c3ce7f3e5f4c05fc5e Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 6 Feb 2020 12:18:10 -0800 Subject: Allow setting built-in options from cross/native files This is like the project options, but for meson builtin options. The only real differences here have to do with the differences between meson builtin options and project options. Some meson options can be set on a per-machine basis (build.pkg_config_path vs pkg_config_path) others can be set on a per-subproject basis, but should inherit the parent setting. --- cross/armcc.txt | 2 +- cross/armclang-linux.txt | 5 +- cross/armclang.txt | 2 +- cross/c2000.txt | 6 +- cross/ccrx.txt | 2 +- cross/iphone.txt | 6 +- cross/tvos.txt | 7 +- cross/ubuntu-armhf.txt | 6 +- cross/wasm.txt | 3 +- cross/xc16.txt | 2 + docs/markdown/Machine-files.md | 56 +++++++- .../snippets/project_options_in_machine_files.md | 17 ++- mesonbuild/coredata.py | 49 +++++-- mesonbuild/environment.py | 28 +++- mesonbuild/interpreter.py | 1 + run_unittests.py | 153 ++++++++++++++++++++- 16 files changed, 307 insertions(+), 38 deletions(-) diff --git a/cross/armcc.txt b/cross/armcc.txt index c884ffa..ae65c9e 100644 --- a/cross/armcc.txt +++ b/cross/armcc.txt @@ -7,7 +7,7 @@ cpp = 'armcc' ar = 'armar' strip = 'armar' -[properties] +[built-in options] # The '--cpu' option with the appropriate target type should be mentioned # to cross compile c/c++ code with armcc,. c_args = ['--cpu=Cortex-M0plus'] diff --git a/cross/armclang-linux.txt b/cross/armclang-linux.txt index 6df78d6..10f6fa4 100644 --- a/cross/armclang-linux.txt +++ b/cross/armclang-linux.txt @@ -12,7 +12,7 @@ # Armcc is only available in toolchain version 5. # Armclang is only available in toolchain version 6. # Start shell with /opt/arm/developmentstudio-2019.0/bin/suite_exec zsh -# Now the compilers will work. +# Now the compilers will work. [binaries] # we could set exe_wrapper = qemu-arm-static but to test the case @@ -24,8 +24,7 @@ ar = '/opt/arm/developmentstudio-2019.0/sw/ARMCompiler6.12/bin/armar' #strip = '/usr/arm-linux-gnueabihf/bin/strip' #pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' -[properties] - +[built-in options] c_args = ['--target=aarch64-arm-none-eabi'] [host_machine] diff --git a/cross/armclang.txt b/cross/armclang.txt index 955b7ef..6146e0d 100644 --- a/cross/armclang.txt +++ b/cross/armclang.txt @@ -7,7 +7,7 @@ cpp = 'armclang' ar = 'armar' strip = 'armar' -[properties] +[built-in options] # The '--target', '-mcpu' options with the appropriate values should be mentioned # to cross compile c/c++ code with armclang. c_args = ['--target=arm-arm-none-eabi', '-mcpu=cortex-m0plus'] diff --git a/cross/c2000.txt b/cross/c2000.txt index e624f25..61c0310 100644 --- a/cross/c2000.txt +++ b/cross/c2000.txt @@ -12,8 +12,7 @@ cpu_family = 'c2000' cpu = 'c28x' endian = 'little' -[properties] -needs_exe_wrapper = true +[built-in options] c_args = [ '-v28', '-ml', @@ -24,3 +23,6 @@ c_link_args = [ '\f28004x_flash.cmd'] cpp_args = [] cpp_link_args = [] + +[properties] +needs_exe_wrapper = true diff --git a/cross/ccrx.txt b/cross/ccrx.txt index 097ec06..f1b536c 100644 --- a/cross/ccrx.txt +++ b/cross/ccrx.txt @@ -7,7 +7,7 @@ cpp = 'ccrx' ar = 'rlink' strip = 'rlink' -[properties] +[built-in options] # The '--cpu' option with the appropriate target type should be mentioned # to cross compile c/c++ code with ccrx,. c_args = ['-cpu=rx600'] diff --git a/cross/iphone.txt b/cross/iphone.txt index e714da5..9659407 100644 --- a/cross/iphone.txt +++ b/cross/iphone.txt @@ -8,14 +8,14 @@ cpp = 'clang++' ar = 'ar' strip = 'strip' -[properties] -root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' - +[built-in options] c_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] cpp_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] c_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] cpp_link_args = ['-arch', 'armv7', '-miphoneos-version-min=8.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS8.4.sdk'] +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer' has_function_printf = true has_function_hfkerhisadf = false diff --git a/cross/tvos.txt b/cross/tvos.txt index dd6d5c1..833f04b 100644 --- a/cross/tvos.txt +++ b/cross/tvos.txt @@ -8,14 +8,15 @@ cpp = 'clang++' ar = 'ar' strip = 'strip' -[properties] -root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' - +[built-in options] c_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] cpp_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] c_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] cpp_link_args = ['-arch', 'arm64', '-mtvos-version-min=12.0', '-isysroot', '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer/SDKs/AppleTVOS.sdk'] +[properties] +root = '/Applications/Xcode.app/Contents/Developer/Platforms/AppleTVOS.platform/Developer' + has_function_printf = true has_function_hfkerhisadf = false diff --git a/cross/ubuntu-armhf.txt b/cross/ubuntu-armhf.txt index 4600c22..69e0c86 100644 --- a/cross/ubuntu-armhf.txt +++ b/cross/ubuntu-armhf.txt @@ -9,12 +9,14 @@ strip = '/usr/arm-linux-gnueabihf/bin/strip' pkgconfig = '/usr/bin/arm-linux-gnueabihf-pkg-config' ld = '/usr/bin/arm-linux/gnueabihf-ld' -[properties] -root = '/usr/arm-linux-gnueabihf' +[built-in options] # Used in unit test '140 get define' c_args = ['-DMESON_TEST_ISSUE_1665=1'] cpp_args = '-DMESON_TEST_ISSUE_1665=1' +[properties] +root = '/usr/arm-linux-gnueabihf' + has_function_printf = true has_function_hfkerhisadf = false diff --git a/cross/wasm.txt b/cross/wasm.txt index a43636f..f2d0cd7 100644 --- a/cross/wasm.txt +++ b/cross/wasm.txt @@ -3,8 +3,7 @@ c = '/home/jpakkane/emsdk/fastcomp/emscripten/emcc' cpp = '/home/jpakkane/emsdk/fastcomp/emscripten/em++' ar = '/home/jpakkane/emsdk/fastcomp/emscripten/emar' -[properties] - +[built-in options] c_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] c_link_args = ['-s','EXPORT_ALL=1'] cpp_args = ['-s', 'WASM=1', '-s', 'EXPORT_ALL=1'] diff --git a/cross/xc16.txt b/cross/xc16.txt index 1e67362..c66889d 100644 --- a/cross/xc16.txt +++ b/cross/xc16.txt @@ -14,6 +14,8 @@ endian = 'little' [properties] needs_exe_wrapper = true + +[built-in options] c_args = [ '-c', '-mcpu=33EP64MC203', diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index ae0219b..e3de808 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -13,6 +13,7 @@ The following sections are allowed: - paths - properties - project options +- built-in options ### constants @@ -158,17 +159,23 @@ command line will override any options in the native file. For example, passing In addition to special data that may be specified in cross files, this section may contain random key value pairs accessed using the -`meson.get_external_property`, or `meson.get_cross_property`. +`meson.get_external_property()`, or `meson.get_cross_property()`. + +*Changed in 0.55.0* putting `_args` and `_link_args` in the +properties section has been deprecated, and should be put in the built-in +options section. ### Project specific options -*New in 0.54.0* +*New in 0.55.0* + +Path options are not allowed, those must be set in the `[paths]` section. -Being able to set project specific options in a native or cross files can be +Being able to set project specific options in a cross or native file can be done using the `[project options]` section of the specific file (if doing a cross build the options from the native file will be ignored) -For setting options in supbprojects use the `:project options` +For setting options in subprojects use the `[:project options]` section instead. ```ini @@ -179,6 +186,47 @@ build-tests = true build-tests = false ``` + +### Meson built-in options + +Meson built-in options can be set the same way: + +```ini +[built-in options] +c_std = 'c99' +``` + +You can set some meson built-in options on a per-subproject basis, such as +`default_library` and `werror`. The order of precedence is: +1) Command line +2) Machine file +3) Build system definitions + +```ini +[zlib:built-in options] +default_library = 'static' +werror = false +``` + +Options set on a per-subproject basis will inherit the +option from the parent if the parent has a setting but the subproject +doesn't, even when there is a default set meson language. + +```ini +[built-in options] +default_library = 'static' +``` + +will make subprojects use default_library as static. + +Some options can be set on a per-machine basis (in other words, the value of +the build machine can be different than the host machine in a cross compile). +In these cases the values from both a cross file and a native file are used. + +An incomplete list of options is: +- pkg_config_path +- cmake_prefix_path + ## Loading multiple machine files Native files allow layering (cross files can be layered since meson 0.52.0). diff --git a/docs/markdown/snippets/project_options_in_machine_files.md b/docs/markdown/snippets/project_options_in_machine_files.md index 78b129a..8dab951 100644 --- a/docs/markdown/snippets/project_options_in_machine_files.md +++ b/docs/markdown/snippets/project_options_in_machine_files.md @@ -1,4 +1,4 @@ -## Project options can be set in native or cross files +## Project and built-in options can be set in native or cross files A new set of sections has been added to the cross and native files, `[project options]` and `[:project options]`, where `subproject_name` @@ -35,3 +35,18 @@ Subproject options are assigned like this: [zlib:project options] foo = 'some val' ``` + +Additionally meson level options can be set in the same way, using the +`[built-in options]` section. + +```ini +[built-in options] +c_std = 'c99' +``` + +These options can also be set on a per-subproject basis, although only +`default_library` and `werror` can currently be set: +```ini +[zlib:built-in options] +default_library = 'static' +``` diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 3330ae5..0cac029 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -361,15 +361,15 @@ class CoreData: self.install_guid = str(uuid.uuid4()).upper() self.target_guids = {} self.version = version - self.builtins = {} # : OptionDictType + self.builtins = {} # type: OptionDictType self.builtins_per_machine = PerMachine({}, {}) - self.backend_options = {} # : OptionDictType - self.user_options = {} # : OptionDictType + self.backend_options = {} # type: OptionDictType + self.user_options = {} # type: OptionDictType self.compiler_options = PerMachine( defaultdict(dict), defaultdict(dict), ) # : PerMachine[T.defaultdict[str, OptionDictType]] - self.base_options = {} # : OptionDictType + self.base_options = {} # type: OptionDictType self.cross_files = self.__load_config_files(options, scratch_dir, 'cross') self.compilers = PerMachine(OrderedDict(), OrderedDict()) @@ -743,19 +743,28 @@ class CoreData: mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. ' 'Using both is redundant since they override each other. ' 'See: https://mesonbuild.com/Builtin-options.html#build-type-options') + cmd_line_options = OrderedDict() - # Set project default_options as if they were passed to the cmdline. + # Set default options as if they were passed to the command line. # Subprojects can only define default for user options and not yielding # builtin option. from . import optinterpreter - for k, v in default_options.items(): + for k, v in chain(default_options.items(), env.meson_options.host.get('', {}).items()): if subproject: if (k not in builtin_options or builtin_options[k].yielding) \ and optinterpreter.is_invalid_name(k, log=False): continue - k = subproject + ':' + k cmd_line_options[k] = v + # IF the subproject options comes from a machine file, then we need to + # set the option as subproject:option + if subproject: + for k, v in env.meson_options.host.get(subproject, {}).items(): + if (k not in builtin_options or builtin_options[k].yielding) \ + and optinterpreter.is_invalid_name(k, log=False): + continue + cmd_line_options['{}:{}'.format(subproject, k)] = v + # load the values for user options out of the appropriate machine file, # then overload the command line for k, v in env.user_options.get(subproject, {}).items(): @@ -768,8 +777,32 @@ class CoreData: if v is not None: cmd_line_options[k] = v + from .compilers import all_languages + # Report that [properties]c_args + for lang in all_languages: + for args in ['{}_args'.format(lang), '{}_link_args'.format(lang)]: + msg = ('{} in the [properties] section of the machine file is deprecated, ' + 'use the [built-in options] section.') + if args in env.properties.host or args in env.properties.build: + mlog.deprecation(msg.format(args)) + + # Currently we don't support any options that are both per-subproject + # and per-machine, but when we do this will need to account for that. + # For cross builds we need to get the build specifc options + if env.meson_options.host != env.meson_options.build and subproject in env.meson_options.build: + for k in builtin_options_per_machine.keys(): + if k in env.meson_options.build[subproject]: + cmd_line_options['build.{}'.format(k)] = env.meson_options.build[subproject][k] + + # compiler options are always per-machine + for lang in all_languages: + prefix = '{}_'.format(lang) + for k in env.meson_options.build[subproject]: + if k.startswith(prefix): + cmd_line_options['build.{}'.format(k)] = env.meson_options.build[subproject][k] + # Override all the above defaults using the command-line arguments - # actually passed to us + # actually passed to use cmd_line_options.update(env.cmd_line_options) env.cmd_line_options = cmd_line_options diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index c872aee..dc674fd 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -556,6 +556,9 @@ class Environment: # We only need one of these as project options are not per machine user_options = {} + # meson builtin options, as passed through cross or native files + meson_options = PerMachineDefaultable() + ## Setup build machine defaults # Will be fully initialized later using compilers later. @@ -568,14 +571,15 @@ class Environment: ## Read in native file(s) to override build machine configuration - def load_user_options(): + def load_options(tag: str, store: T.Dict[str, T.Any]) -> None: for section in config.keys(): - if section.endswith('project options'): + if section.endswith(tag): if ':' in section: project = section.split(':')[0] else: project = '' - user_options[project] = config.get(section, {}) + store[project] = config.get(section, {}) + if self.coredata.config_files is not None: config = coredata.parse_machine_files(self.coredata.config_files) @@ -586,7 +590,9 @@ class Environment: # Don't run this if there are any cross files, we don't want to use # the native values if we're doing a cross build if not self.coredata.cross_files: - load_user_options() + load_options('project options', user_options) + meson_options.build = {} + load_options('built-in options', meson_options.build) ## Read in cross file(s) to override host machine configuration @@ -599,7 +605,9 @@ class Environment: if 'target_machine' in config: machines.target = MachineInfo.from_literal(config['target_machine']) paths.host = Directories(**config.get('paths', {})) - load_user_options() + load_options('project options', user_options) + meson_options.host = {} + load_options('built-in options', meson_options.host) ## "freeze" now initialized configuration, and "save" to the class. @@ -608,6 +616,16 @@ class Environment: self.properties = properties.default_missing() self.paths = paths.default_missing() self.user_options = user_options + self.meson_options = meson_options.default_missing() + + # Ensure that no paths are passed via built-in options: + if '' in self.meson_options.host: + for each in coredata.BUILTIN_DIR_OPTIONS.keys(): + # These are not per-subdirectory and probably never will be + if each in self.meson_options.host['']: + raise EnvironmentException( + 'Invalid entry {} in [built-in options] section. ' + 'Use the [paths] section instead.'.format(each)) exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper') if exe_wrapper is not None: diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 45813c1..317793d 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2791,6 +2791,7 @@ external dependencies (including libraries) must go to "dependencies".''') default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) default_options = coredata.create_options_dict(default_options) + if dirname == '': raise InterpreterException('Subproject dir name must not be empty.') if dirname[0] == '.': diff --git a/run_unittests.py b/run_unittests.py index 9d96ce0..bce0bda 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -7672,8 +7672,10 @@ class NativeFileTests(BasePlatformTests): for section, entries in values.items(): f.write('[{}]\n'.format(section)) for k, v in entries.items(): - if isinstance(v, bool): + if isinstance(v, (bool, int, float)): f.write("{}={}\n".format(k, v)) + elif isinstance(v, list): + f.write("{}=[{}]\n".format(k, ', '.join(["'{}'".format(w) for w in v]))) else: f.write("{}='{}'\n".format(k, v)) return filename @@ -8047,6 +8049,108 @@ class NativeFileTests(BasePlatformTests): else: self.fail('Did not find werror in build options?') + def test_option_integer(self): + # Bools are allowed to be unquoted + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({'built-in options': {'unity_size': 100}}) + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'unity_size' in each['name']: + self.assertEqual(each['value'], 100) + break + else: + self.fail('Did not find unity_size in build options?') + + def test_builtin_options(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_native_file({'built-in options': {'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++14') + break + else: + self.fail('Did not find werror in build options?') + + def test_builtin_options_subprojects(self): + testcase = os.path.join(self.common_test_dir, '102 subproject subdir') + config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}}) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + # Test that no-per subproject options are inherited from the parent + if 'c_args' in each['name']: + # This path will be hit twice, once for build and once for host, + self.assertEqual(each['value'], ['-Dfoo']) + found += 1 + elif each['name'] == 'default_library': + self.assertEqual(each['value'], 'both') + found += 1 + elif each['name'] == 'sub:default_library': + self.assertEqual(each['value'], 'static') + found += 1 + self.assertEqual(found, 4, 'Did not find all three sections') + + def test_builtin_options_subprojects_overrides_buildfiles(self): + # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + testcase = os.path.join(self.common_test_dir, '230 persubproject options') + config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}}) + + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertIn(cm.exception.stdout, 'Parent should override default_library') + + def test_builtin_options_subprojects_inherits_parent_override(self): + # If the buildfile says subproject(... default_library: shared), ensure that's overwritten + testcase = os.path.join(self.common_test_dir, '230 persubproject options') + config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}}) + + with self.assertRaises(subprocess.CalledProcessError) as cm: + self.init(testcase, extra_args=['--native-file', config]) + self.assertIn(cm.exception.stdout, 'Parent should override default_library') + + def test_builtin_options_compiler_properties(self): + # the properties section can have lang_args, and those need to be + # overwritten by the built-in options + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'c_args': ['-DFOO']}, + 'properties': {'c_args': ['-DBAR']}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'c_args': + self.assertEqual(each['value'], ['-DFOO']) + break + else: + self.fail('Did not find c_args in build options?') + + def test_builtin_options_compiler_properties_legacy(self): + # The legacy placement in properties is still valid if a 'built-in + # options' setting is present, but doesn't have the lang_args + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'properties': {'c_args': ['-DBAR']}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'c_args': + self.assertEqual(each['value'], ['-DBAR']) + break + else: + self.fail('Did not find c_args in build options?') + class CrossFileTests(BasePlatformTests): @@ -8266,9 +8370,54 @@ class CrossFileTests(BasePlatformTests): testcase = os.path.join(self.common_test_dir, '43 options') config = self.helper_create_cross_file({'project options': {'testoption': 'some other value'}}) with self.assertRaises(subprocess.CalledProcessError) as cm: - self.init(testcase, extra_args=['--native-file', config]) + self.init(testcase, extra_args=['--cross-file', config]) self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') + def test_builtin_options(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--cross-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++14') + break + else: + self.fail('No c++ standard set?') + + def test_builtin_options_per_machine(self): + """Test options that are allowed to be set on a per-machine basis. + + Such options could be passed twice, once for the build machine, and + once for the host machine. I've picked pkg-config path, but any would + do that can be set for both. + """ + testcase = os.path.join(self.common_test_dir, '2 cpp') + cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}}) + native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}}) + + self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native]) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/cross/path']) + found += 1 + elif each['name'] == 'cpp_std': + self.assertEqual(each['value'], 'c++17') + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/native/path']) + found += 1 + elif each['name'] == 'build.cpp_std': + self.assertEqual(each['value'], 'c++14') + found += 1 + + if found == 4: + break + self.assertEqual(found, 4, 'Did not find all sections.') + class TAPParserTests(unittest.TestCase): def assert_test(self, events, **kwargs): -- cgit v1.1 From 17c8193615e5fafbbfcf99b1d028f5da36d79cc4 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 16 Jun 2020 11:11:15 -0700 Subject: machine-files: give better error messages about using integers --- mesonbuild/coredata.py | 12 +++++++++--- test cases/failing/106 number in combo/meson.build | 1 + test cases/failing/106 number in combo/nativefile.ini | 2 ++ test cases/failing/106 number in combo/test.json | 5 +++++ test cases/failing/107 bool in combo/meson.build | 1 + test cases/failing/107 bool in combo/meson_options.txt | 5 +++++ test cases/failing/107 bool in combo/nativefile.ini | 2 ++ test cases/failing/107 bool in combo/test.json | 5 +++++ 8 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 test cases/failing/106 number in combo/meson.build create mode 100644 test cases/failing/106 number in combo/nativefile.ini create mode 100644 test cases/failing/106 number in combo/test.json create mode 100644 test cases/failing/107 bool in combo/meson.build create mode 100644 test cases/failing/107 bool in combo/meson_options.txt create mode 100644 test cases/failing/107 bool in combo/nativefile.ini create mode 100644 test cases/failing/107 bool in combo/test.json diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 0cac029..b7efe30 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -161,10 +161,16 @@ class UserComboOption(UserOption[str]): def validate_value(self, value): if value not in self.choices: + if isinstance(value, bool): + _type = 'boolean' + elif isinstance(value, (int, float)): + _type = 'number' + else: + _type = 'string' optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices]) - raise MesonException('Value "{}" for combo option "{}" is not one of the choices.' - ' Possible choices are: {}.'.format( - value, self.description, optionsstring)) + raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.' + ' Possible choices are (as string): {}.'.format( + value, _type, self.description, optionsstring)) return value class UserArrayOption(UserOption[T.List[str]]): diff --git a/test cases/failing/106 number in combo/meson.build b/test cases/failing/106 number in combo/meson.build new file mode 100644 index 0000000..1a647df --- /dev/null +++ b/test cases/failing/106 number in combo/meson.build @@ -0,0 +1 @@ +project('number in combo') diff --git a/test cases/failing/106 number in combo/nativefile.ini b/test cases/failing/106 number in combo/nativefile.ini new file mode 100644 index 0000000..55f10fc --- /dev/null +++ b/test cases/failing/106 number in combo/nativefile.ini @@ -0,0 +1,2 @@ +[built-in options] +optimization = 1 diff --git a/test cases/failing/106 number in combo/test.json b/test cases/failing/106 number in combo/test.json new file mode 100644 index 0000000..a32c358 --- /dev/null +++ b/test cases/failing/106 number in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/106 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." } + ] +} diff --git a/test cases/failing/107 bool in combo/meson.build b/test cases/failing/107 bool in combo/meson.build new file mode 100644 index 0000000..c5efd67 --- /dev/null +++ b/test cases/failing/107 bool in combo/meson.build @@ -0,0 +1 @@ +project('bool in combo') diff --git a/test cases/failing/107 bool in combo/meson_options.txt b/test cases/failing/107 bool in combo/meson_options.txt new file mode 100644 index 0000000..0c8f5de --- /dev/null +++ b/test cases/failing/107 bool in combo/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'opt', + type : 'combo', + choices : ['true', 'false'] +) diff --git a/test cases/failing/107 bool in combo/nativefile.ini b/test cases/failing/107 bool in combo/nativefile.ini new file mode 100644 index 0000000..b423957 --- /dev/null +++ b/test cases/failing/107 bool in combo/nativefile.ini @@ -0,0 +1,2 @@ +[project options] +opt = true diff --git a/test cases/failing/107 bool in combo/test.json b/test cases/failing/107 bool in combo/test.json new file mode 100644 index 0000000..37218e8 --- /dev/null +++ b/test cases/failing/107 bool in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/107 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." } + ] +} -- cgit v1.1 From 1ca17dc853ece6225a46b46330e436d46d74bc4b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Wed, 10 Jun 2020 13:02:30 -0700 Subject: docs/machine-files: Add a section on data types This attempts to clarify the usage of strings and arrays, as well as document the boolean type that has been exposed via the project and built-in options --- docs/markdown/Machine-files.md | 48 +++++++++++++++++++++++++++++++++++------- 1 file changed, 40 insertions(+), 8 deletions(-) diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index e3de808..9affdca 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -5,6 +5,37 @@ documentation on the common values used by both, for the specific values of one or the other see the [cross compilation](Cross-compilation.md) and [native environments](Native-environments.md). +## Data Types + +There are four basic data types in a machine file: +- strings +- arrays +- booleans +- integers + +A string is specified single quoted: +```ini +[section] +option1 = 'false' +option2 = '2' +``` + +An array is enclosed in square brackets, and must consist of strings or booleans +```ini +[section] +option = ['value'] +``` + +A boolean must be either `true` or `false`, and unquoted. +```ini +option = false +``` + +An integer must be either an unquoted numeric constant; +```ini +option = 42 +``` + ## Sections The following sections are allowed: @@ -90,14 +121,16 @@ a = 'Hello' ### Binaries The binaries section contains a list of binaries. These can be used -internally by meson, or by the `find_program` function: +internally by meson, or by the `find_program` function. + +These values must be either strings or an array of strings Compilers and linkers are defined here using `` and `_ld`. `_ld` is special because it is compiler specific. For compilers like gcc and clang which are used to invoke the linker this is a value to pass to their "choose the linker" argument (-fuse-ld= in this case). For compilers like MSVC and Clang-Cl, this is the path to a linker for meson to invoke, -such as `link.exe` or `lld-link.exe`. Support for ls is *new in 0.53.0* +such as `link.exe` or `lld-link.exe`. Support for `ld` is *new in 0.53.0* *changed in 0.53.1* the `ld` variable was replaced by `_ld`, because it *regressed a large number of projects. in 0.53.0 the `ld` variable was used @@ -115,8 +148,8 @@ llvm-config = '/usr/lib/llvm8/bin/llvm-config' Cross example: ```ini -c = '/usr/bin/i586-mingw32msvc-gcc' -cpp = '/usr/bin/i586-mingw32msvc-g++' +c = ['ccache', '/usr/bin/i586-mingw32msvc-gcc'] +cpp = ['ccache', '/usr/bin/i586-mingw32msvc-g++'] c_ld = 'gold' cpp_ld = 'gold' ar = '/usr/i586-mingw32msvc/bin/ar' @@ -140,7 +173,7 @@ An incomplete list of internally used programs that can be overridden here is: ### Paths and Directories As of 0.50.0 paths and directories such as libdir can be defined in the native -file in a paths section +and cross files in a paths section. These should be strings ```ini [paths] @@ -186,7 +219,6 @@ build-tests = true build-tests = false ``` - ### Meson built-in options Meson built-in options can be set the same way: @@ -230,9 +262,9 @@ An incomplete list of options is: ## Loading multiple machine files Native files allow layering (cross files can be layered since meson 0.52.0). -More than one native file can be loaded, with values from a previous file being +More than one file can be loaded, with values from a previous file being overridden by the next. The intention of this is not overriding, but to allow -composing native files. This composition is done by passing the command line +composing files. This composition is done by passing the command line argument multiple times: ```console -- cgit v1.1 From e981798e0b5ccb53cdb94a616c9bccc20f091e6b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 16 Jun 2020 11:33:02 -0700 Subject: coredata: fix type annotation --- mesonbuild/coredata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index b7efe30..e3b6dab 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -374,7 +374,7 @@ class CoreData: self.compiler_options = PerMachine( defaultdict(dict), defaultdict(dict), - ) # : PerMachine[T.defaultdict[str, OptionDictType]] + ) # type: PerMachine[T.defaultdict[str, OptionDictType]] self.base_options = {} # type: OptionDictType self.cross_files = self.__load_config_files(options, scratch_dir, 'cross') self.compilers = PerMachine(OrderedDict(), OrderedDict()) -- cgit v1.1 From 601789cc7ce3692fbefe14047d8b8cc68a3d5160 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Tue, 16 Jun 2020 15:23:15 -0700 Subject: machine-files: deprecate the paths section --- docs/markdown/Machine-files.md | 4 +++- mesonbuild/coredata.py | 5 ----- mesonbuild/envconfig.py | 40 ---------------------------------------- mesonbuild/environment.py | 26 +++++++------------------- run_unittests.py | 34 ++++++++++++++++++++++++++++++++++ 5 files changed, 44 insertions(+), 65 deletions(-) diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index 9affdca..60c4dd5 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -172,8 +172,10 @@ An incomplete list of internally used programs that can be overridden here is: ### Paths and Directories +*Deprecated in 0.55.0* use the built-in section instead. + As of 0.50.0 paths and directories such as libdir can be defined in the native -and cross files in a paths section. These should be strings +and cross files in a paths section. These should be strings. ```ini [paths] diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index e3b6dab..99da034 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -778,11 +778,6 @@ class CoreData: k = '{}:{}'.format(subproject, k) cmd_line_options[k] = v - # Override project default_options using conf files (cross or native) - for k, v in env.paths.host: - if v is not None: - cmd_line_options[k] = v - from .compilers import all_languages # Report that [properties]c_args for lang in all_languages: diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index 219b62e..9402d38 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -407,43 +407,3 @@ class BinaryTable: if command is not None and (len(command) == 0 or len(command[0].strip()) == 0): command = None return command - -class Directories: - - """Data class that holds information about directories for native and cross - builds. - """ - - def __init__(self, bindir: T.Optional[str] = None, datadir: T.Optional[str] = None, - includedir: T.Optional[str] = None, infodir: T.Optional[str] = None, - libdir: T.Optional[str] = None, libexecdir: T.Optional[str] = None, - localedir: T.Optional[str] = None, localstatedir: T.Optional[str] = None, - mandir: T.Optional[str] = None, prefix: T.Optional[str] = None, - sbindir: T.Optional[str] = None, sharedstatedir: T.Optional[str] = None, - sysconfdir: T.Optional[str] = None): - self.bindir = bindir - self.datadir = datadir - self.includedir = includedir - self.infodir = infodir - self.libdir = libdir - self.libexecdir = libexecdir - self.localedir = localedir - self.localstatedir = localstatedir - self.mandir = mandir - self.prefix = prefix - self.sbindir = sbindir - self.sharedstatedir = sharedstatedir - self.sysconfdir = sysconfdir - - def __contains__(self, key: str) -> bool: - return hasattr(self, key) - - def __getitem__(self, key: str) -> T.Optional[str]: - # Mypy can't figure out what to do with getattr here, so we'll case for it - return T.cast(T.Optional[str], getattr(self, key)) - - def __setitem__(self, key: str, value: T.Optional[str]) -> None: - setattr(self, key, value) - - def __iter__(self) -> T.Iterator[T.Tuple[str, str]]: - return iter(self.__dict__.items()) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index dc674fd..7dfffa2 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -27,7 +27,7 @@ from .mesonlib import ( from . import mlog from .envconfig import ( - BinaryTable, Directories, MachineInfo, + BinaryTable, MachineInfo, Properties, known_cpu_families, ) from . import compilers @@ -548,11 +548,6 @@ class Environment: # Misc other properties about each machine. properties = PerMachineDefaultable() - # Store paths for native and cross build files. There is no target - # machine information here because nothing is installed for the target - # architecture, just the build and host architectures - paths = PerMachineDefaultable() - # We only need one of these as project options are not per machine user_options = {} @@ -580,11 +575,9 @@ class Environment: project = '' store[project] = config.get(section, {}) - if self.coredata.config_files is not None: config = coredata.parse_machine_files(self.coredata.config_files) binaries.build = BinaryTable(config.get('binaries', {})) - paths.build = Directories(**config.get('paths', {})) properties.build = Properties(config.get('properties', {})) # Don't run this if there are any cross files, we don't want to use @@ -592,6 +585,9 @@ class Environment: if not self.coredata.cross_files: load_options('project options', user_options) meson_options.build = {} + if config.get('paths') is not None: + mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') + load_options('paths', meson_options.build) load_options('built-in options', meson_options.build) ## Read in cross file(s) to override host machine configuration @@ -604,9 +600,11 @@ class Environment: machines.host = MachineInfo.from_literal(config['host_machine']) if 'target_machine' in config: machines.target = MachineInfo.from_literal(config['target_machine']) - paths.host = Directories(**config.get('paths', {})) load_options('project options', user_options) meson_options.host = {} + if config.get('paths') is not None: + mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') + load_options('paths', meson_options.host) load_options('built-in options', meson_options.host) ## "freeze" now initialized configuration, and "save" to the class. @@ -614,19 +612,9 @@ class Environment: self.machines = machines.default_missing() self.binaries = binaries.default_missing() self.properties = properties.default_missing() - self.paths = paths.default_missing() self.user_options = user_options self.meson_options = meson_options.default_missing() - # Ensure that no paths are passed via built-in options: - if '' in self.meson_options.host: - for each in coredata.BUILTIN_DIR_OPTIONS.keys(): - # These are not per-subdirectory and probably never will be - if each in self.meson_options.host['']: - raise EnvironmentException( - 'Invalid entry {} in [built-in options] section. ' - 'Use the [paths] section instead.'.format(each)) - exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper') if exe_wrapper is not None: from .dependencies import ExternalProgram diff --git a/run_unittests.py b/run_unittests.py index bce0bda..7bab408 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -8151,6 +8151,40 @@ class NativeFileTests(BasePlatformTests): else: self.fail('Did not find c_args in build options?') + def test_builtin_options_paths(self): + # the properties section can have lang_args, and those need to be + # overwritten by the built-in options + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'bindir': 'foo'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'foo') + break + else: + self.fail('Did not find bindir in build options?') + + def test_builtin_options_paths_legacy(self): + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'bar') + break + else: + self.fail('Did not find bindir in build options?') + class CrossFileTests(BasePlatformTests): -- cgit v1.1 From 5358765806909de8a7179c7becb3ee6ae1a54215 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 19 Jun 2020 21:13:35 -0700 Subject: mesonlib: Add repr() methods to PerMachine classes Which is super helpful in debuggers --- mesonbuild/mesonlib.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index a43d4c4..2c563e4 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -389,6 +389,9 @@ class PerMachine(T.Generic[_T]): unfreeze.host = None return unfreeze + def __repr__(self) -> str: + return 'PerMachine({!r}, {!r})'.format(self.build, self.host) + class PerThreeMachine(PerMachine[_T]): """Like `PerMachine` but includes `target` too. @@ -421,6 +424,9 @@ class PerThreeMachine(PerMachine[_T]): def matches_build_machine(self, machine: MachineChoice) -> bool: return self.build == self[machine] + def __repr__(self) -> str: + return 'PerThreeMachine({!r}, {!r}, {!r})'.format(self.build, self.host, self.target) + class PerMachineDefaultable(PerMachine[T.Optional[_T]]): """Extends `PerMachine` with the ability to default from `None`s. @@ -439,6 +445,9 @@ class PerMachineDefaultable(PerMachine[T.Optional[_T]]): freeze.host = freeze.build return freeze + def __repr__(self) -> str: + return 'PerMachineDefaultable({!r}, {!r})'.format(self.build, self.host) + class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]): """Extends `PerThreeMachine` with the ability to default from `None`s. @@ -460,6 +469,9 @@ class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Option freeze.target = freeze.host return freeze + def __repr__(self) -> str: + return 'PerThreeMachineDefaultable({!r}, {!r}, {!r})'.format(self.build, self.host, self.target) + def is_sunos() -> bool: return platform.system().lower() == 'sunos' -- cgit v1.1 From 5db3860abf6a27b0dd4653fa8c7143f4a70df7a7 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 19 Jun 2020 10:53:20 -0700 Subject: push all config-file settings into coredata.set_default_options This puts all of them together, in the next patch they'll be pulled back out, but it's convenient to start that refactor by moving them all there, then moving them into env as a whole. --- mesonbuild/coredata.py | 69 +++++++++++++++++++++++--------------------------- 1 file changed, 32 insertions(+), 37 deletions(-) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 99da034..aaf31aa 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -751,34 +751,44 @@ class CoreData: 'See: https://mesonbuild.com/Builtin-options.html#build-type-options') cmd_line_options = OrderedDict() - # Set default options as if they were passed to the command line. - # Subprojects can only define default for user options and not yielding - # builtin option. - from . import optinterpreter - for k, v in chain(default_options.items(), env.meson_options.host.get('', {}).items()): - if subproject: - if (k not in builtin_options or builtin_options[k].yielding) \ - and optinterpreter.is_invalid_name(k, log=False): - continue - cmd_line_options[k] = v - # IF the subproject options comes from a machine file, then we need to - # set the option as subproject:option - if subproject: - for k, v in env.meson_options.host.get(subproject, {}).items(): + from . import optinterpreter + from .compilers import all_languages + if not subproject: + # Set default options as if they were passed to the command line. + # Subprojects can only define default for user options and not yielding + # builtin option. + for k, v in chain(default_options.items(), env.meson_options.host.get('', {}).items()): + cmd_line_options[k] = v + + # compiler options are always per-machine, but not per sub-project + if '' in env.meson_options.build: + for lang in all_languages: + prefix = '{}_'.format(lang) + for k in env.meson_options.build['']: + if k.startswith(prefix): + cmd_line_options['build.{}'.format(k)] = env.meson_options.build[subproject][k] + else: + # If the subproject options comes from a machine file, then we need to + # set the option as subproject:option + for k, v in chain(default_options.items(), env.meson_options.host.get('', {}).items(), + env.meson_options.host.get(subproject, {}).items()): if (k not in builtin_options or builtin_options[k].yielding) \ and optinterpreter.is_invalid_name(k, log=False): continue cmd_line_options['{}:{}'.format(subproject, k)] = v + cmd_line_options.update(env.cmd_line_options) + env.cmd_line_options = cmd_line_options + + options = OrderedDict() # load the values for user options out of the appropriate machine file, # then overload the command line for k, v in env.user_options.get(subproject, {}).items(): if subproject: k = '{}:{}'.format(subproject, k) - cmd_line_options[k] = v + options[k] = v - from .compilers import all_languages # Report that [properties]c_args for lang in all_languages: for args in ['{}_args'.format(lang), '{}_link_args'.format(lang)]: @@ -791,28 +801,13 @@ class CoreData: # and per-machine, but when we do this will need to account for that. # For cross builds we need to get the build specifc options if env.meson_options.host != env.meson_options.build and subproject in env.meson_options.build: + if subproject: + template = '{s}:build.{k}' + else: + template = 'build.{k}' for k in builtin_options_per_machine.keys(): if k in env.meson_options.build[subproject]: - cmd_line_options['build.{}'.format(k)] = env.meson_options.build[subproject][k] - - # compiler options are always per-machine - for lang in all_languages: - prefix = '{}_'.format(lang) - for k in env.meson_options.build[subproject]: - if k.startswith(prefix): - cmd_line_options['build.{}'.format(k)] = env.meson_options.build[subproject][k] - - # Override all the above defaults using the command-line arguments - # actually passed to use - cmd_line_options.update(env.cmd_line_options) - env.cmd_line_options = cmd_line_options - - # Create a subset of cmd_line_options, keeping only options for this - # subproject. Also take builtin options if it's the main project. - # Language and backend specific options will be set later when adding - # languages and setting the backend (builtin options must be set first - # to know which backend we'll use). - options = OrderedDict() + options[template.format(s=subproject, k=k)] = env.meson_options.build[subproject][k] # Some options default to environment variables if they are # unset, set those now. These will either be overwritten @@ -846,7 +841,7 @@ class CoreData: return text[len(prefix):] return text - for k, v in env.cmd_line_options.items(): + for k, v in cmd_line_options.items(): if subproject: if not k.startswith(subproject + ':'): continue -- cgit v1.1 From 591e6e94b9fccfc49ee7093cb21735a27fd64005 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Fri, 19 Jun 2020 17:01:10 -0700 Subject: Put machine file and cmd line parsing in Environment This creates a full set of option in environment that mirror those in coredata, this mirroring of the coredata structure is convenient because lookups int env (such as when initializing compilers) becomes a straight dict lookup, with no list iteration. It also means that all of the command line and machine files are read and stored in the correct order before they're ever accessed, simplifying the logic of using them. --- mesonbuild/ast/introspection.py | 2 +- mesonbuild/coredata.py | 172 ++++++++++++---------------------------- mesonbuild/environment.py | 130 ++++++++++++++++++++++++++++-- mesonbuild/interpreter.py | 8 +- run_unittests.py | 61 +++++++++++++- 5 files changed, 238 insertions(+), 135 deletions(-) diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index 142c219..6e6927f 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -120,7 +120,7 @@ class IntrospectionInterpreter(AstInterpreter): self.do_subproject(i) self.coredata.init_backend_options(self.backend) - options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')} + options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')} self.coredata.set_options(options) self.func_add_languages(None, proj_langs, None) diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index aaf31aa..724e111 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -20,10 +20,8 @@ from pathlib import PurePath from collections import OrderedDict, defaultdict from .mesonlib import ( MesonException, EnvironmentException, MachineChoice, PerMachine, - OrderedSet, default_libdir, default_libexecdir, default_prefix, - split_args + default_libdir, default_libexecdir, default_prefix, split_args ) -from .envconfig import get_env_var_pair from .wrap import WrapMode import ast import argparse @@ -741,117 +739,54 @@ class CoreData: if not self.is_cross_build(): self.copy_build_options_from_regular_ones() - def set_default_options(self, default_options: T.Mapping[str, str], subproject: str, env: 'Environment') -> None: - # Warn if the user is using two different ways of setting build-type - # options that override each other - if 'buildtype' in env.cmd_line_options and \ - ('optimization' in env.cmd_line_options or 'debug' in env.cmd_line_options): - mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. ' - 'Using both is redundant since they override each other. ' - 'See: https://mesonbuild.com/Builtin-options.html#build-type-options') - - cmd_line_options = OrderedDict() - - from . import optinterpreter - from .compilers import all_languages - if not subproject: - # Set default options as if they were passed to the command line. - # Subprojects can only define default for user options and not yielding - # builtin option. - for k, v in chain(default_options.items(), env.meson_options.host.get('', {}).items()): - cmd_line_options[k] = v - - # compiler options are always per-machine, but not per sub-project - if '' in env.meson_options.build: - for lang in all_languages: - prefix = '{}_'.format(lang) - for k in env.meson_options.build['']: - if k.startswith(prefix): - cmd_line_options['build.{}'.format(k)] = env.meson_options.build[subproject][k] - else: - # If the subproject options comes from a machine file, then we need to - # set the option as subproject:option - for k, v in chain(default_options.items(), env.meson_options.host.get('', {}).items(), - env.meson_options.host.get(subproject, {}).items()): - if (k not in builtin_options or builtin_options[k].yielding) \ - and optinterpreter.is_invalid_name(k, log=False): - continue - cmd_line_options['{}:{}'.format(subproject, k)] = v - cmd_line_options.update(env.cmd_line_options) - env.cmd_line_options = cmd_line_options + def set_default_options(self, default_options: 'T.OrderedDict[str, str]', subproject: str, env: 'Environment') -> None: + def make_key(key: str) -> str: + if subproject: + return '{}:{}'.format(subproject, key) + return key options = OrderedDict() - # load the values for user options out of the appropriate machine file, - # then overload the command line - for k, v in env.user_options.get(subproject, {}).items(): - if subproject: - k = '{}:{}'.format(subproject, k) - options[k] = v - - # Report that [properties]c_args - for lang in all_languages: - for args in ['{}_args'.format(lang), '{}_link_args'.format(lang)]: - msg = ('{} in the [properties] section of the machine file is deprecated, ' - 'use the [built-in options] section.') - if args in env.properties.host or args in env.properties.build: - mlog.deprecation(msg.format(args)) - - # Currently we don't support any options that are both per-subproject - # and per-machine, but when we do this will need to account for that. - # For cross builds we need to get the build specifc options - if env.meson_options.host != env.meson_options.build and subproject in env.meson_options.build: - if subproject: - template = '{s}:build.{k}' + # TODO: validate these + from .compilers import all_languages, base_options + lang_prefixes = tuple('{}_'.format(l) for l in all_languages) + # split arguments that can be set now, and those that cannot so they + # can be set later, when they've been initialized. + for k, v in default_options.items(): + if k.startswith(lang_prefixes): + lang, key = k.split('_', 1) + for machine in MachineChoice: + if key not in env.compiler_options[machine][lang]: + env.compiler_options[machine][lang][key] = v + elif k in base_options: + if not subproject and k not in env.base_options: + env.base_options[k] = v else: - template = 'build.{k}' - for k in builtin_options_per_machine.keys(): - if k in env.meson_options.build[subproject]: - options[template.format(s=subproject, k=k)] = env.meson_options.build[subproject][k] - - # Some options default to environment variables if they are - # unset, set those now. These will either be overwritten - # below, or they won't. These should only be set on the first run. - for for_machine in MachineChoice: - p_env_pair = get_env_var_pair(for_machine, self.is_cross_build(), 'PKG_CONFIG_PATH') - if p_env_pair is not None: - p_env_var, p_env = p_env_pair - - # PKG_CONFIG_PATH may contain duplicates, which must be - # removed, else a duplicates-in-array-option warning arises. - p_list = list(OrderedSet(p_env.split(':'))) - - key = 'pkg_config_path' - if for_machine == MachineChoice.BUILD: - key = 'build.' + key - - if env.first_invocation: - options[key] = p_list - elif options.get(key, []) != p_list: - mlog.warning( - p_env_var + - ' environment variable has changed ' - 'between configurations, meson ignores this. ' - 'Use -Dpkg_config_path to change pkg-config search ' - 'path instead.' - ) - - def remove_prefix(text, prefix): - if text.startswith(prefix): - return text[len(prefix):] - return text - - for k, v in cmd_line_options.items(): - if subproject: - if not k.startswith(subproject + ':'): - continue - elif k not in builtin_options.keys() \ - and remove_prefix(k, 'build.') not in builtin_options_per_machine.keys(): - if ':' in k: - continue - if optinterpreter.is_invalid_name(k, log=False): + options[make_key(k)] = v + + for k, v in chain(env.meson_options.host.get('', {}).items(), + env.meson_options.host.get(subproject, {}).items()): + options[make_key(k)] = v + + for k, v in chain(env.meson_options.build.get('', {}).items(), + env.meson_options.build.get(subproject, {}).items()): + if k in builtin_options_per_machine: + options[make_key('build.{}'.format(k))] = v + + options.update({make_key(k): v for k, v in env.user_options.get(subproject, {}).items()}) + + # Some options (namely the compiler options) are not preasant in + # coredata until the compiler is fully initialized. As such, we need to + # put those options into env.meson_options, only if they're not already + # in there, as the machine files and command line have precendence. + for k, v in default_options.items(): + if k in builtin_options and not builtin_options[k].yielding: + continue + for machine in MachineChoice: + if machine is MachineChoice.BUILD and not self.is_cross_build(): continue - options[k] = v + if k not in env.meson_options[machine][subproject]: + env.meson_options[machine][subproject][k] = v self.set_options(options, subproject=subproject) @@ -867,24 +802,19 @@ class CoreData: env.is_cross_build(), env.properties[for_machine]).items(): # prefixed compiler options affect just this machine - opt_prefix = for_machine.get_prefix() - user_k = opt_prefix + lang + '_' + k - if user_k in env.cmd_line_options: - o.set_value(env.cmd_line_options[user_k]) + if k in env.compiler_options[for_machine].get(lang, {}): + o.set_value(env.compiler_options[for_machine][lang][k]) self.compiler_options[for_machine][lang].setdefault(k, o) - def process_new_compiler(self, lang: str, comp: T.Type['Compiler'], env: 'Environment') -> None: + def process_new_compiler(self, lang: str, comp: 'Compiler', env: 'Environment') -> None: from . import compilers self.compilers[comp.for_machine][lang] = comp - enabled_opts = [] for k, o in comp.get_options().items(): # prefixed compiler options affect just this machine - opt_prefix = comp.for_machine.get_prefix() - user_k = opt_prefix + lang + '_' + k - if user_k in env.cmd_line_options: - o.set_value(env.cmd_line_options[user_k]) + if k in env.compiler_options[comp.for_machine].get(lang, {}): + o.set_value(env.compiler_options[comp.for_machine][lang][k]) self.compiler_options[comp.for_machine][lang].setdefault(k, o) enabled_opts = [] @@ -892,8 +822,8 @@ class CoreData: if optname in self.base_options: continue oobj = compilers.base_options[optname] - if optname in env.cmd_line_options: - oobj.set_value(env.cmd_line_options[optname]) + if optname in env.base_options: + oobj.set_value(env.base_options[optname]) enabled_opts.append(optname) self.base_options[optname] = oobj self.emit_base_options_warnings(enabled_opts) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 7dfffa2..9830b45 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -16,6 +16,7 @@ import os, platform, re, sys, shutil, subprocess import tempfile import shlex import typing as T +import collections from . import coredata from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker, Xc16Linker, C2000Linker, IntelVisualStudioLinker @@ -28,11 +29,13 @@ from . import mlog from .envconfig import ( BinaryTable, MachineInfo, - Properties, known_cpu_families, + Properties, known_cpu_families, get_env_var_pair, ) from . import compilers from .compilers import ( Compiler, + all_languages, + base_options, is_assembly, is_header, is_library, @@ -549,10 +552,10 @@ class Environment: properties = PerMachineDefaultable() # We only need one of these as project options are not per machine - user_options = {} + user_options = collections.defaultdict(dict) # type: T.DefaultDict[str, T.Dict[str, object]] # meson builtin options, as passed through cross or native files - meson_options = PerMachineDefaultable() + meson_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]] ## Setup build machine defaults @@ -564,6 +567,13 @@ class Environment: binaries.build = BinaryTable() properties.build = Properties() + # meson base options + _base_options = {} # type: T.Dict[str, object] + + # Per language compiler arguments + compiler_options = PerMachineDefaultable() # type: PerMachineDefaultable[T.DefaultDict[str, T.Dict[str, object]]] + compiler_options.build = collections.defaultdict(dict) + ## Read in native file(s) to override build machine configuration def load_options(tag: str, store: T.Dict[str, T.Any]) -> None: @@ -573,7 +583,44 @@ class Environment: project = section.split(':')[0] else: project = '' - store[project] = config.get(section, {}) + store[project].update(config.get(section, {})) + + def split_base_options(mopts: T.DefaultDict[str, T.Dict[str, object]]) -> None: + for k, v in list(mopts.get('', {}).items()): + if k in base_options: + _base_options[k] = v + del mopts[k] + + lang_prefixes = tuple('{}_'.format(l) for l in all_languages) + def split_compiler_options(mopts: T.DefaultDict[str, T.Dict[str, object]], machine: MachineChoice) -> None: + for k, v in list(mopts.get('', {}).items()): + if k.startswith(lang_prefixes): + lang, key = k.split('_', 1) + if compiler_options[machine] is None: + compiler_options[machine] = collections.defaultdict(dict) + if lang not in compiler_options[machine]: + compiler_options[machine][lang] = collections.defaultdict(dict) + compiler_options[machine][lang][key] = v + del mopts[''][k] + + def move_compiler_options(properties: Properties, compopts: T.Dict[str, T.DefaultDict[str, object]]) -> None: + for k, v in properties.properties.copy().items(): + for lang in all_languages: + if k == '{}_args'.format(lang): + if 'args' not in compopts[lang]: + compopts[lang]['args'] = v + else: + mlog.warning('Ignoring {}_args in [properties] section for those in the [built-in options]'.format(lang)) + elif k == '{}_link_args'.format(lang): + if 'link_args' not in compopts[lang]: + compopts[lang]['link_args'] = v + else: + mlog.warning('Ignoring {}_link_args in [properties] section in favor of the [built-in options] section.') + else: + continue + mlog.deprecation('{} in the [properties] section of the machine file is deprecated, use the [built-in options] section.'.format(k)) + del properties.properties[k] + break if self.coredata.config_files is not None: config = coredata.parse_machine_files(self.coredata.config_files) @@ -584,11 +631,15 @@ class Environment: # the native values if we're doing a cross build if not self.coredata.cross_files: load_options('project options', user_options) - meson_options.build = {} + meson_options.build = collections.defaultdict(dict) if config.get('paths') is not None: mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') load_options('paths', meson_options.build) load_options('built-in options', meson_options.build) + if not self.coredata.cross_files: + split_base_options(meson_options.build) + split_compiler_options(meson_options.build, MachineChoice.BUILD) + move_compiler_options(properties.build, compiler_options.build) ## Read in cross file(s) to override host machine configuration @@ -601,11 +652,15 @@ class Environment: if 'target_machine' in config: machines.target = MachineInfo.from_literal(config['target_machine']) load_options('project options', user_options) - meson_options.host = {} + meson_options.host = collections.defaultdict(dict) + compiler_options.host = collections.defaultdict(dict) if config.get('paths') is not None: mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.') load_options('paths', meson_options.host) load_options('built-in options', meson_options.host) + split_base_options(meson_options.host) + split_compiler_options(meson_options.host, MachineChoice.HOST) + move_compiler_options(properties.host, compiler_options.host) ## "freeze" now initialized configuration, and "save" to the class. @@ -614,6 +669,67 @@ class Environment: self.properties = properties.default_missing() self.user_options = user_options self.meson_options = meson_options.default_missing() + self.base_options = _base_options + self.compiler_options = compiler_options.default_missing() + + # Some options default to environment variables if they are + # unset, set those now. + + for for_machine in MachineChoice: + p_env_pair = get_env_var_pair(for_machine, self.coredata.is_cross_build(), 'PKG_CONFIG_PATH') + if p_env_pair is not None: + p_env_var, p_env = p_env_pair + + # PKG_CONFIG_PATH may contain duplicates, which must be + # removed, else a duplicates-in-array-option warning arises. + p_list = list(mesonlib.OrderedSet(p_env.split(':'))) + + key = 'pkg_config_path' + + if self.first_invocation: + # Environment variables override config + self.meson_options[for_machine][''][key] = p_list + elif self.meson_options[for_machine][''].get(key, []) != p_list: + mlog.warning( + p_env_var, + 'environment variable does not match configured', + 'between configurations, meson ignores this.', + 'Use -Dpkg_config_path to change pkg-config search', + 'path instead.' + ) + + # Read in command line and populate options + # TODO: validate all of this + all_builtins = set(coredata.builtin_options) | set(coredata.builtin_options_per_machine) | set(coredata.builtin_dir_noprefix_options) + for k, v in options.cmd_line_options.items(): + try: + subproject, k = k.split(':') + except ValueError: + subproject = '' + if k in base_options: + self.base_options[k] = v + elif k.startswith(lang_prefixes): + lang, key = k.split('_', 1) + self.compiler_options.host[lang][key] = v + elif k in all_builtins or k.startswith('backend_'): + self.meson_options.host[subproject][k] = v + elif k.startswith('build.'): + k = k.lstrip('build.') + if k in coredata.builtin_options_per_machine: + if self.meson_options.build is None: + self.meson_options.build = collections.defaultdict(dict) + self.meson_options.build[subproject][k] = v + else: + assert not k.startswith('build.') + self.user_options[subproject][k] = v + + # Warn if the user is using two different ways of setting build-type + # options that override each other + if meson_options.build and 'buildtype' in meson_options.build[''] and \ + ('optimization' in meson_options.build[''] or 'debug' in meson_options.build['']): + mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. ' + 'Using both is redundant since they override each other. ' + 'See: https://mesonbuild.com/Builtin-options.html#build-type-options') exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper') if exe_wrapper is not None: @@ -622,8 +738,6 @@ class Environment: else: self.exe_wrapper = None - self.cmd_line_options = options.cmd_line_options.copy() - # List of potential compilers. if mesonlib.is_windows(): # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere. diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 317793d..cf7f282 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -2946,6 +2946,7 @@ external dependencies (including libraries) must go to "dependencies".''') if self.is_subproject(): optname = self.subproject + ':' + optname + for opts in [ self.coredata.base_options, compilers.base_options, self.coredata.builtins, dict(self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine)), @@ -3031,8 +3032,9 @@ external dependencies (including libraries) must go to "dependencies".''') if self.environment.first_invocation: self.coredata.init_backend_options(backend) - options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')} - self.coredata.set_options(options) + if '' in self.environment.meson_options.host: + options = {k: v for k, v in self.environment.meson_options.host[''].items() if k.startswith('backend_')} + self.coredata.set_options(options) @stringArgs @permittedKwargs(permitted_kwargs['project']) @@ -3065,7 +3067,7 @@ external dependencies (including libraries) must go to "dependencies".''') self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) self.project_default_options = coredata.create_options_dict(self.project_default_options) if self.environment.first_invocation: - default_options = self.project_default_options + default_options = self.project_default_options.copy() default_options.update(self.default_project_options) self.coredata.init_builtins(self.subproject) else: diff --git a/run_unittests.py b/run_unittests.py index 7bab408..21eabde 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -8030,7 +8030,7 @@ class NativeFileTests(BasePlatformTests): for opt, value in [('testoption', 'some other val'), ('other_one', True), ('combo_opt', 'one'), ('array_opt', ['two']), ('integer_opt', 0)]: - config = self.helper_create_native_file({'project options': {'sub:{}'.format(opt): value}}) + config = self.helper_create_native_file({'sub:project options': {opt: value}}) with self.assertRaises(subprocess.CalledProcessError) as cm: self.init(testcase, extra_args=['--native-file', config]) self.assertRegex(cm.exception.stdout, r'Incorrect value to [a-z]+ option') @@ -8076,6 +8076,19 @@ class NativeFileTests(BasePlatformTests): else: self.fail('Did not find werror in build options?') + def test_builtin_options_env_overrides_conf(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_native_file({'built-in options': {'pkg_config_path': '/foo'}}) + + self.init(testcase, extra_args=['--native-file', config], override_envvars={'PKG_CONFIG_PATH': '/bar'}) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/bar']) + break + else: + self.fail('Did not find pkg_config_path in build options?') + def test_builtin_options_subprojects(self): testcase = os.path.join(self.common_test_dir, '102 subproject subdir') config = self.helper_create_native_file({'built-in options': {'default_library': 'both', 'c_args': ['-Dfoo']}, 'sub:built-in options': {'default_library': 'static'}}) @@ -8185,6 +8198,22 @@ class NativeFileTests(BasePlatformTests): else: self.fail('Did not find bindir in build options?') + def test_builtin_options_paths_legacy(self): + testcase = os.path.join(self.common_test_dir, '1 trivial') + config = self.helper_create_native_file({ + 'built-in options': {'default_library': 'static'}, + 'paths': {'bindir': 'bar'}, + }) + + self.init(testcase, extra_args=['--native-file', config]) + configuration = self.introspect('--buildoptions') + for each in configuration: + if each['name'] == 'bindir': + self.assertEqual(each['value'], 'bar') + break + else: + self.fail('Did not find bindir in build options?') + class CrossFileTests(BasePlatformTests): @@ -8431,7 +8460,15 @@ class CrossFileTests(BasePlatformTests): cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/cross/path', 'cpp_std': 'c++17'}}) native = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/native/path', 'cpp_std': 'c++14'}}) - self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native]) + # Ensure that PKG_CONFIG_PATH is not set in the environment + with mock.patch.dict('os.environ'): + for k in ['PKG_CONFIG_PATH', 'PKG_CONFIG_PATH_FOR_BUILD']: + try: + del os.environ[k] + except KeyError: + pass + self.init(testcase, extra_args=['--cross-file', cross, '--native-file', native]) + configuration = self.introspect('--buildoptions') found = 0 for each in configuration: @@ -8452,6 +8489,26 @@ class CrossFileTests(BasePlatformTests): break self.assertEqual(found, 4, 'Did not find all sections.') + def test_builtin_options_env_overrides_conf(self): + testcase = os.path.join(self.common_test_dir, '2 cpp') + config = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/foo'}}) + cross = self.helper_create_cross_file({'built-in options': {'pkg_config_path': '/foo'}}) + + self.init(testcase, extra_args=['--native-file', config, '--cross-file', cross], + override_envvars={'PKG_CONFIG_PATH': '/bar', 'PKG_CONFIG_PATH_FOR_BUILD': '/dir'}) + configuration = self.introspect('--buildoptions') + found = 0 + for each in configuration: + if each['name'] == 'pkg_config_path': + self.assertEqual(each['value'], ['/bar']) + found += 1 + elif each['name'] == 'build.pkg_config_path': + self.assertEqual(each['value'], ['/dir']) + found += 1 + if found == 2: + break + self.assertEqual(found, 2, 'Did not find all sections.') + class TAPParserTests(unittest.TestCase): def assert_test(self, events, **kwargs): -- cgit v1.1 From 3a4d8dde52a5755901ec97784e9f3d883162873b Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Jul 2020 19:46:36 -0700 Subject: update version from 0.55. to 0.56 --- docs/markdown/Machine-files.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/markdown/Machine-files.md b/docs/markdown/Machine-files.md index 60c4dd5..5ac66a8 100644 --- a/docs/markdown/Machine-files.md +++ b/docs/markdown/Machine-files.md @@ -48,7 +48,7 @@ The following sections are allowed: ### constants -*Since 0.55.0* +*Since 0.56.0* String and list concatenation is supported using the `+` operator, joining paths is supported using the `/` operator. @@ -172,7 +172,7 @@ An incomplete list of internally used programs that can be overridden here is: ### Paths and Directories -*Deprecated in 0.55.0* use the built-in section instead. +*Deprecated in 0.56.0* use the built-in section instead. As of 0.50.0 paths and directories such as libdir can be defined in the native and cross files in a paths section. These should be strings. @@ -196,13 +196,13 @@ In addition to special data that may be specified in cross files, this section may contain random key value pairs accessed using the `meson.get_external_property()`, or `meson.get_cross_property()`. -*Changed in 0.55.0* putting `_args` and `_link_args` in the +*Changed in 0.56.0* putting `_args` and `_link_args` in the properties section has been deprecated, and should be put in the built-in options section. ### Project specific options -*New in 0.55.0* +*New in 0.56.0* Path options are not allowed, those must be set in the `[paths]` section. -- cgit v1.1 From 4d2a17041f0dd54001a7d32b36e75608330f41f5 Mon Sep 17 00:00:00 2001 From: Dylan Baker Date: Thu, 30 Jul 2020 19:46:57 -0700 Subject: run dircondensor.py --- run_unittests.py | 14 +++++++------- test cases/cmake/19 cmake file/foolib.cmake.in | 1 - test cases/cmake/19 cmake file/meson.build | 14 -------------- test cases/cmake/19 cmake file/test.json | 5 ----- test cases/cmake/20 cmake file/foolib.cmake.in | 1 + test cases/cmake/20 cmake file/meson.build | 14 ++++++++++++++ test cases/cmake/20 cmake file/test.json | 5 +++++ test cases/failing/106 number in combo/meson.build | 1 - test cases/failing/106 number in combo/nativefile.ini | 2 -- test cases/failing/106 number in combo/test.json | 5 ----- test cases/failing/107 bool in combo/meson.build | 1 - test cases/failing/107 bool in combo/meson_options.txt | 5 ----- test cases/failing/107 bool in combo/nativefile.ini | 2 -- test cases/failing/107 bool in combo/test.json | 5 ----- test cases/failing/107 number in combo/meson.build | 1 + test cases/failing/107 number in combo/nativefile.ini | 2 ++ test cases/failing/107 number in combo/test.json | 5 +++++ test cases/failing/108 bool in combo/meson.build | 1 + test cases/failing/108 bool in combo/meson_options.txt | 5 +++++ test cases/failing/108 bool in combo/nativefile.ini | 2 ++ test cases/failing/108 bool in combo/test.json | 5 +++++ test cases/unit/75 user options for subproject/.gitignore | 1 - test cases/unit/75 user options for subproject/meson.build | 3 --- test cases/unit/76 pkgconfig prefixes/client/client.c | 8 -------- test cases/unit/76 pkgconfig prefixes/client/meson.build | 3 --- test cases/unit/76 pkgconfig prefixes/val1/meson.build | 5 ----- test cases/unit/76 pkgconfig prefixes/val1/val1.c | 3 --- test cases/unit/76 pkgconfig prefixes/val1/val1.h | 1 - test cases/unit/76 pkgconfig prefixes/val2/meson.build | 8 -------- test cases/unit/76 pkgconfig prefixes/val2/val2.c | 4 ---- test cases/unit/76 pkgconfig prefixes/val2/val2.h | 1 - test cases/unit/76 subdir libdir/meson.build | 2 -- .../unit/76 subdir libdir/subprojects/flub/meson.build | 1 - test cases/unit/77 global-rpath/meson.build | 3 --- test cases/unit/77 global-rpath/rpathified.cpp | 6 ------ test cases/unit/77 global-rpath/yonder/meson.build | 5 ----- test cases/unit/77 global-rpath/yonder/yonder.cpp | 3 --- test cases/unit/77 global-rpath/yonder/yonder.h | 1 - test cases/unit/77 pkgconfig prefixes/client/client.c | 8 ++++++++ test cases/unit/77 pkgconfig prefixes/client/meson.build | 3 +++ test cases/unit/77 pkgconfig prefixes/val1/meson.build | 5 +++++ test cases/unit/77 pkgconfig prefixes/val1/val1.c | 3 +++ test cases/unit/77 pkgconfig prefixes/val1/val1.h | 1 + test cases/unit/77 pkgconfig prefixes/val2/meson.build | 8 ++++++++ test cases/unit/77 pkgconfig prefixes/val2/val2.c | 4 ++++ test cases/unit/77 pkgconfig prefixes/val2/val2.h | 1 + test cases/unit/78 subdir libdir/meson.build | 2 ++ .../unit/78 subdir libdir/subprojects/flub/meson.build | 1 + test cases/unit/78 wrap-git/meson.build | 4 ---- .../subprojects/packagefiles/wrap_git_builddef/meson.build | 3 --- .../unit/78 wrap-git/subprojects/wrap_git_upstream/main.c | 4 ---- .../75 user options for subproject/.gitignore | 1 + .../75 user options for subproject/meson.build | 3 +++ test cases/unit/80 global-rpath/meson.build | 3 +++ test cases/unit/80 global-rpath/rpathified.cpp | 6 ++++++ test cases/unit/80 global-rpath/yonder/meson.build | 5 +++++ test cases/unit/80 global-rpath/yonder/yonder.cpp | 3 +++ test cases/unit/80 global-rpath/yonder/yonder.h | 1 + test cases/unit/81 wrap-git/meson.build | 4 ++++ .../subprojects/packagefiles/wrap_git_builddef/meson.build | 3 +++ .../unit/81 wrap-git/subprojects/wrap_git_upstream/main.c | 4 ++++ 61 files changed, 117 insertions(+), 117 deletions(-) delete mode 100644 test cases/cmake/19 cmake file/foolib.cmake.in delete mode 100644 test cases/cmake/19 cmake file/meson.build delete mode 100644 test cases/cmake/19 cmake file/test.json create mode 100644 test cases/cmake/20 cmake file/foolib.cmake.in create mode 100644 test cases/cmake/20 cmake file/meson.build create mode 100644 test cases/cmake/20 cmake file/test.json delete mode 100644 test cases/failing/106 number in combo/meson.build delete mode 100644 test cases/failing/106 number in combo/nativefile.ini delete mode 100644 test cases/failing/106 number in combo/test.json delete mode 100644 test cases/failing/107 bool in combo/meson.build delete mode 100644 test cases/failing/107 bool in combo/meson_options.txt delete mode 100644 test cases/failing/107 bool in combo/nativefile.ini delete mode 100644 test cases/failing/107 bool in combo/test.json create mode 100644 test cases/failing/107 number in combo/meson.build create mode 100644 test cases/failing/107 number in combo/nativefile.ini create mode 100644 test cases/failing/107 number in combo/test.json create mode 100644 test cases/failing/108 bool in combo/meson.build create mode 100644 test cases/failing/108 bool in combo/meson_options.txt create mode 100644 test cases/failing/108 bool in combo/nativefile.ini create mode 100644 test cases/failing/108 bool in combo/test.json delete mode 100644 test cases/unit/75 user options for subproject/.gitignore delete mode 100644 test cases/unit/75 user options for subproject/meson.build delete mode 100644 test cases/unit/76 pkgconfig prefixes/client/client.c delete mode 100644 test cases/unit/76 pkgconfig prefixes/client/meson.build delete mode 100644 test cases/unit/76 pkgconfig prefixes/val1/meson.build delete mode 100644 test cases/unit/76 pkgconfig prefixes/val1/val1.c delete mode 100644 test cases/unit/76 pkgconfig prefixes/val1/val1.h delete mode 100644 test cases/unit/76 pkgconfig prefixes/val2/meson.build delete mode 100644 test cases/unit/76 pkgconfig prefixes/val2/val2.c delete mode 100644 test cases/unit/76 pkgconfig prefixes/val2/val2.h delete mode 100644 test cases/unit/76 subdir libdir/meson.build delete mode 100644 test cases/unit/76 subdir libdir/subprojects/flub/meson.build delete mode 100644 test cases/unit/77 global-rpath/meson.build delete mode 100644 test cases/unit/77 global-rpath/rpathified.cpp delete mode 100644 test cases/unit/77 global-rpath/yonder/meson.build delete mode 100644 test cases/unit/77 global-rpath/yonder/yonder.cpp delete mode 100644 test cases/unit/77 global-rpath/yonder/yonder.h create mode 100644 test cases/unit/77 pkgconfig prefixes/client/client.c create mode 100644 test cases/unit/77 pkgconfig prefixes/client/meson.build create mode 100644 test cases/unit/77 pkgconfig prefixes/val1/meson.build create mode 100644 test cases/unit/77 pkgconfig prefixes/val1/val1.c create mode 100644 test cases/unit/77 pkgconfig prefixes/val1/val1.h create mode 100644 test cases/unit/77 pkgconfig prefixes/val2/meson.build create mode 100644 test cases/unit/77 pkgconfig prefixes/val2/val2.c create mode 100644 test cases/unit/77 pkgconfig prefixes/val2/val2.h create mode 100644 test cases/unit/78 subdir libdir/meson.build create mode 100644 test cases/unit/78 subdir libdir/subprojects/flub/meson.build delete mode 100644 test cases/unit/78 wrap-git/meson.build delete mode 100644 test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build delete mode 100644 test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c create mode 100644 test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore create mode 100644 test cases/unit/79 user options for subproject/75 user options for subproject/meson.build create mode 100644 test cases/unit/80 global-rpath/meson.build create mode 100644 test cases/unit/80 global-rpath/rpathified.cpp create mode 100644 test cases/unit/80 global-rpath/yonder/meson.build create mode 100644 test cases/unit/80 global-rpath/yonder/yonder.cpp create mode 100644 test cases/unit/80 global-rpath/yonder/yonder.h create mode 100644 test cases/unit/81 wrap-git/meson.build create mode 100644 test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build create mode 100644 test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c diff --git a/run_unittests.py b/run_unittests.py index 21eabde..6d7eba2 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -5014,7 +5014,7 @@ recommended as it is not supported on some platforms''') def test_wrap_git(self): with tempfile.TemporaryDirectory() as tmpdir: srcdir = os.path.join(tmpdir, 'src') - shutil.copytree(os.path.join(self.unit_test_dir, '78 wrap-git'), srcdir) + shutil.copytree(os.path.join(self.unit_test_dir, '81 wrap-git'), srcdir) upstream = os.path.join(srcdir, 'subprojects', 'wrap_git_upstream') upstream_uri = Path(upstream).as_uri() _git_init(upstream) @@ -6473,7 +6473,7 @@ class LinuxlikeTests(BasePlatformTests): if is_osx(): raise unittest.SkipTest('Global RPATHs via LDFLAGS not yet supported on MacOS (does anybody need it?)') - testdir = os.path.join(self.unit_test_dir, '77 global-rpath') + testdir = os.path.join(self.unit_test_dir, '80 global-rpath') oldinstalldir = self.installdir # Build and install an external library without DESTDIR. @@ -6846,7 +6846,7 @@ class LinuxlikeTests(BasePlatformTests): oldinstalldir = self.installdir # Build and install both external libraries without DESTDIR - val1dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val1') + val1dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val1') val1prefix = os.path.join(oldinstalldir, 'val1') self.prefix = val1prefix self.installdir = val1prefix @@ -6857,7 +6857,7 @@ class LinuxlikeTests(BasePlatformTests): env1 = {} env1['PKG_CONFIG_PATH'] = os.path.join(val1prefix, self.libdir, 'pkgconfig') - val2dir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'val2') + val2dir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'val2') val2prefix = os.path.join(oldinstalldir, 'val2') self.prefix = val2prefix self.installdir = val2prefix @@ -6869,7 +6869,7 @@ class LinuxlikeTests(BasePlatformTests): # Build, install, and run the client program env2 = {} env2['PKG_CONFIG_PATH'] = os.path.join(val2prefix, self.libdir, 'pkgconfig') - testdir = os.path.join(self.unit_test_dir, '76 pkgconfig prefixes', 'client') + testdir = os.path.join(self.unit_test_dir, '77 pkgconfig prefixes', 'client') testprefix = os.path.join(oldinstalldir, 'client') self.prefix = testprefix self.installdir = testprefix @@ -7180,7 +7180,7 @@ class LinuxCrossArmTests(BaseLinuxCrossTests): def test_cross_libdir_subproject(self): # Guard against a regression where calling "subproject" # would reset the value of libdir to its default value. - testdir = os.path.join(self.unit_test_dir, '76 subdir libdir') + testdir = os.path.join(self.unit_test_dir, '78 subdir libdir') self.init(testdir, extra_args=['--libdir=fuf']) for i in self.introspect('--buildoptions'): if i['name'] == 'libdir': @@ -8017,7 +8017,7 @@ class NativeFileTests(BasePlatformTests): self.init(testcase, extra_args=['--native-file', config, '-Dother_one=false']) def test_user_options_subproject(self): - testcase = os.path.join(self.unit_test_dir, '75 user options for subproject') + testcase = os.path.join(self.unit_test_dir, '79 user options for subproject') s = os.path.join(testcase, 'subprojects') if not os.path.exists(s): diff --git a/test cases/cmake/19 cmake file/foolib.cmake.in b/test cases/cmake/19 cmake file/foolib.cmake.in deleted file mode 100644 index 16e992b..0000000 --- a/test cases/cmake/19 cmake file/foolib.cmake.in +++ /dev/null @@ -1 +0,0 @@ -@foo@ diff --git a/test cases/cmake/19 cmake file/meson.build b/test cases/cmake/19 cmake file/meson.build deleted file mode 100644 index 758bbee..0000000 --- a/test cases/cmake/19 cmake file/meson.build +++ /dev/null @@ -1,14 +0,0 @@ -project( - 'cmake config file', -) - -cmake = import('cmake') - -cmake_conf = configuration_data() -cmake_conf.set_quoted('foo', 'bar') -cmake.configure_package_config_file( - name : 'foolib', - input : 'foolib.cmake.in', - install_dir : get_option('libdir') / 'cmake', - configuration : cmake_conf, -) diff --git a/test cases/cmake/19 cmake file/test.json b/test cases/cmake/19 cmake file/test.json deleted file mode 100644 index a8c4ba3..0000000 --- a/test cases/cmake/19 cmake file/test.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "installed": [ - {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"} - ] -} diff --git a/test cases/cmake/20 cmake file/foolib.cmake.in b/test cases/cmake/20 cmake file/foolib.cmake.in new file mode 100644 index 0000000..16e992b --- /dev/null +++ b/test cases/cmake/20 cmake file/foolib.cmake.in @@ -0,0 +1 @@ +@foo@ diff --git a/test cases/cmake/20 cmake file/meson.build b/test cases/cmake/20 cmake file/meson.build new file mode 100644 index 0000000..758bbee --- /dev/null +++ b/test cases/cmake/20 cmake file/meson.build @@ -0,0 +1,14 @@ +project( + 'cmake config file', +) + +cmake = import('cmake') + +cmake_conf = configuration_data() +cmake_conf.set_quoted('foo', 'bar') +cmake.configure_package_config_file( + name : 'foolib', + input : 'foolib.cmake.in', + install_dir : get_option('libdir') / 'cmake', + configuration : cmake_conf, +) diff --git a/test cases/cmake/20 cmake file/test.json b/test cases/cmake/20 cmake file/test.json new file mode 100644 index 0000000..a8c4ba3 --- /dev/null +++ b/test cases/cmake/20 cmake file/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"file": "usr/lib/cmake/foolibConfig.cmake", "type": "file"} + ] +} diff --git a/test cases/failing/106 number in combo/meson.build b/test cases/failing/106 number in combo/meson.build deleted file mode 100644 index 1a647df..0000000 --- a/test cases/failing/106 number in combo/meson.build +++ /dev/null @@ -1 +0,0 @@ -project('number in combo') diff --git a/test cases/failing/106 number in combo/nativefile.ini b/test cases/failing/106 number in combo/nativefile.ini deleted file mode 100644 index 55f10fc..0000000 --- a/test cases/failing/106 number in combo/nativefile.ini +++ /dev/null @@ -1,2 +0,0 @@ -[built-in options] -optimization = 1 diff --git a/test cases/failing/106 number in combo/test.json b/test cases/failing/106 number in combo/test.json deleted file mode 100644 index a32c358..0000000 --- a/test cases/failing/106 number in combo/test.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "stdout": [ - { "line": "test cases/failing/106 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." } - ] -} diff --git a/test cases/failing/107 bool in combo/meson.build b/test cases/failing/107 bool in combo/meson.build deleted file mode 100644 index c5efd67..0000000 --- a/test cases/failing/107 bool in combo/meson.build +++ /dev/null @@ -1 +0,0 @@ -project('bool in combo') diff --git a/test cases/failing/107 bool in combo/meson_options.txt b/test cases/failing/107 bool in combo/meson_options.txt deleted file mode 100644 index 0c8f5de..0000000 --- a/test cases/failing/107 bool in combo/meson_options.txt +++ /dev/null @@ -1,5 +0,0 @@ -option( - 'opt', - type : 'combo', - choices : ['true', 'false'] -) diff --git a/test cases/failing/107 bool in combo/nativefile.ini b/test cases/failing/107 bool in combo/nativefile.ini deleted file mode 100644 index b423957..0000000 --- a/test cases/failing/107 bool in combo/nativefile.ini +++ /dev/null @@ -1,2 +0,0 @@ -[project options] -opt = true diff --git a/test cases/failing/107 bool in combo/test.json b/test cases/failing/107 bool in combo/test.json deleted file mode 100644 index 37218e8..0000000 --- a/test cases/failing/107 bool in combo/test.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "stdout": [ - { "line": "test cases/failing/107 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." } - ] -} diff --git a/test cases/failing/107 number in combo/meson.build b/test cases/failing/107 number in combo/meson.build new file mode 100644 index 0000000..1a647df --- /dev/null +++ b/test cases/failing/107 number in combo/meson.build @@ -0,0 +1 @@ +project('number in combo') diff --git a/test cases/failing/107 number in combo/nativefile.ini b/test cases/failing/107 number in combo/nativefile.ini new file mode 100644 index 0000000..55f10fc --- /dev/null +++ b/test cases/failing/107 number in combo/nativefile.ini @@ -0,0 +1,2 @@ +[built-in options] +optimization = 1 diff --git a/test cases/failing/107 number in combo/test.json b/test cases/failing/107 number in combo/test.json new file mode 100644 index 0000000..f5aeb4e --- /dev/null +++ b/test cases/failing/107 number in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/107 number in combo/meson.build:1:0: ERROR: Value \"1\" (of type \"number\") for combo option \"Optimization level\" is not one of the choices. Possible choices are (as string): \"0\", \"g\", \"1\", \"2\", \"3\", \"s\"." } + ] +} diff --git a/test cases/failing/108 bool in combo/meson.build b/test cases/failing/108 bool in combo/meson.build new file mode 100644 index 0000000..c5efd67 --- /dev/null +++ b/test cases/failing/108 bool in combo/meson.build @@ -0,0 +1 @@ +project('bool in combo') diff --git a/test cases/failing/108 bool in combo/meson_options.txt b/test cases/failing/108 bool in combo/meson_options.txt new file mode 100644 index 0000000..0c8f5de --- /dev/null +++ b/test cases/failing/108 bool in combo/meson_options.txt @@ -0,0 +1,5 @@ +option( + 'opt', + type : 'combo', + choices : ['true', 'false'] +) diff --git a/test cases/failing/108 bool in combo/nativefile.ini b/test cases/failing/108 bool in combo/nativefile.ini new file mode 100644 index 0000000..b423957 --- /dev/null +++ b/test cases/failing/108 bool in combo/nativefile.ini @@ -0,0 +1,2 @@ +[project options] +opt = true diff --git a/test cases/failing/108 bool in combo/test.json b/test cases/failing/108 bool in combo/test.json new file mode 100644 index 0000000..729ad3d --- /dev/null +++ b/test cases/failing/108 bool in combo/test.json @@ -0,0 +1,5 @@ +{ + "stdout": [ + { "line": "test cases/failing/108 bool in combo/meson.build:1:0: ERROR: Value \"True\" (of type \"boolean\") for combo option \"opt\" is not one of the choices. Possible choices are (as string): \"true\", \"false\"." } + ] +} diff --git a/test cases/unit/75 user options for subproject/.gitignore b/test cases/unit/75 user options for subproject/.gitignore deleted file mode 100644 index 4976afc..0000000 --- a/test cases/unit/75 user options for subproject/.gitignore +++ /dev/null @@ -1 +0,0 @@ -subprojects/* diff --git a/test cases/unit/75 user options for subproject/meson.build b/test cases/unit/75 user options for subproject/meson.build deleted file mode 100644 index 0bc395b..0000000 --- a/test cases/unit/75 user options for subproject/meson.build +++ /dev/null @@ -1,3 +0,0 @@ -project('user option for subproject') - -p = subproject('sub') diff --git a/test cases/unit/76 pkgconfig prefixes/client/client.c b/test cases/unit/76 pkgconfig prefixes/client/client.c deleted file mode 100644 index be9bead..0000000 --- a/test cases/unit/76 pkgconfig prefixes/client/client.c +++ /dev/null @@ -1,8 +0,0 @@ -#include -#include - -int main(int argc, char **argv) -{ - printf("%d\n", val2()); - return 0; -} diff --git a/test cases/unit/76 pkgconfig prefixes/client/meson.build b/test cases/unit/76 pkgconfig prefixes/client/meson.build deleted file mode 100644 index 491937b..0000000 --- a/test cases/unit/76 pkgconfig prefixes/client/meson.build +++ /dev/null @@ -1,3 +0,0 @@ -project('client', 'c') -val2_dep = dependency('val2') -executable('client', 'client.c', dependencies : [val2_dep], install: true) diff --git a/test cases/unit/76 pkgconfig prefixes/val1/meson.build b/test cases/unit/76 pkgconfig prefixes/val1/meson.build deleted file mode 100644 index cc63e31..0000000 --- a/test cases/unit/76 pkgconfig prefixes/val1/meson.build +++ /dev/null @@ -1,5 +0,0 @@ -project('val1', 'c') -val1 = shared_library('val1', 'val1.c', install: true) -install_headers('val1.h') -pkgconfig = import('pkgconfig') -pkgconfig.generate(val1, libraries : ['-Wl,-rpath,${libdir}']) diff --git a/test cases/unit/76 pkgconfig prefixes/val1/val1.c b/test cases/unit/76 pkgconfig prefixes/val1/val1.c deleted file mode 100644 index 591e521..0000000 --- a/test cases/unit/76 pkgconfig prefixes/val1/val1.c +++ /dev/null @@ -1,3 +0,0 @@ -#include "val1.h" - -int val1(void) { return 1; } diff --git a/test cases/unit/76 pkgconfig prefixes/val1/val1.h b/test cases/unit/76 pkgconfig prefixes/val1/val1.h deleted file mode 100644 index 6bd435e..0000000 --- a/test cases/unit/76 pkgconfig prefixes/val1/val1.h +++ /dev/null @@ -1 +0,0 @@ -int val1(void); diff --git a/test cases/unit/76 pkgconfig prefixes/val2/meson.build b/test cases/unit/76 pkgconfig prefixes/val2/meson.build deleted file mode 100644 index ce69481..0000000 --- a/test cases/unit/76 pkgconfig prefixes/val2/meson.build +++ /dev/null @@ -1,8 +0,0 @@ -project('val2', 'c') -val1_dep = dependency('val1') -val2 = shared_library('val2', 'val2.c', - dependencies : [val1_dep], - install: true) -install_headers('val2.h') -pkgconfig = import('pkgconfig') -pkgconfig.generate(val2, libraries : ['-Wl,-rpath,${libdir}']) diff --git a/test cases/unit/76 pkgconfig prefixes/val2/val2.c b/test cases/unit/76 pkgconfig prefixes/val2/val2.c deleted file mode 100644 index d7d4857..0000000 --- a/test cases/unit/76 pkgconfig prefixes/val2/val2.c +++ /dev/null @@ -1,4 +0,0 @@ -#include "val1.h" -#include "val2.h" - -int val2(void) { return val1() + 2; } diff --git a/test cases/unit/76 pkgconfig prefixes/val2/val2.h b/test cases/unit/76 pkgconfig prefixes/val2/val2.h deleted file mode 100644 index 995023d..0000000 --- a/test cases/unit/76 pkgconfig prefixes/val2/val2.h +++ /dev/null @@ -1 +0,0 @@ -int val2(void); diff --git a/test cases/unit/76 subdir libdir/meson.build b/test cases/unit/76 subdir libdir/meson.build deleted file mode 100644 index 5099c91..0000000 --- a/test cases/unit/76 subdir libdir/meson.build +++ /dev/null @@ -1,2 +0,0 @@ -project('toplevel', 'c') -subproject('flub') diff --git a/test cases/unit/76 subdir libdir/subprojects/flub/meson.build b/test cases/unit/76 subdir libdir/subprojects/flub/meson.build deleted file mode 100644 index 7bfd2c5..0000000 --- a/test cases/unit/76 subdir libdir/subprojects/flub/meson.build +++ /dev/null @@ -1 +0,0 @@ -project('subflub', 'c') diff --git a/test cases/unit/77 global-rpath/meson.build b/test cases/unit/77 global-rpath/meson.build deleted file mode 100644 index c67d9e0..0000000 --- a/test cases/unit/77 global-rpath/meson.build +++ /dev/null @@ -1,3 +0,0 @@ -project('global-rpath', 'cpp') -yonder_dep = dependency('yonder') -executable('rpathified', 'rpathified.cpp', dependencies: [yonder_dep], install: true) diff --git a/test cases/unit/77 global-rpath/rpathified.cpp b/test cases/unit/77 global-rpath/rpathified.cpp deleted file mode 100644 index 3788906..0000000 --- a/test cases/unit/77 global-rpath/rpathified.cpp +++ /dev/null @@ -1,6 +0,0 @@ -#include -#include -int main(int argc, char **argv) -{ - return strcmp(yonder(), "AB54 6BR"); -} diff --git a/test cases/unit/77 global-rpath/yonder/meson.build b/test cases/unit/77 global-rpath/yonder/meson.build deleted file mode 100644 index e32f383..0000000 --- a/test cases/unit/77 global-rpath/yonder/meson.build +++ /dev/null @@ -1,5 +0,0 @@ -project('yonder', 'cpp') -yonder = shared_library('yonder', 'yonder.cpp', install: true) -install_headers('yonder.h') -pkgconfig = import('pkgconfig') -pkgconfig.generate(yonder) diff --git a/test cases/unit/77 global-rpath/yonder/yonder.cpp b/test cases/unit/77 global-rpath/yonder/yonder.cpp deleted file mode 100644 index b182d34..0000000 --- a/test cases/unit/77 global-rpath/yonder/yonder.cpp +++ /dev/null @@ -1,3 +0,0 @@ -#include "yonder.h" - -char *yonder(void) { return "AB54 6BR"; } diff --git a/test cases/unit/77 global-rpath/yonder/yonder.h b/test cases/unit/77 global-rpath/yonder/yonder.h deleted file mode 100644 index 9d9ad16..0000000 --- a/test cases/unit/77 global-rpath/yonder/yonder.h +++ /dev/null @@ -1 +0,0 @@ -char *yonder(void); diff --git a/test cases/unit/77 pkgconfig prefixes/client/client.c b/test cases/unit/77 pkgconfig prefixes/client/client.c new file mode 100644 index 0000000..be9bead --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/client/client.c @@ -0,0 +1,8 @@ +#include +#include + +int main(int argc, char **argv) +{ + printf("%d\n", val2()); + return 0; +} diff --git a/test cases/unit/77 pkgconfig prefixes/client/meson.build b/test cases/unit/77 pkgconfig prefixes/client/meson.build new file mode 100644 index 0000000..491937b --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/client/meson.build @@ -0,0 +1,3 @@ +project('client', 'c') +val2_dep = dependency('val2') +executable('client', 'client.c', dependencies : [val2_dep], install: true) diff --git a/test cases/unit/77 pkgconfig prefixes/val1/meson.build b/test cases/unit/77 pkgconfig prefixes/val1/meson.build new file mode 100644 index 0000000..cc63e31 --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/val1/meson.build @@ -0,0 +1,5 @@ +project('val1', 'c') +val1 = shared_library('val1', 'val1.c', install: true) +install_headers('val1.h') +pkgconfig = import('pkgconfig') +pkgconfig.generate(val1, libraries : ['-Wl,-rpath,${libdir}']) diff --git a/test cases/unit/77 pkgconfig prefixes/val1/val1.c b/test cases/unit/77 pkgconfig prefixes/val1/val1.c new file mode 100644 index 0000000..591e521 --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/val1/val1.c @@ -0,0 +1,3 @@ +#include "val1.h" + +int val1(void) { return 1; } diff --git a/test cases/unit/77 pkgconfig prefixes/val1/val1.h b/test cases/unit/77 pkgconfig prefixes/val1/val1.h new file mode 100644 index 0000000..6bd435e --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/val1/val1.h @@ -0,0 +1 @@ +int val1(void); diff --git a/test cases/unit/77 pkgconfig prefixes/val2/meson.build b/test cases/unit/77 pkgconfig prefixes/val2/meson.build new file mode 100644 index 0000000..ce69481 --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/val2/meson.build @@ -0,0 +1,8 @@ +project('val2', 'c') +val1_dep = dependency('val1') +val2 = shared_library('val2', 'val2.c', + dependencies : [val1_dep], + install: true) +install_headers('val2.h') +pkgconfig = import('pkgconfig') +pkgconfig.generate(val2, libraries : ['-Wl,-rpath,${libdir}']) diff --git a/test cases/unit/77 pkgconfig prefixes/val2/val2.c b/test cases/unit/77 pkgconfig prefixes/val2/val2.c new file mode 100644 index 0000000..d7d4857 --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/val2/val2.c @@ -0,0 +1,4 @@ +#include "val1.h" +#include "val2.h" + +int val2(void) { return val1() + 2; } diff --git a/test cases/unit/77 pkgconfig prefixes/val2/val2.h b/test cases/unit/77 pkgconfig prefixes/val2/val2.h new file mode 100644 index 0000000..995023d --- /dev/null +++ b/test cases/unit/77 pkgconfig prefixes/val2/val2.h @@ -0,0 +1 @@ +int val2(void); diff --git a/test cases/unit/78 subdir libdir/meson.build b/test cases/unit/78 subdir libdir/meson.build new file mode 100644 index 0000000..5099c91 --- /dev/null +++ b/test cases/unit/78 subdir libdir/meson.build @@ -0,0 +1,2 @@ +project('toplevel', 'c') +subproject('flub') diff --git a/test cases/unit/78 subdir libdir/subprojects/flub/meson.build b/test cases/unit/78 subdir libdir/subprojects/flub/meson.build new file mode 100644 index 0000000..7bfd2c5 --- /dev/null +++ b/test cases/unit/78 subdir libdir/subprojects/flub/meson.build @@ -0,0 +1 @@ +project('subflub', 'c') diff --git a/test cases/unit/78 wrap-git/meson.build b/test cases/unit/78 wrap-git/meson.build deleted file mode 100644 index b0af30a..0000000 --- a/test cases/unit/78 wrap-git/meson.build +++ /dev/null @@ -1,4 +0,0 @@ -project('test-wrap-git') - -exe = subproject('wrap_git').get_variable('exe') -test('test1', exe) diff --git a/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build b/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build deleted file mode 100644 index 2570f77..0000000 --- a/test cases/unit/78 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build +++ /dev/null @@ -1,3 +0,0 @@ -project('foo', 'c') - -exe = executable('app', 'main.c') diff --git a/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c b/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c deleted file mode 100644 index 8488f4e..0000000 --- a/test cases/unit/78 wrap-git/subprojects/wrap_git_upstream/main.c +++ /dev/null @@ -1,4 +0,0 @@ -int main(void) -{ - return 0; -} diff --git a/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore b/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore new file mode 100644 index 0000000..4976afc --- /dev/null +++ b/test cases/unit/79 user options for subproject/75 user options for subproject/.gitignore @@ -0,0 +1 @@ +subprojects/* diff --git a/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build b/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build new file mode 100644 index 0000000..0bc395b --- /dev/null +++ b/test cases/unit/79 user options for subproject/75 user options for subproject/meson.build @@ -0,0 +1,3 @@ +project('user option for subproject') + +p = subproject('sub') diff --git a/test cases/unit/80 global-rpath/meson.build b/test cases/unit/80 global-rpath/meson.build new file mode 100644 index 0000000..c67d9e0 --- /dev/null +++ b/test cases/unit/80 global-rpath/meson.build @@ -0,0 +1,3 @@ +project('global-rpath', 'cpp') +yonder_dep = dependency('yonder') +executable('rpathified', 'rpathified.cpp', dependencies: [yonder_dep], install: true) diff --git a/test cases/unit/80 global-rpath/rpathified.cpp b/test cases/unit/80 global-rpath/rpathified.cpp new file mode 100644 index 0000000..3788906 --- /dev/null +++ b/test cases/unit/80 global-rpath/rpathified.cpp @@ -0,0 +1,6 @@ +#include +#include +int main(int argc, char **argv) +{ + return strcmp(yonder(), "AB54 6BR"); +} diff --git a/test cases/unit/80 global-rpath/yonder/meson.build b/test cases/unit/80 global-rpath/yonder/meson.build new file mode 100644 index 0000000..e32f383 --- /dev/null +++ b/test cases/unit/80 global-rpath/yonder/meson.build @@ -0,0 +1,5 @@ +project('yonder', 'cpp') +yonder = shared_library('yonder', 'yonder.cpp', install: true) +install_headers('yonder.h') +pkgconfig = import('pkgconfig') +pkgconfig.generate(yonder) diff --git a/test cases/unit/80 global-rpath/yonder/yonder.cpp b/test cases/unit/80 global-rpath/yonder/yonder.cpp new file mode 100644 index 0000000..b182d34 --- /dev/null +++ b/test cases/unit/80 global-rpath/yonder/yonder.cpp @@ -0,0 +1,3 @@ +#include "yonder.h" + +char *yonder(void) { return "AB54 6BR"; } diff --git a/test cases/unit/80 global-rpath/yonder/yonder.h b/test cases/unit/80 global-rpath/yonder/yonder.h new file mode 100644 index 0000000..9d9ad16 --- /dev/null +++ b/test cases/unit/80 global-rpath/yonder/yonder.h @@ -0,0 +1 @@ +char *yonder(void); diff --git a/test cases/unit/81 wrap-git/meson.build b/test cases/unit/81 wrap-git/meson.build new file mode 100644 index 0000000..b0af30a --- /dev/null +++ b/test cases/unit/81 wrap-git/meson.build @@ -0,0 +1,4 @@ +project('test-wrap-git') + +exe = subproject('wrap_git').get_variable('exe') +test('test1', exe) diff --git a/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build b/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build new file mode 100644 index 0000000..2570f77 --- /dev/null +++ b/test cases/unit/81 wrap-git/subprojects/packagefiles/wrap_git_builddef/meson.build @@ -0,0 +1,3 @@ +project('foo', 'c') + +exe = executable('app', 'main.c') diff --git a/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c b/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c new file mode 100644 index 0000000..8488f4e --- /dev/null +++ b/test cases/unit/81 wrap-git/subprojects/wrap_git_upstream/main.c @@ -0,0 +1,4 @@ +int main(void) +{ + return 0; +} -- cgit v1.1 From 894623ad5aabaac06fab219173183c4533af74ef Mon Sep 17 00:00:00 2001 From: Ebrahim Byagowi Date: Sun, 2 Aug 2020 14:13:00 +0430 Subject: docs/users: Add HarfBuzz [skip ci] Proudly a meson user now --- docs/markdown/Users.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/markdown/Users.md b/docs/markdown/Users.md index 49d30a4..50cd27a 100644 --- a/docs/markdown/Users.md +++ b/docs/markdown/Users.md @@ -56,6 +56,7 @@ topic](https://github.com/topics/meson). - [GtkDApp](https://gitlab.com/csoriano/GtkDApp), an application template for developing Flatpak apps with Gtk+ and D - [GVfs](https://git.gnome.org/browse/gvfs/), a userspace virtual filesystem designed to work with the I/O abstraction of GIO - [Hardcode-Tray](https://github.com/bil-elmoussaoui/Hardcode-Tray), fixes hardcoded tray icons in Linux + - [HarfBuzz](https://github.com/harfbuzz/harfbuzz), a text shaping engine - [HelenOS](http://helenos.org), a portable microkernel-based multiserver operating system - [HexChat](https://github.com/hexchat/hexchat), a cross-platform IRC client in C - [IGT](https://gitlab.freedesktop.org/drm/igt-gpu-tools), Linux kernel graphics driver test suite -- cgit v1.1 From 70edf82c6c77902cd64f44848302bbac92d611d8 Mon Sep 17 00:00:00 2001 From: Antony Chan Date: Thu, 2 Jul 2020 13:16:21 -0700 Subject: Make meson recognize the Qualcomm LLVM toolchain Meson calls `path/to/clang++ --version` to guess which build toolchain the user has picked to build the source code. For the Qualcomm LLVM toolchain, the output have an unusual output as shown below: ``` clang version 8.0.12 Snapdragon LLVM ARM Compiler 8.0.12 (based on llvm.org 7.0+) Target: arm-unknown-linux-gnueabi Thread model: posix Repository: (ssh://git-hexagon-aus.qualcomm.com:...) InstalledDir: /pkg/qct/software/llvm/release/arm/8.0.12/bin ``` Another unusual pattern is the output of `path/to/ld.qcld --version`: ``` ARM Linker from Snapdragon LLVM ARM Compiler Version 8.0.12 ARM Linker based on LLVM version: 8.0 ``` The Meson logic is modified accordingly so that Meson can correctly determine toolchain as "LLVM aarch64 cross-compiler on GNU/Linux64 OS". This is the expected output of `meson --native-file native_file.ini --cross-file cross_file.ini build/aarch64-debug/`: ``` ... C++ compiler for the host machine: ... (clang 8.0.12 "clang version 8.0.12") C++ linker for the host machine: ... ld.lld 8.0.12 ... ``` --- mesonbuild/compilers/mixins/clang.py | 5 +++++ mesonbuild/environment.py | 8 ++++++-- mesonbuild/linkers.py | 6 ++++++ 3 files changed, 17 insertions(+), 2 deletions(-) diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index ecfbc64..7525c12 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -113,6 +113,11 @@ class ClangCompiler(GnuLikeCompiler): # (and other gcc-like compilers) cannot. This is becuse clang (being # llvm based) is retargetable, while GCC is not. # + + # qcld: Qualcomm Snapdragon linker, based on LLVM + if linker == 'qcld': + return ['-fuse-ld=qcld'] + if shutil.which(linker): if not shutil.which(linker): raise mesonlib.MesonException( diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 9830b45..bf75c80 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -55,6 +55,7 @@ from .linkers import ( GnuBFDDynamicLinker, GnuGoldDynamicLinker, LLVMDynamicLinker, + QualcommLLVMDynamicLinker, MSVCDynamicLinker, OptlinkDynamicLinker, PGIDynamicLinker, @@ -1028,10 +1029,13 @@ class Environment: check_args += override _, o, e = Popen_safe(compiler + check_args) - v = search_version(o) + v = search_version(o + e) if o.startswith('LLD'): linker = LLVMDynamicLinker( compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker + elif 'Snapdragon' in e and 'LLVM' in e: + linker = QualcommLLVMDynamicLinker( + compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v) # type: DynamicLinker elif e.startswith('lld-link: '): # The LLD MinGW frontend didn't respond to --version before version 9.0.0, # and produced an error message about failing to link (when no object @@ -1227,7 +1231,7 @@ class Environment: return cls( compiler, version, for_machine, is_cross, info, exe_wrap, target, linker=linker) - if 'clang' in out: + if 'clang' in out or 'Clang' in out: linker = None defines = self.get_clang_compiler_defines(compiler) diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index fa30b9a..3ce7111 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -927,6 +927,12 @@ class ArmClangDynamicLinker(ArmDynamicLinker): def import_library_args(self, implibname: str) -> T.List[str]: return ['--symdefs=' + implibname] +class QualcommLLVMDynamicLinker(LLVMDynamicLinker): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # ARM Linker from Snapdragon LLVM ARM Compiler + self.id = 'ld.qcld' class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): -- cgit v1.1