aboutsummaryrefslogtreecommitdiff
path: root/unittests
diff options
context:
space:
mode:
Diffstat (limited to 'unittests')
-rw-r--r--unittests/__init__.py20
-rw-r--r--unittests/allplatformstests.py465
-rw-r--r--unittests/cargotests.py345
-rw-r--r--unittests/internaltests.py10
-rw-r--r--unittests/linuxliketests.py41
-rw-r--r--unittests/machinefiletests.py17
-rw-r--r--unittests/optiontests.py290
-rw-r--r--unittests/platformagnostictests.py41
-rw-r--r--unittests/rewritetests.py167
9 files changed, 1160 insertions, 236 deletions
diff --git a/unittests/__init__.py b/unittests/__init__.py
index e69de29..fb8fb8e 100644
--- a/unittests/__init__.py
+++ b/unittests/__init__.py
@@ -0,0 +1,20 @@
+import os
+
+import mesonbuild.compilers
+from mesonbuild.mesonlib import setup_vsenv
+
+def unset_envs():
+ # For unit tests we must fully control all command lines
+ # so that there are no unexpected changes coming from the
+ # environment, for example when doing a package build.
+ varnames = ['CPPFLAGS', 'LDFLAGS'] + list(mesonbuild.compilers.compilers.CFLAGS_MAPPING.values())
+ for v in varnames:
+ if v in os.environ:
+ del os.environ[v]
+
+def set_envs():
+ os.environ.setdefault('MESON_UNIT_TEST_BACKEND', 'ninja')
+
+setup_vsenv()
+unset_envs()
+set_envs()
diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py
index 2fee06c..078ab96 100644
--- a/unittests/allplatformstests.py
+++ b/unittests/allplatformstests.py
@@ -13,6 +13,7 @@ import platform
import pickle
import zipfile, tarfile
import sys
+import sysconfig
from unittest import mock, SkipTest, skipIf, skipUnless, expectedFailure
from contextlib import contextmanager
from glob import glob
@@ -29,7 +30,7 @@ import mesonbuild.coredata
import mesonbuild.machinefile
import mesonbuild.modules.gnome
from mesonbuild.mesonlib import (
- BuildDirLock, MachineChoice, is_windows, is_osx, is_cygwin, is_dragonflybsd,
+ DirectoryLock, DirectoryLockAction, MachineChoice, is_windows, is_osx, is_cygwin, is_dragonflybsd,
is_sunos, windows_proof_rmtree, python_command, version_compare, split_args, quote_arg,
relpath, is_linux, git, search_version, do_conf_file, do_conf_str, default_prefix,
MesonException, EnvironmentException,
@@ -222,6 +223,47 @@ class AllPlatformTests(BasePlatformTests):
confdata.values = {'VAR': (['value'], 'description')}
self.assertRaises(MesonException, conf_str, ['#mesondefine VAR'], confdata, 'meson')
+ def test_cmake_configuration(self):
+ if self.backend is not Backend.ninja:
+ raise SkipTest('ninja backend needed to configure with cmake')
+
+ cmake = ExternalProgram('cmake')
+ if not cmake.found():
+ raise SkipTest('cmake not available')
+
+ cmake_version = cmake.get_version()
+ if not version_compare(cmake_version, '>=3.13.5'):
+ raise SkipTest('cmake is too old')
+
+ with tempfile.TemporaryDirectory() as tmpdir:
+ srcdir = os.path.join(tmpdir, 'src')
+
+ shutil.copytree(os.path.join(self.src_root, 'test cases', 'common', '14 configure file'), srcdir)
+ self.init(srcdir)
+
+ cmake_builddir = os.path.join(srcdir, "cmake_builddir")
+ self.assertNotEqual(self.builddir, cmake_builddir)
+ self._run([cmake.path, '-G', 'Ninja', '-S', srcdir, '-B', cmake_builddir])
+
+ header_list = [
+ 'config7.h',
+ 'config10.h',
+ ]
+
+ for header in header_list:
+ meson_header = ""
+ cmake_header = ""
+
+ with open(os.path.join(self.builddir, header), encoding='utf-8') as f:
+ meson_header = f.read()
+
+ cmake_header_path = os.path.join(cmake_builddir, header)
+ with open(os.path.join(cmake_builddir, header), encoding='utf-8') as f:
+ cmake_header = f.read()
+
+ self.assertTrue(cmake_header, f'cmake generated header {header} is empty')
+ self.assertEqual(cmake_header, meson_header)
+
def test_absolute_prefix_libdir(self):
'''
Tests that setting absolute paths for --prefix and --libdir work. Can't
@@ -529,7 +571,8 @@ class AllPlatformTests(BasePlatformTests):
if self.backend is not Backend.ninja:
raise SkipTest(f'{self.backend.name!r} backend can\'t install files')
testdir = os.path.join(self.common_test_dir, '8 install')
- self.init(testdir)
+ # sneak in a test that covers backend options...
+ self.init(testdir, extra_args=['-Dbackend_max_links=4'])
intro = self.introspect('--targets')
if intro[0]['type'] == 'executable':
intro = intro[::-1]
@@ -1098,110 +1141,144 @@ class AllPlatformTests(BasePlatformTests):
for lang, evar in langs:
# Detect with evar and do sanity checks on that
if evar in os.environ:
- ecc = compiler_from_language(env, lang, MachineChoice.HOST)
- self.assertTrue(ecc.version)
- elinker = detect_static_linker(env, ecc)
- # Pop it so we don't use it for the next detection
- evalue = os.environ.pop(evar)
- # Very rough/strict heuristics. Would never work for actual
- # compiler detection, but should be ok for the tests.
- ebase = os.path.basename(evalue)
- if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
- self.assertIsInstance(ecc, gnu)
- self.assertIsInstance(elinker, ar)
- elif 'clang-cl' in ebase:
- self.assertIsInstance(ecc, clangcl)
- self.assertIsInstance(elinker, lib)
- elif 'clang' in ebase:
- self.assertIsInstance(ecc, clang)
- self.assertIsInstance(elinker, ar)
- elif ebase.startswith('ic'):
- self.assertIsInstance(ecc, intel)
- self.assertIsInstance(elinker, ar)
- elif ebase.startswith('cl'):
- self.assertIsInstance(ecc, msvc)
- self.assertIsInstance(elinker, lib)
- else:
- raise AssertionError(f'Unknown compiler {evalue!r}')
- # Check that we actually used the evalue correctly as the compiler
- self.assertEqual(ecc.get_exelist(), split_args(evalue))
+ with self.subTest(lang=lang, evar=evar):
+ try:
+ ecc = compiler_from_language(env, lang, MachineChoice.HOST)
+ except EnvironmentException:
+ # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind
+ if is_ci():
+ self.fail(f'Could not find a compiler for {lang}')
+ if sys.version_info < (3, 11):
+ continue
+ self.skipTest(f'No valid compiler for {lang}.')
+ finally:
+ # Pop it so we don't use it for the next detection
+ evalue = os.environ.pop(evar)
+ assert ecc is not None, "Something went really wrong"
+ self.assertTrue(ecc.version)
+ elinker = detect_static_linker(env, ecc)
+ # Very rough/strict heuristics. Would never work for actual
+ # compiler detection, but should be ok for the tests.
+ ebase = os.path.basename(evalue)
+ if ebase.startswith('g') or ebase.endswith(('-gcc', '-g++')):
+ self.assertIsInstance(ecc, gnu)
+ self.assertIsInstance(elinker, ar)
+ elif 'clang-cl' in ebase:
+ self.assertIsInstance(ecc, clangcl)
+ self.assertIsInstance(elinker, lib)
+ elif 'clang' in ebase:
+ self.assertIsInstance(ecc, clang)
+ self.assertIsInstance(elinker, ar)
+ elif ebase.startswith('ic'):
+ self.assertIsInstance(ecc, intel)
+ self.assertIsInstance(elinker, ar)
+ elif ebase.startswith('cl'):
+ self.assertIsInstance(ecc, msvc)
+ self.assertIsInstance(elinker, lib)
+ else:
+ self.fail(f'Unknown compiler {evalue!r}')
+ # Check that we actually used the evalue correctly as the compiler
+ self.assertEqual(ecc.get_exelist(), split_args(evalue))
+
# Do auto-detection of compiler based on platform, PATH, etc.
- cc = compiler_from_language(env, lang, MachineChoice.HOST)
- self.assertTrue(cc.version)
- linker = detect_static_linker(env, cc)
- # Check compiler type
- if isinstance(cc, gnu):
- self.assertIsInstance(linker, ar)
- if is_osx():
- self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
- elif is_sunos():
- self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
- else:
- self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
- if isinstance(cc, clangcl):
- self.assertIsInstance(linker, lib)
- self.assertIsInstance(cc.linker, linkers.ClangClDynamicLinker)
- if isinstance(cc, clang):
- self.assertIsInstance(linker, ar)
- if is_osx():
- self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
- elif is_windows():
- # This is clang, not clang-cl. This can be either an
- # ld-like linker of link.exe-like linker (usually the
- # former for msys2, the latter otherwise)
- self.assertIsInstance(cc.linker, (linkers.MSVCDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
- elif is_sunos():
- self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
- else:
- self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
- if isinstance(cc, intel):
- self.assertIsInstance(linker, ar)
- if is_osx():
- self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
- elif is_windows():
- self.assertIsInstance(cc.linker, linkers.XilinkDynamicLinker)
- else:
- self.assertIsInstance(cc.linker, linkers.GnuDynamicLinker)
- if isinstance(cc, msvc):
- self.assertTrue(is_windows())
- self.assertIsInstance(linker, lib)
- self.assertEqual(cc.id, 'msvc')
- self.assertTrue(hasattr(cc, 'is_64'))
- self.assertIsInstance(cc.linker, linkers.MSVCDynamicLinker)
- # If we're on Windows CI, we know what the compiler will be
- if 'arch' in os.environ:
- if os.environ['arch'] == 'x64':
- self.assertTrue(cc.is_64)
+ with self.subTest(lang=lang):
+ try:
+ cc = compiler_from_language(env, lang, MachineChoice.HOST)
+ except EnvironmentException:
+ # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind
+ if is_ci():
+ self.fail(f'Could not find a compiler for {lang}')
+ if sys.version_info < (3, 11):
+ continue
+ self.skipTest(f'No valid compiler for {lang}.')
+ assert cc is not None, "Something went really wrong"
+ self.assertTrue(cc.version)
+ linker = detect_static_linker(env, cc)
+ # Check compiler type
+ if isinstance(cc, gnu):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
+ elif is_sunos():
+ self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
+ else:
+ self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
+ if isinstance(cc, clangcl):
+ self.assertIsInstance(linker, lib)
+ self.assertIsInstance(cc.linker, linkers.ClangClDynamicLinker)
+ if isinstance(cc, clang):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
+ elif is_windows():
+ # This is clang, not clang-cl. This can be either an
+ # ld-like linker of link.exe-like linker (usually the
+ # former for msys2, the latter otherwise)
+ self.assertIsInstance(cc.linker, (linkers.MSVCDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
+ elif is_sunos():
+ self.assertIsInstance(cc.linker, (linkers.SolarisDynamicLinker, linkers.GnuLikeDynamicLinkerMixin))
+ else:
+ self.assertIsInstance(cc.linker, linkers.GnuLikeDynamicLinkerMixin)
+ if isinstance(cc, intel):
+ self.assertIsInstance(linker, ar)
+ if is_osx():
+ self.assertIsInstance(cc.linker, linkers.AppleDynamicLinker)
+ elif is_windows():
+ self.assertIsInstance(cc.linker, linkers.XilinkDynamicLinker)
else:
- self.assertFalse(cc.is_64)
+ self.assertIsInstance(cc.linker, linkers.GnuDynamicLinker)
+ if isinstance(cc, msvc):
+ self.assertTrue(is_windows())
+ self.assertIsInstance(linker, lib)
+ self.assertEqual(cc.id, 'msvc')
+ self.assertTrue(hasattr(cc, 'is_64'))
+ self.assertIsInstance(cc.linker, linkers.MSVCDynamicLinker)
+ # If we're on Windows CI, we know what the compiler will be
+ if 'arch' in os.environ:
+ if os.environ['arch'] == 'x64':
+ self.assertTrue(cc.is_64)
+ else:
+ self.assertFalse(cc.is_64)
+
# Set evar ourselves to a wrapper script that just calls the same
# exelist + some argument. This is meant to test that setting
# something like `ccache gcc -pipe` or `distcc ccache gcc` works.
- wrapper = os.path.join(testdir, 'compiler wrapper.py')
- wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
- os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc)
-
- # Check static linker too
- wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
- os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker)
-
- # Need a new env to re-run environment loading
- env = get_fake_env(testdir, self.builddir, self.prefix)
-
- wcc = compiler_from_language(env, lang, MachineChoice.HOST)
- wlinker = detect_static_linker(env, wcc)
- # Pop it so we don't use it for the next detection
- os.environ.pop('AR')
- # Must be the same type since it's a wrapper around the same exelist
- self.assertIs(type(cc), type(wcc))
- self.assertIs(type(linker), type(wlinker))
- # Ensure that the exelist is correct
- self.assertEqual(wcc.get_exelist(), wrappercc)
- self.assertEqual(wlinker.get_exelist(), wrapperlinker)
- # Ensure that the version detection worked correctly
- self.assertEqual(cc.version, wcc.version)
- if hasattr(cc, 'is_64'):
- self.assertEqual(cc.is_64, wcc.is_64)
+ with self.subTest('wrapper script', lang=lang):
+ wrapper = os.path.join(testdir, 'compiler wrapper.py')
+ wrappercc = python_command + [wrapper] + cc.get_exelist() + ['-DSOME_ARG']
+ os.environ[evar] = ' '.join(quote_arg(w) for w in wrappercc)
+
+ # Check static linker too
+ wrapperlinker = python_command + [wrapper] + linker.get_exelist() + linker.get_always_args()
+ os.environ['AR'] = ' '.join(quote_arg(w) for w in wrapperlinker)
+
+ # Need a new env to re-run environment loading
+ env = get_fake_env(testdir, self.builddir, self.prefix)
+
+ try:
+ wcc = compiler_from_language(env, lang, MachineChoice.HOST)
+ except EnvironmentException:
+ # always raise in ci, we expect to have a valid ObjC and ObjC++ compiler of some kind
+ if is_ci():
+ self.fail(f'Could not find a compiler for {lang}')
+ if sys.version_info < (3, 11):
+ continue
+ self.skipTest(f'No valid compiler for {lang}.')
+ wlinker = detect_static_linker(env, wcc)
+ del os.environ['AR']
+
+ # Must be the same type since it's a wrapper around the same exelist
+ self.assertIs(type(cc), type(wcc))
+ self.assertIs(type(linker), type(wlinker))
+
+ # Ensure that the exelist is correct
+ self.assertEqual(wcc.get_exelist(), wrappercc)
+ self.assertEqual(wlinker.get_exelist(), wrapperlinker)
+
+ # Ensure that the version detection worked correctly
+ self.assertEqual(cc.version, wcc.version)
+ if hasattr(cc, 'is_64'):
+ self.assertEqual(cc.is_64, wcc.is_64)
def test_always_prefer_c_compiler_for_asm(self):
testdir = os.path.join(self.common_test_dir, '133 c cpp and asm')
@@ -1366,7 +1443,7 @@ class AllPlatformTests(BasePlatformTests):
Test that conflicts between -D for builtin options and the corresponding
long option are detected without false positives or negatives.
'''
- testdir = os.path.join(self.unit_test_dir, '128 long opt vs D')
+ testdir = os.path.join(self.unit_test_dir, '129 long opt vs D')
for opt in ['-Dsysconfdir=/etc', '-Dsysconfdir2=/etc']:
exception_raised = False
@@ -2498,10 +2575,9 @@ class AllPlatformTests(BasePlatformTests):
def test_flock(self):
exception_raised = False
with tempfile.TemporaryDirectory() as tdir:
- os.mkdir(os.path.join(tdir, 'meson-private'))
- with BuildDirLock(tdir):
+ with DirectoryLock(tdir, 'lock', DirectoryLockAction.FAIL, 'failed to lock directory'):
try:
- with BuildDirLock(tdir):
+ with DirectoryLock(tdir, 'lock', DirectoryLockAction.FAIL, 'expected failure'):
pass
except MesonException:
exception_raised = True
@@ -2980,6 +3056,121 @@ class AllPlatformTests(BasePlatformTests):
self.wipe()
self.init(testdir, extra_args=['-Dstart_native=true'], override_envvars=env)
+ @skipIf(is_osx(), 'Not implemented for Darwin yet')
+ @skipIf(is_windows(), 'POSIX only')
+ def test_python_build_config_extensions(self):
+ testdir = os.path.join(self.unit_test_dir,
+ '125 python extension')
+
+ VERSION_INFO_KEYS = ('major', 'minor', 'micro', 'releaselevel', 'serial')
+ EXTENSION_SUFFIX = '.extension-suffix.so'
+ STABLE_ABI_SUFFIX = '.stable-abi-suffix.so'
+ # macOS framework builds put libpython in PYTHONFRAMEWORKPREFIX.
+ LIBDIR = (sysconfig.get_config_var('PYTHONFRAMEWORKPREFIX') or
+ sysconfig.get_config_var('LIBDIR'))
+
+ python_build_config = {
+ 'schema_version': '1.0',
+ 'base_interpreter': sys.executable,
+ 'base_prefix': '/usr',
+ 'platform': sysconfig.get_platform(),
+ 'language': {
+ 'version': sysconfig.get_python_version(),
+ 'version_info': {key: getattr(sys.version_info, key) for key in VERSION_INFO_KEYS}
+ },
+ 'implementation': {
+ attr: (
+ getattr(sys.implementation, attr)
+ if attr != 'version' else
+ {key: getattr(sys.implementation.version, key) for key in VERSION_INFO_KEYS}
+ )
+ for attr in dir(sys.implementation)
+ if not attr.startswith('__')
+ },
+ 'abi': {
+ 'flags': list(sys.abiflags),
+ 'extension_suffix': EXTENSION_SUFFIX,
+ 'stable_abi_suffix': STABLE_ABI_SUFFIX,
+ },
+ 'suffixes': {
+ 'source': ['.py'],
+ 'bytecode': ['.pyc'],
+ 'optimized_bytecode': ['.pyc'],
+ 'debug_bytecode': ['.pyc'],
+ 'extensions': [EXTENSION_SUFFIX, STABLE_ABI_SUFFIX, '.so'],
+ },
+ 'libpython': {
+ 'dynamic': os.path.join(LIBDIR, sysconfig.get_config_var('LDLIBRARY')),
+ 'static': os.path.join(LIBDIR, sysconfig.get_config_var('LIBRARY')),
+ # set it to False on PyPy, since dylib is optional, but also
+ # the value is currently wrong:
+ # https://github.com/pypy/pypy/issues/5249
+ 'link_extensions': '__pypy__' not in sys.builtin_module_names,
+ },
+ 'c_api': {
+ 'headers': sysconfig.get_config_var('INCLUDEPY'),
+ }
+ }
+
+ py3library = sysconfig.get_config_var('PY3LIBRARY')
+ if py3library is not None:
+ python_build_config['libpython']['dynamic_stableabi'] = os.path.join(LIBDIR, py3library)
+
+ build_stable_abi = sysconfig.get_config_var('Py_GIL_DISABLED') != 1 or sys.version_info >= (3, 15)
+ intro_installed_file = os.path.join(self.builddir, 'meson-info', 'intro-installed.json')
+ expected_files = [
+ os.path.join(self.builddir, 'foo' + EXTENSION_SUFFIX),
+ ]
+ if build_stable_abi:
+ expected_files += [
+ os.path.join(self.builddir, 'foo_stable' + STABLE_ABI_SUFFIX),
+ ]
+ if is_cygwin():
+ expected_files += [
+ os.path.join(self.builddir, 'foo' + EXTENSION_SUFFIX.replace('.so', '.dll.a')),
+ ]
+ if build_stable_abi:
+ expected_files += [
+ os.path.join(self.builddir, 'foo_stable' + STABLE_ABI_SUFFIX.replace('.so', '.dll.a')),
+ ]
+
+ for with_pkgconfig in (False, True):
+ with self.subTest(with_pkgconfig=with_pkgconfig):
+ if with_pkgconfig:
+ libpc = sysconfig.get_config_var('LIBPC')
+ if libpc is None:
+ continue
+ python_build_config['c_api']['pkgconfig_path'] = libpc
+ # Old Ubuntu versions have incorrect LIBDIR, skip testing non-pkgconfig variant there.
+ elif not os.path.exists(python_build_config['libpython']['dynamic']):
+ continue
+
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, encoding='utf-8') as python_build_config_file:
+ json.dump(python_build_config, fp=python_build_config_file)
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, encoding='utf-8') as cross_file:
+ cross_file.write(
+ textwrap.dedent(f'''
+ [binaries]
+ pkg-config = 'pkg-config'
+
+ [built-in options]
+ python.build_config = '{python_build_config_file.name}'
+ '''.strip())
+ )
+ cross_file.flush()
+
+ for extra_args in (
+ ['--python.build-config', python_build_config_file.name],
+ ['--cross-file', cross_file.name],
+ ):
+ with self.subTest(extra_args=extra_args):
+ self.init(testdir, extra_args=extra_args)
+ self.build()
+ with open(intro_installed_file) as f:
+ intro_installed = json.load(f)
+ self.assertEqual(sorted(expected_files), sorted(intro_installed))
+ self.wipe()
+
def __reconfigure(self):
# Set an older version to force a reconfigure from scratch
filename = os.path.join(self.privatedir, 'coredata.dat')
@@ -3241,10 +3432,15 @@ class AllPlatformTests(BasePlatformTests):
def test_identity_cross(self):
testdir = os.path.join(self.unit_test_dir, '69 cross')
# Do a build to generate a cross file where the host is this target
- self.init(testdir, extra_args=['-Dgenerate=true'])
+ # build.c_args is ignored here.
+ self.init(testdir, extra_args=['-Dgenerate=true', '-Dc_args=-funroll-loops',
+ '-Dbuild.c_args=-pedantic'])
+ self.meson_native_files = [os.path.join(self.builddir, "nativefile")]
+ self.assertTrue(os.path.exists(self.meson_native_files[0]))
self.meson_cross_files = [os.path.join(self.builddir, "crossfile")]
self.assertTrue(os.path.exists(self.meson_cross_files[0]))
- # Now verify that this is detected as cross
+ # Now verify that this is detected as cross and build options are
+ # processed correctly
self.new_builddir()
self.init(testdir)
@@ -3263,6 +3459,11 @@ class AllPlatformTests(BasePlatformTests):
testdir = os.path.join(self.unit_test_dir, '58 introspect buildoptions')
self._run(self.mconf_command + [testdir])
+ @skip_if_not_language('rust')
+ def test_meson_configure_srcdir(self):
+ testdir = os.path.join(self.rust_test_dir, '20 rust and cpp')
+ self._run(self.mconf_command + [testdir])
+
def test_introspect_buildoptions_cross_only(self):
testdir = os.path.join(self.unit_test_dir, '82 cross only introspect')
testfile = os.path.join(testdir, 'meson.build')
@@ -3611,6 +3812,8 @@ class AllPlatformTests(BasePlatformTests):
# Account for differences in output
res_wb = [i for i in res_wb if i['type'] != 'custom']
for i in res_wb:
+ if i['id'] == 'test1@exe':
+ i['build_by_default'] = 'unknown'
i['filename'] = [os.path.relpath(x, self.builddir) for x in i['filename']]
for k in ('install_filename', 'dependencies', 'win_subsystem'):
if k in i:
@@ -3729,7 +3932,7 @@ class AllPlatformTests(BasePlatformTests):
},
{
'name': 'bugDep1',
- 'required': True,
+ 'required': 'unknown',
'version': [],
'has_fallback': False,
'conditional': False
@@ -3747,7 +3950,21 @@ class AllPlatformTests(BasePlatformTests):
'version': ['>=1.0.0', '<=99.9.9'],
'has_fallback': True,
'conditional': True
- }
+ },
+ {
+ 'conditional': False,
+ 'has_fallback': False,
+ 'name': 'unknown',
+ 'required': False,
+ 'version': 'unknown'
+ },
+ {
+ 'conditional': False,
+ 'has_fallback': False,
+ 'name': 'unknown',
+ 'required': False,
+ 'version': 'unknown'
+ },
]
self.maxDiff = None
self.assertListEqual(res_nb, expected)
@@ -4452,6 +4669,10 @@ class AllPlatformTests(BasePlatformTests):
self.assertIn(f'TEST_C="{expected}"', o)
self.assertIn('export TEST_C', o)
+ cmd = self.meson_command + ['devenv', '-C', self.builddir] + python_command + ['-c', 'import sys; sys.exit(42)']
+ result = subprocess.run(cmd, encoding='utf-8')
+ self.assertEqual(result.returncode, 42)
+
def test_clang_format_check(self):
if self.backend is not Backend.ninja:
raise SkipTest(f'Skipping clang-format tests with {self.backend.name} backend')
@@ -4725,120 +4946,140 @@ class AllPlatformTests(BasePlatformTests):
expected = {
'targets': {
f'{self.builddir}/out1-notag.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out1-notag.txt',
'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/out2-notag.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out2-notag.txt',
'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/libstatic.a': {
+ 'build_rpaths': [],
'destination': '{libdir_static}/libstatic.a',
'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/' + exe_name('app'): {
+ 'build_rpaths': [],
'destination': '{bindir}/' + exe_name('app'),
'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + exe_name('app-otherdir'): {
+ 'build_rpaths': [],
'destination': '{prefix}/otherbin/' + exe_name('app-otherdir'),
'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/subdir/' + exe_name('app2'): {
+ 'build_rpaths': [],
'destination': '{bindir}/' + exe_name('app2'),
'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + shared_lib_name('shared'): {
+ 'build_rpaths': [],
'destination': '{libdir_shared}/' + shared_lib_name('shared'),
'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + shared_lib_name('both'): {
+ 'build_rpaths': [],
'destination': '{libdir_shared}/' + shared_lib_name('both'),
'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/' + static_lib_name('both'): {
+ 'build_rpaths': [],
'destination': '{libdir_static}/' + static_lib_name('both'),
'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/' + shared_lib_name('bothcustom'): {
+ 'build_rpaths': [],
'destination': '{libdir_shared}/' + shared_lib_name('bothcustom'),
'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/' + static_lib_name('bothcustom'): {
+ 'build_rpaths': [],
'destination': '{libdir_static}/' + static_lib_name('bothcustom'),
'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/subdir/' + shared_lib_name('both2'): {
+ 'build_rpaths': [],
'destination': '{libdir_shared}/' + shared_lib_name('both2'),
'install_rpath': None,
'tag': 'runtime',
'subproject': None,
},
f'{self.builddir}/subdir/' + static_lib_name('both2'): {
+ 'build_rpaths': [],
'destination': '{libdir_static}/' + static_lib_name('both2'),
'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/out1-custom.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out1-custom.txt',
'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/out2-custom.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out2-custom.txt',
'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/out3-custom.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out3-custom.txt',
'install_rpath': None,
'tag': 'custom',
'subproject': None,
},
f'{self.builddir}/subdir/out1.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out1.txt',
'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/subdir/out2.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out2.txt',
'install_rpath': None,
'tag': None,
'subproject': None,
},
f'{self.builddir}/out-devel.h': {
+ 'build_rpaths': [],
'destination': '{includedir}/out-devel.h',
'install_rpath': None,
'tag': 'devel',
'subproject': None,
},
f'{self.builddir}/out3-notag.txt': {
+ 'build_rpaths': [],
'destination': '{datadir}/out3-notag.txt',
'install_rpath': None,
'tag': None,
@@ -5184,7 +5425,7 @@ class AllPlatformTests(BasePlatformTests):
self.__test_multi_stds(test_objc=True)
def test_slice(self):
- testdir = os.path.join(self.unit_test_dir, '126 test slice')
+ testdir = os.path.join(self.unit_test_dir, '127 test slice')
self.init(testdir)
self.build()
@@ -5196,7 +5437,7 @@ class AllPlatformTests(BasePlatformTests):
'10/10': [10],
}.items():
output = self._run(self.mtest_command + ['--slice=' + arg])
- tests = sorted([ int(x[5:]) for x in re.findall(r'test-[0-9]*', output) ])
+ tests = sorted([ int(x) for x in re.findall(r'\n[ 0-9]+/[0-9]+ test-([0-9]*)', output) ])
self.assertEqual(tests, expectation)
for arg, expectation in {'': 'error: argument --slice: value does not conform to format \'SLICE/NUM_SLICES\'',
@@ -5216,7 +5457,7 @@ class AllPlatformTests(BasePlatformTests):
env = get_fake_env()
cc = detect_c_compiler(env, MachineChoice.HOST)
has_rsp = cc.linker.id in {
- 'ld.bfd', 'ld.gold', 'ld.lld', 'ld.mold', 'ld.qcld', 'ld.wasm',
+ 'ld.bfd', 'ld.eld', 'ld.gold', 'ld.lld', 'ld.mold', 'ld.qcld', 'ld.wasm',
'link', 'lld-link', 'mwldarm', 'mwldeppc', 'optlink', 'xilink',
}
self.assertEqual(cc.linker.get_accepts_rsp(), has_rsp)
diff --git a/unittests/cargotests.py b/unittests/cargotests.py
index d1ac838..7c09ab9 100644
--- a/unittests/cargotests.py
+++ b/unittests/cargotests.py
@@ -8,8 +8,10 @@ import tempfile
import textwrap
import typing as T
-from mesonbuild.cargo import builder, cfg, load_wraps
+from mesonbuild.cargo import cfg, load_wraps
from mesonbuild.cargo.cfg import TokenType
+from mesonbuild.cargo.manifest import Dependency, Manifest, Package, Workspace
+from mesonbuild.cargo.toml import load_toml
from mesonbuild.cargo.version import convert
@@ -101,6 +103,12 @@ class CargoCfgTest(unittest.TestCase):
(TokenType.IDENTIFIER, 'unix'),
(TokenType.RPAREN, None),
]),
+ ('cfg(windows)', [
+ (TokenType.CFG, None),
+ (TokenType.LPAREN, None),
+ (TokenType.IDENTIFIER, 'windows'),
+ (TokenType.RPAREN, None),
+ ]),
]
for data, expected in cases:
with self.subTest():
@@ -130,63 +138,44 @@ class CargoCfgTest(unittest.TestCase):
cfg.Equal(cfg.Identifier("target_arch"), cfg.String("x86")),
cfg.Equal(cfg.Identifier("target_os"), cfg.String("linux")),
]))),
+ ('cfg(all(any(target_os = "android", target_os = "linux"), any(custom_cfg)))',
+ cfg.All([
+ cfg.Any([
+ cfg.Equal(cfg.Identifier("target_os"), cfg.String("android")),
+ cfg.Equal(cfg.Identifier("target_os"), cfg.String("linux")),
+ ]),
+ cfg.Any([
+ cfg.Identifier("custom_cfg"),
+ ]),
+ ])),
]
for data, expected in cases:
with self.subTest():
self.assertEqual(cfg.parse(iter(cfg.lexer(data))), expected)
- def test_ir_to_meson(self) -> None:
- build = builder.Builder('')
- HOST_MACHINE = build.identifier('host_machine')
-
+ def test_eval_ir(self) -> None:
+ d = {
+ 'target_os': 'unix',
+ 'unix': '',
+ }
cases = [
- ('target_os = "windows"',
- build.equal(build.method('system', HOST_MACHINE),
- build.string('windows'))),
- ('target_arch = "x86"',
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86'))),
- ('target_family = "unix"',
- build.equal(build.method('system', HOST_MACHINE),
- build.string('unix'))),
- ('not(target_arch = "x86")',
- build.not_(build.equal(
- build.method('cpu_family', HOST_MACHINE),
- build.string('x86')))),
- ('any(target_arch = "x86", target_arch = "x86_64")',
- build.or_(
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86')),
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86_64')))),
- ('any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")',
- build.or_(
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86')),
- build.or_(
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86_64')),
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('aarch64'))))),
- ('all(target_arch = "x86", target_arch = "x86_64")',
- build.and_(
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86')),
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86_64')))),
- ('all(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")',
- build.and_(
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86')),
- build.and_(
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('x86_64')),
- build.equal(build.method('cpu_family', HOST_MACHINE),
- build.string('aarch64'))))),
+ ('target_os = "windows"', False),
+ ('target_os = "unix"', True),
+ ('doesnotexist = "unix"', False),
+ ('not(target_os = "windows")', True),
+ ('any(target_os = "windows", target_arch = "x86_64")', False),
+ ('any(target_os = "windows", target_os = "unix")', True),
+ ('all(target_os = "windows", target_os = "unix")', False),
+ ('all(not(target_os = "windows"), target_os = "unix")', True),
+ ('any(unix, windows)', True),
+ ('all()', True),
+ ('any()', False),
+ ('cfg(unix)', True),
+ ('cfg(windows)', False),
]
for data, expected in cases:
with self.subTest():
- value = cfg.ir_to_meson(cfg.parse(iter(cfg.lexer(data))), build)
+ value = cfg.eval_cfg(data, d)
self.assertEqual(value, expected)
class CargoLockTest(unittest.TestCase):
@@ -219,3 +208,261 @@ class CargoLockTest(unittest.TestCase):
self.assertEqual(wraps[1].get('method'), 'cargo')
self.assertEqual(wraps[1].get('url'), 'https://github.com/gtk-rs/gtk-rs-core')
self.assertEqual(wraps[1].get('revision'), '23c5599424cc75ec66618891c915d9f490f6e4c2')
+
+class CargoTomlTest(unittest.TestCase):
+ CARGO_TOML_1 = textwrap.dedent('''\
+ [package]
+ name = "mandelbrot"
+ version = "0.1.0"
+ authors = ["Sebastian Dröge <sebastian@centricular.com>"]
+ edition = "2018"
+ license = "GPL-3.0"
+
+ [package.metadata.docs.rs]
+ all-features = true
+ rustc-args = [
+ "--cfg",
+ "docsrs",
+ ]
+ rustdoc-args = [
+ "--cfg",
+ "docsrs",
+ "--generate-link-to-definition",
+ ]
+
+ [dependencies]
+ gtk = { package = "gtk4", version = "0.9" }
+ num-complex = "0.4"
+ rayon = "1.0"
+ once_cell = "1"
+ async-channel = "2.0"
+ zerocopy = { version = "0.7", features = ["derive"] }
+
+ [dev-dependencies.gir-format-check]
+ version = "^0.1"
+ ''')
+
+ CARGO_TOML_2 = textwrap.dedent('''\
+ [package]
+ name = "pango"
+ edition = "2021"
+ rust-version = "1.70"
+ version = "0.20.4"
+ authors = ["The gtk-rs Project Developers"]
+
+ [package.metadata.system-deps.pango]
+ name = "pango"
+ version = "1.40"
+
+ [package.metadata.system-deps.pango.v1_42]
+ version = "1.42"
+
+ [lib]
+ name = "pango"
+
+ [[test]]
+ name = "check_gir"
+ path = "tests/check_gir.rs"
+
+ [features]
+ v1_42 = ["pango-sys/v1_42"]
+ v1_44 = [
+ "v1_42",
+ "pango-sys/v1_44",
+ ]
+ ''')
+
+ CARGO_TOML_WS = textwrap.dedent('''\
+ [workspace]
+ resolver = "2"
+ members = ["tutorial"]
+
+ [workspace.package]
+ version = "0.14.0-alpha.1"
+ repository = "https://gitlab.freedesktop.org/gstreamer/gst-plugins-rs"
+ edition = "2021"
+ rust-version = "1.83"
+
+ [workspace.dependencies]
+ glib = { path = "glib" }
+ gtk = { package = "gtk4", version = "0.9" }
+ once_cell = "1.0"
+ syn = { version = "2", features = ["parse"] }
+ ''')
+
+ def test_cargo_toml_ws_package(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_WS)
+ workspace_toml = load_toml(fname)
+
+ workspace = Workspace.from_raw(workspace_toml)
+ pkg = Package.from_raw({'name': 'foo', 'version': {'workspace': True}}, workspace)
+ self.assertEqual(pkg.name, 'foo')
+ self.assertEqual(pkg.version, '0.14.0-alpha.1')
+ self.assertEqual(pkg.edition, '2015')
+ self.assertEqual(pkg.repository, None)
+
+ def test_cargo_toml_ws_dependency(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_WS)
+ workspace_toml = load_toml(fname)
+
+ workspace = Workspace.from_raw(workspace_toml)
+ dep = Dependency.from_raw('glib', {'workspace': True}, 'member', workspace)
+ self.assertEqual(dep.package, 'glib')
+ self.assertEqual(dep.version, '')
+ self.assertEqual(dep.meson_version, [])
+ self.assertEqual(dep.path, os.path.join('..', 'glib'))
+ self.assertEqual(dep.features, [])
+
+ dep = Dependency.from_raw('gtk', {'workspace': True}, 'member', workspace)
+ self.assertEqual(dep.package, 'gtk4')
+ self.assertEqual(dep.version, '0.9')
+ self.assertEqual(dep.meson_version, ['>= 0.9', '< 0.10'])
+ self.assertEqual(dep.api, '0.9')
+ self.assertEqual(dep.features, [])
+
+ dep = Dependency.from_raw('once_cell', {'workspace': True, 'optional': True}, 'member', workspace)
+ self.assertEqual(dep.package, 'once_cell')
+ self.assertEqual(dep.version, '1.0')
+ self.assertEqual(dep.meson_version, ['>= 1.0', '< 2'])
+ self.assertEqual(dep.api, '1')
+ self.assertEqual(dep.features, [])
+ self.assertTrue(dep.optional)
+
+ dep = Dependency.from_raw('syn', {'workspace': True, 'features': ['full']}, 'member', workspace)
+ self.assertEqual(dep.package, 'syn')
+ self.assertEqual(dep.version, '2')
+ self.assertEqual(dep.meson_version, ['>= 2', '< 3'])
+ self.assertEqual(dep.api, '2')
+ self.assertEqual(sorted(set(dep.features)), ['full', 'parse'])
+
+ def test_cargo_toml_package(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_1)
+ manifest_toml = load_toml(fname)
+ manifest = Manifest.from_raw(manifest_toml, 'Cargo.toml')
+
+ self.assertEqual(manifest.package.name, 'mandelbrot')
+ self.assertEqual(manifest.package.version, '0.1.0')
+ self.assertEqual(manifest.package.authors[0], 'Sebastian Dröge <sebastian@centricular.com>')
+ self.assertEqual(manifest.package.edition, '2018')
+ self.assertEqual(manifest.package.license, 'GPL-3.0')
+
+ print(manifest.package.metadata)
+ self.assertEqual(len(manifest.package.metadata), 1)
+
+ def test_cargo_toml_dependencies(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_1)
+ manifest_toml = load_toml(fname)
+ manifest = Manifest.from_raw(manifest_toml, 'Cargo.toml')
+
+ self.assertEqual(len(manifest.dependencies), 6)
+ self.assertEqual(manifest.dependencies['gtk'].package, 'gtk4')
+ self.assertEqual(manifest.dependencies['gtk'].version, '0.9')
+ self.assertEqual(manifest.dependencies['gtk'].meson_version, ['>= 0.9', '< 0.10'])
+ self.assertEqual(manifest.dependencies['gtk'].api, '0.9')
+ self.assertEqual(manifest.dependencies['num-complex'].package, 'num-complex')
+ self.assertEqual(manifest.dependencies['num-complex'].version, '0.4')
+ self.assertEqual(manifest.dependencies['num-complex'].meson_version, ['>= 0.4', '< 0.5'])
+ self.assertEqual(manifest.dependencies['rayon'].package, 'rayon')
+ self.assertEqual(manifest.dependencies['rayon'].version, '1.0')
+ self.assertEqual(manifest.dependencies['rayon'].meson_version, ['>= 1.0', '< 2'])
+ self.assertEqual(manifest.dependencies['rayon'].api, '1')
+ self.assertEqual(manifest.dependencies['once_cell'].package, 'once_cell')
+ self.assertEqual(manifest.dependencies['once_cell'].version, '1')
+ self.assertEqual(manifest.dependencies['once_cell'].meson_version, ['>= 1', '< 2'])
+ self.assertEqual(manifest.dependencies['once_cell'].api, '1')
+ self.assertEqual(manifest.dependencies['async-channel'].package, 'async-channel')
+ self.assertEqual(manifest.dependencies['async-channel'].version, '2.0')
+ self.assertEqual(manifest.dependencies['async-channel'].meson_version, ['>= 2.0', '< 3'])
+ self.assertEqual(manifest.dependencies['async-channel'].api, '2')
+ self.assertEqual(manifest.dependencies['zerocopy'].package, 'zerocopy')
+ self.assertEqual(manifest.dependencies['zerocopy'].version, '0.7')
+ self.assertEqual(manifest.dependencies['zerocopy'].meson_version, ['>= 0.7', '< 0.8'])
+ self.assertEqual(manifest.dependencies['zerocopy'].features, ['derive'])
+ self.assertEqual(manifest.dependencies['zerocopy'].api, '0.7')
+
+ self.assertEqual(len(manifest.dev_dependencies), 1)
+ self.assertEqual(manifest.dev_dependencies['gir-format-check'].package, 'gir-format-check')
+ self.assertEqual(manifest.dev_dependencies['gir-format-check'].version, '^0.1')
+ self.assertEqual(manifest.dev_dependencies['gir-format-check'].meson_version, ['>= 0.1', '< 0.2'])
+ self.assertEqual(manifest.dev_dependencies['gir-format-check'].api, '0.1')
+
+ def test_cargo_toml_targets(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_2)
+ manifest_toml = load_toml(fname)
+ manifest = Manifest.from_raw(manifest_toml, 'Cargo.toml')
+
+ self.assertEqual(manifest.lib.name, 'pango')
+ self.assertEqual(manifest.lib.crate_type, ['lib'])
+ self.assertEqual(manifest.lib.path, os.path.join('src', 'lib.rs'))
+ self.assertEqual(manifest.lib.test, True)
+ self.assertEqual(manifest.lib.doctest, True)
+ self.assertEqual(manifest.lib.bench, True)
+ self.assertEqual(manifest.lib.doc, True)
+ self.assertEqual(manifest.lib.harness, True)
+ self.assertEqual(manifest.lib.edition, '2015')
+ self.assertEqual(manifest.lib.required_features, [])
+ self.assertEqual(manifest.lib.plugin, False)
+ self.assertEqual(manifest.lib.proc_macro, False)
+ self.assertEqual(manifest.lib.doc_scrape_examples, True)
+
+ self.assertEqual(len(manifest.test), 1)
+ self.assertEqual(manifest.test[0].name, 'check_gir')
+ self.assertEqual(manifest.test[0].crate_type, ['bin'])
+ self.assertEqual(manifest.test[0].path, 'tests/check_gir.rs')
+ self.assertEqual(manifest.lib.path, os.path.join('src', 'lib.rs'))
+ self.assertEqual(manifest.test[0].test, True)
+ self.assertEqual(manifest.test[0].doctest, False)
+ self.assertEqual(manifest.test[0].bench, True)
+ self.assertEqual(manifest.test[0].doc, False)
+ self.assertEqual(manifest.test[0].harness, True)
+ self.assertEqual(manifest.test[0].edition, '2015')
+ self.assertEqual(manifest.test[0].required_features, [])
+ self.assertEqual(manifest.test[0].plugin, False)
+
+ def test_cargo_toml_system_deps(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_2)
+ manifest_toml = load_toml(fname)
+ manifest = Manifest.from_raw(manifest_toml, 'Cargo.toml')
+
+ self.assertIn('system-deps', manifest.package.metadata)
+
+ self.assertEqual(len(manifest.system_dependencies), 1)
+ self.assertEqual(manifest.system_dependencies['pango'].name, 'pango')
+ self.assertEqual(manifest.system_dependencies['pango'].version, '1.40')
+ self.assertEqual(manifest.system_dependencies['pango'].meson_version, ['>=1.40'])
+ self.assertEqual(manifest.system_dependencies['pango'].optional, False)
+ self.assertEqual(manifest.system_dependencies['pango'].feature, None)
+
+ self.assertEqual(len(manifest.system_dependencies['pango'].feature_overrides), 1)
+ self.assertEqual(manifest.system_dependencies['pango'].feature_overrides['v1_42'], {'version': '1.42'})
+
+ def test_cargo_toml_features(self) -> None:
+ with tempfile.TemporaryDirectory() as tmpdir:
+ fname = os.path.join(tmpdir, 'Cargo.toml')
+ with open(fname, 'w', encoding='utf-8') as f:
+ f.write(self.CARGO_TOML_2)
+ manifest_toml = load_toml(fname)
+ manifest = Manifest.from_raw(manifest_toml, 'Cargo.toml')
+
+ self.assertEqual(len(manifest.features), 3)
+ self.assertEqual(manifest.features['v1_42'], ['pango-sys/v1_42'])
+ self.assertEqual(manifest.features['v1_44'], ['v1_42', 'pango-sys/v1_44'])
+ self.assertEqual(manifest.features['default'], [])
diff --git a/unittests/internaltests.py b/unittests/internaltests.py
index d7994ee..e8742eb 100644
--- a/unittests/internaltests.py
+++ b/unittests/internaltests.py
@@ -44,10 +44,7 @@ from mesonbuild.programs import ExternalProgram
import mesonbuild.modules.pkgconfig
from mesonbuild import utils
-
-from run_tests import (
- FakeCompilerOptions, get_fake_env, get_fake_options
-)
+from run_tests import get_fake_env, get_fake_options
from .helpers import *
@@ -555,9 +552,9 @@ class InternalTests(unittest.TestCase):
'libbar.so.7.10', 'libbar.so.7.9', 'libbar.so.7.9.3']:
libpath = Path(tmpdir) / i
libpath.write_text('', encoding='utf-8')
- found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True)
+ found = cc._find_library_real('foo', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True, ignore_system_dirs=False)
self.assertEqual(os.path.basename(found[0]), 'libfoo.so.54.0')
- found = cc._find_library_real('bar', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True)
+ found = cc._find_library_real('bar', env, [tmpdir], '', LibType.PREFER_SHARED, lib_prefix_warning=True, ignore_system_dirs=False)
self.assertEqual(os.path.basename(found[0]), 'libbar.so.7.10')
def test_find_library_patterns(self):
@@ -629,7 +626,6 @@ class InternalTests(unittest.TestCase):
env = get_fake_env()
compiler = detect_c_compiler(env, MachineChoice.HOST)
env.coredata.compilers.host = {'c': compiler}
- env.coredata.optstore.set_value_object(OptionKey('c_link_args'), FakeCompilerOptions())
p1 = Path(tmpdir) / '1'
p2 = Path(tmpdir) / '2'
p1.mkdir()
diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py
index 6b896d7..1ec23a0 100644
--- a/unittests/linuxliketests.py
+++ b/unittests/linuxliketests.py
@@ -446,6 +446,24 @@ class LinuxlikeTests(BasePlatformTests):
libdir = self.installdir + os.path.join(self.prefix, self.libdir)
self._test_soname_impl(libdir, True)
+ @skip_if_not_base_option('b_sanitize')
+ def test_c_link_args_and_env(self):
+ '''
+ Test that the CFLAGS / CXXFLAGS environment variables are
+ included on the linker command line when c_link_args is
+ set but c_args is not.
+ '''
+ if is_cygwin():
+ raise SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise SkipTest('-fsanitize=address is not supported on OpenBSD')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = {'CFLAGS': '-fsanitize=address'}
+ self.init(testdir, extra_args=['-Dc_link_args="-L/usr/lib"'],
+ override_envvars=env)
+ self.build()
+
def test_compiler_check_flags_order(self):
'''
Test that compiler check flags override all other flags. This can't be
@@ -986,6 +1004,22 @@ class LinuxlikeTests(BasePlatformTests):
self.assertEqual(got_rpath, yonder_libdir, rpath_format)
@skip_if_not_base_option('b_sanitize')
+ def test_env_cflags_ldflags(self):
+ if is_cygwin():
+ raise SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise SkipTest('-fsanitize=address is not supported on OpenBSD')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = {'CFLAGS': '-fsanitize=address', 'LDFLAGS': '-I.'}
+ self.init(testdir, override_envvars=env)
+ self.build()
+ compdb = self.get_compdb()
+ for i in compdb:
+ self.assertIn("-fsanitize=address", i["command"])
+ self.wipe()
+
+ @skip_if_not_base_option('b_sanitize')
def test_pch_with_address_sanitizer(self):
if is_cygwin():
raise SkipTest('asan not available on Cygwin')
@@ -1227,8 +1261,9 @@ class LinuxlikeTests(BasePlatformTests):
myenv['PKG_CONFIG_PATH'] = _prepend_pkg_config_path(self.privatedir)
stdo = subprocess.check_output([PKG_CONFIG, '--libs-only-l', 'libsomething'], env=myenv)
deps = [b'-lgobject-2.0', b'-lgio-2.0', b'-lglib-2.0', b'-lsomething']
- if is_windows() or is_cygwin() or is_osx() or is_openbsd():
+ if is_windows() or is_osx() or is_openbsd():
# On Windows, libintl is a separate library
+ # It used to be on Cygwin as well, but no longer is.
deps.append(b'-lintl')
self.assertEqual(set(deps), set(stdo.split()))
@@ -1859,7 +1894,7 @@ class LinuxlikeTests(BasePlatformTests):
self.assertIn('build t13-e1: c_LINKER t13-e1.p/main.c.o | libt12-s1.a libt13-s3.a\n', content)
def test_top_options_in_sp(self):
- testdir = os.path.join(self.unit_test_dir, '125 pkgsubproj')
+ testdir = os.path.join(self.unit_test_dir, '126 pkgsubproj')
self.init(testdir)
def test_unreadable_dir_in_declare_dep(self):
@@ -1938,7 +1973,7 @@ class LinuxlikeTests(BasePlatformTests):
self.check_has_flag(compdb, sub2src, '-O2')
def test_sanitizers(self):
- testdir = os.path.join(self.unit_test_dir, '127 sanitizers')
+ testdir = os.path.join(self.unit_test_dir, '128 sanitizers')
with self.subTest('no b_sanitize value'):
try:
diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py
index b2839e6..7f88a54 100644
--- a/unittests/machinefiletests.py
+++ b/unittests/machinefiletests.py
@@ -550,8 +550,8 @@ class NativeFileTests(BasePlatformTests):
# into augments.
self.assertEqual(found, 2, 'Did not find all two sections')
- def test_builtin_options_subprojects_overrides_buildfiles(self):
- # If the buildfile says subproject(... default_library: shared), ensure that's overwritten
+ def test_builtin_options_machinefile_overrides_subproject(self):
+ # The buildfile says subproject(... default_library: static), the machinefile overrides it
testcase = os.path.join(self.common_test_dir, '223 persubproject options')
config = self.helper_create_native_file({'sub2:built-in options': {'default_library': 'shared'}})
@@ -563,11 +563,18 @@ class NativeFileTests(BasePlatformTests):
check = cm.exception.stdout
self.assertIn(check, 'Parent should override default_library')
- def test_builtin_options_subprojects_dont_inherits_parent_override(self):
- # If the buildfile says subproject(... default_library: shared), ensure that's overwritten
+ def test_builtin_options_machinefile_global_overrides_subproject(self):
+ # The buildfile says subproject(... default_library: static), ensure that's overridden
testcase = os.path.join(self.common_test_dir, '223 persubproject options')
config = self.helper_create_native_file({'built-in options': {'default_library': 'both'}})
- self.init(testcase, extra_args=['--native-file', config])
+
+ with self.assertRaises((RuntimeError, subprocess.CalledProcessError)) as cm:
+ self.init(testcase, extra_args=['--native-file', config])
+ if isinstance(cm, RuntimeError):
+ check = str(cm.exception)
+ else:
+ check = cm.exception.stdout
+ self.assertIn(check, 'Parent should override default_library')
def test_builtin_options_compiler_properties(self):
# the properties section can have lang_args, and those need to be
diff --git a/unittests/optiontests.py b/unittests/optiontests.py
index 5ed601f..8f49a80 100644
--- a/unittests/optiontests.py
+++ b/unittests/optiontests.py
@@ -35,13 +35,29 @@ class OptionTests(unittest.TestCase):
optstore.initialize_from_top_level_project_call({OptionKey('someoption'): new_value}, {}, {})
self.assertEqual(optstore.get_value_for(k), new_value)
+ def test_machine_vs_project(self):
+ optstore = OptionStore(False)
+ name = 'backend'
+ default_value = 'ninja'
+ proj_value = 'xcode'
+ mfile_value = 'vs2010'
+ k = OptionKey(name)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ vo = UserStringOption(k.name, 'You know what this is', default_value)
+ optstore.add_system_option(k.name, vo)
+ self.assertEqual(optstore.get_value_for(k), default_value)
+ optstore.initialize_from_top_level_project_call({OptionKey(name): proj_value}, {},
+ {OptionKey(name): mfile_value})
+ self.assertEqual(optstore.get_value_for(k), mfile_value)
+
def test_subproject_system_option(self):
"""Test that subproject system options get their default value from the global
option (e.g. "sub:b_lto" can be initialized from "b_lto")."""
optstore = OptionStore(False)
- name = 'someoption'
- default_value = 'somevalue'
- new_value = 'new_value'
+ name = 'b_lto'
+ default_value = 'false'
+ new_value = 'true'
k = OptionKey(name)
subk = k.evolve(subproject='sub')
optstore.initialize_from_top_level_project_call({}, {}, {OptionKey(name): new_value})
@@ -105,6 +121,14 @@ class OptionTests(unittest.TestCase):
self.assertEqual(optstore.get_value_for(name, 'sub'), sub_value)
self.assertEqual(num_options(optstore), 2)
+ def test_toplevel_project_yielding(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ top_value = 'top'
+ vo = UserStringOption(name, 'A top level option', top_value, True)
+ optstore.add_project_option(OptionKey(name, ''), vo)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+
def test_project_yielding(self):
optstore = OptionStore(False)
name = 'someoption'
@@ -136,6 +160,30 @@ class OptionTests(unittest.TestCase):
self.assertEqual(optstore.get_value_for(sub_name, 'sub'), sub_value)
self.assertEqual(num_options(optstore), 2)
+ def test_project_yielding_initialize(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ top_value = 'top'
+ sub_value = 'sub'
+ subp = 'subp'
+ cmd_line = { OptionKey(name): top_value, OptionKey(name, subp): sub_value }
+
+ vo = UserStringOption(name, 'A top level option', 'default1')
+ optstore.add_project_option(OptionKey(name, ''), vo)
+ optstore.initialize_from_top_level_project_call({}, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(num_options(optstore), 1)
+
+ vo2 = UserStringOption(name, 'A subproject option', 'default2', True)
+ optstore.add_project_option(OptionKey(name, 'subp'), vo2)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(name, subp), top_value)
+ self.assertEqual(num_options(optstore), 2)
+
+ optstore.initialize_from_subproject_call(subp, {}, {}, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(name, subp), sub_value)
+
def test_augments(self):
optstore = OptionStore(False)
name = 'cpp_std'
@@ -155,25 +203,25 @@ class OptionTests(unittest.TestCase):
# First augment a subproject
with self.subTest('set subproject override'):
- optstore.set_from_configure_command([f'{sub_name}:{name}={aug_value}'], [])
+ optstore.set_from_configure_command({OptionKey.from_string(f'{sub_name}:{name}'): aug_value})
self.assertEqual(optstore.get_value_for(name), top_value)
self.assertEqual(optstore.get_value_for(name, sub_name), aug_value)
self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
with self.subTest('unset subproject override'):
- optstore.set_from_configure_command([], [f'{sub_name}:{name}'])
+ optstore.set_from_configure_command({OptionKey.from_string(f'{sub_name}:{name}'): None})
self.assertEqual(optstore.get_value_for(name), top_value)
self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
# And now augment the top level option
- optstore.set_from_configure_command([f':{name}={aug_value}'], [])
+ optstore.set_from_configure_command({OptionKey.from_string(f':{name}'): aug_value})
self.assertEqual(optstore.get_value_for(name, None), top_value)
self.assertEqual(optstore.get_value_for(name, ''), aug_value)
self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
- optstore.set_from_configure_command([], [f':{name}'])
+ optstore.set_from_configure_command({OptionKey.from_string(f':{name}'): None})
self.assertEqual(optstore.get_value_for(name), top_value)
self.assertEqual(optstore.get_value_for(name, sub_name), top_value)
self.assertEqual(optstore.get_value_for(name, sub2_name), top_value)
@@ -193,12 +241,236 @@ class OptionTests(unittest.TestCase):
choices=['c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23'],
)
optstore.add_system_option(name, co)
- optstore.set_from_configure_command([f'{sub_name}:{name}={aug_value}'], [])
- optstore.set_from_configure_command([f'{sub_name}:{name}={set_value}'], [])
+ optstore.set_from_configure_command({OptionKey.from_string(f'{sub_name}:{name}'): aug_value})
+ optstore.set_from_configure_command({OptionKey.from_string(f'{sub_name}:{name}'): set_value})
self.assertEqual(optstore.get_value_for(name), top_value)
self.assertEqual(optstore.get_value_for(name, sub_name), set_value)
+ def test_build_to_host(self):
+ key = OptionKey('cpp_std')
+ def_value = 'c++98'
+ opt_value = 'c++17'
+ optstore = OptionStore(False)
+ co = UserComboOption(key.name,
+ 'C++ language standard to use',
+ def_value,
+ choices=['c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23'],
+ )
+ optstore.add_compiler_option('cpp', key, co)
+
+ cmd_line = {key: opt_value}
+ optstore.initialize_from_top_level_project_call({}, cmd_line, {})
+ self.assertEqual(optstore.get_value_object_and_value_for(key.as_build())[1], opt_value)
+ self.assertEqual(optstore.get_value(key.as_build()), opt_value)
+ self.assertEqual(optstore.get_value_for(key.as_build()), opt_value)
+
+ def test_build_to_host_subproject(self):
+ key = OptionKey('cpp_std')
+ def_value = 'c++98'
+ opt_value = 'c++17'
+ subp = 'subp'
+ optstore = OptionStore(False)
+ co = UserComboOption(key.name,
+ 'C++ language standard to use',
+ def_value,
+ choices=['c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23'],
+ )
+ optstore.add_compiler_option('cpp', key, co)
+
+ spcall = {key: opt_value}
+ optstore.initialize_from_top_level_project_call({}, {}, {})
+ optstore.initialize_from_subproject_call(subp, spcall, {}, {}, {})
+ self.assertEqual(optstore.get_value_object_and_value_for(key.evolve(subproject=subp,
+ machine=MachineChoice.BUILD))[1], opt_value)
+ self.assertEqual(optstore.get_value(key.evolve(subproject=subp,
+ machine=MachineChoice.BUILD)), opt_value)
+ self.assertEqual(optstore.get_value_for(key.evolve(subproject=subp,
+ machine=MachineChoice.BUILD)), opt_value)
+
+ def test_build_to_host_cross(self):
+ key = OptionKey('cpp_std')
+ def_value = 'c++98'
+ opt_value = 'c++17'
+ optstore = OptionStore(True)
+ for k in [key, key.as_build()]:
+ co = UserComboOption(key.name,
+ 'C++ language standard to use',
+ def_value,
+ choices=['c++98', 'c++11', 'c++14', 'c++17', 'c++20', 'c++23'],
+ )
+ optstore.add_compiler_option('cpp', k, co)
+
+ cmd_line = {key: opt_value}
+ optstore.initialize_from_top_level_project_call({}, cmd_line, {})
+ print(optstore.options)
+
+ self.assertEqual(optstore.get_value_object_and_value_for(key)[1], opt_value)
+ self.assertEqual(optstore.get_value_object_and_value_for(key.as_build())[1], def_value)
+ self.assertEqual(optstore.get_value(key), opt_value)
+ self.assertEqual(optstore.get_value(key.as_build()), def_value)
+ self.assertEqual(optstore.get_value_for(key), opt_value)
+ self.assertEqual(optstore.get_value_for(key.as_build()), def_value)
+
def test_b_default(self):
optstore = OptionStore(False)
value = optstore.get_default_for_b_option(OptionKey('b_vscrt'))
self.assertEqual(value, 'from_buildtype')
+
+ def test_b_nonexistent(self):
+ optstore = OptionStore(False)
+ self.assertTrue(optstore.accept_as_pending_option(OptionKey('b_ndebug')))
+ self.assertFalse(optstore.accept_as_pending_option(OptionKey('b_whatever')))
+
+ def test_backend_option_pending(self):
+ optstore = OptionStore(False)
+ # backend options are known after the first invocation
+ self.assertTrue(optstore.accept_as_pending_option(OptionKey('backend_whatever'), True))
+ self.assertFalse(optstore.accept_as_pending_option(OptionKey('backend_whatever'), False))
+
+ def test_reconfigure_b_nonexistent(self):
+ optstore = OptionStore(False)
+ optstore.set_from_configure_command({OptionKey('b_ndebug'): True})
+
+ def test_unconfigure_nonexistent(self):
+ optstore = OptionStore(False)
+ with self.assertRaises(MesonException):
+ optstore.set_from_configure_command({OptionKey('nonexistent'): None})
+
+ def test_subproject_proj_opt_with_same_name(self):
+ name = 'tests'
+ subp = 'subp'
+
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserBooleanOption(name, 'Tests', False)
+ optstore.add_project_option(OptionKey(name, subproject=''), o)
+ o = UserBooleanOption(name, 'Tests', True)
+ optstore.add_project_option(OptionKey(name, subproject=subp), o)
+
+ cmd_line = {OptionKey(name): True}
+ spcall = {OptionKey(name): False}
+
+ optstore.initialize_from_top_level_project_call({}, cmd_line, {})
+ optstore.initialize_from_subproject_call(subp, spcall, {}, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, ''), True)
+ self.assertEqual(optstore.get_value_for(name, subp), False)
+
+ def test_subproject_cmdline_override_global(self):
+ name = 'optimization'
+ subp = 'subp'
+ new_value = '0'
+
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption(name, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])
+ optstore.add_system_option(name, o)
+
+ toplevel_proj_default = {OptionKey(name): 's'}
+ subp_proj_default = {OptionKey(name): '3'}
+ cmd_line = {OptionKey(name): new_value}
+
+ optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {})
+ optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, subp), new_value)
+ self.assertEqual(optstore.get_value_for(name), new_value)
+
+ def test_subproject_parent_override_subp(self):
+ name = 'optimization'
+ subp = 'subp'
+ default_value = 's'
+ subp_value = '0'
+
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption(name, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])
+ optstore.add_system_option(name, o)
+
+ toplevel_proj_default = {OptionKey(name, subproject=subp): subp_value, OptionKey(name): default_value}
+ subp_proj_default = {OptionKey(name): '3'}
+
+ optstore.initialize_from_top_level_project_call(toplevel_proj_default, {}, {})
+ optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, {}, {})
+ self.assertEqual(optstore.get_value_for(name, subp), subp_value)
+ self.assertEqual(optstore.get_value_for(name), default_value)
+
+ def test_subproject_cmdline_override_global_and_augment(self):
+ name = 'optimization'
+ subp = 'subp'
+ global_value = 's'
+ new_value = '0'
+
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption(name, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])
+ optstore.add_system_option(name, o)
+
+ toplevel_proj_default = {OptionKey(name): '1'}
+ subp_proj_default = {OptionKey(name): '3'}
+ cmd_line = {OptionKey(name): global_value, OptionKey(name, subproject=subp): new_value}
+
+ optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {})
+ optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, subp), new_value)
+ self.assertEqual(optstore.get_value_for(name), global_value)
+
+ def test_subproject_cmdline_override_toplevel(self):
+ name = 'default_library'
+ subp = 'subp'
+ toplevel_value = 'both'
+ subp_value = 'static'
+
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption(name, 'Kind of library', 'both', choices=['shared', 'static', 'both'])
+ optstore.add_system_option(name, o)
+
+ toplevel_proj_default = {OptionKey(name): 'shared'}
+ subp_proj_default = {OptionKey(name): subp_value}
+ cmd_line = {OptionKey(name, subproject=''): toplevel_value}
+
+ optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {})
+ optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, subp), subp_value)
+ self.assertEqual(optstore.get_value_for(name, ''), toplevel_value)
+
+ def test_subproject_buildtype(self):
+ subp = 'subp'
+ main1 = {OptionKey('buildtype'): 'release'}
+ main2 = {OptionKey('optimization'): '3', OptionKey('debug'): 'false'}
+ sub1 = {OptionKey('buildtype'): 'debug'}
+ sub2 = {OptionKey('optimization'): '0', OptionKey('debug'): 'true'}
+
+ for mainopt, subopt in ((main1, sub1),
+ (main2, sub2),
+ ({**main1, **main2}, {**sub1, **sub2})):
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption('buildtype', 'Build type to use', 'debug', choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])
+ optstore.add_system_option(o.name, o)
+ o = UserComboOption('optimization', 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])
+ optstore.add_system_option(o.name, o)
+ o = UserBooleanOption('debug', 'Enable debug symbols and other information', True)
+ optstore.add_system_option(o.name, o)
+
+ optstore.initialize_from_top_level_project_call(mainopt, {}, {})
+ optstore.initialize_from_subproject_call(subp, {}, subopt, {}, {})
+ self.assertEqual(optstore.get_value_for('buildtype', subp), 'debug')
+ self.assertEqual(optstore.get_value_for('optimization', subp), '0')
+ self.assertEqual(optstore.get_value_for('debug', subp), True)
+
+ def test_deprecated_nonstring_value(self):
+ # TODO: add a lot more deprecated option tests
+ optstore = OptionStore(False)
+ name = 'deprecated'
+ do = UserStringOption(name, 'An option with some deprecation', '0',
+ deprecated={'true': '1'})
+ optstore.add_system_option(name, do)
+ optstore.set_option(OptionKey(name), True)
+ value = optstore.get_value(name)
+ self.assertEqual(value, '1')
diff --git a/unittests/platformagnostictests.py b/unittests/platformagnostictests.py
index d6c0078..ebd5317 100644
--- a/unittests/platformagnostictests.py
+++ b/unittests/platformagnostictests.py
@@ -95,6 +95,23 @@ class PlatformAgnosticTests(BasePlatformTests):
testdir = os.path.join(self.unit_test_dir, '102 python without pkgconfig')
self.init(testdir, override_envvars={'PKG_CONFIG': 'notfound'})
+ def test_vala_target_with_internal_glib(self):
+ testdir = os.path.join(self.unit_test_dir, '130 vala internal glib')
+ for run in [{ 'version': '2.84.4', 'expected': '2.84'}, { 'version': '2.85.2', 'expected': '2.84' }]:
+ self.new_builddir()
+ self.init(testdir, extra_args=[f'-Dglib-version={run["version"]}'])
+ try:
+ with open(os.path.join(self.builddir, 'meson-info', 'intro-targets.json'), 'r', encoding='utf-8') as tgt_intro:
+ intro = json.load(tgt_intro)
+ target = list(filter(lambda tgt: tgt['name'] == 'vala-tgt', intro))
+ self.assertLength(target, 1)
+ sources = target[0]['target_sources']
+ vala_sources = filter(lambda src: src.get('language') == 'vala', sources)
+ for src in vala_sources:
+ self.assertIn(('--target-glib', run['expected']), zip(src['parameters'], src['parameters'][1:]))
+ except FileNotFoundError:
+ self.skipTest('Current backend does not produce introspection data')
+
def test_debug_function_outputs_to_meson_log(self):
testdir = os.path.join(self.unit_test_dir, '104 debug function')
log_msg = 'This is an example debug output, should only end up in debug log'
@@ -175,7 +192,7 @@ class PlatformAgnosticTests(BasePlatformTests):
with self.subTest('Changing the backend'):
with self.assertRaises(subprocess.CalledProcessError) as cm:
self.setconf('-Dbackend=none')
- self.assertIn("ERROR: Tried to modify read only option 'backend'", cm.exception.stdout)
+ self.assertIn('ERROR: Tried to modify read only option "backend"', cm.exception.stdout)
# Check that the new value was not written in the store.
with self.subTest('option is stored correctly'):
@@ -203,10 +220,10 @@ class PlatformAgnosticTests(BasePlatformTests):
# Reconfigure of not empty builddir should work
self.new_builddir()
Path(self.builddir, 'dummy').touch()
- self.init(testdir, extra_args=['--reconfigure'])
+ self.init(testdir, extra_args=['--reconfigure', '--buildtype=custom'])
# Setup a valid builddir should update options but not reconfigure
- self.assertEqual(self.getconf('buildtype'), 'debug')
+ self.assertEqual(self.getconf('buildtype'), 'custom')
o = self.init(testdir, extra_args=['-Dbuildtype=release'])
self.assertIn('Directory already configured', o)
self.assertNotIn('The Meson build system', o)
@@ -421,12 +438,12 @@ class PlatformAgnosticTests(BasePlatformTests):
with self.subTest('unknown user option'):
out = self.init(testdir, extra_args=['-Dnot_an_option=1'], allow_fail=True)
- self.assertIn('ERROR: Unknown options: "not_an_option"', out)
+ self.assertIn('ERROR: Unknown option: "not_an_option"', out)
with self.subTest('unknown builtin option'):
self.new_builddir()
out = self.init(testdir, extra_args=['-Db_not_an_option=1'], allow_fail=True)
- self.assertIn('ERROR: Unknown options: "b_not_an_option"', out)
+ self.assertIn('ERROR: Unknown option: "b_not_an_option"', out)
def test_configure_new_option(self) -> None:
@@ -451,7 +468,17 @@ class PlatformAgnosticTests(BasePlatformTests):
f.write(line)
with self.assertRaises(subprocess.CalledProcessError) as e:
self.setconf('-Dneg_int_opt=0')
- self.assertIn('Unknown options: ":neg_int_opt"', e.exception.stdout)
+ self.assertIn('Unknown option: "neg_int_opt"', e.exception.stdout)
+
+ def test_reconfigure_option(self) -> None:
+ testdir = self.copy_srcdir(os.path.join(self.common_test_dir, '40 options'))
+ self.init(testdir)
+ self.assertEqual(self.getconf('neg_int_opt'), -3)
+ with self.assertRaises(subprocess.CalledProcessError) as e:
+ self.init(testdir, extra_args=['--reconfigure', '-Dneg_int_opt=0'])
+ self.assertEqual(self.getconf('neg_int_opt'), -3)
+ self.init(testdir, extra_args=['--reconfigure', '-Dneg_int_opt=-2'])
+ self.assertEqual(self.getconf('neg_int_opt'), -2)
def test_configure_option_changed_constraints(self) -> None:
"""Changing the constraints of an option without reconfiguring should work."""
@@ -491,7 +518,7 @@ class PlatformAgnosticTests(BasePlatformTests):
os.unlink(os.path.join(testdir, 'meson_options.txt'))
with self.assertRaises(subprocess.CalledProcessError) as e:
self.setconf('-Dneg_int_opt=0')
- self.assertIn('Unknown options: ":neg_int_opt"', e.exception.stdout)
+ self.assertIn('Unknown option: "neg_int_opt"', e.exception.stdout)
def test_configure_options_file_added(self) -> None:
"""A new project option file should be detected."""
diff --git a/unittests/rewritetests.py b/unittests/rewritetests.py
index 57a6782..a479f3d 100644
--- a/unittests/rewritetests.py
+++ b/unittests/rewritetests.py
@@ -46,6 +46,18 @@ class RewriterTests(BasePlatformTests):
args = [args]
return self.rewrite_raw(directory, ['command'] + args)
+ # The rewriter sorts the sources alphabetically, but this is very unstable
+ # and buggy, so we do not test it.
+ def assertEqualIgnoreOrder(self, a, b):
+ def deepsort(x):
+ if isinstance(x, list):
+ return sorted(deepsort(el) for el in x)
+ elif isinstance(x, dict):
+ return {k: deepsort(v) for k,v in x.items()}
+ else:
+ return x
+ self.assertDictEqual(deepsort(a), deepsort(b))
+
def test_target_source_list(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
@@ -61,32 +73,40 @@ class RewriterTests(BasePlatformTests):
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp'], 'extra_files': []},
+ 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
def test_target_add_sources(self):
self.prime('1 basic')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
expected = {
'target': {
- 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []},
- 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
- 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['a7.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []},
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp'], 'extra_files': []},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp', 'a7.cpp'], 'extra_files': []},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['a5.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
- 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['a5.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []},
- 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['a3.cpp', 'main.cpp', 'a7.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []},
+ 'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp', 'a5.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['fileB.cpp', 'fileC.cpp', 'a3.cpp', 'main.cpp'], 'extra_files': []},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp', 'a4.cpp'], 'extra_files': []},
- 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
- 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
- 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a6.cpp' ], 'extra_files': []},
+ 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'a1.cpp'], 'extra_files': []},
+ 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['a1.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []},
+ 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['a1.cpp', 'fileA.cpp', 'fileB.cpp', 'main.cpp'], 'extra_files': []},
+ 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
def test_target_add_sources_abs(self):
self.prime('1 basic')
@@ -95,7 +115,7 @@ class RewriterTests(BasePlatformTests):
inf = json.dumps([{"type": "target", "target": "trivialprog1", "operation": "info"}])
self.rewrite(self.builddir, add)
out = self.rewrite(self.builddir, inf)
- expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['a1.cpp', 'a2.cpp', 'a6.cpp', 'fileA.cpp', 'main.cpp'], 'extra_files': []}}}
+ expected = {'target': {'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp', 'a1.cpp', 'a2.cpp', 'a6.cpp'], 'extra_files': []}}}
self.assertDictEqual(out, expected)
def test_target_remove_sources(self):
@@ -103,28 +123,32 @@ class RewriterTests(BasePlatformTests):
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmSrc.json'))
expected = {
'target': {
- 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileC.cpp'], 'extra_files': []},
- 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': []},
- 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileC.cpp'], 'extra_files': []},
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp', 'fileC.cpp'], 'extra_files': []},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['fileB.cpp', 'fileC.cpp'], 'extra_files': []},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp'], 'extra_files': []},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp'], 'extra_files': []},
- 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp', 'fileC.cpp'], 'extra_files': []},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['fileB.cpp', 'fileC.cpp'], 'extra_files': []},
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': []},
- 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp'], 'extra_files': []},
- 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp'], 'extra_files': []},
- 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp'], 'extra_files': []},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp'], 'extra_files': []},
+ 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp'], 'extra_files': []},
+ 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
def test_target_subdir(self):
self.prime('2 subdirs')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
- expected = {'name': 'something', 'sources': ['first.c', 'second.c', 'third.c'], 'extra_files': []}
+ expected = {'name': 'something', 'sources': ['third.c', f'sub2{os.path.sep}first.c', f'sub2{os.path.sep}second.c'], 'extra_files': []}
self.assertDictEqual(list(out['target'].values())[0], expected)
# Check the written file
@@ -145,9 +169,12 @@ class RewriterTests(BasePlatformTests):
'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp'], 'extra_files': []},
+ 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
def test_target_add(self):
self.prime('1 basic')
@@ -166,10 +193,14 @@ class RewriterTests(BasePlatformTests):
'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['fileB.cpp', 'fileC.cpp', 'main.cpp', 'fileA.cpp'], 'extra_files': []},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
- 'trivialprog10@sha': {'name': 'trivialprog10', 'sources': ['new1.cpp', 'new2.cpp'], 'extra_files': []},
+ 'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp', 'fileA.cpp', 'fileB.cpp'], 'extra_files': []},
+ 'trivialprog11@exe': {'name': 'trivialprog11', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog12@exe': {'name': 'trivialprog12', 'sources': ['main.cpp', 'fileA.cpp'], 'extra_files': []},
+ 'trivialprog13@sha': {'name': 'trivialprog13', 'sources': ['new1.cpp', 'new2.cpp'], 'extra_files': []},
+ 'rightName@exe': {'name': 'rightName', 'sources': ['main.cpp'], 'extra_files': []},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
def test_target_remove_subdir(self):
self.prime('2 subdirs')
@@ -181,7 +212,7 @@ class RewriterTests(BasePlatformTests):
self.prime('2 subdirs')
self.rewrite(self.builddir, os.path.join(self.builddir, 'addTgt.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- expected = {'name': 'something', 'sources': ['first.c', 'second.c'], 'extra_files': []}
+ expected = {'name': 'something', 'sources': [f'sub2{os.path.sep}first.c', f'sub2{os.path.sep}second.c'], 'extra_files': []}
self.assertDictEqual(out['target']['94b671c@@something@exe'], expected)
def test_target_source_sorting(self):
@@ -228,16 +259,23 @@ class RewriterTests(BasePlatformTests):
}
}
}
+ for k1, v1 in expected.items():
+ for k2, v2 in v1.items():
+ for k3, v3 in v2.items():
+ if isinstance(v3, list):
+ for i in range(len(v3)):
+ v3[i] = v3[i].replace('/', os.path.sep)
self.assertDictEqual(out, expected)
def test_target_same_name_skip(self):
self.prime('4 same name targets')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- expected = {'name': 'myExe', 'sources': ['main.cpp'], 'extra_files': []}
+ expected1 = {'name': 'myExe', 'sources': ['main.cpp'], 'extra_files': []}
+ expected2 = {'name': 'myExe', 'sources': [f'sub1{os.path.sep}main.cpp'], 'extra_files': []}
self.assertEqual(len(out['target']), 2)
- for val in out['target'].values():
- self.assertDictEqual(expected, val)
+ self.assertDictEqual(expected1, out['target']['myExe@exe'])
+ self.assertDictEqual(expected2, out['target']['9a11041@@myExe@exe'])
def test_kwargs_info(self):
self.prime('3 kwargs')
@@ -347,48 +385,68 @@ class RewriterTests(BasePlatformTests):
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addExtraFiles.json'))
expected = {
'target': {
- 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp', 'a7.hpp', 'fileB.hpp', 'fileC.hpp']},
- 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']},
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['fileA.hpp', 'main.hpp', 'fileB.hpp', 'fileC.hpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'fileA.hpp', 'main.hpp']},
'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['main.cpp'], 'extra_files': ['a7.hpp', 'fileB.hpp', 'fileC.hpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp'], 'extra_files': ['a5.hpp', 'fileA.hpp', 'main.hpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp'], 'extra_files': ['a5.hpp', 'main.hpp', 'fileA.hpp']},
- 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['a3.hpp', 'main.hpp', 'a7.hpp', 'fileB.hpp', 'fileC.hpp']},
- 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']},
- 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a2.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['a3.hpp', 'main.hpp', 'fileB.hpp', 'fileC.hpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['fileA.hpp', 'main.hpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a6.hpp', 'fileA.hpp', 'main.hpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp'], 'extra_files': ['a2.hpp', 'a7.hpp']},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp'], 'extra_files': ['a8.hpp', 'a9.hpp']},
'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp'], 'extra_files': ['a1.hpp', 'a4.hpp']},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
def test_target_remove_extra_files(self):
self.prime('6 extra_files')
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'rmExtraFiles.json'))
expected = {
'target': {
- 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileC.hpp']},
- 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']},
- 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['main.cpp'], 'extra_files': ['fileC.hpp']},
+ 'trivialprog0@exe': {'name': 'trivialprog0', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp', 'fileB.hpp', 'fileC.hpp']},
+ 'trivialprog1@exe': {'name': 'trivialprog1', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp']},
+ 'trivialprog2@exe': {'name': 'trivialprog2', 'sources': ['main.cpp'], 'extra_files': ['fileB.hpp', 'fileC.hpp']},
'trivialprog3@exe': {'name': 'trivialprog3', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']},
'trivialprog4@exe': {'name': 'trivialprog4', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']},
- 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileC.hpp']},
- 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']},
- 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['main.hpp']},
+ 'trivialprog5@exe': {'name': 'trivialprog5', 'sources': ['main.cpp'], 'extra_files': ['fileB.hpp', 'fileC.hpp', 'main.hpp']},
+ 'trivialprog6@exe': {'name': 'trivialprog6', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp']},
+ 'trivialprog7@exe': {'name': 'trivialprog7', 'sources': ['main.cpp'], 'extra_files': ['main.hpp', 'fileA.hpp']},
'trivialprog8@exe': {'name': 'trivialprog8', 'sources': ['main.cpp'], 'extra_files': []},
'trivialprog9@exe': {'name': 'trivialprog9', 'sources': ['main.cpp'], 'extra_files': []},
'trivialprog10@exe': {'name': 'trivialprog10', 'sources': ['main.cpp'], 'extra_files': []},
}
}
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
# Check the written file
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
- self.assertDictEqual(out, expected)
+ self.assertEqualIgnoreOrder(out, expected)
+
+ def test_tricky_dataflow(self):
+ self.prime('8 tricky dataflow')
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'addSrc.json'))
+ expected = {
+ 'target': {
+ 'tgt1@sha': {'name': 'tgt1', 'sources': ['foo.c', 'new.c'], 'extra_files': []},
+ 'tgt2@exe': {'name': 'tgt2', 'sources': ['new.c', 'unknown'], 'extra_files': []},
+ 'tgt3@exe': {'name': 'tgt3', 'sources': ['foo.c', 'new.c'], 'extra_files': []},
+ 'tgt4@exe': {'name': 'tgt4', 'sources': ['unknown'], 'extra_files': []},
+ 'tgt5@exe': {'name': 'tgt5', 'sources': ['unknown', 'new.c'], 'extra_files': []},
+ 'tgt6@exe': {'name': 'tgt6', 'sources': ['unknown', 'new.c'], 'extra_files': []},
+ 'tgt7@exe': {'name': 'tgt7', 'sources': ['unknown', 'unknown'], 'extra_files': []},
+ }
+ }
+ self.assertEqualIgnoreOrder(out, expected)
+
+ # Check the written file
+ out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
+ self.assertEqualIgnoreOrder(out, expected)
def test_raw_printer_is_idempotent(self):
test_path = Path(self.unit_test_dir, '120 rewrite')
@@ -421,3 +479,24 @@ class RewriterTests(BasePlatformTests):
}
}
self.assertDictEqual(out, expected)
+
+ # Asserts that AstInterpreter.dataflow_dag is what it should be
+ def test_dataflow_dag(self):
+ test_path = Path(self.rewrite_test_dir, '1 basic')
+ interpreter = IntrospectionInterpreter(test_path, '', 'ninja')
+ interpreter.analyze()
+
+ def sortkey(node):
+ return (node.lineno, node.colno, node.end_lineno, node.end_colno)
+
+ def node_to_str(node):
+ return f"{node.__class__.__name__}({node.lineno}:{node.colno})"
+
+ dag_as_str = ""
+ for target in sorted(interpreter.dataflow_dag.tgt_to_srcs.keys(), key=sortkey):
+ dag_as_str += f"Data flowing to {node_to_str(target)}:\n"
+ for source in sorted(interpreter.dataflow_dag.tgt_to_srcs[target], key=sortkey):
+ dag_as_str += f" {node_to_str(source)}\n"
+
+ expected = Path(test_path / "expected_dag.txt").read_text().strip()
+ self.assertEqual(dag_as_str.strip(), expected)