diff options
42 files changed, 438 insertions, 148 deletions
diff --git a/docs/markdown/Fs-module.md b/docs/markdown/Fs-module.md index e230ec5..cc67355 100644 --- a/docs/markdown/Fs-module.md +++ b/docs/markdown/Fs-module.md @@ -7,6 +7,14 @@ Since 0.59.0, all functions accept `files()` objects if they can do something useful with them (this excludes `exists`, `is_dir`, `is_file`, `is_absolute` since a `files()` object is always the absolute path to an existing file). +## Usage + +The module may be imported as follows: + +``` meson +fs = [[#import]]('fs') +``` + ## File lookup rules Non-absolute paths are looked up relative to the directory where the diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 3b87623..73741a4 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -945,6 +945,12 @@ class Backend: mesonlib.replace_if_different(pch_file, pch_file_tmp) return pch_rel_to_build + def target_uses_pch(self, target: build.BuildTarget) -> bool: + try: + return T.cast('bool', target.get_option(OptionKey('b_pch'))) + except KeyError: + return False + @staticmethod def escape_extra_args(args: T.List[str]) -> T.List[str]: # all backslashes in defines are doubly-escaped @@ -1605,7 +1611,6 @@ class Backend: mlog.log(f'Running postconf script {name!r}') run_exe(s, env) - @lru_cache(maxsize=1) def create_install_data(self) -> InstallData: strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip') if strip_bin is None: diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 6ed1e46..1786fef 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -970,7 +970,7 @@ class NinjaBackend(backends.Backend): if s.split('.')[-1] in compilers.lang_suffixes['d']: d_generated_deps.append(o) - use_pch = self.environment.coredata.options.get(OptionKey('b_pch')) + use_pch = self.target_uses_pch(target) if use_pch and target.has_pch(): pch_objects = self.generate_pch(target, header_deps=header_deps) else: @@ -1068,7 +1068,7 @@ class NinjaBackend(backends.Backend): cpp = target.compilers['cpp'] if cpp.get_id() != 'msvc': return False - cppversion = self.environment.coredata.options[OptionKey('std', machine=target.for_machine, lang='cpp')].value + cppversion = target.get_option(OptionKey('std', machine=target.for_machine, lang='cpp')) if cppversion not in ('latest', 'c++latest', 'vc++latest'): return False if not mesonlib.current_vs_supports_modules(): @@ -1662,7 +1662,7 @@ class NinjaBackend(backends.Backend): valac_outputs.append(vala_c_file) args = self.generate_basic_compiler_args(target, valac) - args += valac.get_colorout_args(self.environment.coredata.options.get(OptionKey('b_colorout')).value) + args += valac.get_colorout_args(target.get_option(OptionKey('b_colorout'))) # Tell Valac to output everything in our private directory. Sadly this # means it will also preserve the directory components of Vala sources # found inside the build tree (generated sources). @@ -1984,8 +1984,8 @@ class NinjaBackend(backends.Backend): crt_link_args: T.List[str] = [] try: - buildtype = self.environment.coredata.options[OptionKey('buildtype')].value - crt = self.environment.coredata.options[OptionKey('b_vscrt')].value + buildtype = target.get_option(OptionKey('buildtype')) + crt = target.get_option(OptionKey('b_vscrt')) is_debug = buildtype == 'debug' if crt == 'from_buildtype': @@ -2659,10 +2659,10 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) what = f'{sole_output!r}' else: # since there are multiple outputs, we log the source that caused the rebuild - what = f'from {sole_output!r}.' + what = f'from {sole_output!r}' if reason: reason = f' (wrapped by meson {reason})' - elem.add_item('DESC', f'Generating {what}{reason}.') + elem.add_item('DESC', f'Generating {what}{reason}') if isinstance(exe, build.BuildTarget): elem.add_dep(self.get_target_filename(exe)) @@ -2943,7 +2943,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) def generate_common_compile_args_per_src_type(self, target: build.BuildTarget) -> dict[str, list[str]]: src_type_to_args = {} - use_pch = self.environment.coredata.options.get(OptionKey('b_pch')) + use_pch = self.target_uses_pch(target) for src_type_str in target.compilers.keys(): compiler = target.compilers[src_type_str] @@ -2984,7 +2984,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # Include PCH header as first thing as it must be the first one or it will be # ignored by gcc https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100462 - use_pch = self.environment.coredata.options.get(OptionKey('b_pch')) and is_generated != 'pch' + use_pch = self.target_uses_pch(target) and is_generated != 'pch' if use_pch and 'mw' not in compiler.id: commands += self.get_pch_include_args(compiler, target) @@ -3023,7 +3023,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) commands += self.get_compile_debugfile_args(compiler, target, rel_obj) # PCH handling - if self.environment.coredata.options.get(OptionKey('b_pch')): + if self.target_uses_pch(target): pchlist = target.get_pch(compiler.language) else: pchlist = [] @@ -3245,8 +3245,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) commands += linker.get_pie_link_args() elif isinstance(target, build.SharedLibrary): if isinstance(target, build.SharedModule): - options = self.environment.coredata.options - commands += linker.get_std_shared_module_link_args(options) + commands += linker.get_std_shared_module_link_args(target.get_options()) else: commands += linker.get_std_shared_lib_link_args() # All shared libraries are PIC @@ -3528,7 +3527,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # # We shouldn't check whether we are making a static library, because # in the LTO case we do use a real compiler here. - commands += linker.get_option_link_args(self.environment.coredata.options) + commands += linker.get_option_link_args(target.get_options()) dep_targets = [] dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal)) diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 1962fc2..340c1a5 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -1059,8 +1059,7 @@ class Vs2010Backend(backends.Backend): # Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these # to override all the defaults, but not the per-target compile args. for l in file_args.keys(): - opts = self.environment.coredata.options[OptionKey('args', machine=target.for_machine, lang=l)] - file_args[l] += opts.value + file_args[l] += target.get_option(OptionKey('args', machine=target.for_machine, lang=l)) for args in file_args.values(): # This is where Visual Studio will insert target_args, target_defines, # etc, which are added later from external deps (see below). @@ -1355,29 +1354,29 @@ class Vs2010Backend(backends.Backend): if True in ((dep.name == 'openmp') for dep in target.get_external_deps()): ET.SubElement(clconf, 'OpenMPSupport').text = 'true' # CRT type; debug or release - vscrt_type = self.environment.coredata.options[OptionKey('b_vscrt')] - if vscrt_type.value == 'from_buildtype': + vscrt_type = target.get_option(OptionKey('b_vscrt')) + if vscrt_type == 'from_buildtype': if self.buildtype == 'debug': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' else: ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' - elif vscrt_type.value == 'static_from_buildtype': + elif vscrt_type == 'static_from_buildtype': if self.buildtype == 'debug': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' else: ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' - elif vscrt_type.value == 'mdd': + elif vscrt_type == 'mdd': ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' - elif vscrt_type.value == 'mt': + elif vscrt_type == 'mt': # FIXME, wrong ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' - elif vscrt_type.value == 'mtd': + elif vscrt_type == 'mtd': # FIXME, wrong ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' @@ -1406,12 +1405,12 @@ class Vs2010Backend(backends.Backend): # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise # cl will give warning D9025: overriding '/Ehs' with cpp_eh value if 'cpp' in target.compilers: - eh = self.environment.coredata.options[OptionKey('eh', machine=target.for_machine, lang='cpp')] - if eh.value == 'a': + eh = target.get_option(OptionKey('eh', machine=target.for_machine, lang='cpp')) + if eh == 'a': ET.SubElement(clconf, 'ExceptionHandling').text = 'Async' - elif eh.value == 's': + elif eh == 's': ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow' - elif eh.value == 'none': + elif eh == 'none': ET.SubElement(clconf, 'ExceptionHandling').text = 'false' else: # 'sc' or 'default' ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync' @@ -1470,8 +1469,7 @@ class Vs2010Backend(backends.Backend): ET.SubElement(link, 'GenerateDebugInformation').text = 'false' if not isinstance(target, build.StaticLibrary): if isinstance(target, build.SharedModule): - options = self.environment.coredata.options - extra_link_args += compiler.get_std_shared_module_link_args(options) + extra_link_args += compiler.get_std_shared_module_link_args(target.get_options()) # Add link args added using add_project_link_arguments() extra_link_args += self.build.get_project_link_args(compiler, target.subproject, target.for_machine) # Add link args added using add_global_link_arguments() @@ -1504,7 +1502,7 @@ class Vs2010Backend(backends.Backend): # to be after all internal and external libraries so that unresolved # symbols from those can be found here. This is needed when the # *_winlibs that we want to link to are static mingw64 libraries. - extra_link_args += compiler.get_option_link_args(self.environment.coredata.options) + extra_link_args += compiler.get_option_link_args(target.get_options()) (additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native()) # Add more libraries to be linked if needed @@ -1605,7 +1603,7 @@ class Vs2010Backend(backends.Backend): # /nologo ET.SubElement(link, 'SuppressStartupBanner').text = 'true' # /release - if not self.environment.coredata.get_option(OptionKey('debug')): + if not target.get_option(OptionKey('debug')): ET.SubElement(link, 'SetChecksum').text = 'true' # Visual studio doesn't simply allow the src files of a project to be added with the 'Condition=...' attribute, @@ -1732,7 +1730,7 @@ class Vs2010Backend(backends.Backend): return False pch_sources = {} - if self.environment.coredata.options.get(OptionKey('b_pch')): + if self.target_uses_pch(target): for lang in ['c', 'cpp']: pch = target.get_pch(lang) if not pch: diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index c24074b..bc4ba0b 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -1495,8 +1495,7 @@ class XCodeBackend(backends.Backend): else: raise RuntimeError(o) if isinstance(target, build.SharedModule): - options = self.environment.coredata.options - ldargs += linker.get_std_shared_module_link_args(options) + ldargs += linker.get_std_shared_module_link_args(target.get_options()) elif isinstance(target, build.SharedLibrary): ldargs += linker.get_std_shared_lib_link_args() ldstr = ' '.join(ldargs) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index d9e0480..0a4160f 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -755,8 +755,19 @@ class BuildTarget(Target): self.process_objectlist(objects) self.process_kwargs(kwargs) self.missing_languages = self.process_compilers() - self.link(extract_as_list(kwargs, 'link_with')) - self.link_whole(extract_as_list(kwargs, 'link_whole')) + + # self.link_targets and self.link_whole_targets contains libraries from + # dependencies (see add_deps()). They have not been processed yet because + # we have to call process_compilers() first and we need to process libraries + # from link_with and link_whole first. + # See https://github.com/mesonbuild/meson/pull/11957#issuecomment-1629243208. + link_targets = extract_as_list(kwargs, 'link_with') + self.link_targets + link_whole_targets = extract_as_list(kwargs, 'link_whole') + self.link_whole_targets + self.link_targets.clear() + self.link_whole_targets.clear() + self.link(link_targets) + self.link_whole(link_whole_targets) + if not any([self.sources, self.generated, self.objects, self.link_whole_targets, self.structured_sources, kwargs.pop('_allow_no_sources', False)]): mlog.warning(f'Build target {name} has no sources. ' @@ -1337,8 +1348,8 @@ class BuildTarget(Target): self.extra_files.extend(f for f in dep.extra_files if f not in self.extra_files) self.add_include_dirs(dep.include_directories, dep.get_include_type()) self.objects.extend(dep.objects) - self.link(dep.libraries) - self.link_whole(dep.whole_libraries) + self.link_targets.extend(dep.libraries) + self.link_whole_targets.extend(dep.whole_libraries) if dep.get_compile_args() or dep.get_link_args(): # Those parts that are external. extpart = dependencies.InternalDependency('undefined', @@ -1398,7 +1409,8 @@ You probably should put it in link_with instead.''') elif t.is_internal(): # When we're a static library and we link_with to an # internal/convenience library, promote to link_whole. - return self.link_whole([t]) + self.link_whole([t]) + continue if not isinstance(t, (Target, CustomTargetIndex)): if isinstance(t, dependencies.ExternalLibrary): raise MesonException(textwrap.dedent('''\ @@ -1704,6 +1716,9 @@ class FileInTargetPrivateDir: def __init__(self, fname: str): self.fname = fname + def __str__(self) -> str: + return self.fname + class FileMaybeInTargetPrivateDir: """Union between 'File' and 'FileInTargetPrivateDir'""" @@ -1724,6 +1739,9 @@ class FileMaybeInTargetPrivateDir: raise RuntimeError('Unreachable code') return self.inner.absolute_path(srcdir, builddir) + def __str__(self) -> str: + return self.fname + class Generator(HoldableObject): def __init__(self, exe: T.Union['Executable', programs.ExternalProgram], arguments: T.List[str], diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index bb1ebb5..7e8c327 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -206,22 +206,19 @@ class _StdCPPLibMixin(CompilerMixinBase): machine = env.machines[self.for_machine] assert machine is not None, 'for mypy' - # We need to determine whether to us libc++ or libstdc++ In some cases - # we know the answer, so we'll hardcode those cases. There are other - # cases where we can't know the answer just by looking at the OS, namely - # on Linux. In that case we have to fallback to manually checking - stdlib: str + # We need to determine whether to use libc++ or libstdc++. We can't + # really know the answer in most cases, only the most likely answer, + # because a user can install things themselves or build custom images. + search_order: T.List[str] = [] if machine.system in {'android', 'darwin', 'dragonfly', 'freebsd', 'netbsd', 'openbsd'}: - stdlib = 'c++' - elif self.find_library('c++', env, []) is not None: - stdlib = 'c++' - elif self.find_library('stdc++', env, []) is not None: - stdlib = 'stdc++' + search_order = ['c++', 'stdc++'] else: - # TODO: maybe a bug exception? - raise MesonException('Could not detect either libc++ or libstdc++ as your C++ stdlib implementation.') - - return search_dirs + [f'-l{stdlib}'] + search_order = ['stdc++', 'c++'] + for lib in search_order: + if self.find_library(lib, env, []) is not None: + return search_dirs + [f'-l{lib}'] + # TODO: maybe a bug exception? + raise MesonException('Could not detect either libc++ or libstdc++ as your C++ stdlib implementation.') class ClangCPPCompiler(_StdCPPLibMixin, ClangCompiler, CPPCompiler): diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py index 5ac19c9..210ec4d 100644 --- a/mesonbuild/compilers/detect.py +++ b/mesonbuild/compilers/detect.py @@ -927,13 +927,19 @@ def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Co popen_exceptions: T.Dict[str, Exception] = {} for comp in compilers: try: - err = Popen_safe_logged(comp + ['-V'], msg='Detecting compiler via')[2] + _, out, err = Popen_safe_logged(comp + ['-V'], msg='Detecting compiler via') except OSError as e: popen_exceptions[join_args(comp + ['-V'])] = e continue - version = search_version(err) - if 'Cython' in err: + version: T.Optional[str] = None + # 3.0 + if 'Cython' in out: + version = search_version(out) + # older + elif 'Cython' in err: + version = search_version(err) + if version is not None: comp_class = CythonCompiler env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) return comp_class([], comp, version, for_machine, info, is_cross=is_cross) diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py index 13674ed..ef0390e 100644 --- a/mesonbuild/compilers/rust.py +++ b/mesonbuild/compilers/rust.py @@ -217,7 +217,7 @@ class RustCompiler(Compiler): def get_assert_args(self, disable: bool) -> T.List[str]: action = "no" if disable else "yes" - return ['-C', f'debug-assertions={action}'] + return ['-C', f'debug-assertions={action}', '-C', 'overflow-checks=no'] class ClippyRustCompiler(RustCompiler): diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 4d087f8..a6178f0 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -59,7 +59,7 @@ if T.TYPE_CHECKING: # # Pip requires that RCs are named like this: '0.1.0.rc1' # But the corresponding Git tag needs to be '0.1.0rc1' -version = '1.2.0.rc2' +version = '1.2.99' # The next stable version when we are in dev. This is used to allow projects to # require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py index c9ea880..52737c4 100644 --- a/mesonbuild/interpreter/compiler.py +++ b/mesonbuild/interpreter/compiler.py @@ -448,9 +448,12 @@ class CompilerHolder(ObjectHolder['Compiler']): def compiles_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool: code = args[0] if isinstance(code, mesonlib.File): + if code.is_built: + FeatureNew.single_use('compiler.compiles with file created at setup time', '1.2.0', self.subproject, + 'It was broken and either errored or returned false.', self.current_node) self.interpreter.add_build_def_file(code) code = mesonlib.File.from_absolute_file( - code.rel_to_builddir(self.environment.source_dir)) + code.absolute_path(self.environment.source_dir, self.environment.build_dir)) testname = kwargs['name'] extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args']) deps, msg = self._determine_dependencies(kwargs['dependencies'], endl=None) @@ -472,9 +475,12 @@ class CompilerHolder(ObjectHolder['Compiler']): code = args[0] compiler = None if isinstance(code, mesonlib.File): + if code.is_built: + FeatureNew.single_use('compiler.links with file created at setup time', '1.2.0', self.subproject, + 'It was broken and either errored or returned false.', self.current_node) self.interpreter.add_build_def_file(code) code = mesonlib.File.from_absolute_file( - code.rel_to_builddir(self.environment.source_dir)) + code.absolute_path(self.environment.source_dir, self.environment.build_dir)) suffix = code.suffix if suffix not in self.compiler.file_suffixes: for_machine = self.compiler.for_machine diff --git a/mesonbuild/interpreter/primitives/string.py b/mesonbuild/interpreter/primitives/string.py index d4daab9..b825128 100644 --- a/mesonbuild/interpreter/primitives/string.py +++ b/mesonbuild/interpreter/primitives/string.py @@ -117,6 +117,7 @@ class StringHolder(ObjectHolder[str]): return self.held_object.join(args[0]) @noKwargs + @FeatureNew('str.replace', '0.58.0') @typed_pos_args('str.replace', str, str) def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str: return self.held_object.replace(args[0], args[1]) @@ -129,9 +130,12 @@ class StringHolder(ObjectHolder[str]): @noKwargs @typed_pos_args('str.strip', optargs=[str]) def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str: + if args[0]: + FeatureNew.single_use('str.strip with a positional argument', '0.43.0', self.subproject, location=self.current_node) return self.held_object.strip(args[0]) @noKwargs + @FeatureNew('str.substring', '0.56.0') @typed_pos_args('str.substring', optargs=[int, int]) def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str: start = args[0] if args[0] is not None else 0 diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py index 60be10f..e09a28e 100644 --- a/mesonbuild/linkers/detect.py +++ b/mesonbuild/linkers/detect.py @@ -194,7 +194,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty _, newo, newerr = Popen_safe_logged(cmd, msg='Detecting Apple linker via') for line in newerr.split('\n'): - if 'PROJECT:ld' in line: + if 'PROJECT:ld' in line or 'PROJECT:dyld' in line: v = line.split('-')[1] break else: diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py index d37c832..c6048f9 100644 --- a/mesonbuild/modules/cmake.py +++ b/mesonbuild/modules/cmake.py @@ -391,7 +391,8 @@ class CmakeModule(ExtensionModule): if not os.path.isabs(abs_install_dir): abs_install_dir = os.path.join(prefix, install_dir) - PACKAGE_RELATIVE_PATH = os.path.relpath(prefix, abs_install_dir) + # path used in cmake scripts are POSIX even on Windows + PACKAGE_RELATIVE_PATH = pathlib.PurePath(os.path.relpath(prefix, abs_install_dir)).as_posix() extra = '' if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir): extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir) diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index 2056c0a..f9c7c57 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -97,6 +97,7 @@ class WindowsModule(ExtensionModule): for (arg, match, rc_type) in [ ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc), + ('/?', 'LLVM Resource Converter.*$', ResourceCompilerType.rc), ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres), ('--version', '^.*Wine Resource Compiler.*$', ResourceCompilerType.wrc), ]: diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py index db9db85..3ecfba1 100755 --- a/mesonbuild/msubprojects.py +++ b/mesonbuild/msubprojects.py @@ -14,6 +14,7 @@ import tarfile import zipfile from . import mlog +from .ast import IntrospectionInterpreter, AstIDGenerator from .mesonlib import quiet_git, GitException, Popen_safe, MesonException, windows_proof_rmtree from .wrap.wrap import (Resolver, WrapException, ALL_TYPES, PackageDefinition, parse_patch_url, update_wrap_file, get_releases) @@ -685,15 +686,20 @@ def add_arguments(parser: argparse.ArgumentParser) -> None: p.set_defaults(subprojects_func=Runner.packagefiles) def run(options: 'Arguments') -> int: - src_dir = os.path.relpath(os.path.realpath(options.sourcedir)) - if not os.path.isfile(os.path.join(src_dir, 'meson.build')): - mlog.error('Directory', mlog.bold(src_dir), 'does not seem to be a Meson source directory.') + source_dir = os.path.relpath(os.path.realpath(options.sourcedir)) + if not os.path.isfile(os.path.join(source_dir, 'meson.build')): + mlog.error('Directory', mlog.bold(source_dir), 'does not seem to be a Meson source directory.') return 1 - subprojects_dir = os.path.join(src_dir, 'subprojects') - if not os.path.isdir(subprojects_dir): - mlog.log('Directory', mlog.bold(src_dir), 'does not seem to have subprojects.') + with mlog.no_logging(): + intr = IntrospectionInterpreter(source_dir, '', 'none', visitors = [AstIDGenerator()]) + intr.load_root_meson_file() + intr.sanity_check_ast() + intr.parse_project() + subproject_dir = intr.subproject_dir + if not os.path.isdir(os.path.join(source_dir, subproject_dir)): + mlog.log('Directory', mlog.bold(source_dir), 'does not seem to have subprojects.') return 0 - r = Resolver(src_dir, 'subprojects', wrap_frontend=True, allow_insecure=options.allow_insecure, silent=True) + r = Resolver(source_dir, subproject_dir, wrap_frontend=True, allow_insecure=options.allow_insecure, silent=True) if options.subprojects: wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects] else: @@ -714,7 +720,7 @@ def run(options: 'Arguments') -> int: pre_func(options) logger = Logger(len(wraps)) for wrap in wraps: - dirname = Path(subprojects_dir, wrap.directory).as_posix() + dirname = Path(subproject_dir, wrap.directory).as_posix() runner = Runner(logger, r, wrap, dirname, options) task = loop.run_in_executor(executor, runner.run) tasks.append(task) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 63041bd..eb56c42 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -72,6 +72,26 @@ GNU_ERROR_RETURNCODE = 99 # Exit if 3 Ctrl-C's are received within one second MAX_CTRLC = 3 +# Define unencodable xml characters' regex for replacing them with their +# printable representation +UNENCODABLE_XML_UNICHRS: T.List[T.Tuple[int, int]] = [ + (0x00, 0x08), (0x0B, 0x0C), (0x0E, 0x1F), (0x7F, 0x84), + (0x86, 0x9F), (0xFDD0, 0xFDEF), (0xFFFE, 0xFFFF)] +# Not narrow build +if sys.maxunicode >= 0x10000: + UNENCODABLE_XML_UNICHRS.extend([ + (0x1FFFE, 0x1FFFF), (0x2FFFE, 0x2FFFF), + (0x3FFFE, 0x3FFFF), (0x4FFFE, 0x4FFFF), + (0x5FFFE, 0x5FFFF), (0x6FFFE, 0x6FFFF), + (0x7FFFE, 0x7FFFF), (0x8FFFE, 0x8FFFF), + (0x9FFFE, 0x9FFFF), (0xAFFFE, 0xAFFFF), + (0xBFFFE, 0xBFFFF), (0xCFFFE, 0xCFFFF), + (0xDFFFE, 0xDFFFF), (0xEFFFE, 0xEFFFF), + (0xFFFFE, 0xFFFFF), (0x10FFFE, 0x10FFFF)]) +UNENCODABLE_XML_CHR_RANGES = [fr'{chr(low)}-{chr(high)}' for (low, high) in UNENCODABLE_XML_UNICHRS] +UNENCODABLE_XML_CHRS_RE = re.compile('([' + ''.join(UNENCODABLE_XML_CHR_RANGES) + '])') + + def is_windows() -> bool: platname = platform.system().lower() return platname == 'windows' @@ -1148,14 +1168,21 @@ class TestRunRust(TestRun): TestRun.PROTOCOL_TO_CLASS[TestProtocol.RUST] = TestRunRust +# Check unencodable characters in xml output and replace them with +# their printable representation +def replace_unencodable_xml_chars(original_str: str) -> str: + # [1:-1] is needed for removing `'` characters from both start and end + # of the string + replacement_lambda = lambda illegal_chr: repr(illegal_chr.group())[1:-1] + return UNENCODABLE_XML_CHRS_RE.sub(replacement_lambda, original_str) def decode(stream: T.Union[None, bytes]) -> str: if stream is None: return '' try: - return stream.decode('utf-8') + return replace_unencodable_xml_chars(stream.decode('utf-8')) except UnicodeDecodeError: - return stream.decode('iso-8859-1', errors='ignore') + return replace_unencodable_xml_chars(stream.decode('iso-8859-1', errors='ignore')) async def read_decode(reader: asyncio.StreamReader, queue: T.Optional['asyncio.Queue[T.Optional[str]]'], @@ -1606,9 +1633,12 @@ class TestHarness: # happen before rebuild_deps(), because we need the correct list of # tests and their dependencies to compute if not self.options.no_rebuild: - ret = subprocess.run(self.ninja + ['build.ninja']).returncode - if ret != 0: - raise TestException(f'Could not configure {self.options.wd!r}') + teststdo = subprocess.run(self.ninja + ['-n', 'build.ninja'], capture_output=True).stdout + if b'ninja: no work to do.' not in teststdo and b'samu: nothing to do' not in teststdo: + stdo = sys.stderr if self.options.list else sys.stdout + ret = subprocess.run(self.ninja + ['build.ninja'], stdout=stdo.fileno()) + if ret.returncode != 0: + raise TestException(f'Could not configure {self.options.wd!r}') self.build_data = build.load(os.getcwd()) if not self.options.setup: @@ -1903,9 +1933,9 @@ class TestHarness: # succeed on an invalid pattern. raise MesonException(f'{arg} test name does not match any test') - def get_tests(self) -> T.List[TestSerialisation]: + def get_tests(self, errorfile: T.Optional[T.IO] = sys.stdout) -> T.List[TestSerialisation]: if not self.tests: - print('No tests defined.') + print('No tests defined.', file=errorfile) return [] tests = [t for t in self.tests if self.test_suitable(t)] @@ -1913,7 +1943,7 @@ class TestHarness: tests = list(self.tests_from_args(tests)) if not tests: - print('No suitable tests defined.') + print('No suitable tests defined.', file=errorfile) return [] return tests @@ -2071,7 +2101,7 @@ class TestHarness: await l.finish(self) def list_tests(th: TestHarness) -> bool: - tests = th.get_tests() + tests = th.get_tests(errorfile=sys.stderr) for t in tests: print(th.get_pretty_suite(t)) return not tests diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py index bbd5029..d78fadd 100644 --- a/mesonbuild/utils/universal.py +++ b/mesonbuild/utils/universal.py @@ -703,15 +703,23 @@ def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]': mlog.debug(f'lipo {objpath}: {stderr}') return None stdo = stdo.rsplit(': ', 1)[1] + # Convert from lipo-style archs to meson-style CPUs - stdo = stdo.replace('i386', 'x86') - stdo = stdo.replace('arm64', 'aarch64') - stdo = stdo.replace('ppc7400', 'ppc') - stdo = stdo.replace('ppc970', 'ppc') + map_arch = { + 'i386': 'x86', + 'arm64': 'aarch64', + 'arm64e': 'aarch64', + 'ppc7400': 'ppc', + 'ppc970': 'ppc', + } + lipo_archs = stdo.split() + meson_archs = [map_arch.get(lipo_arch, lipo_arch) for lipo_arch in lipo_archs] + # Add generic name for armv7 and armv7s if 'armv7' in stdo: - stdo += ' arm' - return stdo.split() + meson_archs.append('arm') + + return meson_archs def windows_detect_native_arch() -> str: """ diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 9e423ea..2b0a0ba 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -54,7 +54,7 @@ try: except ImportError: has_ssl = False -REQ_TIMEOUT = 600.0 +REQ_TIMEOUT = 30.0 WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com' ALL_TYPES = ['file', 'git', 'hg', 'svn'] diff --git a/test cases/cmake/26 cmake package prefix dir/cmakePackagePrefixDirConfig.cmake.in b/test cases/cmake/26 cmake package prefix dir/cmakePackagePrefixDirConfig.cmake.in new file mode 100644 index 0000000..a18cb7d --- /dev/null +++ b/test cases/cmake/26 cmake package prefix dir/cmakePackagePrefixDirConfig.cmake.in @@ -0,0 +1 @@ +@PACKAGE_INIT@ diff --git a/test cases/cmake/26 cmake package prefix dir/meson.build b/test cases/cmake/26 cmake package prefix dir/meson.build new file mode 100644 index 0000000..851371b --- /dev/null +++ b/test cases/cmake/26 cmake package prefix dir/meson.build @@ -0,0 +1,19 @@ +project('cmakePackagePrefixDir', 'c', version: '1.0.0') + +cmake = import('cmake') + +cmake.configure_package_config_file( + name: 'cmakePackagePrefixDir', + input: 'cmakePackagePrefixDirConfig.cmake.in', + configuration: configuration_data(), +) + +# NOTE: can't use fs.read because cmakePackagePrefixDirConfig.cmake is in build_dir +python = find_program('python3') +lines = run_command(python, '-c', + '[print(line, end="") for line in open("@0@")]'.format(meson.current_build_dir() / 'cmakePackagePrefixDirConfig.cmake'), check : true, +).stdout().split('\n') + +message(lines) + +assert(lines[5] == 'get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/../../.." ABSOLUTE)') diff --git a/test cases/cmake/26 cmake package prefix dir/test.json b/test cases/cmake/26 cmake package prefix dir/test.json new file mode 100644 index 0000000..d6a9505 --- /dev/null +++ b/test cases/cmake/26 cmake package prefix dir/test.json @@ -0,0 +1,5 @@ +{ + "installed": [ + {"type": "file", "file": "usr/lib/cmake/cmakePackagePrefixDir/cmakePackagePrefixDirConfig.cmake"} + ] +} diff --git a/test cases/common/100 postconf with args/postconf.py b/test cases/common/100 postconf with args/postconf.py index cef7f79..af6abe4 100644 --- a/test cases/common/100 postconf with args/postconf.py +++ b/test cases/common/100 postconf with args/postconf.py @@ -12,7 +12,7 @@ template = '''#pragma once input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat') output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h') -with open(input_file) as f: +with open(input_file, encoding='utf-8') as f: data = f.readline().strip() -with open(output_file, 'w') as f: +with open(output_file, 'w', encoding='utf-8') as f: f.write(template.format(data, sys.argv[1], sys.argv[2])) diff --git a/test cases/common/28 try compile/meson.build b/test cases/common/28 try compile/meson.build index cb41e1d..3480d1d 100644 --- a/test cases/common/28 try compile/meson.build +++ b/test cases/common/28 try compile/meson.build @@ -8,20 +8,27 @@ breakcode = '''#include<nonexisting.h> void func(void) { printf("This won't work.\n"); } ''' -foreach compiler : [meson.get_compiler('c'), meson.get_compiler('cpp')] - if compiler.compiles(code, name : 'should succeed') == false +foreach lang : ['c', 'cpp'] + compiler = meson.get_compiler(lang) + + if compiler.compiles(code, name : 'code should succeed') == false + error('Compiler ' + compiler.get_id() + ' is fail.') + endif + + if compiler.compiles(files('valid.c'), name : 'file should succeed') == false error('Compiler ' + compiler.get_id() + ' is fail.') endif - if compiler.compiles(files('valid.c'), name : 'should succeed') == false + copied = configure_file(input: 'valid.c', output: lang + '-valid-copy.c', copy: true) + if compiler.compiles(copied, name : 'built file should succeed') == false error('Compiler ' + compiler.get_id() + ' is fail.') endif - if compiler.compiles(breakcode, name : 'should fail') + if compiler.compiles(breakcode, name : 'code should fail') error('Compiler ' + compiler.get_id() + ' returned true on broken code.') endif - if compiler.compiles(files('invalid.c'), name : 'should fail') + if compiler.compiles(files('invalid.c'), name : 'file should fail') error('Compiler ' + compiler.get_id() + ' returned true on broken code.') endif endforeach diff --git a/test cases/common/99 postconf/postconf.py b/test cases/common/99 postconf/postconf.py index 950c706..8cf576c 100644 --- a/test cases/common/99 postconf/postconf.py +++ b/test cases/common/99 postconf/postconf.py @@ -10,7 +10,7 @@ template = '''#pragma once input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat') output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h') -with open(input_file) as f: +with open(input_file, encoding='utf-8') as f: data = f.readline().strip() -with open(output_file, 'w') as f: +with open(output_file, 'w', encoding='utf-8') as f: f.write(template.format(data)) diff --git a/test cases/d/13 declare dep/meson.build b/test cases/d/13 declare dep/meson.build index eef9816..2293934 100644 --- a/test cases/d/13 declare dep/meson.build +++ b/test cases/d/13 declare dep/meson.build @@ -3,7 +3,7 @@ project('meson-d-sample', 'd', ) my_dep = declare_dependency( - d_module_versions: ['TestVersion', 1], + d_module_versions: ['TestVersion'], d_import_dirs: include_directories('views'), ) diff --git a/test cases/d/9 features/meson.build b/test cases/d/9 features/meson.build index 06f0341..50059f1 100644 --- a/test cases/d/9 features/meson.build +++ b/test cases/d/9 features/meson.build @@ -1,5 +1,16 @@ project('D Features', 'd', default_options : ['debug=false']) +dc = meson.get_compiler('d') + +# GDC 13 hard errors if options are given number values. +# https://github.com/mesonbuild/meson/pull/11996 + +if dc.get_id() == 'gcc' and dc.version().version_compare('>=13') + number_options_supported = false +else + number_options_supported = true +endif + # ONLY FOR BACKWARDS COMPATIBILITY. # DO NOT DO THIS IN NEW CODE! # USE include_directories() INSTEAD OF BUILDING @@ -46,12 +57,13 @@ e_test = executable('dapp_test', test('dapp_test', e_test) # test version level -e_version_int = executable('dapp_version_int', - test_src, - d_import_dirs: [data_dir], - d_module_versions: ['With_VersionInteger', 3], -) -test('dapp_version_int_t', e_version_int, args: ['debug']) +if number_options_supported + e_version_int = executable('dapp_version_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_VersionInteger', 3], + ) + test('dapp_version_int_t', e_version_int, args: ['debug']) # test version level failure e_version_int_fail = executable('dapp_version_int_fail', @@ -60,6 +72,7 @@ e_version_int_fail = executable('dapp_version_int_fail', d_module_versions: ['With_VersionInteger', 2], ) test('dapp_version_int_t_fail', e_version_int_fail, args: ['debug'], should_fail: true) +endif # test debug conditions: disabled e_no_debug = executable('dapp_no_debug', @@ -69,23 +82,34 @@ e_no_debug = executable('dapp_no_debug', ) test('dapp_no_debug_t_fail', e_no_debug, args: ['debug'], should_fail: true) -# test debug conditions: enabled -e_debug = executable('dapp_debug', - test_src, - d_import_dirs: [data_dir], - d_module_versions: ['With_Debug'], - d_debug: 1, -) -test('dapp_debug_t', e_debug, args: ['debug']) +if number_options_supported + # test debug conditions: enabled + e_debug = executable('dapp_debug', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_Debug'], + d_debug: 1, + ) + test('dapp_debug_t', e_debug, args: ['debug']) -# test debug conditions: integer -e_debug_int = executable('dapp_debug_int', - test_src, - d_import_dirs: [data_dir], - d_module_versions: ['With_DebugInteger'], - d_debug: 3, -) -test('dapp_debug_int_t', e_debug_int, args: ['debug']) + # test debug conditions: integer + e_debug_int = executable('dapp_debug_int', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugInteger'], + d_debug: 3, + ) + test('dapp_debug_int_t', e_debug_int, args: ['debug']) + + # test with all debug conditions at once, and with redundant values + e_debug_all = executable('dapp_debug_all', + test_src, + d_import_dirs: [data_dir], + d_module_versions: ['With_DebugAll'], + d_debug: ['4', 'DebugIdentifier', 2, 'DebugIdentifierUnused'], + ) + test('dapp_debug_all_t', e_debug_all, args: ['debug']) +endif # test debug conditions: identifier e_debug_ident = executable('dapp_debug_ident', @@ -95,12 +119,3 @@ e_debug_ident = executable('dapp_debug_ident', d_debug: 'DebugIdentifier', ) test('dapp_debug_ident_t', e_debug_ident, args: ['debug']) - -# test with all debug conditions at once, and with redundant values -e_debug_all = executable('dapp_debug_all', - test_src, - d_import_dirs: [data_dir], - d_module_versions: ['With_DebugAll'], - d_debug: ['4', 'DebugIdentifier', 2, 'DebugIdentifierUnused'], -) -test('dapp_debug_all_t', e_debug_all, args: ['debug']) diff --git a/test cases/rust/5 polyglot static/clib.c b/test cases/rust/5 polyglot static/clib.c index 366dbe5..84749de 100644 --- a/test cases/rust/5 polyglot static/clib.c +++ b/test cases/rust/5 polyglot static/clib.c @@ -1,6 +1,7 @@ #include <stdio.h> +#include <stdint.h> -void hello_from_rust(void); +int32_t hello_from_rust(const int32_t a, const int32_t b); static void hello_from_c(void) { printf("Hello from C!\n"); @@ -8,5 +9,6 @@ static void hello_from_c(void) { void hello_from_both(void) { hello_from_c(); - hello_from_rust(); + if (hello_from_rust(2, 3) == 5) + printf("Hello from Rust!\n"); } diff --git a/test cases/rust/5 polyglot static/meson.build b/test cases/rust/5 polyglot static/meson.build index 22c0cd0..5d1f023 100644 --- a/test cases/rust/5 polyglot static/meson.build +++ b/test cases/rust/5 polyglot static/meson.build @@ -17,3 +17,10 @@ e = executable('prog', 'prog.c', link_with : l, install : true) test('polyglottest', e) + +# Create a version that has overflow-checks on, then run a test to ensure that +# the overflow-checks is larger than the other version by some ammount +r2 = static_library('stuff2', 'stuff.rs', rust_crate_type : 'staticlib', rust_args : ['-C', 'overflow-checks=on']) +l2 = static_library('clib2', 'clib.c') +e2 = executable('prog2', 'prog.c', link_with : [r2, l2]) +test('overflow-checks', find_program('overflow_size_checks.py'), args : [e, e2]) diff --git a/test cases/rust/5 polyglot static/overflow_size_checks.py b/test cases/rust/5 polyglot static/overflow_size_checks.py new file mode 100755 index 0000000..9a6a64a --- /dev/null +++ b/test cases/rust/5 polyglot static/overflow_size_checks.py @@ -0,0 +1,29 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2023 Intel Corporation + +from __future__ import annotations +import argparse +import os +import typing as T + +if T.TYPE_CHECKING: + class Arguments(T.Protocol): + checks_off: str + checks_on: str + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument('checks_off') + parser.add_argument('checks_on') + args: Arguments = parser.parse_args() + + off = os.stat(args.checks_off).st_size + on = os.stat(args.checks_on).st_size + + assert on > off, f'Expected binary built with overflow-checks to be bigger, but it was smaller. with: "{on}"B, without: "{off}"B' + + +if __name__ == "__main__": + main() diff --git a/test cases/rust/5 polyglot static/stuff.rs b/test cases/rust/5 polyglot static/stuff.rs index 3777ae8..c312441 100644 --- a/test cases/rust/5 polyglot static/stuff.rs +++ b/test cases/rust/5 polyglot static/stuff.rs @@ -1,6 +1,4 @@ -#![crate_name = "stuff"] - #[no_mangle] -pub extern "C" fn hello_from_rust() { - println!("Hello from Rust!"); +pub extern "C" fn hello_from_rust(a: i32, b: i32) -> i32 { + a + b } diff --git a/test cases/unit/110 replace unencodable xml chars/meson.build b/test cases/unit/110 replace unencodable xml chars/meson.build new file mode 100644 index 0000000..2e6b1b7 --- /dev/null +++ b/test cases/unit/110 replace unencodable xml chars/meson.build @@ -0,0 +1,4 @@ +project('replace unencodable xml chars') + +test_script = find_program('script.py') +test('main', test_script) diff --git a/test cases/unit/110 replace unencodable xml chars/script.py b/test cases/unit/110 replace unencodable xml chars/script.py new file mode 100644 index 0000000..2f2d4d6 --- /dev/null +++ b/test cases/unit/110 replace unencodable xml chars/script.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +import sys + +# Print base string(\nHello Meson\n) to see valid chars are not replaced +print('\n\x48\x65\x6c\x6c\x6f\x20\x4d\x65\x73\x6f\x6e\n') +# Print invalid input from all known unencodable chars +print( + '\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f\x10\x11' + '\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f') + +# Cover for potential encoding issues +try: + print( + '\x80\x81\x82\x83\x84\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' + '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e' + '\x9f\ufdd0\ufdd1\ufdd2\ufdd3\ufdd4\ufdd5\ufdd6\ufdd7\ufdd8' + '\ufdd9\ufdda\ufddb\ufddc\ufddd\ufdde\ufddf\ufde0\ufde1' + '\ufde2\ufde3\ufde4\ufde5\ufde6\ufde7\ufde8\ufde9\ufdea' + '\ufdeb\ufdec\ufded\ufdee\ufdef\ufffe\uffff') +except: + pass + +# Cover for potential encoding issues +try: + if sys.maxunicode >= 0x10000: + print( + '\U0001fffe\U0001ffff\U0002fffe\U0002ffff' + '\U0003fffe\U0003ffff\U0004fffe\U0004ffff' + '\U0005fffe\U0005ffff\U0006fffe\U0006ffff' + '\U0007fffe\U0007ffff\U0008fffe\U0008ffff' + '\U0009fffe\U0009ffff\U000afffe\U000affff' + '\U000bfffe\U000bffff\U000cfffe\U000cffff' + '\U000dfffe\U000dffff\U000efffe\U000effff' + '\U000ffffe\U000fffff\U0010fffe\U0010ffff') +except: + pass diff --git a/test cases/unit/113 complex link cases/meson.build b/test cases/unit/113 complex link cases/meson.build index d3387c2..04e6281 100644 --- a/test cases/unit/113 complex link cases/meson.build +++ b/test cases/unit/113 complex link cases/meson.build @@ -38,3 +38,23 @@ s1 = static_library('t6-s1', 's1.c') s2 = static_library('t6-s2', 's2.c', link_with: s1, install: true) s3 = static_library('t6-s3', 's3.c', link_with: s2, install: true) e = executable('t6-e1', 'main.c', link_with: s3) + +# Regression test: s1 gets promoted to link_whole and that used to make all other +# libraries in the list (s2) to be ignored. +# Executable only needs to link with s3. +# See https://github.com/mesonbuild/meson/issues/11956. +s1 = static_library('t7-s1', 's1.c') +s2 = static_library('t7-s2', 's2.c') +s3 = static_library('t7-s3', 's3.c', link_with: [s1, s2], install: true) +e = executable('t7-e1', 'main.c', link_with: s3) + +# Regression test: s3 should come last in the linker command. This seems to be +# required for at least backward compatibility reasons: +# https://github.com/mesonbuild/meson/pull/11957#issuecomment-1629243208 +s1 = static_library('t8-s1', 's1.c') +s2 = static_library('t8-s2', 's2.c') +s3 = static_library('t8-s3', 's3.c') +e = executable('t8-e1', 'main.c', + link_with: [s1, s2], + dependencies: declare_dependency(link_with: s3), +) diff --git a/test cases/unit/39 external, internal library rpath/built library/foo.py b/test cases/unit/39 external, internal library rpath/built library/foo.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/test cases/unit/39 external, internal library rpath/built library/foo.py diff --git a/test cases/unit/39 external, internal library rpath/built library/meson.build b/test cases/unit/39 external, internal library rpath/built library/meson.build index 07fe7bb..6399cdc 100644 --- a/test cases/unit/39 external, internal library rpath/built library/meson.build +++ b/test cases/unit/39 external, internal library rpath/built library/meson.build @@ -2,6 +2,8 @@ project('built library', 'c') cc = meson.get_compiler('c') +import('python').find_installation().install_sources('foo.py') + if host_machine.system() != 'cygwin' # bar_in_system has undefined symbols, but still must be found bar_system_dep = cc.find_library('bar_in_system') diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 438e4fe..b4bf371 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -59,6 +59,7 @@ from mesonbuild.linkers import linkers from mesonbuild.dependencies.pkgconfig import PkgConfigDependency from mesonbuild.build import Target, ConfigurationData, Executable, SharedLibrary, StaticLibrary +from mesonbuild import mtest import mesonbuild.modules.pkgconfig from mesonbuild.scripts import destdir_join @@ -398,6 +399,56 @@ class AllPlatformTests(BasePlatformTests): self.assertTrue(compdb[3]['file'].endswith("libfile4.c")) # FIXME: We don't have access to the linker command + def test_replace_unencodable_xml_chars(self): + ''' + Test that unencodable xml chars are replaced with their + printable representation + https://github.com/mesonbuild/meson/issues/9894 + ''' + # Create base string(\nHello Meson\n) to see valid chars are not replaced + base_string_invalid = '\n\x48\x65\x6c\x6c\x6f\x20\x4d\x65\x73\x6f\x6e\n' + base_string_valid = '\nHello Meson\n' + # Create invalid input from all known unencodable chars + invalid_string = ( + '\x00\x01\x02\x03\x04\x05\x06\x07\x08\x0b\x0c\x0e\x0f\x10\x11' + '\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f\x7f' + '\x80\x81\x82\x83\x84\x86\x87\x88\x89\x8a\x8b\x8c\x8d\x8e\x8f' + '\x90\x91\x92\x93\x94\x95\x96\x97\x98\x99\x9a\x9b\x9c\x9d\x9e' + '\x9f\ufdd0\ufdd1\ufdd2\ufdd3\ufdd4\ufdd5\ufdd6\ufdd7\ufdd8' + '\ufdd9\ufdda\ufddb\ufddc\ufddd\ufdde\ufddf\ufde0\ufde1' + '\ufde2\ufde3\ufde4\ufde5\ufde6\ufde7\ufde8\ufde9\ufdea' + '\ufdeb\ufdec\ufded\ufdee\ufdef\ufffe\uffff') + if sys.maxunicode >= 0x10000: + invalid_string = invalid_string + ( + '\U0001fffe\U0001ffff\U0002fffe\U0002ffff' + '\U0003fffe\U0003ffff\U0004fffe\U0004ffff' + '\U0005fffe\U0005ffff\U0006fffe\U0006ffff' + '\U0007fffe\U0007ffff\U0008fffe\U0008ffff' + '\U0009fffe\U0009ffff\U000afffe\U000affff' + '\U000bfffe\U000bffff\U000cfffe\U000cffff' + '\U000dfffe\U000dffff\U000efffe\U000effff' + '\U000ffffe\U000fffff\U0010fffe\U0010ffff') + + valid_string = base_string_valid + repr(invalid_string)[1:-1] + base_string_valid + invalid_string = base_string_invalid + invalid_string + base_string_invalid + broken_xml_stream = invalid_string.encode() + decoded_broken_stream = mtest.decode(broken_xml_stream) + self.assertEqual(decoded_broken_stream, valid_string) + + def test_replace_unencodable_xml_chars_unit(self): + ''' + Test that unencodable xml chars are replaced with their + printable representation + https://github.com/mesonbuild/meson/issues/9894 + ''' + if not shutil.which('xmllint'): + raise SkipTest('xmllint not installed') + testdir = os.path.join(self.unit_test_dir, '110 replace unencodable xml chars') + self.init(testdir) + self.run_tests() + junit_xml_logs = Path(self.logdir, 'testlog.junit.xml') + subprocess.run(['xmllint', junit_xml_logs], check=True) + def test_run_target_files_path(self): ''' Test that run_targets are run from the correct directory @@ -2667,7 +2718,7 @@ class AllPlatformTests(BasePlatformTests): def test_native_dep_pkgconfig(self): testdir = os.path.join(self.unit_test_dir, '45 native dep pkgconfig var') - with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: + with tempfile.NamedTemporaryFile(mode='w', delete=False, encoding='utf-8') as crossfile: crossfile.write(textwrap.dedent( '''[binaries] pkgconfig = '{}' @@ -2694,7 +2745,7 @@ class AllPlatformTests(BasePlatformTests): def test_pkg_config_libdir(self): testdir = os.path.join(self.unit_test_dir, '45 native dep pkgconfig var') - with tempfile.NamedTemporaryFile(mode='w', delete=False) as crossfile: + with tempfile.NamedTemporaryFile(mode='w', delete=False, encoding='utf-8') as crossfile: crossfile.write(textwrap.dedent( '''[binaries] pkgconfig = 'pkg-config' @@ -4145,7 +4196,7 @@ class AllPlatformTests(BasePlatformTests): cmd = self.meson_command + ['devenv', '-C', self.builddir, '--dump', fname] o = self._run(cmd) self.assertEqual(o, '') - o = Path(fname).read_text() + o = Path(fname).read_text(encoding='utf-8') expected = os.pathsep.join(['/prefix', '$TEST_C', '/suffix']) self.assertIn(f'TEST_C="{expected}"', o) self.assertIn('export TEST_C', o) @@ -4646,7 +4697,7 @@ class AllPlatformTests(BasePlatformTests): testdir = os.path.join(self.unit_test_dir, '102 rlib linkage') gen_file = os.path.join(testdir, 'lib.rs') - with open(gen_file, 'w') as f: + with open(gen_file, 'w', encoding='utf-8') as f: f.write(template.format(0)) self.addCleanup(windows_proof_rm, gen_file) @@ -4654,7 +4705,7 @@ class AllPlatformTests(BasePlatformTests): self.build() self.run_tests() - with open(gen_file, 'w') as f: + with open(gen_file, 'w', encoding='utf-8') as f: f.write(template.format(39)) self.build() diff --git a/unittests/darwintests.py b/unittests/darwintests.py index 254b3d0..1f17760 100644 --- a/unittests/darwintests.py +++ b/unittests/darwintests.py @@ -148,3 +148,8 @@ class DarwinTests(BasePlatformTests): testdir = os.path.join(self.objcpp_test_dir, '1 simple') self.init(testdir) self.assertIn('-std=c++14', self.get_compdb()[0]['command']) + + def test_darwin_get_object_archs(self): + from mesonbuild.mesonlib import darwin_get_object_archs + archs = darwin_get_object_archs('/System/Library/CoreServices/Encodings/libSymbolConverter.dylib') + self.assertEqual(archs, ['x86_64', 'aarch64']) diff --git a/unittests/internaltests.py b/unittests/internaltests.py index f22adb2..672a5a0 100644 --- a/unittests/internaltests.py +++ b/unittests/internaltests.py @@ -453,7 +453,7 @@ class InternalTests(unittest.TestCase): # Can not be used as context manager because we need to # open it a second time and this is not possible on # Windows. - configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) + configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False, encoding='utf-8') configfilename = configfile.name config.write(configfile) configfile.flush() @@ -469,7 +469,7 @@ class InternalTests(unittest.TestCase): 'needs_exe_wrapper': 'true' if desired_value else 'false' } - configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False) + configfile = tempfile.NamedTemporaryFile(mode='w+', delete=False, encoding='utf-8') configfilename = configfile.name config.write(configfile) configfile.close() diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py index 4bb6809..37388740 100644 --- a/unittests/linuxliketests.py +++ b/unittests/linuxliketests.py @@ -1024,7 +1024,7 @@ class LinuxlikeTests(BasePlatformTests): def test_cross_find_program(self): testdir = os.path.join(self.unit_test_dir, '11 cross prog') - crossfile = tempfile.NamedTemporaryFile(mode='w') + crossfile = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8') print(os.path.join(testdir, 'some_cross_tool.py')) tool_path = os.path.join(testdir, 'some_cross_tool.py') @@ -1517,14 +1517,14 @@ class LinuxlikeTests(BasePlatformTests): def test_identity_cross(self): testdir = os.path.join(self.unit_test_dir, '60 identity cross') - constantsfile = tempfile.NamedTemporaryFile(mode='w') + constantsfile = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8') constantsfile.write(textwrap.dedent('''\ [constants] py_ext = '.py' ''')) constantsfile.flush() - nativefile = tempfile.NamedTemporaryFile(mode='w') + nativefile = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8') nativefile.write(textwrap.dedent('''\ [binaries] c = ['{}' + py_ext] @@ -1532,7 +1532,7 @@ class LinuxlikeTests(BasePlatformTests): nativefile.flush() self.meson_native_files = [constantsfile.name, nativefile.name] - crossfile = tempfile.NamedTemporaryFile(mode='w') + crossfile = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8') crossfile.write(textwrap.dedent('''\ [binaries] c = ['{}' + py_ext] @@ -1549,7 +1549,7 @@ class LinuxlikeTests(BasePlatformTests): 'CC_FOR_BUILD': '"' + os.path.join(testdir, 'build_wrapper.py') + '"', 'CC': '"' + os.path.join(testdir, 'host_wrapper.py') + '"', } - crossfile = tempfile.NamedTemporaryFile(mode='w') + crossfile = tempfile.NamedTemporaryFile(mode='w', encoding='utf-8') crossfile.write('') crossfile.flush() self.meson_cross_files = [crossfile.name] @@ -1845,3 +1845,5 @@ class LinuxlikeTests(BasePlatformTests): self.assertIn('build t4-e1: c_LINKER t4-e1.p/main.c.o | libt4-s2.so.p/libt4-s2.so.symbols libt4-s3.a\n', content) self.assertIn('build t5-e1: c_LINKER t5-e1.p/main.c.o | libt5-s1.so.p/libt5-s1.so.symbols libt5-s3.a\n', content) self.assertIn('build t6-e1: c_LINKER t6-e1.p/main.c.o | libt6-s2.a libt6-s3.a\n', content) + self.assertIn('build t7-e1: c_LINKER t7-e1.p/main.c.o | libt7-s3.a\n', content) + self.assertIn('build t8-e1: c_LINKER t8-e1.p/main.c.o | libt8-s1.a libt8-s2.a libt8-s3.a\n', content) diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py index 3807c88..9e71810 100644 --- a/unittests/machinefiletests.py +++ b/unittests/machinefiletests.py @@ -754,7 +754,7 @@ class CrossFileTests(BasePlatformTests): with tempfile.TemporaryDirectory() as d: dir_ = os.path.join(d, 'meson', 'cross') os.makedirs(dir_) - with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False, encoding='utf-8') as f: f.write(cross_content) name = os.path.basename(f.name) @@ -770,7 +770,7 @@ class CrossFileTests(BasePlatformTests): with tempfile.TemporaryDirectory() as d: dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross') os.makedirs(dir_) - with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f: + with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False, encoding='utf-8') as f: f.write(cross_content) name = os.path.basename(f.name) diff --git a/unittests/platformagnostictests.py b/unittests/platformagnostictests.py index a88a514..fd01293 100644 --- a/unittests/platformagnostictests.py +++ b/unittests/platformagnostictests.py @@ -246,10 +246,10 @@ class PlatformAgnosticTests(BasePlatformTests): self.init(testdir) self._run(self.meson_command + ['--internal', 'regenerate', '--profile-self', testdir, self.builddir]) - with open(os.path.join(self.builddir, 'meson-logs', 'profile-startup-modules.json')) as f: + with open(os.path.join(self.builddir, 'meson-logs', 'profile-startup-modules.json'), encoding='utf-8') as f: data = json.load(f)['meson'] - with open(os.path.join(testdir, 'expected_mods.json')) as f: + with open(os.path.join(testdir, 'expected_mods.json'), encoding='utf-8') as f: expected = json.load(f)['meson']['modules'] self.assertEqual(data['modules'], expected) |