aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild')
-rw-r--r--mesonbuild/ast/introspection.py2
-rw-r--r--mesonbuild/backend/backends.py128
-rw-r--r--mesonbuild/backend/ninjabackend.py172
-rw-r--r--mesonbuild/backend/xcodebackend.py55
-rw-r--r--mesonbuild/build.py158
-rw-r--r--mesonbuild/cargo/interpreter.py501
-rw-r--r--mesonbuild/cargo/manifest.py645
-rw-r--r--mesonbuild/cargo/raw.py192
-rw-r--r--mesonbuild/cargo/toml.py49
-rw-r--r--mesonbuild/cargo/version.py14
-rw-r--r--mesonbuild/cmake/interpreter.py2
-rw-r--r--mesonbuild/cmake/tracetargets.py1
-rw-r--r--mesonbuild/compilers/__init__.py2
-rw-r--r--mesonbuild/compilers/c.py2
-rw-r--r--mesonbuild/compilers/compilers.py64
-rw-r--r--mesonbuild/compilers/cuda.py15
-rw-r--r--mesonbuild/compilers/d.py11
-rw-r--r--mesonbuild/compilers/detect.py60
-rw-r--r--mesonbuild/compilers/mixins/clike.py15
-rw-r--r--mesonbuild/compilers/mixins/islinker.py5
-rw-r--r--mesonbuild/compilers/rust.py43
-rw-r--r--mesonbuild/compilers/swift.py22
-rw-r--r--mesonbuild/coredata.py89
-rw-r--r--mesonbuild/dependencies/detect.py10
-rw-r--r--mesonbuild/dependencies/misc.py16
-rw-r--r--mesonbuild/dependencies/qt.py5
-rw-r--r--mesonbuild/dependencies/scalapack.py11
-rw-r--r--mesonbuild/environment.py49
-rw-r--r--mesonbuild/interpreter/interpreter.py13
-rw-r--r--mesonbuild/interpreter/kwargs.py7
-rw-r--r--mesonbuild/interpreter/primitives/array.py14
-rw-r--r--mesonbuild/interpreter/type_checking.py7
-rw-r--r--mesonbuild/linkers/detect.py6
-rw-r--r--mesonbuild/linkers/linkers.py112
-rw-r--r--mesonbuild/mconf.py18
-rw-r--r--mesonbuild/mformat.py21
-rw-r--r--mesonbuild/mintro.py18
-rw-r--r--mesonbuild/modules/__init__.py18
-rw-r--r--mesonbuild/modules/fs.py121
-rw-r--r--mesonbuild/modules/gnome.py85
-rw-r--r--mesonbuild/modules/pkgconfig.py8
-rw-r--r--mesonbuild/modules/python.py3
-rw-r--r--mesonbuild/modules/rust.py4
-rw-r--r--mesonbuild/msetup.py22
-rwxr-xr-xmesonbuild/msubprojects.py17
-rw-r--r--mesonbuild/options.py219
-rw-r--r--mesonbuild/scripts/clangtidy.py4
-rw-r--r--mesonbuild/scripts/run_tool.py20
-rw-r--r--mesonbuild/utils/universal.py2
-rw-r--r--mesonbuild/wrap/wrap.py78
50 files changed, 1837 insertions, 1318 deletions
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 147436d..decce4b 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -284,7 +284,7 @@ class IntrospectionInterpreter(AstInterpreter):
return new_target
def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
- default_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
+ default_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
if default_library == 'shared':
return self.build_target(node, args, kwargs, SharedLibrary)
elif default_library == 'static':
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index ed57a4c..e3d6c60 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -24,12 +24,12 @@ from .. import dependencies
from .. import programs
from .. import mesonlib
from .. import mlog
-from ..compilers import LANGUAGES_USING_LDFLAGS, detect, lang_suffixes
+from ..compilers import detect, lang_suffixes
from ..mesonlib import (
File, MachineChoice, MesonException, MesonBugException, OrderedSet,
ExecutableSerialisation, EnvironmentException,
classify_unity_sources, get_compiler_for_source,
- is_parent_path, get_rsp_threshold,
+ get_rsp_threshold,
)
from ..options import OptionKey
@@ -61,7 +61,7 @@ if T.TYPE_CHECKING:
# Languages that can mix with C or C++ but don't support unity builds yet
# because the syntax we use for unity builds is specific to C/++/ObjC/++.
# Assembly files cannot be unitified and neither can LLVM IR files
-LANGS_CANT_UNITY = ('d', 'fortran', 'vala')
+LANGS_CANT_UNITY = ('d', 'fortran', 'vala', 'rust')
@dataclass(eq=False)
class RegenInfo:
@@ -150,7 +150,7 @@ class TargetInstallData:
def __post_init__(self, outdir_name: T.Optional[str]) -> None:
if outdir_name is None:
outdir_name = os.path.join('{prefix}', self.outdir)
- self.out_name = os.path.join(outdir_name, os.path.basename(self.fname))
+ self.out_name = Path(outdir_name, os.path.basename(self.fname)).as_posix()
@dataclass(eq=False)
class InstallEmptyDir:
@@ -307,16 +307,16 @@ class Backend:
else:
assert isinstance(t, build.BuildTarget), t
filename = t.get_filename()
- return os.path.join(self.get_target_dir(t), filename)
+ return Path(self.get_target_dir(t), filename).as_posix()
def get_target_filename_abs(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str:
- return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))
+ return Path(self.environment.get_build_dir(), self.get_target_filename(target)).as_posix()
def get_target_debug_filename(self, target: build.BuildTarget) -> T.Optional[str]:
assert isinstance(target, build.BuildTarget), target
if target.get_debug_filename():
debug_filename = target.get_debug_filename()
- return os.path.join(self.get_target_dir(target), debug_filename)
+ return Path(self.get_target_dir(target), debug_filename).as_posix()
else:
return None
@@ -324,7 +324,7 @@ class Backend:
assert isinstance(target, build.BuildTarget), target
if not target.get_debug_filename():
return None
- return os.path.join(self.environment.get_build_dir(), self.get_target_debug_filename(target))
+ return Path(self.environment.get_build_dir(), self.get_target_debug_filename(target)).as_posix()
def get_source_dir_include_args(self, target: build.BuildTarget, compiler: 'Compiler', *, absolute_path: bool = False) -> T.List[str]:
curdir = target.get_subdir()
@@ -733,118 +733,6 @@ class Backend:
return l, stdlib_args
@staticmethod
- def _libdir_is_system(libdir: str, compilers: T.Mapping[str, 'Compiler'], env: 'Environment') -> bool:
- libdir = os.path.normpath(libdir)
- for cc in compilers.values():
- if libdir in cc.get_library_dirs(env):
- return True
- return False
-
- def get_external_rpath_dirs(self, target: build.BuildTarget) -> T.Set[str]:
- args: T.List[str] = []
- for lang in LANGUAGES_USING_LDFLAGS:
- try:
- e = self.environment.coredata.get_external_link_args(target.for_machine, lang)
- if isinstance(e, str):
- args.append(e)
- else:
- args.extend(e)
- except Exception:
- pass
- return self.get_rpath_dirs_from_link_args(args)
-
- @staticmethod
- def get_rpath_dirs_from_link_args(args: T.List[str]) -> T.Set[str]:
- dirs: T.Set[str] = set()
- # Match rpath formats:
- # -Wl,-rpath=
- # -Wl,-rpath,
- rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
- # Match solaris style compat runpath formats:
- # -Wl,-R
- # -Wl,-R,
- runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
- # Match symbols formats:
- # -Wl,--just-symbols=
- # -Wl,--just-symbols,
- symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
- for arg in args:
- rpath_match = rpath_regex.match(arg)
- if rpath_match:
- for dir in rpath_match.group(1).split(':'):
- dirs.add(dir)
- runpath_match = runpath_regex.match(arg)
- if runpath_match:
- for dir in runpath_match.group(1).split(':'):
- # The symbols arg is an rpath if the path is a directory
- if Path(dir).is_dir():
- dirs.add(dir)
- symbols_match = symbols_regex.match(arg)
- if symbols_match:
- for dir in symbols_match.group(1).split(':'):
- # Prevent usage of --just-symbols to specify rpath
- if Path(dir).is_dir():
- raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
- return dirs
-
- @lru_cache(maxsize=None)
- def rpaths_for_non_system_absolute_shared_libraries(self, target: build.BuildTarget, exclude_system: bool = True) -> 'ImmutableListProtocol[str]':
- paths: OrderedSet[str] = OrderedSet()
- srcdir = self.environment.get_source_dir()
-
- for dep in target.external_deps:
- if dep.type_name not in {'library', 'pkgconfig', 'cmake'}:
- continue
- for libpath in dep.link_args:
- # For all link args that are absolute paths to a library file, add RPATH args
- if not os.path.isabs(libpath):
- continue
- libdir = os.path.dirname(libpath)
- if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
- # No point in adding system paths.
- continue
- # Don't remove rpaths specified in LDFLAGS.
- if libdir in self.get_external_rpath_dirs(target):
- continue
- # Windows doesn't support rpaths, but we use this function to
- # emulate rpaths by setting PATH
- # .dll is there for mingw gcc
- # .so's may be extended with version information, e.g. libxyz.so.1.2.3
- if not (
- os.path.splitext(libpath)[1] in {'.dll', '.lib', '.so', '.dylib'}
- or re.match(r'.+\.so(\.|$)', os.path.basename(libpath))
- ):
- continue
-
- if is_parent_path(srcdir, libdir):
- rel_to_src = libdir[len(srcdir) + 1:]
- assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
- paths.add(os.path.join(self.build_to_src, rel_to_src))
- else:
- paths.add(libdir)
- # Don't remove rpaths specified by the dependency
- paths.difference_update(self.get_rpath_dirs_from_link_args(dep.link_args))
- for i in chain(target.link_targets, target.link_whole_targets):
- if isinstance(i, build.BuildTarget):
- paths.update(self.rpaths_for_non_system_absolute_shared_libraries(i, exclude_system))
- return list(paths)
-
- # This may take other types
- def determine_rpath_dirs(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]
- ) -> T.Tuple[str, ...]:
- result: OrderedSet[str]
- if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
- # Need a copy here
- result = OrderedSet(target.get_link_dep_subdirs())
- else:
- result = OrderedSet()
- result.add('meson-out')
- if isinstance(target, build.BuildTarget):
- result.update(self.rpaths_for_non_system_absolute_shared_libraries(target))
- target.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result])
- return tuple(result)
-
- @staticmethod
@lru_cache(maxsize=None)
def canonicalize_filename(fname: str) -> str:
if os.path.altsep is not None:
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 73f2db7..595a27a 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -891,14 +891,14 @@ class NinjaBackend(backends.Backend):
self.generate_shlib_aliases(target, self.get_target_dir(target))
+ # Generate rules for GeneratedLists
+ self.generate_generator_list_rules(target)
+
# If target uses a language that cannot link to C objects,
# just generate for that language and return.
if isinstance(target, build.Jar):
self.generate_jar_target(target)
return
- if target.uses_rust():
- self.generate_rust_target(target)
- return
if 'cs' in target.compilers:
self.generate_cs_target(target)
return
@@ -935,8 +935,6 @@ class NinjaBackend(backends.Backend):
generated_sources = self.get_target_generated_sources(target)
transpiled_sources = []
self.scan_fortran_module_outputs(target)
- # Generate rules for GeneratedLists
- self.generate_generator_list_rules(target)
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
@@ -992,6 +990,8 @@ class NinjaBackend(backends.Backend):
# this target. We create the Ninja build file elements for this here
# because we need `header_deps` to be fully generated in the above loop.
for src in generated_source_files:
+ if not self.environment.is_separate_compile(src):
+ continue
if self.environment.is_llvm_ir(src):
o, s = self.generate_llvm_ir_compile(target, src)
else:
@@ -1050,21 +1050,24 @@ class NinjaBackend(backends.Backend):
# Generate compile targets for all the preexisting sources for this target
for src in target_sources.values():
- if not self.environment.is_header(src) or is_compile_target:
- if self.environment.is_llvm_ir(src):
- o, s = self.generate_llvm_ir_compile(target, src)
- obj_list.append(o)
- elif is_unity and self.get_target_source_can_unity(target, src):
- abs_src = os.path.join(self.environment.get_build_dir(),
- src.rel_to_builddir(self.build_to_src))
- unity_src.append(abs_src)
- else:
- o, s = self.generate_single_compile(target, src, False, [],
- header_deps + d_generated_deps + fortran_order_deps,
- fortran_inc_args)
- obj_list.append(o)
- compiled_sources.append(s)
- source2object[s] = o
+ if not self.environment.is_separate_compile(src):
+ continue
+ if self.environment.is_header(src) and not is_compile_target:
+ continue
+ if self.environment.is_llvm_ir(src):
+ o, s = self.generate_llvm_ir_compile(target, src)
+ obj_list.append(o)
+ elif is_unity and self.get_target_source_can_unity(target, src):
+ abs_src = os.path.join(self.environment.get_build_dir(),
+ src.rel_to_builddir(self.build_to_src))
+ unity_src.append(abs_src)
+ else:
+ o, s = self.generate_single_compile(target, src, False, [],
+ header_deps + d_generated_deps + fortran_order_deps,
+ fortran_inc_args)
+ obj_list.append(o)
+ compiled_sources.append(s)
+ source2object[s] = o
if is_unity:
for src in self.generate_unity_files(target, unity_src):
@@ -1084,8 +1087,14 @@ class NinjaBackend(backends.Backend):
final_obj_list = self.generate_prelink(target, obj_list)
else:
final_obj_list = obj_list
- elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
+
self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)
+
+ if target.uses_rust():
+ self.generate_rust_target(target, outname, final_obj_list, fortran_order_deps)
+ return
+
+ elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
self.add_build(elem)
#In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library
#object and create the build element.
@@ -1556,7 +1565,6 @@ class NinjaBackend(backends.Backend):
elem.add_item('ARGS', commands)
self.add_build(elem)
- self.generate_generator_list_rules(target)
self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
def determine_java_compile_args(self, target, compiler) -> T.List[str]:
@@ -1769,7 +1777,7 @@ class NinjaBackend(backends.Backend):
valac_outputs.append(girname)
shared_target = target.get('shared')
if isinstance(shared_target, build.SharedLibrary):
- args += ['--shared-library', self.get_target_filename_for_linking(shared_target)]
+ args += ['--shared-library', shared_target.get_filename()]
# Install GIR to default location if requested by user
if len(target.install_dir) > 3 and target.install_dir[3] is True:
target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
@@ -1972,6 +1980,7 @@ class NinjaBackend(backends.Backend):
for s in f.get_outputs()])
self.all_structured_sources.update(_ods)
orderdeps.extend(_ods)
+ return orderdeps, main_rust_file
for i in target.get_sources():
if main_rust_file is None:
@@ -2010,7 +2019,8 @@ class NinjaBackend(backends.Backend):
args += target.get_extra_args('rust')
return args
- def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler) -> T.Tuple[T.List[str], T.List[str], T.List[RustDep], T.List[str]]:
+ def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler,
+ obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[RustDep], T.List[str]]:
deps: T.List[str] = []
project_deps: T.List[RustDep] = []
args: T.List[str] = []
@@ -2042,11 +2052,9 @@ class NinjaBackend(backends.Backend):
type_ += ':' + ','.join(modifiers)
args.append(f'-l{type_}={libname}')
- objs, od = self.flatten_object_list(target)
- for o in objs:
+ for o in obj_list:
args.append(f'-Clink-arg={o}')
deps.append(o)
- fortran_order_deps = self.get_fortran_order_deps(od)
linkdirs = mesonlib.OrderedSet()
external_deps = target.external_deps.copy()
@@ -2096,20 +2104,24 @@ class NinjaBackend(backends.Backend):
for a in e.get_link_args():
if a in rustc.native_static_libs:
# Exclude link args that rustc already add by default
- pass
+ continue
elif a.startswith('-L'):
args.append(a)
- elif a.endswith(('.dll', '.so', '.dylib', '.a', '.lib')) and isinstance(target, build.StaticLibrary):
+ continue
+ elif a.endswith(('.dll', '.so', '.dylib', '.a', '.lib')):
dir_, lib = os.path.split(a)
linkdirs.add(dir_)
- if not verbatim:
- lib, ext = os.path.splitext(lib)
- if lib.startswith('lib'):
- lib = lib[3:]
- static = a.endswith(('.a', '.lib'))
- _link_library(lib, static)
- else:
- args.append(f'-Clink-arg={a}')
+
+ if isinstance(target, build.StaticLibrary):
+ if not verbatim:
+ lib, ext = os.path.splitext(lib)
+ if lib.startswith('lib'):
+ lib = lib[3:]
+ static = a.endswith(('.a', '.lib'))
+ _link_library(lib, static)
+ continue
+
+ args.append(f'-Clink-arg={a}')
for d in linkdirs:
d = d or '.'
@@ -2124,40 +2136,44 @@ class NinjaBackend(backends.Backend):
and dep.rust_crate_type == 'dylib'
for dep in target_deps)
- if target.rust_crate_type in {'dylib', 'proc-macro'} or has_rust_shared_deps:
- # add prefer-dynamic if any of the Rust libraries we link
+ if target.rust_crate_type in {'dylib', 'proc-macro'}:
+ # also add prefer-dynamic if any of the Rust libraries we link
# against are dynamic or this is a dynamic library itself,
# otherwise we'll end up with multiple implementations of libstd.
+ has_rust_shared_deps = True
+ elif self.get_target_option(target, 'rust_dynamic_std'):
+ if target.rust_crate_type == 'staticlib':
+ # staticlib crates always include a copy of the Rust libstd,
+ # therefore it is not possible to also link it dynamically.
+ # The options to avoid this (-Z staticlib-allow-rdylib-deps and
+ # -Z staticlib-prefer-dynamic) are not yet stable; alternatively,
+ # one could use "--emit obj" (implemented in the pull request at
+ # https://github.com/mesonbuild/meson/pull/11213) or "--emit rlib"
+ # (officially not recommended for linking with C programs).
+ raise MesonException('rust_dynamic_std does not support staticlib crates yet')
+ # want libstd as a shared dep
+ has_rust_shared_deps = True
+
+ if has_rust_shared_deps:
args += ['-C', 'prefer-dynamic']
-
- if isinstance(target, build.SharedLibrary) or has_shared_deps:
+ if has_shared_deps or has_rust_shared_deps:
args += self.get_build_rpath_args(target, rustc)
- return deps, fortran_order_deps, project_deps, args
-
- def generate_rust_target(self, target: build.BuildTarget) -> None:
- rustc = T.cast('RustCompiler', target.compilers['rust'])
- self.generate_generator_list_rules(target)
-
- for i in target.get_sources():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
- for g in target.get_generated_sources():
- for i in g.get_outputs():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+ return deps, project_deps, args
+ def generate_rust_target(self, target: build.BuildTarget, target_name: str, obj_list: T.List[str],
+ fortran_order_deps: T.List[str]) -> None:
orderdeps, main_rust_file = self.generate_rust_sources(target)
- target_name = self.get_target_filename(target)
if main_rust_file is None:
raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report')
+ rustc = T.cast('RustCompiler', target.compilers['rust'])
args = rustc.compiler_args()
depfile = os.path.join(self.get_target_private_dir(target), target.name + '.d')
args += self.get_rust_compiler_args(target, rustc, target.rust_crate_type, depfile)
- deps, fortran_order_deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc)
+ deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc, obj_list)
args += deps_args
proc_macro_dylib_path = None
@@ -2192,7 +2208,10 @@ class NinjaBackend(backends.Backend):
rustdoc = rustc.get_rustdoc(self.environment)
args = rustdoc.get_exe_args()
args += self.get_rust_compiler_args(target.doctests.target, rustdoc, target.rust_crate_type)
- _, _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc)
+ # There can be no non-Rust objects: the doctests are gathered from Rust
+ # sources and the tests are linked with the target (which is where the
+ # obj_list was linked into)
+ _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc, [])
args += deps_args
target.doctests.cmd_args = args.to_native() + [main_rust_file] + target.doctests.cmd_args
@@ -2214,10 +2233,7 @@ class NinjaBackend(backends.Backend):
def swift_module_file_name(self, target):
return os.path.join(self.get_target_private_dir(target),
- self.target_swift_modulename(target) + '.swiftmodule')
-
- def target_swift_modulename(self, target):
- return target.name
+ target.swift_module_name + '.swiftmodule')
def determine_swift_dep_modules(self, target):
result = []
@@ -2244,12 +2260,26 @@ class NinjaBackend(backends.Backend):
return srcs, others
def generate_swift_target(self, target) -> None:
- module_name = self.target_swift_modulename(target)
+ module_name = target.swift_module_name
swiftc = target.compilers['swift']
abssrc = []
relsrc = []
abs_headers = []
header_imports = []
+
+ if not target.uses_swift_cpp_interop():
+ cpp_targets = [t for t in target.link_targets if t.uses_swift_cpp_interop()]
+ if cpp_targets != []:
+ target_word = 'targets' if len(cpp_targets) > 1 else 'target'
+ first = ', '.join(repr(t.name) for t in cpp_targets[:-1])
+ and_word = ' and ' if len(cpp_targets) > 1 else ''
+ last = repr(cpp_targets[-1].name)
+ enable_word = 'enable' if len(cpp_targets) > 1 else 'enables'
+ raise MesonException('Swift target {0} links against {1} {2}{3}{4} which {5} C++ interoperability. '
+ 'This requires {0} to also have it enabled. '
+ 'Add "swift_interoperability_mode: \'cpp\'" to the definition of {0}.'
+ .format(repr(target.name), target_word, first, and_word, last, enable_word))
+
for i in target.get_sources():
if swiftc.can_compile(i):
rels = i.rel_to_builddir(self.build_to_src)
@@ -2266,10 +2296,16 @@ class NinjaBackend(backends.Backend):
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = self.generate_basic_compiler_args(target, swiftc)
compile_args += swiftc.get_module_args(module_name)
- if mesonlib.version_compare(swiftc.version, '>=5.9'):
- compile_args += swiftc.get_cxx_interoperability_args(target.compilers)
+ compile_args += swiftc.get_cxx_interoperability_args(target)
compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine)
compile_args += self.build.get_global_args(swiftc, target.for_machine)
+ if isinstance(target, (build.StaticLibrary, build.SharedLibrary)):
+ # swiftc treats modules with a single source file, and the main.swift file in multi-source file modules
+ # as top-level code. This is undesirable in library targets since it emits a main function. Add the
+ # -parse-as-library option as necessary to prevent emitting the main function while keeping files explicitly
+ # named main.swift treated as the entrypoint of the module in case this is desired.
+ if len(abssrc) == 1 and os.path.basename(abssrc[0]) != 'main.swift':
+ compile_args += swiftc.get_library_args()
for i in reversed(target.get_include_dirs()):
basedir = i.get_curdir()
for d in i.get_incdirs():
@@ -3353,7 +3389,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
def get_target_shsym_filename(self, target):
# Always name the .symbols file after the primary build output because it always exists
targetdir = self.get_target_private_dir(target)
- return os.path.join(targetdir, target.get_filename() + '.symbols')
+ return Path(targetdir, target.get_filename() + '.symbols').as_posix()
def generate_shsym(self, target) -> None:
target_file = self.get_target_filename(target)
@@ -3372,7 +3408,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
self.add_build(elem)
def get_import_filename(self, target) -> str:
- return os.path.join(self.get_target_dir(target), target.import_filename)
+ return Path(self.get_target_dir(target), target.import_filename).as_posix()
def get_target_type_link_args(self, target, linker):
commands = []
@@ -3560,9 +3596,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
linker.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
- self.determine_rpath_dirs(target),
- target.build_rpath,
- target.install_rpath))
+ target))
return rpath_args
def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None):
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 6ad982d..e7bd487 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -176,6 +176,15 @@ class PbxDict:
self.keys.add(key)
self.items.append(item)
+ def get_item(self, key: str) -> PbxDictItem:
+ assert key in self.keys
+ for item in self.items:
+ if not isinstance(item, PbxDictItem):
+ continue
+ if item.key == key:
+ return item
+ return None
+
def has_item(self, key: str) -> bool:
return key in self.keys
@@ -396,10 +405,23 @@ class XCodeBackend(backends.Backend):
def generate_filemap(self) -> None:
self.filemap = {} # Key is source file relative to src root.
+ self.foldermap = {}
self.target_filemap = {}
for name, t in self.build_targets.items():
for s in t.sources:
if isinstance(s, mesonlib.File):
+ if '/' in s.fname:
+ # From the top level down, add the folders containing the source file.
+ folder = os.path.split(os.path.dirname(s.fname))
+ while folder:
+ fpath = os.path.join(*folder)
+ # Multiple targets might use the same folders, so store their targets with them.
+ # Otherwise, folders and their source files will appear in the wrong places in Xcode.
+ if (fpath, t) not in self.foldermap:
+ self.foldermap[(fpath, t)] = self.gen_id()
+ else:
+ break
+ folder = folder[:-1]
s = os.path.join(s.subdir, s.fname)
self.filemap[s] = self.gen_id()
for o in t.objects:
@@ -1052,6 +1074,24 @@ class XCodeBackend(backends.Backend):
main_children.add_item(frameworks_id, 'Frameworks')
main_dict.add_item('sourceTree', '<group>')
+ # Define each folder as a group in Xcode. That way, it can build the file tree correctly.
+ # This must be done before the project tree group is generated, as source files are added during that phase.
+ for (path, target), id in self.foldermap.items():
+ folder_dict = PbxDict()
+ objects_dict.add_item(id, folder_dict, path)
+ folder_dict.add_item('isa', 'PBXGroup')
+ folder_children = PbxArray()
+ folder_dict.add_item('children', folder_children)
+ folder_dict.add_item('name', '"{}"'.format(path.rsplit('/', 1)[-1]))
+ folder_dict.add_item('path', f'"{path}"')
+ folder_dict.add_item('sourceTree', 'SOURCE_ROOT')
+
+ # Add any detected subdirectories (not declared as subdir()) here, but only one level higher.
+ # Example: In "root", add "root/sub", but not "root/sub/subtwo".
+ for path_dep, target_dep in self.foldermap:
+ if path_dep.startswith(path) and path_dep.split('/', 1)[0] == path.split('/', 1)[0] and path_dep != path and path_dep.count('/') == path.count('/') + 1 and target == target_dep:
+ folder_children.add_item(self.foldermap[(path_dep, target)], path_dep)
+
self.add_projecttree(objects_dict, projecttree_id)
resource_dict = PbxDict()
@@ -1121,6 +1161,7 @@ class XCodeBackend(backends.Backend):
tid = t.get_id()
group_id = self.gen_id()
target_dict = PbxDict()
+ folder_ids = set()
objects_dict.add_item(group_id, target_dict, tid)
target_dict.add_item('isa', 'PBXGroup')
target_children = PbxArray()
@@ -1130,6 +1171,18 @@ class XCodeBackend(backends.Backend):
source_files_dict = PbxDict()
for s in t.sources:
if isinstance(s, mesonlib.File):
+ # If the file is in a folder, add it to the group representing that folder.
+ if '/' in s.fname:
+ folder = '/'.join(s.fname.split('/')[:-1])
+ folder_dict = objects_dict.get_item(self.foldermap[(folder, t)]).value.get_item('children').value
+ temp = os.path.join(s.subdir, s.fname)
+ folder_dict.add_item(self.fileref_ids[(tid, temp)], temp)
+ if self.foldermap[(folder, t)] in folder_ids:
+ continue
+ if len(folder.split('/')) == 1:
+ target_children.add_item(self.foldermap[(folder, t)], folder)
+ folder_ids.add(self.foldermap[(folder, t)])
+ continue
s = os.path.join(s.subdir, s.fname)
elif isinstance(s, str):
s = os.path.join(t.subdir, s)
@@ -1778,7 +1831,7 @@ class XCodeBackend(backends.Backend):
settings_dict.add_item('SECTORDER_FLAGS', '')
if is_swift and bridging_header:
settings_dict.add_item('SWIFT_OBJC_BRIDGING_HEADER', bridging_header)
- if self.objversion >= 60 and 'cpp' in langs:
+ if self.objversion >= 60 and target.uses_swift_cpp_interop():
settings_dict.add_item('SWIFT_OBJC_INTEROP_MODE', 'objcxx')
settings_dict.add_item('BUILD_DIR', symroot)
settings_dict.add_item('OBJROOT', f'{symroot}/build')
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 72d376d..2adfb98 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -24,14 +24,14 @@ from .mesonlib import (
File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
extract_as_list, typeslistify, stringlistify, classify_unity_sources,
get_filenames_templates_dict, substitute_values, has_path_sep,
- is_parent_path, PerMachineDefaultable,
+ is_parent_path, relpath, PerMachineDefaultable,
MesonBugException, EnvironmentVariables, pickle_load, lazy_property,
)
from .options import OptionKey
from .compilers import (
is_header, is_object, is_source, clink_langs, sort_clink, all_languages,
- is_known_suffix, detect_static_linker
+ is_known_suffix, detect_static_linker, LANGUAGES_USING_LDFLAGS
)
from .interpreterbase import FeatureNew, FeatureDeprecated, UnknownValue
@@ -75,6 +75,7 @@ lang_arg_kwargs |= {
vala_kwargs = {'vala_header', 'vala_gir', 'vala_vapi'}
rust_kwargs = {'rust_crate_type', 'rust_dependency_map'}
cs_kwargs = {'resources', 'cs_args'}
+swift_kwargs = {'swift_interoperability_mode', 'swift_module_name'}
buildtarget_kwargs = {
'build_by_default',
@@ -110,7 +111,8 @@ known_build_target_kwargs = (
pch_kwargs |
vala_kwargs |
rust_kwargs |
- cs_kwargs)
+ cs_kwargs |
+ swift_kwargs)
known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie', 'vs_module_defs', 'android_exe_type'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions', 'rust_abi'}
@@ -769,14 +771,23 @@ class BuildTarget(Target):
''' Initialisations and checks requiring the final list of compilers to be known
'''
self.validate_sources()
- if self.structured_sources and any([self.sources, self.generated]):
- raise MesonException('cannot mix structured sources and unstructured sources')
- if self.structured_sources and 'rust' not in self.compilers:
- raise MesonException('structured sources are only supported in Rust targets')
if self.uses_rust():
+ if self.link_language and self.link_language != 'rust':
+ raise MesonException('cannot build Rust sources with a different link_language')
+ if self.structured_sources:
+ # TODO: the interpreter should be able to generate a better error message?
+ if any((s.endswith('.rs') for s in self.sources)) or \
+ any(any((s.endswith('.rs') for s in g.get_outputs())) for g in self.generated):
+ raise MesonException('cannot mix Rust structured sources and unstructured sources')
+
# relocation-model=pic is rustc's default and Meson does not
# currently have a way to disable PIC.
self.pic = True
+ self.pie = True
+ else:
+ if self.structured_sources:
+ raise MesonException('structured sources are only supported in Rust targets')
+
if 'vala' in self.compilers and self.is_linkable_target():
self.outputs += [self.vala_header, self.vala_vapi]
self.install_tag += ['devel', 'devel']
@@ -878,6 +889,10 @@ class BuildTarget(Target):
if isinstance(t, (CustomTarget, CustomTargetIndex)):
continue # We can't know anything about these.
for name, compiler in t.compilers.items():
+ if name == 'rust':
+ # Rust is always linked through a C-ABI target, so do not add
+ # the compiler here
+ continue
if name in link_langs and name not in self.compilers:
self.compilers[name] = compiler
@@ -963,7 +978,7 @@ class BuildTarget(Target):
self.compilers[lang] = compiler
break
else:
- if is_known_suffix(s):
+ if is_known_suffix(s) and not is_header(s):
path = pathlib.Path(str(s)).as_posix()
m = f'No {self.for_machine.get_lower_case_name()} machine compiler for {path!r}'
raise MesonException(m)
@@ -1260,6 +1275,12 @@ class BuildTarget(Target):
raise InvalidArguments(f'Invalid rust_dependency_map "{rust_dependency_map}": must be a dictionary with string values.')
self.rust_dependency_map = rust_dependency_map
+ self.swift_interoperability_mode = kwargs.get('swift_interoperability_mode')
+
+ self.swift_module_name = kwargs.get('swift_module_name')
+ if self.swift_module_name == '':
+ self.swift_module_name = self.name
+
def _extract_pic_pie(self, kwargs: T.Dict[str, T.Any], arg: str, option: str) -> bool:
# Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags
all_flags = self.extra_args['c'] + self.extra_args['cpp']
@@ -1589,6 +1610,9 @@ class BuildTarget(Target):
if isinstance(link_target, (CustomTarget, CustomTargetIndex)):
continue
for language in link_target.compilers:
+ if language == 'rust' and not link_target.uses_rust_abi():
+ # All Rust dependencies must go through a C-ABI dependency, so ignore it
+ continue
if language not in langs:
langs.append(language)
@@ -1680,6 +1704,9 @@ class BuildTarget(Target):
def uses_fortran(self) -> bool:
return 'fortran' in self.compilers
+ def uses_swift_cpp_interop(self) -> bool:
+ return self.swift_interoperability_mode == 'cpp' and 'swift' in self.compilers
+
def get_using_msvc(self) -> bool:
'''
Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary,
@@ -1774,6 +1801,121 @@ class BuildTarget(Target):
"""Base case used by BothLibraries"""
return self
+ def determine_rpath_dirs(self) -> T.Tuple[str, ...]:
+ result: OrderedSet[str]
+ if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
+ # Need a copy here
+ result = OrderedSet(self.get_link_dep_subdirs())
+ else:
+ result = OrderedSet()
+ result.add('meson-out')
+ result.update(self.rpaths_for_non_system_absolute_shared_libraries())
+ self.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result])
+ return tuple(result)
+
+ @lru_cache(maxsize=None)
+ def rpaths_for_non_system_absolute_shared_libraries(self, exclude_system: bool = True) -> ImmutableListProtocol[str]:
+ paths: OrderedSet[str] = OrderedSet()
+ srcdir = self.environment.get_source_dir()
+
+ system_dirs = set()
+ if exclude_system:
+ for cc in self.compilers.values():
+ system_dirs.update(cc.get_library_dirs(self.environment))
+
+ external_rpaths = self.get_external_rpath_dirs()
+ build_to_src = relpath(self.environment.get_source_dir(),
+ self.environment.get_build_dir())
+
+ for dep in self.external_deps:
+ if dep.type_name not in {'library', 'pkgconfig', 'cmake'}:
+ continue
+ for libpath in dep.link_args:
+ if libpath.startswith('-'):
+ continue
+ # For all link args that are absolute paths to a library file, add RPATH args
+ if not os.path.isabs(libpath):
+ continue
+ libdir, libname = os.path.split(libpath)
+ # Windows doesn't support rpaths, but we use this function to
+ # emulate rpaths by setting PATH
+ # .dll is there for mingw gcc
+ # .so's may be extended with version information, e.g. libxyz.so.1.2.3
+ if not (
+ libname.endswith(('.dll', '.lib', '.so', '.dylib'))
+ or '.so.' in libname
+ ):
+ continue
+
+ # Don't remove rpaths specified in LDFLAGS.
+ if libdir in external_rpaths:
+ continue
+ if system_dirs and os.path.normpath(libdir) in system_dirs:
+ # No point in adding system paths.
+ continue
+
+ if is_parent_path(srcdir, libdir):
+ rel_to_src = libdir[len(srcdir) + 1:]
+ assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
+ paths.add(os.path.join(build_to_src, rel_to_src))
+ else:
+ paths.add(libdir)
+ # Don't remove rpaths specified by the dependency
+ paths.difference_update(self.get_rpath_dirs_from_link_args(dep.link_args))
+ for i in itertools.chain(self.link_targets, self.link_whole_targets):
+ if isinstance(i, BuildTarget):
+ paths.update(i.rpaths_for_non_system_absolute_shared_libraries(exclude_system))
+ return list(paths)
+
+ def get_external_rpath_dirs(self) -> T.Set[str]:
+ args: T.List[str] = []
+ for lang in LANGUAGES_USING_LDFLAGS:
+ try:
+ args += self.environment.coredata.get_external_link_args(self.for_machine, lang)
+ except KeyError:
+ pass
+ return self.get_rpath_dirs_from_link_args(args)
+
+ # Match rpath formats:
+ # -Wl,-rpath=
+ # -Wl,-rpath,
+ _rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
+ # Match solaris style compat runpath formats:
+ # -Wl,-R
+ # -Wl,-R,
+ _runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
+ # Match symbols formats:
+ # -Wl,--just-symbols=
+ # -Wl,--just-symbols,
+ _symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
+
+ @classmethod
+ def get_rpath_dirs_from_link_args(cls, args: T.List[str]) -> T.Set[str]:
+ dirs: T.Set[str] = set()
+
+ for arg in args:
+ if not arg.startswith('-Wl,'):
+ continue
+
+ rpath_match = cls._rpath_regex.match(arg)
+ if rpath_match:
+ for dir in rpath_match.group(1).split(':'):
+ dirs.add(dir)
+ runpath_match = cls._runpath_regex.match(arg)
+ if runpath_match:
+ for dir in runpath_match.group(1).split(':'):
+ # The symbols arg is an rpath if the path is a directory
+ if os.path.isdir(dir):
+ dirs.add(dir)
+ symbols_match = cls._symbols_regex.match(arg)
+ if symbols_match:
+ for dir in symbols_match.group(1).split(':'):
+ # Prevent usage of --just-symbols to specify rpath
+ if os.path.isdir(dir):
+ raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
+ return dirs
+
+
class FileInTargetPrivateDir:
"""Represents a file with the path '/path/to/build/target_private_dir/fname'.
target_private_dir is the return value of get_target_private_dir which is e.g. 'subdir/target.p'.
diff --git a/mesonbuild/cargo/interpreter.py b/mesonbuild/cargo/interpreter.py
index a5d703e..a0d4371 100644
--- a/mesonbuild/cargo/interpreter.py
+++ b/mesonbuild/cargo/interpreter.py
@@ -11,439 +11,30 @@ port will be required.
from __future__ import annotations
import dataclasses
-import importlib
-import json
import os
-import shutil
import collections
import urllib.parse
import itertools
import typing as T
from . import builder, version, cfg
-from ..mesonlib import MesonException, Popen_safe, MachineChoice
+from .toml import load_toml, TomlImplementationMissing
+from .manifest import Manifest, CargoLock, fixup_meson_varname
+from ..mesonlib import MesonException, MachineChoice
from .. import coredata, mlog
from ..wrap.wrap import PackageDefinition
if T.TYPE_CHECKING:
- from types import ModuleType
-
- from typing_extensions import Protocol, Self
-
- from . import manifest
+ from . import raw
from .. import mparser
+ from .manifest import Dependency, SystemDependency
from ..environment import Environment
from ..interpreterbase import SubProject
from ..compilers.rust import RustCompiler
- # Copied from typeshed. Blarg that they don't expose this
- class DataclassInstance(Protocol):
- __dataclass_fields__: T.ClassVar[dict[str, dataclasses.Field[T.Any]]]
-
- _UnknownKeysT = T.TypeVar('_UnknownKeysT', manifest.FixedPackage,
- manifest.FixedDependency, manifest.FixedLibTarget,
- manifest.FixedBuildTarget)
-
-
-# tomllib is present in python 3.11, before that it is a pypi module called tomli,
-# we try to import tomllib, then tomli,
-# TODO: add a fallback to toml2json?
-tomllib: T.Optional[ModuleType] = None
-toml2json: T.Optional[str] = None
-for t in ['tomllib', 'tomli']:
- try:
- tomllib = importlib.import_module(t)
- break
- except ImportError:
- pass
-else:
- # TODO: it would be better to use an Executable here, which could be looked
- # up in the cross file or provided by a wrap. However, that will have to be
- # passed in externally, since we don't have (and I don't think we should),
- # have access to the `Environment` for that in this module.
- toml2json = shutil.which('toml2json')
-
-
-_EXTRA_KEYS_WARNING = (
- "This may (unlikely) be an error in the cargo manifest, or may be a missing "
- "implementation in Meson. If this issue can be reproduced with the latest "
- "version of Meson, please help us by opening an issue at "
- "https://github.com/mesonbuild/meson/issues. Please include the crate and "
- "version that is generating this warning if possible."
-)
-
-class TomlImplementationMissing(MesonException):
- pass
-
-
-def load_toml(filename: str) -> T.Dict[object, object]:
- if tomllib:
- with open(filename, 'rb') as f:
- raw = tomllib.load(f)
- else:
- if toml2json is None:
- raise TomlImplementationMissing('Could not find an implementation of tomllib, nor toml2json')
-
- p, out, err = Popen_safe([toml2json, filename])
- if p.returncode != 0:
- raise MesonException('toml2json failed to decode output\n', err)
-
- raw = json.loads(out)
-
- if not isinstance(raw, dict):
- raise MesonException("Cargo.toml isn't a dictionary? How did that happen?")
-
- return raw
-
-
-def fixup_meson_varname(name: str) -> str:
- """Fixup a meson variable name
-
- :param name: The name to fix
- :return: the fixed name
- """
- return name.replace('-', '_')
-
-
-# Pylance can figure out that these do not, in fact, overlap, but mypy can't
-@T.overload
-def _fixup_raw_mappings(d: manifest.BuildTarget) -> manifest.FixedBuildTarget: ... # type: ignore
-
-@T.overload
-def _fixup_raw_mappings(d: manifest.LibTarget) -> manifest.FixedLibTarget: ... # type: ignore
-
-@T.overload
-def _fixup_raw_mappings(d: manifest.Dependency) -> manifest.FixedDependency: ...
-
-def _fixup_raw_mappings(d: T.Union[manifest.BuildTarget, manifest.LibTarget, manifest.Dependency]
- ) -> T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget,
- manifest.FixedDependency]:
- """Fixup raw cargo mappings to ones more suitable for python to consume.
-
- This does the following:
- * replaces any `-` with `_`, cargo likes the former, but python dicts make
- keys with `-` in them awkward to work with
- * Convert Dependency versions from the cargo format to something meson
- understands
-
- :param d: The mapping to fix
- :return: the fixed string
- """
- raw = {fixup_meson_varname(k): v for k, v in d.items()}
- if 'version' in raw:
- assert isinstance(raw['version'], str), 'for mypy'
- raw['version'] = version.convert(raw['version'])
- return T.cast('T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget, manifest.FixedDependency]', raw)
-
-
-def _handle_unknown_keys(data: _UnknownKeysT, cls: T.Union[DataclassInstance, T.Type[DataclassInstance]],
- msg: str) -> _UnknownKeysT:
- """Remove and warn on keys that are coming from cargo, but are unknown to
- our representations.
-
- This is intended to give users the possibility of things proceeding when a
- new key is added to Cargo.toml that we don't yet handle, but to still warn
- them that things might not work.
-
- :param data: The raw data to look at
- :param cls: The Dataclass derived type that will be created
- :param msg: the header for the error message. Usually something like "In N structure".
- :return: The original data structure, but with all unknown keys removed.
- """
- unexpected = set(data) - {x.name for x in dataclasses.fields(cls)}
- if unexpected:
- mlog.warning(msg, 'has unexpected keys', '"{}".'.format(', '.join(sorted(unexpected))),
- _EXTRA_KEYS_WARNING)
- for k in unexpected:
- # Mypy and Pyright can't prove that this is okay
- del data[k] # type: ignore[misc]
- return data
-
-
-@dataclasses.dataclass
-class Package:
-
- """Representation of a Cargo Package entry, with defaults filled in."""
-
- name: str
- version: str
- description: T.Optional[str] = None
- resolver: T.Optional[str] = None
- authors: T.List[str] = dataclasses.field(default_factory=list)
- edition: manifest.EDITION = '2015'
- rust_version: T.Optional[str] = None
- documentation: T.Optional[str] = None
- readme: T.Optional[str] = None
- homepage: T.Optional[str] = None
- repository: T.Optional[str] = None
- license: T.Optional[str] = None
- license_file: T.Optional[str] = None
- keywords: T.List[str] = dataclasses.field(default_factory=list)
- categories: T.List[str] = dataclasses.field(default_factory=list)
- workspace: T.Optional[str] = None
- build: T.Optional[str] = None
- links: T.Optional[str] = None
- exclude: T.List[str] = dataclasses.field(default_factory=list)
- include: T.List[str] = dataclasses.field(default_factory=list)
- publish: bool = True
- metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
- default_run: T.Optional[str] = None
- autolib: bool = True
- autobins: bool = True
- autoexamples: bool = True
- autotests: bool = True
- autobenches: bool = True
- api: str = dataclasses.field(init=False)
-
- def __post_init__(self) -> None:
- self.api = _version_to_api(self.version)
-
- @classmethod
- def from_raw(cls, raw: manifest.Package) -> Self:
- pkg = T.cast('manifest.FixedPackage',
- {fixup_meson_varname(k): v for k, v in raw.items()})
- pkg = _handle_unknown_keys(pkg, cls, f'Package entry {pkg["name"]}')
- return cls(**pkg)
-
-@dataclasses.dataclass
-class SystemDependency:
-
- """ Representation of a Cargo system-deps entry
- https://docs.rs/system-deps/latest/system_deps
- """
-
- name: str
- version: T.List[str]
- optional: bool = False
- feature: T.Optional[str] = None
- feature_overrides: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
-
- @classmethod
- def from_raw(cls, name: str, raw: T.Any) -> SystemDependency:
- if isinstance(raw, str):
- return cls(name, SystemDependency.convert_version(raw))
- name = raw.get('name', name)
- version = SystemDependency.convert_version(raw.get('version'))
- optional = raw.get('optional', False)
- feature = raw.get('feature')
- # Everything else are overrides when certain features are enabled.
- feature_overrides = {k: v for k, v in raw.items() if k not in {'name', 'version', 'optional', 'feature'}}
- return cls(name, version, optional, feature, feature_overrides)
-
- @staticmethod
- def convert_version(version: T.Optional[str]) -> T.List[str]:
- vers = version.split(',') if version is not None else []
- result: T.List[str] = []
- for v in vers:
- v = v.strip()
- if v[0] not in '><=':
- v = f'>={v}'
- result.append(v)
- return result
-
- def enabled(self, features: T.Set[str]) -> bool:
- return self.feature is None or self.feature in features
-
-@dataclasses.dataclass
-class Dependency:
-
- """Representation of a Cargo Dependency Entry."""
-
- name: dataclasses.InitVar[str]
- version: T.List[str]
- registry: T.Optional[str] = None
- git: T.Optional[str] = None
- branch: T.Optional[str] = None
- rev: T.Optional[str] = None
- path: T.Optional[str] = None
- optional: bool = False
- package: str = ''
- default_features: bool = True
- features: T.List[str] = dataclasses.field(default_factory=list)
- api: str = dataclasses.field(init=False)
-
- def __post_init__(self, name: str) -> None:
- self.package = self.package or name
- # Extract wanted API version from version constraints.
- api = set()
- for v in self.version:
- if v.startswith(('>=', '==')):
- api.add(_version_to_api(v[2:].strip()))
- elif v.startswith('='):
- api.add(_version_to_api(v[1:].strip()))
- if not api:
- self.api = '0'
- elif len(api) == 1:
- self.api = api.pop()
- else:
- raise MesonException(f'Cannot determine minimum API version from {self.version}.')
-
- @classmethod
- def from_raw(cls, name: str, raw: manifest.DependencyV) -> Dependency:
- """Create a dependency from a raw cargo dictionary"""
- if isinstance(raw, str):
- return cls(name, version.convert(raw))
- fixed = _handle_unknown_keys(_fixup_raw_mappings(raw), cls, f'Dependency entry {name}')
- return cls(name, **fixed)
-
-
-@dataclasses.dataclass
-class BuildTarget:
-
- name: str
- crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
- path: dataclasses.InitVar[T.Optional[str]] = None
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-test-field
- # True for lib, bin, test
- test: bool = True
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doctest-field
- # True for lib
- doctest: bool = False
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-bench-field
- # True for lib, bin, benchmark
- bench: bool = True
-
- # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doc-field
- # True for libraries and binaries
- doc: bool = False
-
- harness: bool = True
- edition: manifest.EDITION = '2015'
- required_features: T.List[str] = dataclasses.field(default_factory=list)
- plugin: bool = False
-
- @classmethod
- def from_raw(cls, raw: manifest.BuildTarget) -> Self:
- name = raw.get('name', '<anonymous>')
- build = _handle_unknown_keys(_fixup_raw_mappings(raw), cls, f'Binary entry {name}')
- return cls(**build)
-
-@dataclasses.dataclass
-class Library(BuildTarget):
-
- """Representation of a Cargo Library Entry."""
-
- doctest: bool = True
- doc: bool = True
- path: str = os.path.join('src', 'lib.rs')
- proc_macro: bool = False
- crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
- doc_scrape_examples: bool = True
-
- @classmethod
- def from_raw(cls, raw: manifest.LibTarget, fallback_name: str) -> Self: # type: ignore[override]
- fixed = _fixup_raw_mappings(raw)
-
- # We need to set the name field if it's not set manually, including if
- # other fields are set in the lib section
- if 'name' not in fixed:
- fixed['name'] = fallback_name
- fixed = _handle_unknown_keys(fixed, cls, f'Library entry {fixed["name"]}')
-
- return cls(**fixed)
-
-
-@dataclasses.dataclass
-class Binary(BuildTarget):
-
- """Representation of a Cargo Bin Entry."""
-
- doc: bool = True
-
-
-@dataclasses.dataclass
-class Test(BuildTarget):
-
- """Representation of a Cargo Test Entry."""
-
- bench: bool = True
-
-
-@dataclasses.dataclass
-class Benchmark(BuildTarget):
-
- """Representation of a Cargo Benchmark Entry."""
-
- test: bool = True
-
-
-@dataclasses.dataclass
-class Example(BuildTarget):
-
- """Representation of a Cargo Example Entry."""
-
- crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
-
-
-@dataclasses.dataclass
-class Manifest:
-
- """Cargo Manifest definition.
-
- Most of these values map up to the Cargo Manifest, but with default values
- if not provided.
-
- Cargo subprojects can contain what Meson wants to treat as multiple,
- interdependent, subprojects.
-
- :param path: the path within the cargo subproject.
- """
-
- package: Package
- dependencies: T.Dict[str, Dependency]
- dev_dependencies: T.Dict[str, Dependency]
- build_dependencies: T.Dict[str, Dependency]
- system_dependencies: T.Dict[str, SystemDependency] = dataclasses.field(init=False)
- lib: Library
- bin: T.List[Binary]
- test: T.List[Test]
- bench: T.List[Benchmark]
- example: T.List[Example]
- features: T.Dict[str, T.List[str]]
- target: T.Dict[str, T.Dict[str, Dependency]]
- path: str = ''
-
- def __post_init__(self) -> None:
- self.features.setdefault('default', [])
- self.system_dependencies = {k: SystemDependency.from_raw(k, v) for k, v in self.package.metadata.get('system-deps', {}).items()}
-
-
-def _convert_manifest(raw_manifest: manifest.Manifest, subdir: str, path: str = '') -> Manifest:
- return Manifest(
- Package.from_raw(raw_manifest['package']),
- {k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('dependencies', {}).items()},
- {k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('dev-dependencies', {}).items()},
- {k: Dependency.from_raw(k, v) for k, v in raw_manifest.get('build-dependencies', {}).items()},
- Library.from_raw(raw_manifest.get('lib', {}), raw_manifest['package']['name']),
- [Binary.from_raw(b) for b in raw_manifest.get('bin', {})],
- [Test.from_raw(b) for b in raw_manifest.get('test', {})],
- [Benchmark.from_raw(b) for b in raw_manifest.get('bench', {})],
- [Example.from_raw(b) for b in raw_manifest.get('example', {})],
- raw_manifest.get('features', {}),
- {k: {k2: Dependency.from_raw(k2, v2) for k2, v2 in v.get('dependencies', {}).items()}
- for k, v in raw_manifest.get('target', {}).items()},
- path,
- )
-
-
-def _version_to_api(version: str) -> str:
- # x.y.z -> x
- # 0.x.y -> 0.x
- # 0.0.x -> 0
- vers = version.split('.')
- if int(vers[0]) != 0:
- return vers[0]
- elif len(vers) >= 2 and int(vers[1]) != 0:
- return f'0.{vers[1]}'
- return '0'
-
-
-def _dependency_name(package_name: str, api: str) -> str:
- basename = package_name[:-3] if package_name.endswith('-rs') else package_name
- return f'{basename}-{api}-rs'
+def _dependency_name(package_name: str, api: str, suffix: str = '-rs') -> str:
+ basename = package_name[:-len(suffix)] if package_name.endswith(suffix) else package_name
+ return f'{basename}-{api}{suffix}'
def _dependency_varname(package_name: str) -> str:
@@ -458,13 +49,13 @@ def _extra_deps_varname() -> str:
return 'extra_deps'
+@dataclasses.dataclass
class PackageState:
- def __init__(self, manifest: Manifest, downloaded: bool) -> None:
- self.manifest = manifest
- self.downloaded = downloaded
- self.features: T.Set[str] = set()
- self.required_deps: T.Set[str] = set()
- self.optional_deps_features: T.Dict[str, T.Set[str]] = collections.defaultdict(set)
+ manifest: Manifest
+ downloaded: bool = False
+ features: T.Set[str] = dataclasses.field(default_factory=set)
+ required_deps: T.Set[str] = dataclasses.field(default_factory=set)
+ optional_deps_features: T.Dict[str, T.Set[str]] = dataclasses.field(default_factory=lambda: collections.defaultdict(set))
@dataclasses.dataclass(frozen=True)
@@ -484,6 +75,9 @@ class Interpreter:
# Rustc's config
self.cfgs = self._get_cfgs()
+ def get_build_def_files(self) -> T.List[str]:
+ return [os.path.join(subdir, 'Cargo.toml') for subdir in self.manifests]
+
def interpret(self, subdir: str) -> mparser.CodeBlockNode:
manifest = self._load_manifest(subdir)
pkg, cached = self._fetch_package(manifest.package.name, manifest.package.api)
@@ -506,9 +100,7 @@ class Interpreter:
ast += self._create_dependencies(pkg, build)
ast += self._create_meson_subdir(build)
- # Libs are always auto-discovered and there's no other way to handle them,
- # which is unfortunate for reproducability
- if os.path.exists(os.path.join(self.environment.source_dir, subdir, pkg.manifest.path, pkg.manifest.lib.path)):
+ if pkg.manifest.lib:
for crate_type in pkg.manifest.lib.crate_type:
ast.extend(self._create_lib(pkg, build, crate_type))
@@ -545,11 +137,12 @@ class Interpreter:
def _load_manifest(self, subdir: str) -> Manifest:
manifest_ = self.manifests.get(subdir)
if not manifest_:
- filename = os.path.join(self.environment.source_dir, subdir, 'Cargo.toml')
- raw = load_toml(filename)
- if 'package' in raw:
- raw_manifest = T.cast('manifest.Manifest', raw)
- manifest_ = _convert_manifest(raw_manifest, subdir)
+ path = os.path.join(self.environment.source_dir, subdir)
+ filename = os.path.join(path, 'Cargo.toml')
+ toml = load_toml(filename)
+ if 'package' in toml:
+ raw_manifest = T.cast('raw.Manifest', toml)
+ manifest_ = Manifest.from_raw(raw_manifest, path)
self.manifests[subdir] = manifest_
else:
raise MesonException(f'{subdir}/Cargo.toml does not have [package] section')
@@ -668,8 +261,9 @@ class Interpreter:
return ast
def _create_system_dependency(self, name: str, dep: SystemDependency, build: builder.Builder) -> T.List[mparser.BaseNode]:
+ # TODO: handle feature_overrides
kw = {
- 'version': build.array([build.string(s) for s in dep.version]),
+ 'version': build.array([build.string(s) for s in dep.meson_version]),
'required': build.bool(not dep.optional),
}
varname = f'{fixup_meson_varname(name)}_system_dep'
@@ -696,7 +290,7 @@ class Interpreter:
def _create_dependency(self, dep: Dependency, build: builder.Builder) -> T.List[mparser.BaseNode]:
pkg = self._dep_package(dep)
kw = {
- 'version': build.array([build.string(s) for s in dep.version]),
+ 'version': build.array([build.string(s) for s in dep.meson_version]),
}
# Lookup for this dependency with the features we want in default_options kwarg.
#
@@ -772,7 +366,7 @@ class Interpreter:
build.block([build.function('subdir', [build.string('meson')])]))
]
- def _create_lib(self, pkg: PackageState, build: builder.Builder, crate_type: manifest.CRATE_TYPE) -> T.List[mparser.BaseNode]:
+ def _create_lib(self, pkg: PackageState, build: builder.Builder, crate_type: raw.CRATE_TYPE) -> T.List[mparser.BaseNode]:
dependencies: T.List[mparser.BaseNode] = []
dependency_map: T.Dict[mparser.BaseNode, mparser.BaseNode] = {}
for name in pkg.required_deps:
@@ -805,6 +399,9 @@ class Interpreter:
'rust_args': build.array(rust_args),
}
+ depname_suffix = '-rs' if crate_type in {'lib', 'rlib', 'proc-macro'} else f'-{crate_type}'
+ depname = _dependency_name(pkg.manifest.package.name, pkg.manifest.package.api, depname_suffix)
+
lib: mparser.BaseNode
if pkg.manifest.lib.proc_macro or crate_type == 'proc-macro':
lib = build.method('proc_macro', build.identifier('rust'), posargs, kwargs)
@@ -837,7 +434,8 @@ class Interpreter:
'link_with': build.identifier('lib'),
'variables': build.dict({
build.string('features'): build.string(','.join(pkg.features)),
- })
+ }),
+ 'version': build.string(pkg.manifest.package.version),
},
),
'dep'
@@ -846,7 +444,7 @@ class Interpreter:
'override_dependency',
build.identifier('meson'),
[
- build.string(_dependency_name(pkg.manifest.package.name, pkg.manifest.package.api)),
+ build.string(depname),
build.identifier('dep'),
],
),
@@ -860,24 +458,23 @@ def load_wraps(source_dir: str, subproject_dir: str) -> T.List[PackageDefinition
filename = os.path.join(source_dir, 'Cargo.lock')
if os.path.exists(filename):
try:
- cargolock = T.cast('manifest.CargoLock', load_toml(filename))
+ toml = load_toml(filename)
except TomlImplementationMissing as e:
mlog.warning('Failed to load Cargo.lock:', str(e), fatal=False)
return wraps
- for package in cargolock['package']:
- name = package['name']
- version = package['version']
- subp_name = _dependency_name(name, _version_to_api(version))
- source = package.get('source')
- if source is None:
+ raw_cargolock = T.cast('raw.CargoLock', toml)
+ cargolock = CargoLock.from_raw(raw_cargolock)
+ for package in cargolock.package:
+ subp_name = _dependency_name(package.name, version.api(package.version))
+ if package.source is None:
# This is project's package, or one of its workspace members.
pass
- elif source == 'registry+https://github.com/rust-lang/crates.io-index':
- checksum = package.get('checksum')
+ elif package.source == 'registry+https://github.com/rust-lang/crates.io-index':
+ checksum = package.checksum
if checksum is None:
- checksum = cargolock['metadata'][f'checksum {name} {version} ({source})']
- url = f'https://crates.io/api/v1/crates/{name}/{version}/download'
- directory = f'{name}-{version}'
+ checksum = cargolock.metadata[f'checksum {package.name} {package.version} ({package.source})']
+ url = f'https://crates.io/api/v1/crates/{package.name}/{package.version}/download'
+ directory = f'{package.name}-{package.version}'
wraps.append(PackageDefinition.from_values(subp_name, subproject_dir, 'file', {
'directory': directory,
'source_url': url,
@@ -885,18 +482,18 @@ def load_wraps(source_dir: str, subproject_dir: str) -> T.List[PackageDefinition
'source_hash': checksum,
'method': 'cargo',
}))
- elif source.startswith('git+'):
- parts = urllib.parse.urlparse(source[4:])
+ elif package.source.startswith('git+'):
+ parts = urllib.parse.urlparse(package.source[4:])
query = urllib.parse.parse_qs(parts.query)
branch = query['branch'][0] if 'branch' in query else ''
revision = parts.fragment or branch
url = urllib.parse.urlunparse(parts._replace(params='', query='', fragment=''))
wraps.append(PackageDefinition.from_values(subp_name, subproject_dir, 'git', {
- 'directory': name,
+ 'directory': package.name,
'url': url,
'revision': revision,
'method': 'cargo',
}))
else:
- mlog.warning(f'Unsupported source URL in {filename}: {source}')
+ mlog.warning(f'Unsupported source URL in {filename}: {package.source}')
return wraps
diff --git a/mesonbuild/cargo/manifest.py b/mesonbuild/cargo/manifest.py
index d95df7f..ab059b0 100644
--- a/mesonbuild/cargo/manifest.py
+++ b/mesonbuild/cargo/manifest.py
@@ -4,244 +4,505 @@
"""Type definitions for cargo manifest files."""
from __future__ import annotations
+
+import dataclasses
+import os
import typing as T
-from typing_extensions import Literal, TypedDict, Required
-
-EDITION = Literal['2015', '2018', '2021']
-CRATE_TYPE = Literal['bin', 'lib', 'dylib', 'staticlib', 'cdylib', 'rlib', 'proc-macro']
-
-Package = TypedDict(
- 'Package',
- {
- 'name': Required[str],
- 'version': Required[str],
- 'authors': T.List[str],
- 'edition': EDITION,
- 'rust-version': str,
- 'description': str,
- 'readme': str,
- 'license': str,
- 'license-file': str,
- 'keywords': T.List[str],
- 'categories': T.List[str],
- 'workspace': str,
- 'build': str,
- 'links': str,
- 'include': T.List[str],
- 'exclude': T.List[str],
- 'publish': bool,
- 'metadata': T.Dict[str, T.Dict[str, str]],
- 'default-run': str,
- 'autolib': bool,
- 'autobins': bool,
- 'autoexamples': bool,
- 'autotests': bool,
- 'autobenches': bool,
- },
- total=False,
-)
-"""A description of the Package Dictionary."""
-
-class FixedPackage(TypedDict, total=False):
-
- """A description of the Package Dictionary, fixed up."""
-
- name: Required[str]
- version: Required[str]
- authors: T.List[str]
- edition: EDITION
- rust_version: str
- description: str
- readme: str
- license: str
- license_file: str
- keywords: T.List[str]
- categories: T.List[str]
- workspace: str
- build: str
- links: str
- include: T.List[str]
- exclude: T.List[str]
- publish: bool
- metadata: T.Dict[str, T.Dict[str, str]]
- default_run: str
- autolib: bool
- autobins: bool
- autoexamples: bool
- autotests: bool
- autobenches: bool
-
-
-class Badge(TypedDict):
-
- """An entry in the badge section."""
-
- status: Literal['actively-developed', 'passively-developed', 'as-is', 'experimental', 'deprecated', 'none']
-
-
-Dependency = TypedDict(
- 'Dependency',
- {
- 'version': str,
- 'registry': str,
- 'git': str,
- 'branch': str,
- 'rev': str,
- 'path': str,
- 'optional': bool,
- 'package': str,
- 'default-features': bool,
- 'features': T.List[str],
- },
- total=False,
-)
-"""An entry in the *dependencies sections."""
+from . import version
+from ..mesonlib import MesonException, lazy_property
+from .. import mlog
+if T.TYPE_CHECKING:
+ from typing_extensions import Protocol, Self
-class FixedDependency(TypedDict, total=False):
+ from . import raw
+ from .raw import EDITION, CRATE_TYPE
- """An entry in the *dependencies sections, fixed up."""
+ # Copied from typeshed. Blarg that they don't expose this
+ class DataclassInstance(Protocol):
+ __dataclass_fields__: T.ClassVar[dict[str, dataclasses.Field[T.Any]]]
- version: T.List[str]
- registry: str
- git: str
- branch: str
- rev: str
- path: str
- optional: bool
- package: str
- default_features: bool
- features: T.List[str]
-
-
-DependencyV = T.Union[Dependency, str]
-"""A Dependency entry, either a string or a Dependency Dict."""
-
-
-_BaseBuildTarget = TypedDict(
- '_BaseBuildTarget',
- {
- 'path': str,
- 'test': bool,
- 'doctest': bool,
- 'bench': bool,
- 'doc': bool,
- 'plugin': bool,
- 'proc-macro': bool,
- 'harness': bool,
- 'edition': EDITION,
- 'crate-type': T.List[CRATE_TYPE],
- 'required-features': T.List[str],
- },
- total=False,
+_DI = T.TypeVar('_DI', bound='DataclassInstance')
+_R = T.TypeVar('_R', bound='raw._BaseBuildTarget')
+
+_EXTRA_KEYS_WARNING = (
+ "This may (unlikely) be an error in the cargo manifest, or may be a missing "
+ "implementation in Meson. If this issue can be reproduced with the latest "
+ "version of Meson, please help us by opening an issue at "
+ "https://github.com/mesonbuild/meson/issues. Please include the crate and "
+ "version that is generating this warning if possible."
)
-class BuildTarget(_BaseBuildTarget, total=False):
+def fixup_meson_varname(name: str) -> str:
+ """Fixup a meson variable name
+
+ :param name: The name to fix
+ :return: the fixed name
+ """
+ return name.replace('-', '_')
+
+
+@T.overload
+def _depv_to_dep(depv: raw.FromWorkspace) -> raw.FromWorkspace: ...
+
+@T.overload
+def _depv_to_dep(depv: raw.DependencyV) -> raw.Dependency: ...
+
+def _depv_to_dep(depv: T.Union[raw.FromWorkspace, raw.DependencyV]) -> T.Union[raw.FromWorkspace, raw.Dependency]:
+ return {'version': depv} if isinstance(depv, str) else depv
+
- name: Required[str]
+def _raw_to_dataclass(raw: T.Mapping[str, object], cls: T.Type[_DI],
+ msg: str, **kwargs: T.Callable[[T.Any], object]) -> _DI:
+ """Fixup raw cargo mappings to ones more suitable for python to consume as dataclass.
-class LibTarget(_BaseBuildTarget, total=False):
+ * Replaces any `-` with `_` in the keys.
+ * Optionally pass values through the functions in kwargs, in order to do
+ recursive conversions.
+ * Remove and warn on keys that are coming from cargo, but are unknown to
+ our representations.
+
+ This is intended to give users the possibility of things proceeding when a
+ new key is added to Cargo.toml that we don't yet handle, but to still warn
+ them that things might not work.
+
+ :param data: The raw data to look at
+ :param cls: The Dataclass derived type that will be created
+ :param msg: the header for the error message. Usually something like "In N structure".
+ :return: The original data structure, but with all unknown keys removed.
+ """
+ new_dict = {}
+ unexpected = set()
+ fields = {x.name for x in dataclasses.fields(cls)}
+ for orig_k, v in raw.items():
+ k = fixup_meson_varname(orig_k)
+ if k not in fields:
+ unexpected.add(orig_k)
+ continue
+ if k in kwargs:
+ v = kwargs[k](v)
+ new_dict[k] = v
+
+ if unexpected:
+ mlog.warning(msg, 'has unexpected keys', '"{}".'.format(', '.join(sorted(unexpected))),
+ _EXTRA_KEYS_WARNING)
+ return cls(**new_dict)
+
+
+@T.overload
+def _inherit_from_workspace(raw: raw.Package,
+ raw_from_workspace: T.Optional[T.Mapping[str, object]],
+ msg: str,
+ **kwargs: T.Callable[[T.Any, T.Any], object]) -> raw.Package: ...
+
+@T.overload
+def _inherit_from_workspace(raw: T.Union[raw.FromWorkspace, raw.Dependency],
+ raw_from_workspace: T.Optional[T.Mapping[str, object]],
+ msg: str,
+ **kwargs: T.Callable[[T.Any, T.Any], object]) -> raw.Dependency: ...
+
+def _inherit_from_workspace(raw_: T.Union[raw.FromWorkspace, raw.Package, raw.Dependency], # type: ignore[misc]
+ raw_from_workspace: T.Optional[T.Mapping[str, object]],
+ msg: str,
+ **kwargs: T.Callable[[T.Any, T.Any], object]) -> T.Mapping[str, object]:
+ # allow accesses by non-literal key below
+ raw = T.cast('T.Mapping[str, object]', raw_)
+
+ if not raw_from_workspace:
+ if raw.get('workspace', False) or \
+ any(isinstance(v, dict) and v.get('workspace', False) for v in raw):
+ raise MesonException(f'Cargo.toml file requests {msg} from workspace')
+
+ return raw
+
+ result = {k: v for k, v in raw.items() if k != 'workspace'}
+ for k, v in raw.items():
+ if isinstance(v, dict) and v.get('workspace', False):
+ if k in raw_from_workspace:
+ result[k] = raw_from_workspace[k]
+ if k in kwargs:
+ result[k] = kwargs[k](v, result[k])
+ else:
+ del result[k]
+
+ if raw.get('workspace', False):
+ for k, v in raw_from_workspace.items():
+ if k not in result or k in kwargs:
+ if k in kwargs:
+ v = kwargs[k](raw.get(k), v)
+ result[k] = v
+ return result
+
+
+@dataclasses.dataclass
+class Package:
+
+ """Representation of a Cargo Package entry, with defaults filled in."""
+
+ name: str
+ version: str
+ description: T.Optional[str] = None
+ resolver: T.Optional[str] = None
+ authors: T.List[str] = dataclasses.field(default_factory=list)
+ edition: EDITION = '2015'
+ rust_version: T.Optional[str] = None
+ documentation: T.Optional[str] = None
+ readme: T.Optional[str] = None
+ homepage: T.Optional[str] = None
+ repository: T.Optional[str] = None
+ license: T.Optional[str] = None
+ license_file: T.Optional[str] = None
+ keywords: T.List[str] = dataclasses.field(default_factory=list)
+ categories: T.List[str] = dataclasses.field(default_factory=list)
+ workspace: T.Optional[str] = None
+ build: T.Optional[str] = None
+ links: T.Optional[str] = None
+ exclude: T.List[str] = dataclasses.field(default_factory=list)
+ include: T.List[str] = dataclasses.field(default_factory=list)
+ publish: bool = True
+ metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
+ default_run: T.Optional[str] = None
+ autolib: bool = True
+ autobins: bool = True
+ autoexamples: bool = True
+ autotests: bool = True
+ autobenches: bool = True
+
+ @lazy_property
+ def api(self) -> str:
+ return version.api(self.version)
+
+ @classmethod
+ def from_raw(cls, raw_pkg: raw.Package, workspace: T.Optional[Workspace] = None) -> Self:
+ raw_ws_pkg = None
+ if workspace is not None:
+ raw_ws_pkg = workspace.package
+
+ raw_pkg = _inherit_from_workspace(raw_pkg, raw_ws_pkg, f'Package entry {raw_pkg["name"]}')
+ return _raw_to_dataclass(raw_pkg, cls, f'Package entry {raw_pkg["name"]}')
+
+@dataclasses.dataclass
+class SystemDependency:
+
+ """ Representation of a Cargo system-deps entry
+ https://docs.rs/system-deps/latest/system_deps
+ """
name: str
+ version: str = ''
+ optional: bool = False
+ feature: T.Optional[str] = None
+ # TODO: convert values to dataclass
+ feature_overrides: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict)
+
+ @classmethod
+ def from_raw(cls, name: str, raw: T.Union[T.Dict[str, T.Any], str]) -> SystemDependency:
+ if isinstance(raw, str):
+ raw = {'version': raw}
+ name = raw.get('name', name)
+ version = raw.get('version', '')
+ optional = raw.get('optional', False)
+ feature = raw.get('feature')
+ # Everything else are overrides when certain features are enabled.
+ feature_overrides = {k: v for k, v in raw.items() if k not in {'name', 'version', 'optional', 'feature'}}
+ return cls(name, version, optional, feature, feature_overrides)
+
+ @lazy_property
+ def meson_version(self) -> T.List[str]:
+ vers = self.version.split(',') if self.version else []
+ result: T.List[str] = []
+ for v in vers:
+ v = v.strip()
+ if v[0] not in '><=':
+ v = f'>={v}'
+ result.append(v)
+ return result
+
+ def enabled(self, features: T.Set[str]) -> bool:
+ return self.feature is None or self.feature in features
+
+@dataclasses.dataclass
+class Dependency:
+
+ """Representation of a Cargo Dependency Entry."""
+ package: str
+ version: str = ''
+ registry: T.Optional[str] = None
+ git: T.Optional[str] = None
+ branch: T.Optional[str] = None
+ rev: T.Optional[str] = None
+ path: T.Optional[str] = None
+ optional: bool = False
+ default_features: bool = True
+ features: T.List[str] = dataclasses.field(default_factory=list)
+
+ @lazy_property
+ def meson_version(self) -> T.List[str]:
+ return version.convert(self.version)
+
+ @lazy_property
+ def api(self) -> str:
+ # Extract wanted API version from version constraints.
+ api = set()
+ for v in self.meson_version:
+ if v.startswith(('>=', '==')):
+ api.add(version.api(v[2:].strip()))
+ elif v.startswith('='):
+ api.add(version.api(v[1:].strip()))
+ if not api:
+ return '0'
+ elif len(api) == 1:
+ return api.pop()
+ else:
+ raise MesonException(f'Cannot determine minimum API version from {self.version}.')
+
+ @classmethod
+ def from_raw_dict(cls, name: str, raw_dep: T.Union[raw.FromWorkspace, raw.Dependency], member_path: str = '', raw_ws_dep: T.Optional[raw.Dependency] = None) -> Dependency:
+ raw_dep = _inherit_from_workspace(raw_dep, raw_ws_dep,
+ f'Dependency entry {name}',
+ path=lambda pkg_path, ws_path: os.path.relpath(ws_path, member_path),
+ features=lambda pkg_path, ws_path: (pkg_path or []) + (ws_path or []))
+ raw_dep.setdefault('package', name)
+ return _raw_to_dataclass(raw_dep, cls, f'Dependency entry {name}')
+
+ @classmethod
+ def from_raw(cls, name: str, raw_depv: T.Union[raw.FromWorkspace, raw.DependencyV], member_path: str = '', workspace: T.Optional[Workspace] = None) -> Dependency:
+ """Create a dependency from a raw cargo dictionary or string"""
+ raw_ws_dep: T.Optional[raw.Dependency] = None
+ if workspace is not None:
+ raw_ws_depv = workspace.dependencies.get(name, {})
+ raw_ws_dep = _depv_to_dep(raw_ws_depv)
+
+ raw_dep = _depv_to_dep(raw_depv)
+ return cls.from_raw_dict(name, raw_dep, member_path, raw_ws_dep)
+
+
+@dataclasses.dataclass
+class BuildTarget(T.Generic[_R]):
-class _BaseFixedBuildTarget(TypedDict, total=False):
+ name: str
path: str
- test: bool
- doctest: bool
- bench: bool
- doc: bool
- plugin: bool
- harness: bool
- edition: EDITION
crate_type: T.List[CRATE_TYPE]
- required_features: T.List[str]
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-test-field
+ # True for lib, bin, test
+ test: bool = True
-class FixedBuildTarget(_BaseFixedBuildTarget, total=False):
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doctest-field
+ # True for lib
+ doctest: bool = False
- name: str
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-bench-field
+ # True for lib, bin, benchmark
+ bench: bool = True
-class FixedLibTarget(_BaseFixedBuildTarget, total=False):
+ # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doc-field
+ # True for libraries and binaries
+ doc: bool = False
- name: Required[str]
- proc_macro: bool
+ harness: bool = True
+ edition: EDITION = '2015'
+ required_features: T.List[str] = dataclasses.field(default_factory=list)
+ plugin: bool = False
+ @classmethod
+ def from_raw(cls, raw: _R) -> Self:
+ name = raw.get('name', '<anonymous>')
+ return _raw_to_dataclass(raw, cls, f'Binary entry {name}')
-class Target(TypedDict):
+@dataclasses.dataclass
+class Library(BuildTarget['raw.LibTarget']):
- """Target entry in the Manifest File."""
+ """Representation of a Cargo Library Entry."""
- dependencies: T.Dict[str, DependencyV]
+ doctest: bool = True
+ doc: bool = True
+ path: str = os.path.join('src', 'lib.rs')
+ proc_macro: bool = False
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib'])
+ doc_scrape_examples: bool = True
+ @classmethod
+ def from_raw(cls, raw: raw.LibTarget, fallback_name: str) -> Self: # type: ignore[override]
+ # We need to set the name field if it's not set manually, including if
+ # other fields are set in the lib section
+ raw.setdefault('name', fallback_name)
+ return _raw_to_dataclass(raw, cls, f'Library entry {raw["name"]}')
-class Workspace(TypedDict):
- """The representation of a workspace.
+@dataclasses.dataclass
+class Binary(BuildTarget['raw.BuildTarget']):
- In a vritual manifest the :attribute:`members` is always present, but in a
- project manifest, an empty workspace may be provided, in which case the
- workspace is implicitly filled in by values from the path based dependencies.
+ """Representation of a Cargo Bin Entry."""
- the :attribute:`exclude` is always optional
- """
+ doc: bool = True
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
- members: T.List[str]
- exclude: T.List[str]
-
-
-Manifest = TypedDict(
- 'Manifest',
- {
- 'package': Required[Package],
- 'badges': T.Dict[str, Badge],
- 'dependencies': T.Dict[str, DependencyV],
- 'dev-dependencies': T.Dict[str, DependencyV],
- 'build-dependencies': T.Dict[str, DependencyV],
- 'lib': LibTarget,
- 'bin': T.List[BuildTarget],
- 'test': T.List[BuildTarget],
- 'bench': T.List[BuildTarget],
- 'example': T.List[BuildTarget],
- 'features': T.Dict[str, T.List[str]],
- 'target': T.Dict[str, Target],
- 'workspace': Workspace,
-
- # TODO: patch?
- # TODO: replace?
- },
- total=False,
-)
-"""The Cargo Manifest format."""
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('bin', raw['name'] + '.rs')
+ return super().from_raw(raw)
-class VirtualManifest(TypedDict):
+@dataclasses.dataclass
+class Test(BuildTarget['raw.BuildTarget']):
- """The Representation of a virtual manifest.
+ """Representation of a Cargo Test Entry."""
- Cargo allows a root manifest that contains only a workspace, this is called
- a virtual manifest. This doesn't really map 1:1 with any meson concept,
- except perhaps the proposed "meta project".
+ bench: bool = True
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
+
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('tests', raw['name'] + '.rs')
+ return super().from_raw(raw)
+
+@dataclasses.dataclass
+class Benchmark(BuildTarget['raw.BuildTarget']):
+
+ """Representation of a Cargo Benchmark Entry."""
+
+ test: bool = True
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
+
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('benches', raw['name'] + '.rs')
+ return super().from_raw(raw)
+
+
+@dataclasses.dataclass
+class Example(BuildTarget['raw.BuildTarget']):
+
+ """Representation of a Cargo Example Entry."""
+
+ crate_type: T.List[CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin'])
+
+ @classmethod
+ def from_raw(cls, raw: raw.BuildTarget) -> Self:
+ if 'path' not in raw:
+ raw['path'] = os.path.join('examples', raw['name'] + '.rs')
+ return super().from_raw(raw)
+
+
+@dataclasses.dataclass
+class Manifest:
+
+ """Cargo Manifest definition.
+
+ Most of these values map up to the Cargo Manifest, but with default values
+ if not provided.
+
+ Cargo subprojects can contain what Meson wants to treat as multiple,
+ interdependent, subprojects.
+
+ :param path: the path within the cargo subproject.
"""
- workspace: Workspace
+ package: Package
+ dependencies: T.Dict[str, Dependency] = dataclasses.field(default_factory=dict)
+ dev_dependencies: T.Dict[str, Dependency] = dataclasses.field(default_factory=dict)
+ build_dependencies: T.Dict[str, Dependency] = dataclasses.field(default_factory=dict)
+ lib: T.Optional[Library] = None
+ bin: T.List[Binary] = dataclasses.field(default_factory=list)
+ test: T.List[Test] = dataclasses.field(default_factory=list)
+ bench: T.List[Benchmark] = dataclasses.field(default_factory=list)
+ example: T.List[Example] = dataclasses.field(default_factory=list)
+ features: T.Dict[str, T.List[str]] = dataclasses.field(default_factory=dict)
+ target: T.Dict[str, T.Dict[str, Dependency]] = dataclasses.field(default_factory=dict)
+
+ path: str = ''
+
+ def __post_init__(self) -> None:
+ self.features.setdefault('default', [])
+
+ @lazy_property
+ def system_dependencies(self) -> T.Dict[str, SystemDependency]:
+ return {k: SystemDependency.from_raw(k, v) for k, v in self.package.metadata.get('system-deps', {}).items()}
+
+ @classmethod
+ def from_raw(cls, raw: raw.Manifest, path: str = '', workspace: T.Optional[Workspace] = None, member_path: str = '') -> Self:
+ # Libs are always auto-discovered and there's no other way to handle them,
+ # which is unfortunate for reproducability
+ pkg = Package.from_raw(raw['package'], workspace)
+ if pkg.autolib and 'lib' not in raw and \
+ os.path.exists(os.path.join(path, 'src/lib.rs')):
+ raw['lib'] = {}
+ fixed = _raw_to_dataclass(raw, cls, f'Cargo.toml package {raw["package"]["name"]}',
+ package=lambda x: pkg,
+ dependencies=lambda x: {k: Dependency.from_raw(k, v, member_path, workspace) for k, v in x.items()},
+ dev_dependencies=lambda x: {k: Dependency.from_raw(k, v, member_path, workspace) for k, v in x.items()},
+ build_dependencies=lambda x: {k: Dependency.from_raw(k, v, member_path, workspace) for k, v in x.items()},
+ lib=lambda x: Library.from_raw(x, raw['package']['name']),
+ bin=lambda x: [Binary.from_raw(b) for b in x],
+ test=lambda x: [Test.from_raw(b) for b in x],
+ bench=lambda x: [Benchmark.from_raw(b) for b in x],
+ example=lambda x: [Example.from_raw(b) for b in x],
+ target=lambda x: {k: {k2: Dependency.from_raw(k2, v2, member_path, workspace) for k2, v2 in v.get('dependencies', {}).items()}
+ for k, v in x.items()})
+ fixed.path = path
+ return fixed
+
+
+@dataclasses.dataclass
+class Workspace:
+
+ """Cargo Workspace definition.
+ """
+
+ resolver: str = dataclasses.field(default_factory=lambda: '2')
+ members: T.List[str] = dataclasses.field(default_factory=list)
+ exclude: T.List[str] = dataclasses.field(default_factory=list)
+ default_members: T.List[str] = dataclasses.field(default_factory=list)
+
+ # inheritable settings are kept in raw format, for use with _inherit_from_workspace
+ package: T.Optional[raw.Package] = None
+ dependencies: T.Dict[str, raw.Dependency] = dataclasses.field(default_factory=dict)
+ lints: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
+ metadata: T.Dict[str, T.Any] = dataclasses.field(default_factory=dict)
-class CargoLockPackage(TypedDict, total=False):
+ # A workspace can also have a root package.
+ root_package: T.Optional[Manifest] = dataclasses.field(init=False)
+
+ @classmethod
+ def from_raw(cls, raw: raw.VirtualManifest) -> Workspace:
+ ws_raw = raw['workspace']
+ fixed = _raw_to_dataclass(ws_raw, cls, 'Workspace')
+ return fixed
+
+
+@dataclasses.dataclass
+class CargoLockPackage:
"""A description of a package in the Cargo.lock file format."""
name: str
version: str
- source: str
- checksum: str
+ source: T.Optional[str] = None
+ checksum: T.Optional[str] = None
+ dependencies: T.List[str] = dataclasses.field(default_factory=list)
+ @classmethod
+ def from_raw(cls, raw: raw.CargoLockPackage) -> CargoLockPackage:
+ return _raw_to_dataclass(raw, cls, 'Cargo.lock package')
-class CargoLock(TypedDict, total=False):
+
+@dataclasses.dataclass
+class CargoLock:
"""A description of the Cargo.lock file format."""
- version: str
- package: T.List[CargoLockPackage]
- metadata: T.Dict[str, str]
+ version: int = 1
+ package: T.List[CargoLockPackage] = dataclasses.field(default_factory=list)
+ metadata: T.Dict[str, str] = dataclasses.field(default_factory=dict)
+
+ @classmethod
+ def from_raw(cls, raw: raw.CargoLock) -> CargoLock:
+ return _raw_to_dataclass(raw, cls, 'Cargo.lock',
+ package=lambda x: [CargoLockPackage.from_raw(p) for p in x])
diff --git a/mesonbuild/cargo/raw.py b/mesonbuild/cargo/raw.py
new file mode 100644
index 0000000..67dd58a
--- /dev/null
+++ b/mesonbuild/cargo/raw.py
@@ -0,0 +1,192 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2022-2024 Intel Corporation
+
+"""Type definitions for cargo manifest files."""
+
+from __future__ import annotations
+import typing as T
+
+from typing_extensions import Literal, TypedDict, Required
+
+EDITION = Literal['2015', '2018', '2021']
+CRATE_TYPE = Literal['bin', 'lib', 'dylib', 'staticlib', 'cdylib', 'rlib', 'proc-macro']
+
+
+class FromWorkspace(TypedDict):
+
+ """An entry or section that is copied from the workspace."""
+
+ workspace: bool
+
+
+Package = TypedDict(
+ 'Package',
+ {
+ 'name': Required[str],
+ 'version': Required[T.Union[FromWorkspace, str]],
+ 'authors': T.Union[FromWorkspace, T.List[str]],
+ 'edition': T.Union[FromWorkspace, EDITION],
+ 'rust-version': T.Union[FromWorkspace, str],
+ 'description': T.Union[FromWorkspace, str],
+ 'readme': T.Union[FromWorkspace, str],
+ 'license': T.Union[FromWorkspace, str],
+ 'license-file': T.Union[FromWorkspace, str],
+ 'keywords': T.Union[FromWorkspace, T.List[str]],
+ 'categories': T.Union[FromWorkspace, T.List[str]],
+ 'homepage': T.Union[FromWorkspace, str],
+ 'repository': T.Union[FromWorkspace, str],
+ 'documentation': T.Union[FromWorkspace, str],
+ 'workspace': str,
+ 'build': str,
+ 'links': str,
+ 'include': T.Union[FromWorkspace, T.List[str]],
+ 'exclude': T.Union[FromWorkspace, T.List[str]],
+ 'publish': T.Union[FromWorkspace, bool],
+ 'metadata': T.Dict[str, T.Dict[str, str]],
+ 'default-run': str,
+ 'autolib': bool,
+ 'autobins': bool,
+ 'autoexamples': bool,
+ 'autotests': bool,
+ 'autobenches': bool,
+ },
+ total=False,
+)
+"""A description of the Package Dictionary."""
+
+class Badge(TypedDict):
+
+ """An entry in the badge section."""
+
+ status: Literal['actively-developed', 'passively-developed', 'as-is', 'experimental', 'deprecated', 'none']
+
+
+Dependency = TypedDict(
+ 'Dependency',
+ {
+ 'version': str,
+ 'registry': str,
+ 'git': str,
+ 'branch': str,
+ 'rev': str,
+ 'path': str,
+ 'optional': bool,
+ 'package': str,
+ 'default-features': bool,
+ 'features': T.List[str],
+ },
+ total=False,
+)
+"""An entry in the *dependencies sections."""
+
+
+DependencyV = T.Union[Dependency, str]
+"""A Dependency entry, either a string or a Dependency Dict."""
+
+
+_BaseBuildTarget = TypedDict(
+ '_BaseBuildTarget',
+ {
+ 'path': str,
+ 'test': bool,
+ 'doctest': bool,
+ 'bench': bool,
+ 'doc': bool,
+ 'plugin': bool,
+ 'proc-macro': bool,
+ 'harness': bool,
+ 'edition': EDITION,
+ 'crate-type': T.List[CRATE_TYPE],
+ 'required-features': T.List[str],
+ },
+ total=False,
+)
+
+
+class BuildTarget(_BaseBuildTarget, total=False):
+
+ name: Required[str]
+
+
+class LibTarget(_BaseBuildTarget, total=False):
+
+ name: str
+
+
+class Target(TypedDict):
+
+ """Target entry in the Manifest File."""
+
+ dependencies: T.Dict[str, T.Union[FromWorkspace, DependencyV]]
+
+
+class Workspace(TypedDict):
+
+ """The representation of a workspace.
+
+ In a vritual manifest the :attribute:`members` is always present, but in a
+ project manifest, an empty workspace may be provided, in which case the
+ workspace is implicitly filled in by values from the path based dependencies.
+
+ the :attribute:`exclude` is always optional
+ """
+
+ members: T.List[str]
+ exclude: T.List[str]
+ package: Package
+ dependencies: T.Dict[str, DependencyV]
+
+
+Manifest = TypedDict(
+ 'Manifest',
+ {
+ 'package': Required[Package],
+ 'badges': T.Dict[str, Badge],
+ 'dependencies': T.Dict[str, T.Union[FromWorkspace, DependencyV]],
+ 'dev-dependencies': T.Dict[str, T.Union[FromWorkspace, DependencyV]],
+ 'build-dependencies': T.Dict[str, T.Union[FromWorkspace, DependencyV]],
+ 'lib': LibTarget,
+ 'bin': T.List[BuildTarget],
+ 'test': T.List[BuildTarget],
+ 'bench': T.List[BuildTarget],
+ 'example': T.List[BuildTarget],
+ 'features': T.Dict[str, T.List[str]],
+ 'target': T.Dict[str, Target],
+ 'workspace': Workspace,
+
+ # TODO: patch?
+ # TODO: replace?
+ },
+ total=False,
+)
+"""The Cargo Manifest format."""
+
+
+class VirtualManifest(TypedDict, total=False):
+
+ """The Representation of a virtual manifest.
+
+ Cargo allows a root manifest that contains only a workspace, this is called
+ a virtual manifest. This doesn't really map 1:1 with any meson concept,
+ except perhaps the proposed "meta project".
+ """
+
+ workspace: Workspace
+
+class CargoLockPackage(TypedDict, total=False):
+
+ """A description of a package in the Cargo.lock file format."""
+
+ name: str
+ version: str
+ source: str
+ checksum: str
+
+
+class CargoLock(TypedDict, total=False):
+
+ """A description of the Cargo.lock file format."""
+
+ version: int
+ package: T.List[CargoLockPackage]
+ metadata: T.Dict[str, str]
diff --git a/mesonbuild/cargo/toml.py b/mesonbuild/cargo/toml.py
new file mode 100644
index 0000000..601510e
--- /dev/null
+++ b/mesonbuild/cargo/toml.py
@@ -0,0 +1,49 @@
+from __future__ import annotations
+
+import importlib
+import shutil
+import json
+import typing as T
+
+from ..mesonlib import MesonException, Popen_safe
+if T.TYPE_CHECKING:
+ from types import ModuleType
+
+
+# tomllib is present in python 3.11, before that it is a pypi module called tomli,
+# we try to import tomllib, then tomli,
+tomllib: T.Optional[ModuleType] = None
+toml2json: T.Optional[str] = None
+for t in ['tomllib', 'tomli']:
+ try:
+ tomllib = importlib.import_module(t)
+ break
+ except ImportError:
+ pass
+else:
+ # TODO: it would be better to use an Executable here, which could be looked
+ # up in the cross file or provided by a wrap. However, that will have to be
+ # passed in externally, since we don't have (and I don't think we should),
+ # have access to the `Environment` for that in this module.
+ toml2json = shutil.which('toml2json')
+
+class TomlImplementationMissing(MesonException):
+ pass
+
+
+def load_toml(filename: str) -> T.Dict[str, object]:
+ if tomllib:
+ with open(filename, 'rb') as f:
+ raw = tomllib.load(f)
+ else:
+ if toml2json is None:
+ raise TomlImplementationMissing('Could not find an implementation of tomllib, nor toml2json')
+
+ p, out, err = Popen_safe([toml2json, filename])
+ if p.returncode != 0:
+ raise MesonException('toml2json failed to decode output\n', err)
+
+ raw = json.loads(out)
+
+ # tomllib.load() returns T.Dict[str, T.Any] but not other implementations.
+ return T.cast('T.Dict[str, object]', raw)
diff --git a/mesonbuild/cargo/version.py b/mesonbuild/cargo/version.py
index cde7a83..ce58945 100644
--- a/mesonbuild/cargo/version.py
+++ b/mesonbuild/cargo/version.py
@@ -7,6 +7,18 @@ from __future__ import annotations
import typing as T
+def api(version: str) -> str:
+ # x.y.z -> x
+ # 0.x.y -> 0.x
+ # 0.0.x -> 0
+ vers = version.split('.')
+ if int(vers[0]) != 0:
+ return vers[0]
+ elif len(vers) >= 2 and int(vers[1]) != 0:
+ return f'0.{vers[1]}'
+ return '0'
+
+
def convert(cargo_ver: str) -> T.List[str]:
"""Convert a Cargo compatible version into a Meson compatible one.
@@ -15,6 +27,8 @@ def convert(cargo_ver: str) -> T.List[str]:
"""
# Cleanup, just for safety
cargo_ver = cargo_ver.strip()
+ if not cargo_ver:
+ return []
cargo_vers = [c.strip() for c in cargo_ver.split(',')]
out: T.List[str] = []
diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py
index 609038d..c68cb60 100644
--- a/mesonbuild/cmake/interpreter.py
+++ b/mesonbuild/cmake/interpreter.py
@@ -125,7 +125,7 @@ TRANSFER_DEPENDENCIES_FROM: T.Collection[str] = ['header_only']
_cmake_name_regex = re.compile(r'[^_a-zA-Z0-9]')
def _sanitize_cmake_name(name: str) -> str:
name = _cmake_name_regex.sub('_', name)
- if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson'):
+ if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson') or name[0].isdigit():
name = 'cm_' + name
return name
diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py
index 9873845..2b2b93d 100644
--- a/mesonbuild/cmake/tracetargets.py
+++ b/mesonbuild/cmake/tracetargets.py
@@ -87,6 +87,7 @@ def resolve_cmake_trace_targets(target_name: str,
curr_path = Path(*path_to_framework)
framework_path = curr_path.parent
framework_name = curr_path.stem
+ res.public_compile_opts += [f"-F{framework_path}"]
res.libraries += [f'-F{framework_path}', '-framework', framework_name]
else:
res.libraries += [curr]
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index aab761a..f645090 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -18,6 +18,7 @@ __all__ = [
'is_library',
'is_llvm_ir',
'is_object',
+ 'is_separate_compile',
'is_source',
'is_java',
'is_known_suffix',
@@ -62,6 +63,7 @@ from .compilers import (
is_object,
is_library,
is_known_suffix,
+ is_separate_compile,
lang_suffixes,
LANGUAGES_USING_LDFLAGS,
sort_clink,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 7a2fec5..424b612 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -504,7 +504,7 @@ class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerM
def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- std = self.get_compileropt_value('winlibs', env, target, subproject)
+ std = self.get_compileropt_value('std', env, target, subproject)
assert isinstance(std, str)
if std == 'c89':
mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 0376922..a823aeb 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -42,7 +42,7 @@ _T = T.TypeVar('_T')
about. To support a new compiler, add its information below.
Also add corresponding autodetection code in detect.py."""
-header_suffixes = {'h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di'}
+header_suffixes = {'h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di', 'pxd', 'pxi'}
obj_suffixes = {'o', 'obj', 'res'}
# To the emscripten compiler, .js files are libraries
lib_suffixes = {'a', 'lib', 'dll', 'dll.a', 'dylib', 'so', 'js'}
@@ -84,7 +84,7 @@ clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'nasm', 'fortran')
# List of languages that can be linked with C code directly by the linker
# used in build.py:process_compilers() and build.py:get_dynamic_linker()
# This must be sorted, see sort_clink().
-clink_langs = ('d', 'cuda') + clib_langs
+clink_langs = ('rust', 'd', 'cuda') + clib_langs
SUFFIX_TO_LANG = dict(itertools.chain(*(
[(suffix, lang) for suffix in v] for lang, v in lang_suffixes.items())))
@@ -154,6 +154,9 @@ def is_java(fname: mesonlib.FileOrString) -> bool:
suffix = fname.split('.')[-1]
return suffix in lang_suffixes['java']
+def is_separate_compile(fname: mesonlib.FileOrString) -> bool:
+ return not fname.endswith('.rs')
+
def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool:
if isinstance(fname, mesonlib.File):
fname = fname.fname
@@ -933,11 +936,10 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
"""
return None
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return self.linker.build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target, extra_paths)
def get_archive_name(self, filename: str) -> str:
return self.linker.get_archive_name(filename)
@@ -1119,9 +1121,6 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
def get_compile_only_args(self) -> T.List[str]:
return []
- def get_cxx_interoperability_args(self, lang: T.Dict[str, Compiler]) -> T.List[str]:
- raise EnvironmentException('This compiler does not support CXX interoperability')
-
def get_preprocess_only_args(self) -> T.List[str]:
raise EnvironmentException('This compiler does not have a preprocessor')
@@ -1417,50 +1416,3 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
if 'none' not in value:
value = ['none'] + value
std.choices = value
-
-
-def get_global_options(lang: str,
- comp: T.Type[Compiler],
- for_machine: MachineChoice,
- env: 'Environment') -> dict[OptionKey, options.AnyOptionType]:
- """Retrieve options that apply to all compilers for a given language."""
- description = f'Extra arguments passed to the {lang}'
- argkey = OptionKey(f'{lang}_args', machine=for_machine)
- largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
-
- comp_args_from_envvar = False
- comp_options = env.coredata.optstore.get_pending_value(argkey)
- if comp_options is None:
- comp_args_from_envvar = True
- comp_options = env.env_opts.get(argkey, [])
-
- link_args_from_envvar = False
- link_options = env.coredata.optstore.get_pending_value(largkey)
- if link_options is None:
- link_args_from_envvar = True
- link_options = env.env_opts.get(largkey, [])
-
- assert isinstance(comp_options, (str, list)), 'for mypy'
- assert isinstance(link_options, (str, list)), 'for mypy'
-
- cargs = options.UserStringArrayOption(
- argkey.name,
- description + ' compiler',
- comp_options, split_args=True, allow_dups=True)
-
- largs = options.UserStringArrayOption(
- largkey.name,
- description + ' linker',
- link_options, split_args=True, allow_dups=True)
-
- if comp.INVOKES_LINKER and comp_args_from_envvar and link_args_from_envvar:
- # If the compiler acts as a linker driver, and we're using the
- # environment variable flags for both the compiler and linker
- # arguments, then put the compiler flags in the linker flags as well.
- # This is how autotools works, and the env vars feature is for
- # autotools compatibility.
- largs.extend_value(comp_options)
-
- opts: dict[OptionKey, options.AnyOptionType] = {argkey: cargs, largkey: largs}
-
- return opts
diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py
index fd747d1..7e050f1 100644
--- a/mesonbuild/compilers/cuda.py
+++ b/mesonbuild/compilers/cuda.py
@@ -198,6 +198,7 @@ class CudaCompiler(Compiler):
for level, flags in host_compiler.warn_args.items()
}
self.host_werror_args = ['-Xcompiler=' + x for x in self.host_compiler.get_werror_args()]
+ self.debug_macros_available = version_compare(self.version, '>=12.9')
@classmethod
def _shield_nvcc_list_arg(cls, arg: str, listmode: bool = True) -> str:
@@ -730,11 +731,10 @@ class CudaCompiler(Compiler):
def get_optimization_link_args(self, optimization_level: str) -> T.List[str]:
return self._to_host_flags(self.host_compiler.get_optimization_link_args(optimization_level), Phase.LINKER)
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
(rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target, extra_paths)
return (self._to_host_flags(rpath_args, Phase.LINKER), rpath_dirs_to_remove)
def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
@@ -808,7 +808,12 @@ class CudaCompiler(Compiler):
return ['-Xcompiler=' + x for x in self.host_compiler.get_profile_use_args()]
def get_assert_args(self, disable: bool, env: 'Environment') -> T.List[str]:
- return self.host_compiler.get_assert_args(disable, env)
+ cccl_macros = []
+ if not disable and self.debug_macros_available:
+ # https://github.com/NVIDIA/cccl/pull/2382
+ cccl_macros = ['-DCCCL_ENABLE_ASSERTIONS=1']
+
+ return self.host_compiler.get_assert_args(disable, env) + cccl_macros
def has_multi_arguments(self, args: T.List[str], env: Environment) -> T.Tuple[bool, bool]:
args = self._to_host_flags(args)
diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py
index 51f2436..9f662ad 100644
--- a/mesonbuild/compilers/d.py
+++ b/mesonbuild/compilers/d.py
@@ -26,7 +26,7 @@ from .mixins.gnu import gnu_common_warning_args
if T.TYPE_CHECKING:
from . import compilers
- from ..build import DFeatures
+ from ..build import BuildTarget, DFeatures
from ..dependencies import Dependency
from ..envconfig import MachineInfo
from ..environment import Environment
@@ -175,9 +175,8 @@ class DmdLikeCompilerMixin(CompilerMixinBase):
def gen_import_library_args(self, implibname: str) -> T.List[str]:
return self.linker.import_library_args(implibname)
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
if self.info.is_windows():
return ([], set())
@@ -188,7 +187,7 @@ class DmdLikeCompilerMixin(CompilerMixinBase):
# split into two separate arguments both prefaced with the -L=.
args: T.List[str] = []
(rpath_args, rpath_dirs_to_remove) = super().build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target)
for r in rpath_args:
if ',' in r:
a, b = r.split(',', maxsplit=1)
@@ -199,7 +198,7 @@ class DmdLikeCompilerMixin(CompilerMixinBase):
return (args, rpath_dirs_to_remove)
return super().build_rpath_args(
- env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath)
+ env, build_dir, from_dir, target)
@classmethod
def _translate_args_to_nongnu(cls, args: T.List[str], info: MachineInfo, link_id: str) -> T.List[str]:
diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
index 040c42f..f57957f 100644
--- a/mesonbuild/compilers/detect.py
+++ b/mesonbuild/compilers/detect.py
@@ -366,7 +366,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Emscripten' in out:
cls = c.EmscriptenCCompiler if lang == 'c' else cpp.EmscriptenCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
# emcc requires a file input in order to pass arguments to the
# linker. It'll exit with an error code, but still print the
@@ -410,7 +410,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version = arm_ver_str
cls = c.ArmclangCCompiler if lang == 'c' else cpp.ArmclangCPPCompiler
linker = linkers.ArmClangDynamicLinker(for_machine, version=version)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
full_version=full_version, linker=linker)
@@ -445,7 +445,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if identifier in out:
cls = compiler_classes[0] if lang == 'c' else compiler_classes[1]
lnk = compiler_classes[2]
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = lnk(compiler, for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -482,7 +482,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = c.IntelClCCompiler if lang == 'c' else cpp.IntelClCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, target,
@@ -491,7 +491,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = c.IntelLLVMClCCompiler if lang == 'c' else cpp.IntelLLVMClCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, target,
@@ -524,14 +524,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version=cl_signature, linker=linker)
if 'PGI Compilers' in out:
cls = c.PGICCompiler if lang == 'c' else cpp.PGICPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
info, linker=linker)
if 'NVIDIA Compilers and Tools' in out:
cls = c.NvidiaHPC_CCompiler if lang == 'c' else cpp.NvidiaHPC_CPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
@@ -550,14 +550,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version=full_version, linker=l)
if 'ARM' in out and not ('Metrowerks' in out or 'Freescale' in out):
cls = c.ArmCCompiler if lang == 'c' else cpp.ArmCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.ArmDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
info, full_version=full_version, linker=linker)
if 'RX Family' in out:
cls = c.CcrxCCompiler if lang == 'c' else cpp.CcrxCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.CcrxDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -565,7 +565,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Microchip Technology' in out:
cls = c.Xc16CCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.Xc16DynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -573,7 +573,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'CompCert' in out:
cls = c.CompCertCCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.CompCertDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -591,7 +591,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
assert mwcc_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
compiler_version = '.'.join(x for x in mwcc_ver_match.groups() if x is not None)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
if ld is not None:
@@ -616,7 +616,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
assert tasking_ver_match is not None, 'for mypy'
tasking_version = '.'.join(x for x in tasking_ver_match.groups() if x is not None)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
if ld is None:
raise MesonException(f'{cls.language}_ld was not properly defined in your cross file')
@@ -668,7 +668,7 @@ def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = out.strip().rsplit('V', maxsplit=1)[-1]
cpp_compiler = detect_cpp_compiler(env, for_machine)
cls = CudaCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
key = OptionKey('cuda_link_args', machine=for_machine)
if key in env.options:
# To fix LDFLAGS issue
@@ -759,7 +759,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = fortran.IntelLLVMClFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info,
@@ -769,7 +769,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = fortran.IntelClFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info,
@@ -796,7 +796,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'PGI Compilers' in out:
cls = fortran.PGIFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
@@ -805,7 +805,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'NVIDIA Compilers and Tools' in out:
cls = fortran.NvidiaHPC_FortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
@@ -856,7 +856,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
full_version = err.split('\n', 1)[0]
version = full_version.split()[-1]
cls = fortran.NAGFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.NAGDynamicLinker(
compiler, for_machine, cls.LINKER_PREFIX, [],
version=version)
@@ -948,7 +948,7 @@ def detect_java_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
if len(parts) > 1:
version = parts[1]
comp_class = JavaCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class(exelist, version, for_machine, info)
raise EnvironmentException('Unknown compiler: ' + join_args(exelist))
@@ -972,7 +972,7 @@ def detect_cs_compiler(env: 'Environment', for_machine: MachineChoice) -> Compil
cls = cs.VisualStudioCsCompiler
else:
continue
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(comp, version, for_machine, info)
_handle_exceptions(popen_exceptions, compilers)
@@ -1002,7 +1002,7 @@ def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Co
version = search_version(err)
if version is not None:
comp_class = CythonCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, is_cross=is_cross)
_handle_exceptions(popen_exceptions, compilers)
raise EnvironmentException('Unreachable code (exception to make mypy happy)')
@@ -1023,7 +1023,7 @@ def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = search_version(out)
if 'Vala' in out:
comp_class = ValaCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class(exelist, version, for_machine, is_cross, info)
raise EnvironmentException('Unknown compiler: ' + join_args(exelist))
@@ -1145,7 +1145,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0]
compiler.extend(cls.use_linker_args(c, ''))
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(
compiler, version, for_machine, is_cross, info,
linker=linker, full_version=full_version)
@@ -1329,20 +1329,20 @@ def detect_nasm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = search_version(output)
if 'NASM' in output:
comp_class = NasmCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
elif 'yasm' in output:
comp_class = YasmCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
elif 'Metrowerks' in output or 'Freescale' in output:
if 'ARM' in output:
comp_class_mwasmarm = MetrowerksAsmCompilerARM
- env.coredata.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine, env)
+ env.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine)
return comp_class_mwasmarm([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
else:
comp_class_mwasmeppc = MetrowerksAsmCompilerEmbeddedPowerPC
- env.coredata.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine, env)
+ env.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine)
return comp_class_mwasmeppc([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
_handle_exceptions(popen_exceptions, compilers)
@@ -1383,7 +1383,7 @@ def detect_masm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
try:
output = Popen_safe(comp + [arg])[2]
version = search_version(output)
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
except OSError as e:
popen_exceptions[' '.join(comp + [arg])] = e
@@ -1403,7 +1403,7 @@ def detect_linearasm_compiler(env: Environment, for_machine: MachineChoice) -> C
try:
output = Popen_safe(comp + [arg])[2]
version = search_version(output)
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
except OSError as e:
popen_exceptions[' '.join(comp + [arg])] = e
diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py
index 1c875a3..d2eb611 100644
--- a/mesonbuild/compilers/mixins/clike.py
+++ b/mesonbuild/compilers/mixins/clike.py
@@ -1272,12 +1272,19 @@ class CLikeCompiler(Compiler):
# check the equivalent enable flag too "-Wforgotten-towel".
if arg.startswith('-Wno-'):
# Make an exception for -Wno-attributes=x as -Wattributes=x is invalid
- # for GCC at least. Also, the opposite of -Wno-vla-larger-than is
- # -Wvla-larger-than=N
+ # for GCC at least. Also, the positive form of some flags require a
+ # value to be specified, i.e. we need to pass -Wfoo=N rather than just
+ # -Wfoo.
if arg.startswith('-Wno-attributes='):
pass
- elif arg == '-Wno-vla-larger-than':
- new_args.append('-Wvla-larger-than=1000')
+ elif arg in {'-Wno-alloc-size-larger-than',
+ '-Wno-alloca-larger-than',
+ '-Wno-frame-larger-than',
+ '-Wno-stack-usage',
+ '-Wno-vla-larger-than'}:
+ # Pass an arbitrary value to the enabling flag; since the test program
+ # is trivial, it is unlikely to provoke any of these warnings.
+ new_args.append('-W' + arg[5:] + '=1000')
else:
new_args.append('-W' + arg[5:])
if arg.startswith('-Wl,'):
diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py
index 3f35619..e359fb3 100644
--- a/mesonbuild/compilers/mixins/islinker.py
+++ b/mesonbuild/compilers/mixins/islinker.py
@@ -101,9 +101,8 @@ class BasicLinkerIsCompilerMixin(Compiler):
darwin_versions: T.Tuple[str, str]) -> T.List[str]:
raise MesonException("This linker doesn't support soname args")
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
def get_asneeded_args(self) -> T.List[str]:
diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py
index 5ebb093..bc27779 100644
--- a/mesonbuild/compilers/rust.py
+++ b/mesonbuild/compilers/rust.py
@@ -197,18 +197,15 @@ class RustCompiler(Compiler):
def get_optimization_args(self, optimization_level: str) -> T.List[str]:
return rust_optimization_args[optimization_level]
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- args, to_remove = super().build_rpath_args(env, build_dir, from_dir, rpath_paths,
- build_rpath, install_rpath)
-
- # ... but then add rustc's sysroot to account for rustup
- # installations
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ # add rustc's sysroot to account for rustup installations
+ args, to_remove = super().build_rpath_args(env, build_dir, from_dir, target, [self.get_target_libdir()])
+
rustc_rpath_args = []
for arg in args:
rustc_rpath_args.append('-C')
- rustc_rpath_args.append(f'link-arg={arg}:{self.get_target_libdir()}')
+ rustc_rpath_args.append(f'link-arg={arg}')
return rustc_rpath_args, to_remove
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
@@ -241,6 +238,12 @@ class RustCompiler(Compiler):
'none',
choices=['none', '2015', '2018', '2021', '2024'])
+ key = self.form_compileropt_key('dynamic_std')
+ opts[key] = options.UserBooleanOption(
+ self.make_option_name(key),
+ 'Whether to link Rust build targets to a dynamic libstd',
+ False)
+
return opts
def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
@@ -335,7 +338,7 @@ class RustCompiler(Compiler):
return RustdocTestCompiler(exelist, self.version, self.for_machine,
self.is_cross, self.info, full_version=self.full_version,
- linker=self.linker)
+ linker=self.linker, rustc=self)
class ClippyRustCompiler(RustCompiler):
@@ -355,6 +358,26 @@ class RustdocTestCompiler(RustCompiler):
id = 'rustdoc --test'
+ def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+ is_cross: bool, info: 'MachineInfo',
+ full_version: T.Optional[str],
+ linker: T.Optional['DynamicLinker'], rustc: RustCompiler):
+ super().__init__(exelist, version, for_machine,
+ is_cross, info, full_version, linker)
+ self.rustc = rustc
+
+ @functools.lru_cache(maxsize=None)
+ def get_sysroot(self) -> str:
+ return self.rustc.get_sysroot()
+
+ @functools.lru_cache(maxsize=None)
+ def get_target_libdir(self) -> str:
+ return self.rustc.get_target_libdir()
+
+ @functools.lru_cache(maxsize=None)
+ def get_cfgs(self) -> T.List[str]:
+ return self.rustc.get_cfgs()
+
def get_debug_args(self, is_debug: bool) -> T.List[str]:
return []
diff --git a/mesonbuild/compilers/swift.py b/mesonbuild/compilers/swift.py
index 47d254b..4ad3aff 100644
--- a/mesonbuild/compilers/swift.py
+++ b/mesonbuild/compilers/swift.py
@@ -140,7 +140,11 @@ class SwiftCompiler(Compiler):
args += ['-swift-version', std]
# Pass C compiler -std=... arg to swiftc
- c_lang = first(['objc', 'c'], lambda x: x in target.compilers)
+ c_langs = ['objc', 'c']
+ if target.uses_swift_cpp_interop():
+ c_langs = ['objcpp', 'cpp', *c_langs]
+
+ c_lang = first(c_langs, lambda x: x in target.compilers)
if c_lang is not None:
cc = target.compilers[c_lang]
args.extend(arg for c_arg in cc.get_option_std_args(target, env, subproject) for arg in ['-Xcc', c_arg])
@@ -153,11 +157,17 @@ class SwiftCompiler(Compiler):
return ['-working-directory', path]
- def get_cxx_interoperability_args(self, lang: T.Dict[str, Compiler]) -> T.List[str]:
- if 'cpp' in lang or 'objcpp' in lang:
- return ['-cxx-interoperability-mode=default']
- else:
- return ['-cxx-interoperability-mode=off']
+ def get_cxx_interoperability_args(self, target: T.Optional[build.BuildTarget] = None) -> T.List[str]:
+ if target is not None and not target.uses_swift_cpp_interop():
+ return []
+
+ if version_compare(self.version, '<5.9'):
+ raise MesonException(f'Compiler {self} does not support C++ interoperability')
+
+ return ['-cxx-interoperability-mode=default']
+
+ def get_library_args(self) -> T.List[str]:
+ return ['-parse-as-library']
def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
build_dir: str) -> T.List[str]:
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 26ef1b8..27795b0 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -7,6 +7,7 @@ from __future__ import annotations
import copy
from . import mlog, options
+import argparse
import pickle, os, uuid
import sys
from functools import lru_cache
@@ -18,7 +19,6 @@ from .mesonlib import (
MesonException, MachineChoice, PerMachine,
PerMachineDefaultable,
default_prefix,
- stringlistify,
pickle_load
)
@@ -32,13 +32,11 @@ import shlex
import typing as T
if T.TYPE_CHECKING:
- import argparse
from typing_extensions import Protocol
from . import dependencies
from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode
from .dependencies.detect import TV_DepID
- from .environment import Environment
from .mesonlib import FileOrString
from .cmake.traceparser import CMakeCacheEntry
from .interpreterbase import SubProject
@@ -50,13 +48,11 @@ if T.TYPE_CHECKING:
"""Representation of command line options from Meson setup, configure,
and dist.
- :param projectoptions: The raw list of command line options given
:param cmd_line_options: command line options parsed into an OptionKey:
str mapping
"""
- cmd_line_options: T.Dict[OptionKey, str]
- projectoptions: T.List[str]
+ cmd_line_options: T.Dict[OptionKey, T.Optional[str]]
cross_file: T.List[str]
native_file: T.List[str]
@@ -72,7 +68,7 @@ if T.TYPE_CHECKING:
#
# Pip requires that RCs are named like this: '0.1.0.rc1'
# But the corresponding Git tag needs to be '0.1.0rc1'
-version = '1.8.99'
+version = '1.9.0.rc1'
# The next stable version when we are in dev. This is used to allow projects to
# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
@@ -147,13 +143,13 @@ class DependencyCache:
def __init__(self, builtins: options.OptionStore, for_machine: MachineChoice):
self.__cache: T.MutableMapping[TV_DepID, DependencySubCache] = OrderedDict()
self.__builtins = builtins
- self.__pkg_conf_key = options.OptionKey('pkg_config_path')
- self.__cmake_key = options.OptionKey('cmake_prefix_path')
+ self.__pkg_conf_key = options.OptionKey('pkg_config_path', machine=for_machine)
+ self.__cmake_key = options.OptionKey('cmake_prefix_path', machine=for_machine)
def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
data: T.Dict[DependencyCacheType, T.List[str]] = {
- DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins.get_value_for(self.__pkg_conf_key)),
- DependencyCacheType.CMAKE: stringlistify(self.__builtins.get_value_for(self.__cmake_key)),
+ DependencyCacheType.PKG_CONFIG: T.cast('T.List[str]', self.__builtins.get_value_for(self.__pkg_conf_key)),
+ DependencyCacheType.CMAKE: T.cast('T.List[str]', self.__builtins.get_value_for(self.__cmake_key)),
DependencyCacheType.OTHER: [],
}
assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
@@ -414,11 +410,7 @@ class CoreData:
return value
def set_from_configure_command(self, options: SharedCMDOptions) -> bool:
- unset_opts = getattr(options, 'unset_opts', [])
- all_D = options.projectoptions[:]
- for key, valstr in options.cmd_line_options.items():
- all_D.append(f'{key!s}={valstr}')
- return self.optstore.set_from_configure_command(all_D, unset_opts)
+ return self.optstore.set_from_configure_command(options.cmd_line_options)
def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool:
dirty = False
@@ -584,16 +576,6 @@ class CoreData:
else:
self.optstore.add_compiler_option(lang, k, o)
- def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
- for_machine: MachineChoice, env: 'Environment') -> None:
- """Add global language arguments that are needed before compiler/linker detection."""
- from .compilers import compilers
- # These options are all new at this point, because the compiler is
- # responsible for adding its own options, thus calling
- # `self.optstore.update()`` is perfectly safe.
- for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items():
- self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj)
-
def process_compiler_options(self, lang: str, comp: Compiler, subproject: str) -> None:
self.add_compiler_options(comp.get_options(), lang, comp.for_machine)
@@ -699,25 +681,60 @@ def save(obj: CoreData, build_dir: str) -> str:
return filename
+class KeyNoneAction(argparse.Action):
+ """
+ Custom argparse Action that stores values in a dictionary as keys with value None.
+ """
+
+ def __init__(self, option_strings, dest, nargs=None, **kwargs: object) -> None:
+ assert nargs is None or nargs == 1
+ super().__init__(option_strings, dest, nargs=1, **kwargs)
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ arg: T.List[str], option_string: str = None) -> None:
+ current_dict = getattr(namespace, self.dest)
+ if current_dict is None:
+ current_dict = {}
+ setattr(namespace, self.dest, current_dict)
+
+ key = OptionKey.from_string(arg[0])
+ current_dict[key] = None
+
+
+class KeyValueAction(argparse.Action):
+ """
+ Custom argparse Action that parses KEY=VAL arguments and stores them in a dictionary.
+ """
+
+ def __init__(self, option_strings, dest, nargs=None, **kwargs: object) -> None:
+ assert nargs is None or nargs == 1
+ super().__init__(option_strings, dest, nargs=1, **kwargs)
+
+ def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+ arg: T.List[str], option_string: str = None) -> None:
+ current_dict = getattr(namespace, self.dest)
+ if current_dict is None:
+ current_dict = {}
+ setattr(namespace, self.dest, current_dict)
+
+ try:
+ keystr, value = arg[0].split('=', 1)
+ key = OptionKey.from_string(keystr)
+ current_dict[key] = value
+ except ValueError:
+ parser.error(f'The argument for option {option_string!r} must be in OPTION=VALUE format.')
+
+
def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
for n, b in options.BUILTIN_OPTIONS.items():
options.option_to_argparse(b, n, parser, '')
for n, b in options.BUILTIN_OPTIONS_PER_MACHINE.items():
options.option_to_argparse(b, n, parser, ' (just for host machine)')
options.option_to_argparse(b, n.as_build(), parser, ' (just for build machine)')
- parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
+ parser.add_argument('-D', action=KeyValueAction, dest='cmd_line_options', default={}, metavar="option=value",
help='Set the value of an option, can be used several times to set multiple options.')
def parse_cmd_line_options(args: SharedCMDOptions) -> None:
- args.cmd_line_options = {}
- for o in args.projectoptions:
- try:
- keystr, value = o.split('=', 1)
- except ValueError:
- raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
- key = OptionKey.from_string(keystr)
- args.cmd_line_options[key] = value
-
# Merge builtin options set with --option into the dict.
for key in chain(
options.BUILTIN_OPTIONS.keys(),
diff --git a/mesonbuild/dependencies/detect.py b/mesonbuild/dependencies/detect.py
index aa62c66..4cdf16d 100644
--- a/mesonbuild/dependencies/detect.py
+++ b/mesonbuild/dependencies/detect.py
@@ -15,7 +15,7 @@ if T.TYPE_CHECKING:
from ..environment import Environment
from .factory import DependencyFactory, WrappedFactoryFunc, DependencyGenerator
- TV_DepIDEntry = T.Union[str, bool, int, T.Tuple[str, ...]]
+ TV_DepIDEntry = T.Union[str, bool, int, None, T.Tuple[str, ...]]
TV_DepID = T.Tuple[T.Tuple[str, TV_DepIDEntry], ...]
PackageTypes = T.Union[T.Type[ExternalDependency], DependencyFactory, WrappedFactoryFunc]
@@ -40,10 +40,14 @@ _packages_accept_language: T.Set[str] = set()
def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
identifier: 'TV_DepID' = (('name', name), )
+ from ..interpreter.type_checking import DEPENDENCY_KWS
+ nkwargs = {k.name: k.default for k in DEPENDENCY_KWS}
+ nkwargs.update(kwargs)
+
from ..interpreter import permitted_dependency_kwargs
assert len(permitted_dependency_kwargs) == 19, \
'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here'
- for key, value in kwargs.items():
+ for key, value in nkwargs.items():
# 'version' is irrelevant for caching; the caller must check version matches
# 'native' is handled above with `for_machine`
# 'required' is irrelevant for caching; the caller handles it separately
@@ -62,7 +66,7 @@ def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
assert isinstance(i, str), i
value = tuple(frozenset(listify(value)))
else:
- assert isinstance(value, (str, bool, int)), value
+ assert value is None or isinstance(value, (str, bool, int)), value
identifier = (*identifier, (key, value),)
return identifier
diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py
index 1b45418..3ab2194 100644
--- a/mesonbuild/dependencies/misc.py
+++ b/mesonbuild/dependencies/misc.py
@@ -56,24 +56,22 @@ packages['netcdf'] = netcdf_factory
class AtomicBuiltinDependency(BuiltinDependency):
def __init__(self, name: str, env: Environment, kwargs: T.Dict[str, T.Any]):
super().__init__(name, env, kwargs)
- self.feature_since = ('1.7.0', "consider checking for `atomic_fetch_add` of a 64-bit type with and without `find_library('atomic')`")
+ self.feature_since = ('1.7.0', "consider checking for `atomic_flag_clear` with and without `find_library('atomic')`")
- code = '''#include <stdatomic.h>\n\nint main() {\n atomic_int_least64_t a;\n return atomic_fetch_add(&b, 1);\n}''' # [ignore encoding] this is C, not python, Mr. Lint
-
- self.is_found = bool(self.clib_compiler.links(code, env)[0])
+ if self.clib_compiler.has_function('atomic_flag_clear', '#include <stdatomic.h>', env)[0]:
+ self.is_found = True
class AtomicSystemDependency(SystemDependency):
def __init__(self, name: str, env: Environment, kwargs: T.Dict[str, T.Any]):
super().__init__(name, env, kwargs)
- self.feature_since = ('1.7.0', "consider checking for `atomic_fetch_add` of a 64-bit type with and without `find_library('atomic')`")
+ self.feature_since = ('1.7.0', "consider checking for `atomic_flag_clear` with and without `find_library('atomic')`")
h = self.clib_compiler.has_header('stdatomic.h', '', env)
- if not h[0]:
- return
-
self.link_args = self.clib_compiler.find_library('atomic', env, [], self.libtype)
- self.is_found = bool(self.link_args)
+
+ if h[0] and self.link_args:
+ self.is_found = True
class DlBuiltinDependency(BuiltinDependency):
diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py
index a3a9388..8bb269e 100644
--- a/mesonbuild/dependencies/qt.py
+++ b/mesonbuild/dependencies/qt.py
@@ -9,6 +9,7 @@ from __future__ import annotations
import abc
import re
import os
+from pathlib import Path
import typing as T
from .base import DependencyException, DependencyMethods
@@ -50,7 +51,7 @@ def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) ->
if len(dirname.split('.')) == 3:
private_dir = dirname
break
- return [private_dir, os.path.join(private_dir, 'Qt' + module)]
+ return [private_dir, Path(private_dir, f'Qt{module}').as_posix()]
def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str:
@@ -303,7 +304,7 @@ class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
for module in self.requested_modules:
- mincdir = os.path.join(incdir, 'Qt' + module)
+ mincdir = Path(incdir, f'Qt{module}').as_posix()
self.compile_args.append('-I' + mincdir)
if module == 'QuickTest':
diff --git a/mesonbuild/dependencies/scalapack.py b/mesonbuild/dependencies/scalapack.py
index c04d1f5..f34692c 100644
--- a/mesonbuild/dependencies/scalapack.py
+++ b/mesonbuild/dependencies/scalapack.py
@@ -9,7 +9,7 @@ import os
import typing as T
from ..options import OptionKey
-from .base import DependencyMethods
+from .base import DependencyException, DependencyMethods
from .cmake import CMakeDependency
from .detect import packages
from .pkgconfig import PkgConfigDependency
@@ -65,8 +65,7 @@ class MKLPkgConfigDependency(PkgConfigDependency):
super().__init__(name, env, kwargs, language=language)
# Doesn't work with gcc on windows, but does on Linux
- if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
- and self.clib_compiler.id == 'gcc')):
+ if env.machines[self.for_machine].is_windows() and self.clib_compiler.id == 'gcc':
self.is_found = False
# This can happen either because we're using GCC, we couldn't find the
@@ -96,6 +95,9 @@ class MKLPkgConfigDependency(PkgConfigDependency):
self.version = v
def _set_libs(self) -> None:
+ if self.__mklroot is None:
+ raise DependencyException('MKLROOT not set')
+
super()._set_libs()
if self.env.machines[self.for_machine].is_windows():
@@ -133,6 +135,9 @@ class MKLPkgConfigDependency(PkgConfigDependency):
self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
def _set_cargs(self) -> None:
+ if self.__mklroot is None:
+ raise DependencyException('MKLROOT not set')
+
allow_system = False
if self.language == 'fortran':
# gfortran doesn't appear to look in system paths for INCLUDE files,
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 2c3bdec..489ef50 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -12,6 +12,7 @@ import collections
from . import coredata
from . import mesonlib
from . import machinefile
+from . import options
CmdLineFileParser = machinefile.CmdLineFileParser
@@ -34,6 +35,7 @@ from .compilers import (
is_library,
is_llvm_ir,
is_object,
+ is_separate_compile,
is_source,
)
@@ -728,13 +730,14 @@ class Environment:
def mfilestr2key(self, machine_file_string: str, section: T.Optional[str], section_subproject: T.Optional[str], machine: MachineChoice) -> OptionKey:
key = OptionKey.from_string(machine_file_string)
- assert key.machine == MachineChoice.HOST
if key.subproject:
suggestion = section if section == 'project options' else 'built-in options'
raise MesonException(f'Do not set subproject options in [{section}] section, use [subproject:{suggestion}] instead.')
if section_subproject:
key = key.evolve(subproject=section_subproject)
if machine == MachineChoice.BUILD:
+ if key.machine == MachineChoice.BUILD:
+ mlog.deprecation('Setting build machine options in the native file does not need the "build." prefix', once=True)
return key.evolve(machine=machine)
return key
@@ -935,6 +938,9 @@ class Environment:
def is_assembly(self, fname: 'mesonlib.FileOrString') -> bool:
return is_assembly(fname)
+ def is_separate_compile(self, fname: 'mesonlib.FileOrString') -> bool:
+ return is_separate_compile(fname)
+
def is_llvm_ir(self, fname: 'mesonlib.FileOrString') -> bool:
return is_llvm_ir(fname)
@@ -1071,3 +1077,44 @@ class Environment:
if extra_paths:
env.prepend('PATH', list(extra_paths))
return env
+
+ def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
+ for_machine: MachineChoice) -> None:
+ """Add global language arguments that are needed before compiler/linker detection."""
+ description = f'Extra arguments passed to the {lang}'
+ argkey = OptionKey(f'{lang}_args', machine=for_machine)
+ largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
+
+ comp_args_from_envvar = False
+ comp_options = self.coredata.optstore.get_pending_value(argkey)
+ if comp_options is None:
+ comp_args_from_envvar = True
+ comp_options = self.env_opts.get(argkey, [])
+
+ link_options = self.coredata.optstore.get_pending_value(largkey)
+ if link_options is None:
+ link_options = self.env_opts.get(largkey, [])
+
+ assert isinstance(comp_options, (str, list)), 'for mypy'
+ assert isinstance(link_options, (str, list)), 'for mypy'
+
+ cargs = options.UserStringArrayOption(
+ argkey.name,
+ description + ' compiler',
+ comp_options, split_args=True, allow_dups=True)
+
+ largs = options.UserStringArrayOption(
+ largkey.name,
+ description + ' linker',
+ link_options, split_args=True, allow_dups=True)
+
+ self.coredata.optstore.add_compiler_option(lang, argkey, cargs)
+ self.coredata.optstore.add_compiler_option(lang, largkey, largs)
+
+ if comp.INVOKES_LINKER and comp_args_from_envvar:
+ # If the compiler acts as a linker driver, and we're using the
+ # environment variable flags for both the compiler and linker
+ # arguments, then put the compiler flags in the linker flags as well.
+ # This is how autotools works, and the env vars feature is for
+ # autotools compatibility.
+ largs.extend_value(comp_options)
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index 29bb705..2cf5b7a 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -62,6 +62,7 @@ from .type_checking import (
OUTPUT_KW,
DEFAULT_OPTIONS,
DEPENDENCIES_KW,
+ DEPENDENCY_KWS,
DEPENDS_KW,
DEPEND_FILES_KW,
DEPFILE_KW,
@@ -523,6 +524,8 @@ class Interpreter(InterpreterBase, HoldableObject):
self.handle_meson_version(val.value, val)
def get_build_def_files(self) -> mesonlib.OrderedSet[str]:
+ if self.environment.cargo:
+ self.build_def_files.update(self.environment.cargo.get_build_def_files())
return self.build_def_files
def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
@@ -1085,7 +1088,7 @@ class Interpreter(InterpreterBase, HoldableObject):
value_object: T.Optional[options.AnyOptionType]
try:
- optkey = options.OptionKey(optname, self.subproject)
+ optkey = options.OptionKey.from_string(optname).evolve(subproject=self.subproject)
value_object, value = self.coredata.optstore.get_value_object_and_value_for(optkey)
except KeyError:
if self.coredata.optstore.is_base_option(optkey):
@@ -1787,8 +1790,8 @@ class Interpreter(InterpreterBase, HoldableObject):
@disablerIfNotFound
@permittedKwargs(permitted_dependency_kwargs)
@typed_pos_args('dependency', varargs=str, min_varargs=1)
- @typed_kwargs('dependency', DEFAULT_OPTIONS.evolve(since='0.38.0'), allow_unknown=True)
- def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs) -> Dependency:
+ @typed_kwargs('dependency', *DEPENDENCY_KWS, allow_unknown=True)
+ def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs: kwtypes.FuncDependency) -> Dependency:
# Replace '' by empty list of names
names = [n for n in args[0] if n]
if len(names) > 1:
@@ -3255,9 +3258,9 @@ class Interpreter(InterpreterBase, HoldableObject):
def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library) -> build.BothLibraries:
shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
- preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries', subproject=self.subproject))
if preferred_library == 'auto':
- preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
if preferred_library == 'both':
preferred_library = 'shared'
diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py
index d741aab..7dd49a1 100644
--- a/mesonbuild/interpreter/kwargs.py
+++ b/mesonbuild/interpreter/kwargs.py
@@ -363,6 +363,8 @@ class _BuildTarget(_BaseBuildTarget):
d_module_versions: T.List[T.Union[str, int]]
d_unittest: bool
rust_dependency_map: T.Dict[str, str]
+ swift_interoperability_mode: Literal['c', 'cpp']
+ swift_module_name: str
sources: SourcesVarargsType
c_args: T.List[str]
cpp_args: T.List[str]
@@ -486,3 +488,8 @@ class FuncDeclareDependency(TypedDict):
sources: T.List[T.Union[FileOrString, build.GeneratedTypes]]
variables: T.Dict[str, str]
version: T.Optional[str]
+
+
+class FuncDependency(TypedDict):
+
+ default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
diff --git a/mesonbuild/interpreter/primitives/array.py b/mesonbuild/interpreter/primitives/array.py
index ff520a2..d0a2441 100644
--- a/mesonbuild/interpreter/primitives/array.py
+++ b/mesonbuild/interpreter/primitives/array.py
@@ -97,3 +97,17 @@ class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
return self.held_object[other]
except IndexError:
raise InvalidArguments(f'Index {other} out of bounds of array of size {len(self.held_object)}.')
+
+ @noPosargs
+ @noKwargs
+ @FeatureNew('array.flatten', '1.9.0')
+ @InterpreterObject.method('flatten')
+ def flatten_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+ def flatten(obj: TYPE_var) -> T.Iterable[TYPE_var]:
+ if isinstance(obj, list):
+ for o in obj:
+ yield from flatten(o)
+ else:
+ yield obj
+
+ return list(flatten(self.held_object))
diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py
index fbe3e3e..a551d0f 100644
--- a/mesonbuild/interpreter/type_checking.py
+++ b/mesonbuild/interpreter/type_checking.py
@@ -633,6 +633,8 @@ _BUILD_TARGET_KWS: T.List[KwargInfo] = [
default={},
since='1.2.0',
),
+ KwargInfo('swift_interoperability_mode', str, default='c', validator=in_set_validator({'c', 'cpp'}), since='1.9.0'),
+ KwargInfo('swift_module_name', str, default='', since='1.9.0'),
KwargInfo('build_rpath', str, default='', since='0.42.0'),
KwargInfo(
'gnu_symbol_visibility',
@@ -865,3 +867,8 @@ PKGCONFIG_DEFINE_KW: KwargInfo = KwargInfo(
default=[],
convertor=_pkgconfig_define_convertor,
)
+
+
+DEPENDENCY_KWS: T.List[KwargInfo] = [
+ DEFAULT_OPTIONS.evolve(since='0.38.0'),
+]
diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py
index f6c0fbc..6fbe6e4 100644
--- a/mesonbuild/linkers/detect.py
+++ b/mesonbuild/linkers/detect.py
@@ -39,7 +39,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
use_linker_prefix: bool = True, invoked_directly: bool = True,
extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
from . import linkers
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
if invoked_directly or comp_class.get_argument_syntax() == 'msvc':
rsp_syntax = RSPFileSyntax.MSVC
@@ -128,7 +128,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
:extra_args: Any additional arguments required (such as a source file)
"""
from . import linkers
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
extra_args = extra_args or []
system = env.machines[for_machine].system
@@ -166,7 +166,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
linker = lld_cls(
compiler, for_machine, comp_class.LINKER_PREFIX, override, system=system, version=v)
- elif 'Hexagon' in o and 'LLVM' in o:
+ elif o.startswith("eld"):
linker = linkers.ELDDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'Snapdragon' in e and 'LLVM' in e:
diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py
index d81892b..c528db7 100644
--- a/mesonbuild/linkers/linkers.py
+++ b/mesonbuild/linkers/linkers.py
@@ -65,9 +65,8 @@ class StaticLinker:
def get_coverage_link_args(self) -> T.List[str]:
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
def thread_link_flags(self, env: 'Environment') -> T.List[str]:
@@ -297,9 +296,8 @@ class DynamicLinker(metaclass=abc.ABCMeta):
def bitcode_args(self) -> T.List[str]:
raise MesonException('This linker does not support bitcode bundles')
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
@@ -703,13 +701,13 @@ class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
sostr = '' if soversion is None else '.' + soversion
return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
m = env.machines[self.for_machine]
if m.is_windows() or m.is_cygwin():
return ([], set())
- if not rpath_paths and not install_rpath and not build_rpath:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
args: T.List[str] = []
origin_placeholder = '$ORIGIN'
@@ -722,10 +720,12 @@ class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
for p in all_paths:
rpath_dirs_to_remove.add(p.encode('utf8'))
# Build_rpath is used as-is (it is usually absolute).
- if build_rpath != '':
- all_paths.add(build_rpath)
- for p in build_rpath.split(':'):
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ for p in target.build_rpath.split(':'):
rpath_dirs_to_remove.add(p.encode('utf8'))
+ if extra_paths:
+ all_paths.update(extra_paths)
# TODO: should this actually be "for (dragonfly|open)bsd"?
if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
@@ -740,7 +740,7 @@ class GnuLikeDynamicLinkerMixin(DynamicLinkerBase):
# enough space in the ELF header to hold the final installation RPATH.
paths = ':'.join(all_paths)
paths_length = len(paths.encode('utf-8'))
- install_rpath_length = len(install_rpath.encode('utf-8'))
+ install_rpath_length = len(target.install_rpath.encode('utf-8'))
if paths_length < install_rpath_length:
padding = 'X' * (install_rpath_length - paths_length)
if not paths:
@@ -873,10 +873,10 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
'-current_version', darwin_versions[1]])
return args
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- if not rpath_paths and not install_rpath and not build_rpath:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
args: T.List[str] = []
rpath_dirs_to_remove: T.Set[bytes] = set()
@@ -885,8 +885,10 @@ class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
origin_placeholder = '@loader_path'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
- if build_rpath != '':
- all_paths.update(build_rpath.split(':'))
+ if target.build_rpath != '':
+ all_paths.update(target.build_rpath.split(':'))
+ if extra_paths:
+ all_paths.update(extra_paths)
for rp in all_paths:
rpath_dirs_to_remove.add(rp.encode('utf8'))
args.extend(self._apply_prefix('-rpath,' + rp))
@@ -1022,9 +1024,8 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna
def get_asneeded_args(self) -> T.List[str]:
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
@@ -1100,9 +1101,8 @@ class Xc16DynamicLinker(DynamicLinker):
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
class CompCertDynamicLinker(DynamicLinker):
@@ -1143,9 +1143,8 @@ class CompCertDynamicLinker(DynamicLinker):
suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
raise MesonException(f'{self.id} does not support shared libraries.')
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
return ([], set())
class TIDynamicLinker(DynamicLinker):
@@ -1255,17 +1254,19 @@ class NAGDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
id = 'nag'
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- if not rpath_paths and not install_rpath and not build_rpath:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
args: T.List[str] = []
origin_placeholder = '$ORIGIN'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
- if build_rpath != '':
- all_paths.add(build_rpath)
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ if extra_paths:
+ all_paths.update(extra_paths)
for rp in all_paths:
args.extend(self._apply_prefix('-Wl,-Wl,,-rpath,,' + rp))
@@ -1300,10 +1301,10 @@ class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
return ['-shared']
return []
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not env.machines[self.for_machine].is_windows():
+ rpath_paths = target.determine_rpath_dirs()
return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
return ([], set())
@@ -1511,26 +1512,28 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
def fatal_warnings(self) -> T.List[str]:
return ['-z', 'fatal-warnings']
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
- if not rpath_paths and not install_rpath and not build_rpath:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ rpath_paths = target.determine_rpath_dirs()
+ if not rpath_paths and not target.install_rpath and not target.build_rpath and not extra_paths:
return ([], set())
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
rpath_dirs_to_remove: T.Set[bytes] = set()
for p in all_paths:
rpath_dirs_to_remove.add(p.encode('utf8'))
- if build_rpath != '':
- all_paths.add(build_rpath)
- for p in build_rpath.split(':'):
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ for p in target.build_rpath.split(':'):
rpath_dirs_to_remove.add(p.encode('utf8'))
+ if extra_paths:
+ all_paths.update(extra_paths)
# In order to avoid relinking for RPATH removal, the binary needs to contain just
# enough space in the ELF header to hold the final installation RPATH.
paths = ':'.join(all_paths)
paths_length = len(paths.encode('utf-8'))
- install_rpath_length = len(install_rpath.encode('utf-8'))
+ install_rpath_length = len(target.install_rpath.encode('utf-8'))
if paths_length < install_rpath_length:
padding = 'X' * (install_rpath_length - paths_length)
if not paths:
@@ -1581,16 +1584,15 @@ class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
# archives or not."
return args
- def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
- rpath_paths: T.Tuple[str, ...], build_rpath: str,
- install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+ def build_rpath_args(self, env: Environment, build_dir: str, from_dir: str,
+ target: BuildTarget, extra_paths: T.Optional[T.List[str]] = None) -> T.Tuple[T.List[str], T.Set[bytes]]:
all_paths: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
# install_rpath first, followed by other paths, and the system path last
- if install_rpath != '':
- all_paths.add(install_rpath)
- if build_rpath != '':
- all_paths.add(build_rpath)
- for p in rpath_paths:
+ if target.install_rpath != '':
+ all_paths.add(target.install_rpath)
+ if target.build_rpath != '':
+ all_paths.add(target.build_rpath)
+ for p in target.determine_rpath_dirs():
all_paths.add(os.path.join(build_dir, p))
# We should consider allowing the $LIBPATH environment variable
# to override sys_path.
@@ -1604,6 +1606,8 @@ class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
for p in sys_path:
if os.path.isdir(p):
all_paths.add(p)
+ if extra_paths:
+ all_paths.update(extra_paths)
return (self._apply_prefix('-blibpath:' + ':'.join(all_paths)), set())
def thread_flags(self, env: 'Environment') -> T.List[str]:
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 217379f..7f62ba0 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -33,7 +33,6 @@ if T.TYPE_CHECKING:
builddir: str
clearcache: bool
pager: bool
- unset_opts: T.List[str]
# cannot be TV_Loggable, because non-ansidecorators do direct string concat
LOGLINE = T.Union[str, mlog.AnsiDecorator]
@@ -47,7 +46,7 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None:
help='Clear cached state (e.g. found dependencies)')
parser.add_argument('--no-pager', action='store_false', dest='pager',
help='Do not redirect output to a pager')
- parser.add_argument('-U', action='append', dest='unset_opts', default=[],
+ parser.add_argument('-U', action=coredata.KeyNoneAction, dest='cmd_line_options', default={},
help='Remove a subproject option.')
def stringify(val: T.Any) -> str:
@@ -147,7 +146,7 @@ class Conf:
Each column will have a specific width, and will be line wrapped.
"""
total_width = shutil.get_terminal_size(fallback=(160, 0))[0]
- _col = max(total_width // 5, 20)
+ _col = max(total_width // 5, 24)
last_column = total_width - (3 * _col) - 3
four_column = (_col, _col, _col, last_column if last_column > 1 else _col)
@@ -207,11 +206,12 @@ class Conf:
self.choices_col.append(choices)
self.descr_col.append(descr)
- def add_option(self, name: str, descr: str, value: T.Any, choices: T.Any) -> None:
+ def add_option(self, key: OptionKey, descr: str, value: T.Any, choices: T.Any) -> None:
self._add_section()
value = stringify(value)
choices = stringify(choices)
- self._add_line(mlog.green(name), mlog.yellow(value), mlog.blue(choices), descr)
+ self._add_line(mlog.green(str(key.evolve(subproject=None))), mlog.yellow(value),
+ mlog.blue(choices), descr)
def add_title(self, title: str) -> None:
self._add_section()
@@ -248,7 +248,7 @@ class Conf:
# printable_value = '<inherited from main project>'
#if isinstance(o, options.UserFeatureOption) and o.is_auto():
# printable_value = auto.printable_value()
- self.add_option(k.name, o.description, printable_value, o.printable_choices())
+ self.add_option(k, o.description, printable_value, o.printable_choices())
def print_conf(self, pager: bool) -> None:
if pager:
@@ -354,11 +354,7 @@ class Conf:
mlog.log('\nThere are no option augments.')
def has_option_flags(options: CMDOptions) -> bool:
- if options.cmd_line_options:
- return True
- if options.unset_opts:
- return True
- return False
+ return bool(options.cmd_line_options)
def is_print_only(options: CMDOptions) -> bool:
if has_option_flags(options):
diff --git a/mesonbuild/mformat.py b/mesonbuild/mformat.py
index 1e134f5..2131ff7 100644
--- a/mesonbuild/mformat.py
+++ b/mesonbuild/mformat.py
@@ -837,7 +837,15 @@ class Formatter:
# See https://editorconfig.org/
config = EditorConfig()
- for p in source_file.parents:
+ if source_file == Path('STDIN'):
+ raise MesonException('Using editorconfig with stdin requires --source-file-path argument')
+
+ try:
+ source_file_path = source_file.resolve()
+ except FileNotFoundError:
+ raise MesonException(f'Unable to resolve path for "{source_file}"')
+
+ for p in source_file_path.parents:
editorconfig_file = p / '.editorconfig'
if not editorconfig_file.exists():
continue
@@ -956,6 +964,11 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
help='output file (implies having exactly one input)'
)
parser.add_argument(
+ '--source-file-path',
+ type=Path,
+ help='path to use, when reading from stdin'
+ )
+ parser.add_argument(
'sources',
nargs='*',
type=Path,
@@ -981,6 +994,10 @@ def run(options: argparse.Namespace) -> int:
raise MesonException('--recursive argument is not compatible with stdin input')
if options.inplace and from_stdin:
raise MesonException('--inplace argument is not compatible with stdin input')
+ if options.source_file_path and not from_stdin:
+ raise MesonException('--source-file-path argument is only compatible with stdin input')
+ if from_stdin and options.editor_config and not options.source_file_path:
+ raise MesonException('using --editor-config with stdin input requires --source-file-path argument')
sources: T.List[Path] = options.sources.copy() or [Path(build_filename)]
@@ -996,7 +1013,7 @@ def run(options: argparse.Namespace) -> int:
try:
if from_stdin:
- src_file = Path('STDIN') # used for error messages and introspection
+ src_file = options.source_file_path or Path('STDIN') # used for error messages and introspection
code = sys.stdin.read()
else:
code = src_file.read_text(encoding='utf-8')
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 6986186..e19e528 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -125,14 +125,15 @@ def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
res[basename] = os.path.join(installdata.prefix, s.install_path, basename)
return res
-def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]:
- plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]] = {
+def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Union[str, T.List[str], None]]]]:
+ plan: T.Dict[str, T.Dict[str, T.Dict[str, T.Union[str, T.List[str], None]]]] = {
'targets': {
- os.path.join(installdata.build_dir, target.fname): {
+ Path(installdata.build_dir, target.fname).as_posix(): {
'destination': target.out_name,
'tag': target.tag or None,
'subproject': target.subproject or None,
- 'install_rpath': target.install_rpath or None
+ 'install_rpath': target.install_rpath or None,
+ 'build_rpaths': sorted(x.decode('utf8') for x in target.rpath_dirs_to_remove),
}
for target in installdata.targets
},
@@ -145,13 +146,14 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
}.items():
# Mypy doesn't recognize SubdirInstallData as a subclass of InstallDataBase
for data in data_list: # type: ignore[attr-defined]
+ data_path = Path(data.path).as_posix()
data_type = data.data_type or key
- install_path_name = data.install_path_name
+ install_path_name = Path(data.install_path_name)
if key == 'headers': # in the headers, install_path_name is the directory
- install_path_name = os.path.join(install_path_name, os.path.basename(data.path))
+ install_path_name = install_path_name / os.path.basename(data.path)
entry = {
- 'destination': install_path_name,
+ 'destination': install_path_name.as_posix(),
'tag': data.tag or None,
'subproject': data.subproject or None,
}
@@ -162,7 +164,7 @@ def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[s
entry['exclude_files'] = list(exclude_files)
plan[data_type] = plan.get(data_type, {})
- plan[data_type][data.path] = entry
+ plan[data_type][data_path] = entry
return plan
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index 87892e6..3938101 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -82,19 +82,21 @@ class ModuleState:
wanted=wanted, silent=silent, for_machine=for_machine)
def find_tool(self, name: str, depname: str, varname: str, required: bool = True,
- wanted: T.Optional[str] = None) -> T.Union[build.OverrideExecutable, ExternalProgram, 'OverrideProgram']:
- # Look in overrides in case it's built as subproject
- progobj = self._interpreter.program_from_overrides([name], [])
- if progobj is not None:
- return progobj
+ wanted: T.Optional[str] = None, for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[build.OverrideExecutable, ExternalProgram, 'OverrideProgram']:
+ if for_machine is MachineChoice.HOST:
+ # Look in overrides in case it's built as subproject
+ progobj = self._interpreter.program_from_overrides([name], [])
+ if progobj is not None:
+ return progobj
# Look in machine file
- prog_list = self.environment.lookup_binary_entry(MachineChoice.HOST, name)
+ prog_list = self.environment.lookup_binary_entry(for_machine, name)
if prog_list is not None:
return ExternalProgram.from_entry(name, prog_list)
# Check if pkgconfig has a variable
- dep = self.dependency(depname, native=True, required=False, wanted=wanted)
+ dep = self.dependency(depname, native=for_machine is MachineChoice.BUILD,
+ required=False, wanted=wanted)
if dep.found() and dep.type_name == 'pkgconfig':
value = dep.get_variable(pkgconfig=varname)
if value:
@@ -106,7 +108,7 @@ class ModuleState:
return progobj
# Normal program lookup
- return self.find_program(name, required=required, wanted=wanted)
+ return self.find_program(name, required=required, wanted=wanted, for_machine=for_machine)
def dependency(self, depname: str, native: bool = False, required: bool = True,
wanted: T.Optional[str] = None) -> 'Dependency':
diff --git a/mesonbuild/modules/fs.py b/mesonbuild/modules/fs.py
index 1fa368e..57a6b6d 100644
--- a/mesonbuild/modules/fs.py
+++ b/mesonbuild/modules/fs.py
@@ -2,7 +2,9 @@
# Copyright 2019 The Meson development team
from __future__ import annotations
-from pathlib import Path, PurePath, PureWindowsPath
+from ntpath import sep as ntsep
+from pathlib import Path
+from posixpath import sep as posixsep
import hashlib
import os
import typing as T
@@ -12,7 +14,7 @@ from .. import mlog
from ..build import BuildTarget, CustomTarget, CustomTargetIndex, InvalidArguments
from ..interpreter.type_checking import INSTALL_KW, INSTALL_MODE_KW, INSTALL_TAG_KW, NoneType
from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs
-from ..mesonlib import File, MesonException, has_path_sep, path_is_in_root, relpath
+from ..mesonlib import File, MesonException, has_path_sep, is_windows, path_is_in_root, relpath
if T.TYPE_CHECKING:
from . import ModuleState
@@ -42,7 +44,7 @@ class FSModule(ExtensionModule):
INFO = ModuleInfo('fs', '0.53.0')
- def __init__(self, interpreter: 'Interpreter') -> None:
+ def __init__(self, interpreter: Interpreter) -> None:
super().__init__(interpreter)
self.methods.update({
'as_posix': self.as_posix,
@@ -62,29 +64,30 @@ class FSModule(ExtensionModule):
'replace_suffix': self.replace_suffix,
'size': self.size,
'stem': self.stem,
+ 'suffix': self.suffix,
})
- def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+ def _absolute_dir(self, state: ModuleState, arg: FileOrString) -> str:
"""
make an absolute path from a relative path, WITHOUT resolving symlinks
"""
if isinstance(arg, File):
- return Path(arg.absolute_path(state.source_root, state.environment.get_build_dir()))
- return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
+ return arg.absolute_path(state.source_root, state.environment.get_build_dir())
+ return os.path.join(state.source_root, state.subdir, os.path.expanduser(arg))
@staticmethod
- def _obj_to_path(feature_new_prefix: str, obj: T.Union[FileOrString, BuildTargetTypes], state: ModuleState) -> PurePath:
+ def _obj_to_pathstr(feature_new_prefix: str, obj: T.Union[FileOrString, BuildTargetTypes], state: ModuleState) -> str:
if isinstance(obj, str):
- return PurePath(obj)
+ return obj
if isinstance(obj, File):
FeatureNew(f'{feature_new_prefix} with file', '0.59.0').use(state.subproject, location=state.current_node)
- return PurePath(str(obj))
+ return str(obj)
FeatureNew(f'{feature_new_prefix} with build_tgt, custom_tgt, and custom_idx', '1.4.0').use(state.subproject, location=state.current_node)
- return PurePath(state.backend.get_target_filename(obj))
+ return state.backend.get_target_filename(obj)
- def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+ def _resolve_dir(self, state: ModuleState, arg: FileOrString) -> str:
"""
resolves symlinks and makes absolute a directory relative to calling meson.build,
if not already absolute
@@ -92,7 +95,7 @@ class FSModule(ExtensionModule):
path = self._absolute_dir(state, arg)
try:
# accommodate unresolvable paths e.g. symlink loops
- path = path.resolve()
+ path = os.path.realpath(path)
except Exception:
# return the best we could do
pass
@@ -101,123 +104,139 @@ class FSModule(ExtensionModule):
@noKwargs
@FeatureNew('fs.expanduser', '0.54.0')
@typed_pos_args('fs.expanduser', str)
- def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
- return str(Path(args[0]).expanduser())
+ def expanduser(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ return os.path.expanduser(args[0])
@noKwargs
@FeatureNew('fs.is_absolute', '0.54.0')
@typed_pos_args('fs.is_absolute', (str, File))
- def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
- if isinstance(args[0], File):
+ def is_absolute(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: T.Dict[str, T.Any]) -> bool:
+ path = args[0]
+ if isinstance(path, File):
FeatureNew('fs.is_absolute with file', '0.59.0').use(state.subproject, location=state.current_node)
- return PurePath(str(args[0])).is_absolute()
+ path = str(path)
+ if is_windows():
+ # os.path.isabs was broken for Windows before Python 3.13, so we implement it ourselves
+ path = path[:3].replace(posixsep, ntsep)
+ return path.startswith(ntsep * 2) or path.startswith(':' + ntsep, 1)
+ return path.startswith(posixsep)
@noKwargs
@FeatureNew('fs.as_posix', '0.54.0')
@typed_pos_args('fs.as_posix', str)
- def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+ def as_posix(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
r"""
this function assumes you are passing a Windows path, even if on a Unix-like system
and so ALL '\' are turned to '/', even if you meant to escape a character
"""
- return PureWindowsPath(args[0]).as_posix()
+ return args[0].replace(ntsep, posixsep)
@noKwargs
@typed_pos_args('fs.exists', str)
- def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
- return self._resolve_dir(state, args[0]).exists()
+ def exists(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return os.path.exists(self._resolve_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.is_symlink', (str, File))
- def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ def is_symlink(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: T.Dict[str, T.Any]) -> bool:
if isinstance(args[0], File):
FeatureNew('fs.is_symlink with file', '0.59.0').use(state.subproject, location=state.current_node)
- return self._absolute_dir(state, args[0]).is_symlink()
+ return os.path.islink(self._absolute_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.is_file', str)
- def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
- return self._resolve_dir(state, args[0]).is_file()
+ def is_file(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return os.path.isfile(self._resolve_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.is_dir', str)
- def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
- return self._resolve_dir(state, args[0]).is_dir()
+ def is_dir(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+ return os.path.isdir(self._resolve_dir(state, args[0]))
@noKwargs
@typed_pos_args('fs.hash', (str, File), str)
- def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+ def hash(self, state: ModuleState, args: T.Tuple[FileOrString, str], kwargs: T.Dict[str, T.Any]) -> str:
if isinstance(args[0], File):
FeatureNew('fs.hash with file', '0.59.0').use(state.subproject, location=state.current_node)
file = self._resolve_dir(state, args[0])
- if not file.is_file():
+ if not os.path.isfile(file):
raise MesonException(f'{file} is not a file and therefore cannot be hashed')
try:
h = hashlib.new(args[1])
except ValueError:
raise MesonException('hash algorithm {} is not available'.format(args[1]))
- mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size))
- h.update(file.read_bytes())
+ mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, os.stat(file).st_size))
+ with open(file, mode='rb', buffering=0) as f:
+ h.update(f.read())
return h.hexdigest()
@noKwargs
@typed_pos_args('fs.size', (str, File))
- def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int:
+ def size(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: T.Dict[str, T.Any]) -> int:
if isinstance(args[0], File):
FeatureNew('fs.size with file', '0.59.0').use(state.subproject, location=state.current_node)
file = self._resolve_dir(state, args[0])
- if not file.is_file():
+ if not os.path.isfile(file):
raise MesonException(f'{file} is not a file and therefore cannot be sized')
try:
- return file.stat().st_size
+ return os.stat(file).st_size
except ValueError:
raise MesonException('{} size could not be determined'.format(args[0]))
@noKwargs
@typed_pos_args('fs.is_samepath', (str, File), (str, File))
- def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+ def is_samepath(self, state: ModuleState, args: T.Tuple[FileOrString, FileOrString], kwargs: T.Dict[str, T.Any]) -> bool:
if isinstance(args[0], File) or isinstance(args[1], File):
FeatureNew('fs.is_samepath with file', '0.59.0').use(state.subproject, location=state.current_node)
file1 = self._resolve_dir(state, args[0])
file2 = self._resolve_dir(state, args[1])
- if not file1.exists():
+ if not os.path.exists(file1):
return False
- if not file2.exists():
+ if not os.path.exists(file2):
return False
try:
- return file1.samefile(file2)
+ return os.path.samefile(file1, file2)
except OSError:
return False
@noKwargs
@typed_pos_args('fs.replace_suffix', (str, File, CustomTarget, CustomTargetIndex, BuildTarget), str)
- def replace_suffix(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes], str], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.replace_suffix', args[0], state)
- return str(path.with_suffix(args[1]))
+ def replace_suffix(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes], str], kwargs: T.Dict[str, T.Any]) -> str:
+ if args[1] and not args[1].startswith('.'):
+ raise ValueError(f"Invalid suffix {args[1]!r}")
+ path = self._obj_to_pathstr('fs.replace_suffix', args[0], state)
+ return os.path.splitext(path)[0] + args[1]
@noKwargs
@typed_pos_args('fs.parent', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
- def parent(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.parent', args[0], state)
- return str(path.parent)
+ def parent(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.parent', args[0], state)
+ return os.path.split(path)[0] or '.'
@noKwargs
@typed_pos_args('fs.name', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
- def name(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.name', args[0], state)
- return str(path.name)
+ def name(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.name', args[0], state)
+ return os.path.basename(path)
@noKwargs
@typed_pos_args('fs.stem', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
@FeatureNew('fs.stem', '0.54.0')
- def stem(self, state: 'ModuleState', args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
- path = self._obj_to_path('fs.stem', args[0], state)
- return str(path.stem)
+ def stem(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.name', args[0], state)
+ return os.path.splitext(os.path.basename(path))[0]
+
+ @noKwargs
+ @typed_pos_args('fs.suffix', (str, File, CustomTarget, CustomTargetIndex, BuildTarget))
+ @FeatureNew('fs.suffix', '1.9.0')
+ def suffix(self, state: ModuleState, args: T.Tuple[T.Union[FileOrString, BuildTargetTypes]], kwargs: T.Dict[str, T.Any]) -> str:
+ path = self._obj_to_pathstr('fs.suffix', args[0], state)
+ return os.path.splitext(path)[1]
@FeatureNew('fs.read', '0.57.0')
@typed_pos_args('fs.read', (str, File))
@typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8'))
- def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str:
+ def read(self, state: ModuleState, args: T.Tuple[FileOrString], kwargs: ReadKwArgs) -> str:
"""Read a file from the source tree and return its value as a decoded
string.
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 9f955ae..53919bc 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -22,7 +22,7 @@ from .. import build
from .. import interpreter
from .. import mesonlib
from .. import mlog
-from ..build import CustomTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments
+from ..build import CustomTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments, OverrideExecutable
from ..dependencies import Dependency, InternalDependency
from ..dependencies.pkgconfig import PkgConfigDependency, PkgConfigInterface
from ..interpreter.type_checking import DEPENDS_KW, DEPEND_FILES_KW, ENV_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, DEPENDENCY_SOURCES_KW, in_set_validator
@@ -33,11 +33,11 @@ from ..mesonlib import (
MachineChoice, MesonException, OrderedSet, Popen_safe, join_args, quote_arg
)
from ..options import OptionKey
-from ..programs import OverrideProgram
+from ..programs import ExternalProgram, OverrideProgram
from ..scripts.gettext import read_linguas
if T.TYPE_CHECKING:
- from typing_extensions import Literal, TypedDict
+ from typing_extensions import Literal, TypeAlias, TypedDict
from . import ModuleState
from ..build import BuildTarget
@@ -45,7 +45,6 @@ if T.TYPE_CHECKING:
from ..interpreter import Interpreter
from ..interpreterbase import TYPE_var, TYPE_kwargs
from ..mesonlib import FileOrString
- from ..programs import ExternalProgram
class PostInstall(TypedDict):
glib_compile_schemas: bool
@@ -198,7 +197,7 @@ if T.TYPE_CHECKING:
vtail: T.Optional[str]
depends: T.List[T.Union[BuildTarget, CustomTarget, CustomTargetIndex]]
- ToolType = T.Union[Executable, ExternalProgram, OverrideProgram]
+ ToolType: TypeAlias = T.Union[OverrideExecutable, ExternalProgram, OverrideProgram]
# Differs from the CustomTarget version in that it straight defaults to True
@@ -255,9 +254,8 @@ class GnomeModule(ExtensionModule):
def __init__(self, interpreter: 'Interpreter') -> None:
super().__init__(interpreter)
- self.gir_dep: T.Optional[Dependency] = None
- self.giscanner: T.Optional[T.Union[ExternalProgram, Executable, OverrideProgram]] = None
- self.gicompiler: T.Optional[T.Union[ExternalProgram, Executable, OverrideProgram]] = None
+ self.giscanner: T.Optional[ToolType] = None
+ self.gicompiler: T.Optional[ToolType] = None
self.install_glib_compile_schemas = False
self.install_gio_querymodules: T.List[str] = []
self.install_gtk_update_icon_cache = False
@@ -309,7 +307,7 @@ class GnomeModule(ExtensionModule):
once=True, fatal=False)
@staticmethod
- def _find_tool(state: 'ModuleState', tool: str) -> 'ToolType':
+ def _find_tool(state: 'ModuleState', tool: str, for_machine: MachineChoice = MachineChoice.HOST) -> 'ToolType':
tool_map = {
'gio-querymodules': 'gio-2.0',
'glib-compile-schemas': 'gio-2.0',
@@ -322,7 +320,7 @@ class GnomeModule(ExtensionModule):
}
depname = tool_map[tool]
varname = tool.replace('-', '_')
- return state.find_tool(tool, depname, varname)
+ return state.find_tool(tool, depname, varname, for_machine=for_machine)
@typed_kwargs(
'gnome.post_install',
@@ -636,7 +634,7 @@ class GnomeModule(ExtensionModule):
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
- for d in state.backend.determine_rpath_dirs(lib):
+ for d in lib.determine_rpath_dirs():
d = os.path.join(state.environment.get_build_dir(), d)
link_command.append('-L' + d)
if include_rpath:
@@ -775,9 +773,7 @@ class GnomeModule(ExtensionModule):
STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1"
if isinstance(girtarget, (build.StaticLibrary)) and \
- not mesonlib.version_compare(
- self._get_gir_dep(state)[0].get_version(),
- STATIC_BUILD_REQUIRED_VERSION):
+ not self._giscanner_version_compare(state, STATIC_BUILD_REQUIRED_VERSION):
raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION)
return girtarget
@@ -791,18 +787,26 @@ class GnomeModule(ExtensionModule):
if self.devenv is not None:
b.devenv.append(self.devenv)
- def _get_gir_dep(self, state: 'ModuleState') -> T.Tuple[Dependency, T.Union[Executable, 'ExternalProgram', 'OverrideProgram'],
- T.Union[Executable, 'ExternalProgram', 'OverrideProgram']]:
- if not self.gir_dep:
- self.gir_dep = state.dependency('gobject-introspection-1.0')
- self.giscanner = self._find_tool(state, 'g-ir-scanner')
- self.gicompiler = self._find_tool(state, 'g-ir-compiler')
- return self.gir_dep, self.giscanner, self.gicompiler
+ def _get_gi(self, state: 'ModuleState') -> T.Tuple[ToolType, ToolType]:
+ if not self.giscanner:
+ self.giscanner = self._find_tool(state, 'g-ir-scanner', for_machine=MachineChoice.BUILD)
+ self.gicompiler = self._find_tool(state, 'g-ir-compiler', for_machine=MachineChoice.HOST)
+ return self.giscanner, self.gicompiler
+
+ def _giscanner_version_compare(self, state: 'ModuleState', cmp: str) -> bool:
+ # Support for --version was introduced in g-i 1.58, but Ubuntu
+ # Bionic shipped 1.56.1. As all our version checks are greater
+ # than 1.58, we can just return False if get_version fails.
+ try:
+ giscanner, _ = self._get_gi(state)
+ return mesonlib.version_compare(giscanner.get_version(), cmp)
+ except MesonException:
+ return False
@functools.lru_cache(maxsize=None)
def _gir_has_option(self, option: str) -> bool:
exe = self.giscanner
- if isinstance(exe, OverrideProgram):
+ if isinstance(exe, (Executable, OverrideProgram)):
# Handle overridden g-ir-scanner
assert option in {'--extra-library', '--sources-top-dirs'}
return True
@@ -867,7 +871,7 @@ class GnomeModule(ExtensionModule):
# https://github.com/mesonbuild/meson/issues/1911
# However, g-ir-scanner does not understand -Wl,-rpath
# so we need to use -L instead
- for d in state.backend.determine_rpath_dirs(girtarget):
+ for d in girtarget.determine_rpath_dirs():
d = os.path.join(state.environment.get_build_dir(), d)
ret.append('-L' + d)
@@ -990,10 +994,10 @@ class GnomeModule(ExtensionModule):
run_env.set('CFLAGS', [quote_arg(x) for x in env_flags], ' ')
run_env.merge(kwargs['env'])
- gir_dep, _, _ = self._get_gir_dep(state)
+ giscanner, _ = self._get_gi(state)
# response file supported?
- rspable = mesonlib.version_compare(gir_dep.get_version(), '>= 1.85.0')
+ rspable = self._giscanner_version_compare(state, '>= 1.85.0')
return GirTarget(
girfile,
@@ -1145,7 +1149,7 @@ class GnomeModule(ExtensionModule):
if len(girtargets) > 1 and any(isinstance(el, Executable) for el in girtargets):
raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
- gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
+ giscanner, gicompiler = self._get_gi(state)
ns = kwargs['namespace']
nsversion = kwargs['nsversion']
@@ -1156,14 +1160,13 @@ class GnomeModule(ExtensionModule):
builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
depends: T.List[T.Union['FileOrString', 'build.GeneratedTypes', build.BuildTarget, build.StructuredSources]] = []
- depends.extend(gir_dep.sources)
depends.extend(girtargets)
langs_compilers = self._get_girtargets_langs_compilers(girtargets)
cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
deps = self._get_gir_targets_deps(girtargets)
deps += kwargs['dependencies']
- deps += [gir_dep]
+ deps += [state.dependency('glib-2.0'), state.dependency('gobject-2.0'), state.dependency('gmodule-2.0'), state.dependency('gio-2.0')]
typelib_includes, depends = self._gather_typelib_includes_and_update_depends(state, deps, depends)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
@@ -1190,6 +1193,32 @@ class GnomeModule(ExtensionModule):
scan_command: T.List[T.Union[str, Executable, 'ExternalProgram', 'OverrideProgram']] = [giscanner]
scan_command += ['--quiet']
+
+ if state.environment.is_cross_build() and state.environment.need_exe_wrapper():
+ if not state.environment.has_exe_wrapper():
+ mlog.error('generate_gir requires exe_wrapper')
+
+ binary_wrapper = state.environment.get_exe_wrapper().get_command()
+ ldd = state.environment.lookup_binary_entry(MachineChoice.HOST, 'ldd')
+ if ldd is None:
+ ldd_wrapper = ['ldd']
+ else:
+ ldd_wrapper = ExternalProgram.from_bin_list(state.environment, MachineChoice.HOST, 'ldd').get_command()
+
+ WRAPPER_ARGS_REQUIRED_VERSION = ">=1.85.0"
+ if not self._giscanner_version_compare(state, WRAPPER_ARGS_REQUIRED_VERSION):
+ msg = ('Use of gnome.generate_gir during cross compilation requires'
+ f'g-ir-scanner {WRAPPER_ARGS_REQUIRED_VERSION}')
+ raise MesonException(msg)
+ else:
+ scan_command += ['--use-binary-wrapper', binary_wrapper[0]]
+ if len(binary_wrapper) > 1:
+ scan_command += ['--binary-wrapper-args-begin', *binary_wrapper[1:], '--binary-wrapper-args-end']
+
+ scan_command += ['--use-ldd-wrapper', ldd_wrapper[0]]
+ if len(ldd_wrapper) > 1:
+ scan_command += ['--ldd-wrapper-args-begin', *ldd_wrapper[1:], '--ldd-wrapper-args-end']
+
scan_command += ['--no-libtool']
scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
scan_command += ['--warn-all']
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index e3f7a97..bef14e9 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -156,6 +156,14 @@ class DependenciesHelper:
pass
elif isinstance(obj, dependencies.ExternalDependency) and obj.name == 'threads':
pass
+ elif isinstance(obj, dependencies.InternalDependency) and all(lib.get_id() in self.metadata for lib in obj.libraries):
+ # Ensure BothLibraries are resolved:
+ if self.pub_libs and isinstance(self.pub_libs[0], build.StaticLibrary):
+ obj = obj.get_as_static(recursive=True)
+ else:
+ obj = obj.get_as_shared(recursive=True)
+ for lib in obj.libraries:
+ processed_reqs.append(self.metadata[lib.get_id()].filebase)
else:
raise mesonlib.MesonException('requires argument not a string, '
'library with pkgconfig-generated file '
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index 8d82a33..3c07960 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -16,7 +16,7 @@ from ..dependencies.detect import get_dep_identifier, find_external_dependency
from ..dependencies.python import BasicPythonExternalProgram, python_factory, _PythonDependencyBase
from ..interpreter import extract_required_kwarg, permitted_dependency_kwargs, primitives as P_OBJ
from ..interpreter.interpreterobjects import _ExternalProgramHolder
-from ..interpreter.type_checking import NoneType, PRESERVE_PATH_KW, SHARED_MOD_KWS
+from ..interpreter.type_checking import NoneType, DEPENDENCY_KWS, PRESERVE_PATH_KW, SHARED_MOD_KWS
from ..interpreterbase import (
noPosargs, noKwargs, permittedKwargs, ContainerTypeInfo,
InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo,
@@ -256,6 +256,7 @@ class PythonInstallation(_ExternalProgramHolder['PythonExternalProgram']):
@permittedKwargs(permitted_dependency_kwargs | {'embed'})
@FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed'])
@noPosargs
+ @typed_kwargs('python_installation.dependency', *DEPENDENCY_KWS, allow_unknown=True)
@InterpreterObject.method('dependency')
def dependency_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'Dependency':
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py
index c5f18e8..d0e8091 100644
--- a/mesonbuild/modules/rust.py
+++ b/mesonbuild/modules/rust.py
@@ -242,6 +242,10 @@ class RustModule(ExtensionModule):
def doctest(self, state: ModuleState, args: T.Tuple[str, T.Union[SharedLibrary, StaticLibrary]], kwargs: FuncDoctest) -> ModuleReturnValue:
name, base_target = args
+ if not base_target.uses_rust():
+ raise MesonException('doc tests are only supported for Rust targets')
+ if not base_target.uses_rust_abi():
+ raise MesonException("doc tests are not supported for rust_abi: 'c'")
if state.environment.is_cross_build() and state.environment.need_exe_wrapper(base_target.for_machine):
mlog.notice('skipping Rust doctests due to cross compilation', once=True)
return ModuleReturnValue(None, [])
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index b08d5e8..8d7dd0b 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -27,7 +27,6 @@ if T.TYPE_CHECKING:
builddir: str
sourcedir: str
pager: bool
- unset_opts: T.List[str]
git_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
*
@@ -194,22 +193,25 @@ class MesonApp:
return self._generate(env, capture, vslite_ctx)
def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Dict[OptionKey, str], all_subprojects: T.Mapping[str, object]) -> None:
- pending = coredata.optstore.pending_options
errlist: T.List[str] = []
known_subprojects = all_subprojects.keys()
- for opt in pending:
- # It is not an error to set wrong option for unknown subprojects
- # because they might be used in future reconfigurations
- if coredata.optstore.accept_as_pending_option(opt, known_subprojects):
+ for opt in cmd_line_options:
+ # Accept options that exist or could appear in subsequent reconfigurations,
+ # including options for subprojects that were not used
+ if opt in coredata.optstore or \
+ opt.evolve(subproject=None) in coredata.optstore or \
+ coredata.optstore.accept_as_pending_option(opt):
continue
- if opt in cmd_line_options:
- errlist.append(f'"{opt}"')
+ if opt.subproject and opt.subproject not in known_subprojects:
+ continue
+ # "foo=true" may also refer to toplevel project option ":foo"
+ if opt.subproject is None and coredata.optstore.is_project_option(opt.as_root()):
+ continue
+ errlist.append(f'"{opt}"')
if errlist:
errstr = ', '.join(errlist)
raise MesonException(f'Unknown options: {errstr}')
- coredata.optstore.clear_pending()
-
def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.Optional[dict]) -> T.Optional[dict]:
# Get all user defined options, including options that have been defined
# during a previous invocation or using meson configure.
diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py
index c74283c..d4549c0 100755
--- a/mesonbuild/msubprojects.py
+++ b/mesonbuild/msubprojects.py
@@ -4,6 +4,7 @@ from dataclasses import dataclass, InitVar
import os, subprocess
import argparse
import asyncio
+import fnmatch
import threading
import copy
import shutil
@@ -640,9 +641,14 @@ def add_common_arguments(p: argparse.ArgumentParser) -> None:
p.add_argument('--allow-insecure', default=False, action='store_true',
help='Allow insecure server connections.')
-def add_subprojects_argument(p: argparse.ArgumentParser) -> None:
- p.add_argument('subprojects', nargs='*',
- help='List of subprojects (default: all)')
+def add_subprojects_argument(p: argparse.ArgumentParser, name: str = None) -> None:
+ helpstr = 'Patterns of subprojects to operate on (default: all)'
+ if name:
+ p.add_argument(name, dest='subprojects', metavar='pattern', nargs=1, action='append',
+ default=[], help=helpstr)
+ else:
+ p.add_argument('subprojects', metavar='pattern', nargs='*', default=[],
+ help=helpstr)
def add_wrap_update_parser(subparsers: 'SubParsers') -> argparse.ArgumentParser:
p = subparsers.add_parser('update', help='Update wrap files from WrapDB (Since 0.63.0)')
@@ -692,7 +698,7 @@ def add_arguments(parser: argparse.ArgumentParser) -> None:
p.add_argument('args', nargs=argparse.REMAINDER,
help=argparse.SUPPRESS)
add_common_arguments(p)
- p.set_defaults(subprojects=[])
+ add_subprojects_argument(p, '--filter')
p.set_defaults(subprojects_func=Runner.foreach)
p = subparsers.add_parser('purge', help='Remove all wrap-based subproject artifacts')
@@ -724,7 +730,8 @@ def run(options: 'Arguments') -> int:
return 0
r = Resolver(source_dir, subproject_dir, wrap_frontend=True, allow_insecure=options.allow_insecure, silent=True)
if options.subprojects:
- wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
+ wraps = [wrap for name, wrap in r.wraps.items()
+ if any(fnmatch.fnmatch(name, pat) for pat in options.subprojects)]
else:
wraps = list(r.wraps.values())
types = [t.strip() for t in options.types.split(',')] if options.types else []
diff --git a/mesonbuild/options.py b/mesonbuild/options.py
index bc4d79f..988b4f3 100644
--- a/mesonbuild/options.py
+++ b/mesonbuild/options.py
@@ -321,6 +321,7 @@ class UserOption(T.Generic[_T], HoldableObject):
yielding: bool = DEFAULT_YIELDING
deprecated: DeprecatedType = False
readonly: bool = dataclasses.field(default=False)
+ parent: T.Optional[UserOption] = None
def __post_init__(self, value_: _T) -> None:
self.value = self.validate_value(value_)
@@ -805,6 +806,7 @@ class OptionStore:
def __init__(self, is_cross: bool) -> None:
self.options: T.Dict['OptionKey', 'AnyOptionType'] = {}
+ self.subprojects: T.Set[str] = set()
self.project_options: T.Set[OptionKey] = set()
self.module_options: T.Set[OptionKey] = set()
from .compilers import all_languages
@@ -812,13 +814,11 @@ class OptionStore:
self.augments: OptionDict = {}
self.is_cross = is_cross
- # Pending options are options that need to be initialized later, either
- # configuration dependent options like compiler options, or options for
- # a different subproject
+ # Pending options are configuration dependent options that could be
+ # initialized later, such as compiler options
self.pending_options: OptionDict = {}
-
- def clear_pending(self) -> None:
- self.pending_options = {}
+ # Subproject options from toplevel project()
+ self.pending_subproject_options: OptionDict = {}
def ensure_and_validate_key(self, key: T.Union[OptionKey, str]) -> OptionKey:
if isinstance(key, str):
@@ -854,40 +854,33 @@ class OptionStore:
potential = self.options.get(key, None)
if self.is_project_option(key):
assert key.subproject is not None
- if potential is not None and potential.yielding:
- parent_key = key.as_root()
- try:
- parent_option = self.options[parent_key]
- except KeyError:
- # Subproject is set to yield, but top level
- # project does not have an option of the same
- # name. Return the subproject option.
- return potential
- # If parent object has different type, do not yield.
- # This should probably be an error.
- if type(parent_option) is type(potential):
- return parent_option
- return potential
if potential is None:
raise KeyError(f'Tried to access nonexistant project option {key}.')
- return potential
else:
if potential is None:
parent_key = OptionKey(key.name, subproject=None, machine=key.machine)
if parent_key not in self.options:
raise KeyError(f'Tried to access nonexistant project parent option {parent_key}.')
+ # This is a global option but it can still have per-project
+ # augment, so return the subproject key.
return self.options[parent_key]
- return potential
+ return potential
def get_value_object_and_value_for(self, key: OptionKey) -> T.Tuple[AnyOptionType, ElementaryOptionValues]:
assert isinstance(key, OptionKey)
vobject = self.get_value_object_for(key)
computed_value = vobject.value
- if key.subproject is not None:
- if key in self.augments:
- computed_value = vobject.validate_value(self.augments[key])
+ if key in self.augments:
+ assert key.subproject is not None
+ computed_value = self.augments[key]
+ elif vobject.yielding:
+ computed_value = vobject.parent.value
return (vobject, computed_value)
+ def option_has_value(self, key: OptionKey, value: ElementaryOptionValues) -> bool:
+ vobject, current_value = self.get_value_object_and_value_for(key)
+ return vobject.validate_value(value) == current_value
+
def get_value_for(self, name: 'T.Union[OptionKey, str]', subproject: T.Optional[str] = None) -> ElementaryOptionValues:
if isinstance(name, str):
key = OptionKey(name, subproject)
@@ -932,6 +925,19 @@ class OptionStore:
assert key.subproject is not None
if key in self.options:
raise MesonException(f'Internal error: tried to add a project option {key} that already exists.')
+ if valobj.yielding and key.subproject:
+ parent_key = key.as_root()
+ try:
+ parent_option = self.options[parent_key]
+ # If parent object has different type, do not yield.
+ # This should probably be an error.
+ if type(parent_option) is type(valobj):
+ valobj.parent = parent_option
+ except KeyError:
+ # Subproject is set to yield, but top level
+ # project does not have an option of the same
+ pass
+ valobj.yielding = bool(valobj.parent)
self.options[key] = valobj
self.project_options.add(key)
@@ -998,6 +1004,7 @@ class OptionStore:
return value.as_posix()
def set_option(self, key: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ changed = False
error_key = key
if error_key.subproject == '':
error_key = error_key.evolve(subproject=None)
@@ -1034,13 +1041,19 @@ class OptionStore:
elif isinstance(opt.deprecated, str):
mlog.deprecation(f'Option "{error_key}" is replaced by {opt.deprecated!r}')
# Change both this aption and the new one pointed to.
- dirty = self.set_option(key.evolve(name=opt.deprecated), new_value)
- dirty |= opt.set_value(new_value)
- return dirty
+ changed |= self.set_option(key.evolve(name=opt.deprecated), new_value, first_invocation)
- old_value = opt.value
- changed = opt.set_value(new_value)
+ new_value = opt.validate_value(new_value)
+ if key in self.options:
+ old_value = opt.value
+ opt.set_value(new_value)
+ opt.yielding = False
+ else:
+ assert key.subproject is not None
+ old_value = self.augments.get(key, opt.value)
+ self.augments[key] = new_value
+ changed |= old_value != new_value
if opt.readonly and changed and not first_invocation:
raise MesonException(f'Tried to modify read only option "{error_key}"')
@@ -1054,12 +1067,12 @@ class OptionStore:
optimization, debug = self.DEFAULT_DEPENDENTS[new_value]
dkey = key.evolve(name='debug')
optkey = key.evolve(name='optimization')
- self.options[dkey].set_value(debug)
- self.options[optkey].set_value(optimization)
+ self.set_option(dkey, debug, first_invocation)
+ self.set_option(optkey, optimization, first_invocation)
return changed
- def set_option_maybe_root(self, o: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ def set_user_option(self, o: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
if not self.is_cross and o.is_for_build():
return False
@@ -1070,42 +1083,51 @@ class OptionStore:
# can be either
#
# A) a system option in which case the subproject is None
- # B) a project option, in which case the subproject is '' (this method is only called from top level)
+ # B) a project option, in which case the subproject is ''
#
# The key parsing function can not handle the difference between the two
# and defaults to A.
if o in self.options:
return self.set_option(o, new_value, first_invocation)
+
+ # could also be an augment...
+ global_option = o.evolve(subproject=None)
+ if o.subproject is not None and global_option in self.options:
+ return self.set_option(o, new_value, first_invocation)
+
if self.accept_as_pending_option(o, first_invocation=first_invocation):
old_value = self.pending_options.get(o, None)
self.pending_options[o] = new_value
- return old_value is None or str(old_value) == new_value
- else:
+ return old_value is None or str(old_value) != new_value
+ elif o.subproject is None:
o = o.as_root()
return self.set_option(o, new_value, first_invocation)
+ else:
+ raise MesonException(f'Unknown option: "{o}".')
- def set_from_configure_command(self, D_args: T.List[str], U_args: T.List[str]) -> bool:
+ def set_from_configure_command(self, D_args: T.Dict[OptionKey, T.Optional[str]]) -> bool:
dirty = False
- D_args = [] if D_args is None else D_args
- (global_options, perproject_global_options, project_options) = self.classify_D_arguments(D_args)
- U_args = [] if U_args is None else U_args
- for key, valstr in global_options:
- dirty |= self.set_option_maybe_root(key, valstr)
- for key, valstr in project_options:
- dirty |= self.set_option_maybe_root(key, valstr)
- for key, valstr in perproject_global_options:
- if key in self.augments:
- if self.augments[key] != valstr:
- self.augments[key] = valstr
- dirty = True
- else:
- self.augments[key] = valstr
- dirty = True
- for keystr in U_args:
- key = OptionKey.from_string(keystr)
+ for key, valstr in D_args.items():
+ if valstr is not None:
+ dirty |= self.set_user_option(key, valstr)
+ continue
+
if key in self.augments:
del self.augments[key]
dirty = True
+ else:
+ # TODO: For project options, "dropping an augment" means going
+ # back to the superproject's value. However, it's confusing
+ # that -U does not simply remove the option from the stored
+ # cmd_line_options. This may cause "meson setup --wipe" to
+ # have surprising behavior. For this to work, UserOption
+ # should only store the default value and the option values
+ # should be stored with their source (project(), subproject(),
+ # machine file, command line). This way the effective value
+ # can be easily recomputed.
+ opt = self.get_value_object(key)
+ dirty |= not opt.yielding and bool(opt.parent)
+ opt.yielding = bool(opt.parent)
return dirty
def reset_prefixed_options(self, old_prefix: str, new_prefix: str) -> None:
@@ -1226,24 +1248,6 @@ class OptionStore:
def is_module_option(self, key: OptionKey) -> bool:
return key in self.module_options
- def classify_D_arguments(self, D: T.List[str]) -> T.Tuple[T.List[T.Tuple[OptionKey, str]],
- T.List[T.Tuple[OptionKey, str]],
- T.List[T.Tuple[OptionKey, str]]]:
- global_options = []
- project_options = []
- perproject_global_options = []
- for setval in D:
- keystr, valstr = setval.split('=', 1)
- key = OptionKey.from_string(keystr)
- valuetuple = (key, valstr)
- if self.is_project_option(key):
- project_options.append(valuetuple)
- elif key.subproject is None:
- global_options.append(valuetuple)
- else:
- perproject_global_options.append(valuetuple)
- return (global_options, perproject_global_options, project_options)
-
def prefix_split_options(self, coll: OptionDict) -> T.Tuple[T.Optional[str], OptionDict]:
prefix = None
others_d: OptionDict = {}
@@ -1305,15 +1309,15 @@ class OptionStore:
if not self.is_cross and key.is_for_build():
continue
if key.subproject:
- # do apply project() default_options for subprojects here, because
- # they have low priority
- self.pending_options[key] = valstr
+ # Subproject options from toplevel project() have low priority
+ # and will be processed when the subproject is found
+ self.pending_subproject_options[key] = valstr
else:
# Setting a project option with default_options
# should arguably be a hard error; the default
# value of project option should be set in the option
# file, not in the project call.
- self.set_option_maybe_root(key, valstr, True)
+ self.set_user_option(key, valstr, True)
# ignore subprojects for now for machine file and command line
# options; they are applied later
@@ -1323,25 +1327,18 @@ class OptionStore:
if not self.is_cross and key.is_for_build():
continue
if not key.subproject:
- self.set_option_maybe_root(key, valstr, True)
+ self.set_user_option(key, valstr, True)
for key, valstr in cmd_line_options.items():
# Due to backwards compatibility we ignore all build-machine options
# when building natively.
if not self.is_cross and key.is_for_build():
continue
if not key.subproject:
- self.set_option_maybe_root(key, valstr, True)
+ self.set_user_option(key, valstr, True)
- def accept_as_pending_option(self, key: OptionKey, known_subprojects: T.Optional[T.Container[str]] = None,
- first_invocation: bool = False) -> bool:
- # Fail on unknown options that we can know must exist at this point in time.
- # Subproject and compiler options are resolved later.
- #
+ def accept_as_pending_option(self, key: OptionKey, first_invocation: bool = False) -> bool:
# Some base options (sanitizers etc) might get added later.
# Permitting them all is not strictly correct.
- if key.subproject:
- if known_subprojects is None or key.subproject not in known_subprojects:
- return True
if self.is_compiler_option(key):
return True
if first_invocation and self.is_backend_option(key):
@@ -1365,23 +1362,40 @@ class OptionStore:
project_default_options: OptionDict,
cmd_line_options: OptionDict,
machine_file_options: OptionDict) -> None:
- # pick up pending per-project settings from the toplevel project() invocation
- options = {k: v for k, v in self.pending_options.items() if k.subproject == subproject}
- # apply project() and subproject() default_options
- for key, valstr in itertools.chain(project_default_options.items(), spcall_default_options.items()):
+ options: OptionDict = {}
+
+ # project() default_options
+ for key, valstr in project_default_options.items():
+ if key.subproject == subproject:
+ without_subp = key.evolve(subproject=None)
+ raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+
if key.subproject is None:
key = key.evolve(subproject=subproject)
- elif key.subproject == subproject:
+ options[key] = valstr
+
+ # augments from the toplevel project() default_options
+ for key, valstr in self.pending_subproject_options.items():
+ if key.subproject == subproject:
+ options[key] = valstr
+
+ # subproject() default_options
+ for key, valstr in spcall_default_options.items():
+ if key.subproject == subproject:
without_subp = key.evolve(subproject=None)
raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+
+ if key.subproject is None:
+ key = key.evolve(subproject=subproject)
options[key] = valstr
# then global settings from machine file and command line
+ # **but not if they are toplevel project options**
for key, valstr in itertools.chain(machine_file_options.items(), cmd_line_options.items()):
- if key.subproject is None:
+ if key.subproject is None and not self.is_project_option(key.as_root()):
subp_key = key.evolve(subproject=subproject)
- self.pending_options.pop(subp_key, None)
+ self.pending_subproject_options.pop(subp_key, None)
options.pop(subp_key, None)
# then finally per project augments from machine file and command line
@@ -1391,12 +1405,21 @@ class OptionStore:
# merge everything that has been computed above, while giving self.augments priority
for key, valstr in options.items():
+ if key.subproject != subproject:
+ if key.subproject in self.subprojects and not self.option_has_value(key, valstr):
+ mlog.warning('option {key} is set in subproject {subproject} but has already been processed')
+ continue
+
+ # Subproject options from project() will be processed when the subproject is found
+ self.pending_subproject_options[key] = valstr
+ continue
+
+ self.pending_subproject_options.pop(key, None)
self.pending_options.pop(key, None)
- valstr = self.augments.pop(key, valstr)
- if key in self.project_options:
- self.set_option(key, valstr, True)
- else:
- self.augments[key] = valstr
+ if key not in self.augments:
+ self.set_user_option(key, valstr, True)
+
+ self.subprojects.add(subproject)
def update_project_options(self, project_options: MutableKeyedOptionDictType, subproject: SubProject) -> None:
for key, value in project_options.items():
diff --git a/mesonbuild/scripts/clangtidy.py b/mesonbuild/scripts/clangtidy.py
index 550faee..e5f7024 100644
--- a/mesonbuild/scripts/clangtidy.py
+++ b/mesonbuild/scripts/clangtidy.py
@@ -11,7 +11,7 @@ import os
import shutil
import sys
-from .run_tool import run_clang_tool, run_with_buffered_output
+from .run_tool import run_with_buffered_output, run_clang_tool_on_sources
from ..environment import detect_clangtidy, detect_clangapply
import typing as T
@@ -56,7 +56,7 @@ def run(args: T.List[str]) -> int:
fixesdir.unlink()
fixesdir.mkdir(parents=True)
- tidyret = run_clang_tool('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
+ tidyret = run_clang_tool_on_sources('clang-tidy', srcdir, builddir, run_clang_tidy, tidyexe, builddir, fixesdir)
if fixesdir is not None:
print('Applying fix-its...')
applyret = subprocess.run(applyexe + ['-format', '-style=file', '-ignore-insert-conflict', fixesdir]).returncode
diff --git a/mesonbuild/scripts/run_tool.py b/mesonbuild/scripts/run_tool.py
index e206ff7..6181c6d 100644
--- a/mesonbuild/scripts/run_tool.py
+++ b/mesonbuild/scripts/run_tool.py
@@ -128,6 +128,26 @@ def run_clang_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[...,
yield fn(path, *args)
return asyncio.run(_run_workers(all_clike_files(name, srcdir, builddir), wrapper))
+def run_clang_tool_on_sources(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., T.Coroutine[None, None, int]], *args: T.Any) -> int:
+ if sys.platform == 'win32':
+ asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
+
+ source_files = set()
+ with open('meson-info/intro-targets.json', encoding='utf-8') as fp:
+ targets = json.load(fp)
+
+ for target in targets:
+ for target_source in target.get('target_sources') or []:
+ for source in target_source.get('sources') or []:
+ source_files.add(Path(source))
+
+ clike_files = set(all_clike_files(name, srcdir, builddir))
+ source_files = source_files.intersection(clike_files)
+
+ def wrapper(path: Path) -> T.Iterable[T.Coroutine[None, None, int]]:
+ yield fn(path, *args)
+ return asyncio.run(_run_workers(source_files, wrapper))
+
def run_tool_on_targets(fn: T.Callable[[T.Dict[str, T.Any]],
T.Iterable[T.Coroutine[None, None, int]]]) -> int:
if sys.platform == 'win32':
diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py
index 4b656a6..0628310 100644
--- a/mesonbuild/utils/universal.py
+++ b/mesonbuild/utils/universal.py
@@ -433,7 +433,7 @@ class File(HoldableObject):
absdir = srcdir
if self.is_built:
absdir = builddir
- return os.path.join(absdir, self.relative_name())
+ return os.path.normpath(os.path.join(absdir, self.relative_name()))
@property
def suffix(self) -> str:
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index c8eff69..1cc2cee 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -57,7 +57,21 @@ WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com'
ALL_TYPES = ['file', 'git', 'hg', 'svn', 'redirect']
-PATCH = shutil.which('patch')
+if mesonlib.is_windows():
+ from ..programs import ExternalProgram
+ from ..mesonlib import version_compare
+ _exclude_paths: T.List[str] = []
+ while True:
+ _patch = ExternalProgram('patch', silent=True, exclude_paths=_exclude_paths)
+ if not _patch.found():
+ break
+ if version_compare(_patch.get_version(), '>=2.6.1'):
+ break
+ _exclude_paths.append(os.path.dirname(_patch.get_path()))
+ PATCH = _patch.get_path() if _patch.found() else None
+else:
+ PATCH = shutil.which('patch')
+
def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
""" raises WrapException if not whitelisted subdomain """
@@ -233,6 +247,15 @@ class PackageDefinition:
wrap.original_filename = filename
wrap.parse_provide_section(config)
+ patch_url = values.get('patch_url')
+ if patch_url and patch_url.startswith('https://wrapdb.mesonbuild.com/v1'):
+ if name == 'sqlite':
+ mlog.deprecation('sqlite wrap has been renamed to sqlite3, update using `meson wrap install sqlite3`')
+ elif name == 'libjpeg':
+ mlog.deprecation('libjpeg wrap has been renamed to libjpeg-turbo, update using `meson wrap install libjpeg-turbo`')
+ else:
+ mlog.deprecation(f'WrapDB v1 is deprecated, updated using `meson wrap update {name}`')
+
with open(filename, 'r', encoding='utf-8') as file:
wrap.wrapfile_hash = hashlib.sha256(file.read().encode('utf-8')).hexdigest()
@@ -331,6 +354,7 @@ class Resolver:
self.wrapdb: T.Dict[str, T.Any] = {}
self.wrapdb_provided_deps: T.Dict[str, str] = {}
self.wrapdb_provided_programs: T.Dict[str, str] = {}
+ self.loaded_dirs: T.Set[str] = set()
self.load_wraps()
self.load_netrc()
self.load_wrapdb()
@@ -372,6 +396,7 @@ class Resolver:
# Add provided deps and programs into our lookup tables
for wrap in self.wraps.values():
self.add_wrap(wrap)
+ self.loaded_dirs.add(self.subdir)
def add_wrap(self, wrap: PackageDefinition) -> None:
for k in wrap.provided_deps.keys():
@@ -416,16 +441,25 @@ class Resolver:
def _merge_wraps(self, other_resolver: 'Resolver') -> None:
for k, v in other_resolver.wraps.items():
- self.wraps.setdefault(k, v)
- for k, v in other_resolver.provided_deps.items():
- self.provided_deps.setdefault(k, v)
- for k, v in other_resolver.provided_programs.items():
- self.provided_programs.setdefault(k, v)
+ prev_wrap = self.wraps.get(v.directory)
+ if prev_wrap and prev_wrap.type is None and v.type is not None:
+ # This happens when a subproject has been previously downloaded
+ # using a wrap from another subproject and the wrap-redirect got
+ # deleted. In that case, the main project created a bare wrap
+ # for the download directory, but now we have a proper wrap.
+ # It also happens for wraps coming from Cargo.lock files, which
+ # don't create wrap-redirect.
+ del self.wraps[v.directory]
+ del self.provided_deps[v.directory]
+ if k not in self.wraps:
+ self.wraps[k] = v
+ self.add_wrap(v)
def load_and_merge(self, subdir: str, subproject: SubProject) -> None:
- if self.wrap_mode != WrapMode.nopromote:
+ if self.wrap_mode != WrapMode.nopromote and subdir not in self.loaded_dirs:
other_resolver = Resolver(self.source_dir, subdir, subproject, self.wrap_mode, self.wrap_frontend, self.allow_insecure, self.silent)
self._merge_wraps(other_resolver)
+ self.loaded_dirs.add(subdir)
def find_dep_provider(self, packagename: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
# Python's ini parser converts all key values to lowercase.
@@ -720,6 +754,23 @@ class Resolver:
resp = open_wrapdburl(urlstring, allow_insecure=self.allow_insecure, have_opt=self.wrap_frontend)
elif WHITELIST_SUBDOMAIN in urlstring:
raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL')
+ elif url.scheme == 'sftp':
+ sftp = shutil.which('sftp')
+ if sftp is None:
+ raise WrapException('Scheme sftp is not available. Install sftp to enable it.')
+ with tempfile.TemporaryDirectory() as workdir, \
+ tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False) as tmpfile:
+ args = []
+ # Older versions of the sftp client cannot handle URLs, hence the splitting of url below
+ if url.port:
+ args += ['-P', f'{url.port}']
+ user = f'{url.username}@' if url.username else ''
+ command = [sftp, '-o', 'KbdInteractiveAuthentication=no', *args, f'{user}{url.hostname}:{url.path[1:]}']
+ subprocess.run(command, cwd=workdir, check=True)
+ downloaded = os.path.join(workdir, os.path.basename(url.path))
+ tmpfile.close()
+ shutil.move(downloaded, tmpfile.name)
+ return self.hash_file(tmpfile.name), tmpfile.name
else:
headers = {
'User-Agent': f'mesonbuild/{coredata.version}',
@@ -744,7 +795,7 @@ class Resolver:
resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
except OSError as e:
mlog.log(str(e))
- raise WrapException(f'could not get {urlstring} is the internet available?')
+ raise WrapException(f'could not get {urlstring}; is the internet available?')
with contextlib.closing(resp) as resp, tmpfile as tmpfile:
try:
dlsize = int(resp.info()['Content-Length'])
@@ -775,14 +826,17 @@ class Resolver:
hashvalue = h.hexdigest()
return hashvalue, tmpfile.name
+ def hash_file(self, path: str) -> str:
+ h = hashlib.sha256()
+ with open(path, 'rb') as f:
+ h.update(f.read())
+ return h.hexdigest()
+
def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
if what + '_hash' not in self.wrap.values and not hash_required:
return
expected = self.wrap.get(what + '_hash').lower()
- h = hashlib.sha256()
- with open(path, 'rb') as f:
- h.update(f.read())
- dhash = h.hexdigest()
+ dhash = self.hash_file(path)
if dhash != expected:
raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')