aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild/backend
diff options
context:
space:
mode:
Diffstat (limited to 'mesonbuild/backend')
-rw-r--r--mesonbuild/backend/backends.py204
-rw-r--r--mesonbuild/backend/ninjabackend.py188
-rw-r--r--mesonbuild/backend/vs2010backend.py102
-rw-r--r--mesonbuild/backend/vs2012backend.py2
-rw-r--r--mesonbuild/backend/vs2013backend.py2
-rw-r--r--mesonbuild/backend/vs2015backend.py2
-rw-r--r--mesonbuild/backend/vs2017backend.py2
-rw-r--r--mesonbuild/backend/vs2019backend.py2
-rw-r--r--mesonbuild/backend/vs2022backend.py2
-rw-r--r--mesonbuild/backend/xcodebackend.py57
10 files changed, 337 insertions, 226 deletions
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 3dfa2fb..8fe696e 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -24,12 +24,12 @@ from .. import dependencies
from .. import programs
from .. import mesonlib
from .. import mlog
-from ..compilers import LANGUAGES_USING_LDFLAGS, detect, lang_suffixes
+from ..compilers import detect, lang_suffixes
from ..mesonlib import (
File, MachineChoice, MesonException, MesonBugException, OrderedSet,
ExecutableSerialisation, EnvironmentException,
classify_unity_sources, get_compiler_for_source,
- is_parent_path,
+ get_rsp_threshold,
)
from ..options import OptionKey
@@ -61,7 +61,7 @@ if T.TYPE_CHECKING:
# Languages that can mix with C or C++ but don't support unity builds yet
# because the syntax we use for unity builds is specific to C/++/ObjC/++.
# Assembly files cannot be unitified and neither can LLVM IR files
-LANGS_CANT_UNITY = ('d', 'fortran', 'vala')
+LANGS_CANT_UNITY = ('d', 'fortran', 'vala', 'rust')
@dataclass(eq=False)
class RegenInfo:
@@ -177,7 +177,6 @@ class InstallSymlinkData:
install_path: str
subproject: str
tag: T.Optional[str] = None
- allow_missing: bool = False
# cannot use dataclass here because "exclude" is out of order
class SubdirInstallData(InstallDataBase):
@@ -533,6 +532,7 @@ class Backend:
capture: T.Optional[str] = None,
feed: T.Optional[str] = None,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
+ can_use_rsp_file: bool = False,
tag: T.Optional[str] = None,
verbose: bool = False,
installdir_map: T.Optional[T.Dict[str, str]] = None) -> 'ExecutableSerialisation':
@@ -563,9 +563,7 @@ class Backend:
cmd_args: T.List[str] = []
for c in raw_cmd_args:
if isinstance(c, programs.ExternalProgram):
- p = c.get_path()
- assert isinstance(p, str)
- cmd_args.append(p)
+ cmd_args += c.get_command()
elif isinstance(c, (build.BuildTarget, build.CustomTarget)):
cmd_args.append(self.get_target_filename_abs(c))
elif isinstance(c, mesonlib.File):
@@ -594,6 +592,21 @@ class Backend:
exe_wrapper = None
workdir = workdir or self.environment.get_build_dir()
+
+ # Must include separators as well
+ needs_rsp_file = can_use_rsp_file and sum(len(i) + 1 for i in cmd_args) >= get_rsp_threshold()
+
+ if needs_rsp_file:
+ hasher = hashlib.sha1()
+ args = ' '.join(mesonlib.quote_arg(arg) for arg in cmd_args)
+ hasher.update(args.encode(encoding='utf-8', errors='ignore'))
+ digest = hasher.hexdigest()
+ scratch_file = f'meson_rsp_{digest}.rsp'
+ rsp_file = os.path.join(self.environment.get_scratch_dir(), scratch_file)
+ with open(rsp_file, 'w', encoding='utf-8', newline='\n') as f:
+ f.write(args)
+ cmd_args = [f'@{rsp_file}']
+
return ExecutableSerialisation(exe_cmd + cmd_args, env,
exe_wrapper, workdir,
extra_paths, capture, feed, tag, verbose, installdir_map)
@@ -606,6 +619,7 @@ class Backend:
feed: T.Optional[str] = None,
force_serialize: bool = False,
env: T.Optional[mesonlib.EnvironmentVariables] = None,
+ can_use_rsp_file: bool = False,
verbose: bool = False) -> T.Tuple[T.List[str], str]:
'''
Serialize an executable for running with a generator or a custom target
@@ -613,7 +627,7 @@ class Backend:
cmd: T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]] = []
cmd.append(exe)
cmd.extend(cmd_args)
- es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, verbose=verbose)
+ es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, can_use_rsp_file, verbose=verbose)
reasons: T.List[str] = []
if es.extra_paths:
reasons.append('to set PATH')
@@ -653,6 +667,9 @@ class Backend:
envlist.append(f'{k}={v}')
return ['env'] + envlist + es.cmd_args, ', '.join(reasons)
+ if any(a.startswith('@') for a in es.cmd_args):
+ reasons.append('because command is too long')
+
if not force_serialize:
if not capture and not feed:
return es.cmd_args, ''
@@ -715,118 +732,6 @@ class Backend:
return l, stdlib_args
@staticmethod
- def _libdir_is_system(libdir: str, compilers: T.Mapping[str, 'Compiler'], env: 'Environment') -> bool:
- libdir = os.path.normpath(libdir)
- for cc in compilers.values():
- if libdir in cc.get_library_dirs(env):
- return True
- return False
-
- def get_external_rpath_dirs(self, target: build.BuildTarget) -> T.Set[str]:
- args: T.List[str] = []
- for lang in LANGUAGES_USING_LDFLAGS:
- try:
- e = self.environment.coredata.get_external_link_args(target.for_machine, lang)
- if isinstance(e, str):
- args.append(e)
- else:
- args.extend(e)
- except Exception:
- pass
- return self.get_rpath_dirs_from_link_args(args)
-
- @staticmethod
- def get_rpath_dirs_from_link_args(args: T.List[str]) -> T.Set[str]:
- dirs: T.Set[str] = set()
- # Match rpath formats:
- # -Wl,-rpath=
- # -Wl,-rpath,
- rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)')
- # Match solaris style compat runpath formats:
- # -Wl,-R
- # -Wl,-R,
- runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)')
- # Match symbols formats:
- # -Wl,--just-symbols=
- # -Wl,--just-symbols,
- symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)')
- for arg in args:
- rpath_match = rpath_regex.match(arg)
- if rpath_match:
- for dir in rpath_match.group(1).split(':'):
- dirs.add(dir)
- runpath_match = runpath_regex.match(arg)
- if runpath_match:
- for dir in runpath_match.group(1).split(':'):
- # The symbols arg is an rpath if the path is a directory
- if Path(dir).is_dir():
- dirs.add(dir)
- symbols_match = symbols_regex.match(arg)
- if symbols_match:
- for dir in symbols_match.group(1).split(':'):
- # Prevent usage of --just-symbols to specify rpath
- if Path(dir).is_dir():
- raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.')
- return dirs
-
- @lru_cache(maxsize=None)
- def rpaths_for_non_system_absolute_shared_libraries(self, target: build.BuildTarget, exclude_system: bool = True) -> 'ImmutableListProtocol[str]':
- paths: OrderedSet[str] = OrderedSet()
- srcdir = self.environment.get_source_dir()
-
- for dep in target.external_deps:
- if dep.type_name not in {'library', 'pkgconfig', 'cmake'}:
- continue
- for libpath in dep.link_args:
- # For all link args that are absolute paths to a library file, add RPATH args
- if not os.path.isabs(libpath):
- continue
- libdir = os.path.dirname(libpath)
- if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
- # No point in adding system paths.
- continue
- # Don't remove rpaths specified in LDFLAGS.
- if libdir in self.get_external_rpath_dirs(target):
- continue
- # Windows doesn't support rpaths, but we use this function to
- # emulate rpaths by setting PATH
- # .dll is there for mingw gcc
- # .so's may be extended with version information, e.g. libxyz.so.1.2.3
- if not (
- os.path.splitext(libpath)[1] in {'.dll', '.lib', '.so', '.dylib'}
- or re.match(r'.+\.so(\.|$)', os.path.basename(libpath))
- ):
- continue
-
- if is_parent_path(srcdir, libdir):
- rel_to_src = libdir[len(srcdir) + 1:]
- assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute'
- paths.add(os.path.join(self.build_to_src, rel_to_src))
- else:
- paths.add(libdir)
- # Don't remove rpaths specified by the dependency
- paths.difference_update(self.get_rpath_dirs_from_link_args(dep.link_args))
- for i in chain(target.link_targets, target.link_whole_targets):
- if isinstance(i, build.BuildTarget):
- paths.update(self.rpaths_for_non_system_absolute_shared_libraries(i, exclude_system))
- return list(paths)
-
- # This may take other types
- def determine_rpath_dirs(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]
- ) -> T.Tuple[str, ...]:
- result: OrderedSet[str]
- if self.environment.coredata.optstore.get_value_for(OptionKey('layout')) == 'mirror':
- # Need a copy here
- result = OrderedSet(target.get_link_dep_subdirs())
- else:
- result = OrderedSet()
- result.add('meson-out')
- if isinstance(target, build.BuildTarget):
- result.update(self.rpaths_for_non_system_absolute_shared_libraries(target))
- target.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result])
- return tuple(result)
-
- @staticmethod
@lru_cache(maxsize=None)
def canonicalize_filename(fname: str) -> str:
if os.path.altsep is not None:
@@ -1067,11 +972,6 @@ class Backend:
if compiler.language == 'vala':
if dep.type_name == 'pkgconfig':
assert isinstance(dep, dependencies.ExternalDependency)
- if dep.name == 'glib-2.0' and dep.version_reqs is not None:
- for req in dep.version_reqs:
- if req.startswith(('>=', '==')):
- commands += ['--target-glib', req[2:]]
- break
commands += ['--pkg', dep.name]
elif isinstance(dep, dependencies.ExternalLibrary):
commands += dep.get_link_args('vala')
@@ -1083,6 +983,32 @@ class Backend:
commands += dep.get_exe_args(compiler)
# For 'automagic' deps: Boost and GTest. Also dependency('threads').
# pkg-config puts the thread flags itself via `Cflags:`
+ if compiler.language == 'vala':
+ # Vala wants to know the minimum glib version
+ for dep in target.added_deps:
+ if dep.name == 'glib-2.0':
+ if dep.type_name == 'pkgconfig':
+ assert isinstance(dep, dependencies.ExternalDependency)
+ if dep.version_reqs is not None:
+ for req in dep.version_reqs:
+ if req.startswith(('>=', '==')):
+ commands += ['--target-glib', req[2:]]
+ break
+ elif isinstance(dep, dependencies.InternalDependency) and dep.version is not None:
+ glib_version = dep.version.split('.')
+ if len(glib_version) != 3:
+ mlog.warning(f'GLib version has unexpected format: {dep.version}')
+ break
+ try:
+ # If GLib version is a development version, downgrade
+ # --target-glib to the previous version, as valac will
+ # complain about non-even minor versions
+ glib_version[1] = str((int(glib_version[1]) // 2) * 2)
+ except ValueError:
+ mlog.warning(f'GLib version has unexpected format: {dep.version}')
+ break
+ commands += ['--target-glib', f'{glib_version[0]}.{glib_version[1]}']
+
# Fortran requires extra include directives.
if compiler.language == 'fortran':
for lt in chain(target.link_targets, target.link_whole_targets):
@@ -1258,12 +1184,9 @@ class Backend:
extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]] = []
if isinstance(exe, build.CustomTarget):
extra_bdeps = list(exe.get_transitive_build_target_deps())
+ extra_bdeps.extend(t.depends)
+ extra_bdeps.extend(a for a in t.cmd_args if isinstance(a, build.BuildTarget))
extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
- for a in t.cmd_args:
- if isinstance(a, build.BuildTarget):
- for p in self.determine_windows_extra_paths(a, []):
- if p not in extra_paths:
- extra_paths.append(p)
else:
extra_paths = []
@@ -1289,8 +1212,12 @@ class Backend:
else:
raise MesonException('Bad object in test command.')
+ # set LD_LIBRARY_PATH for
+ # a) dependencies, as relying on rpath is not very safe:
+ # https://github.com/mesonbuild/meson/pull/11119
+ # b) depends and targets passed via args.
t_env = copy.deepcopy(t.env)
- if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin():
+ if not machine.is_windows() and not machine.is_cygwin():
ld_lib_path_libs: T.Set[build.SharedLibrary] = set()
for d in depends:
if isinstance(d, build.BuildTarget):
@@ -1303,6 +1230,8 @@ class Backend:
if ld_lib_path:
t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':')
+ if machine.is_darwin():
+ t_env.prepend('DYLD_LIBRARY_PATH', list(ld_lib_path), ':')
ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
exe_wrapper, self.environment.need_exe_wrapper(),
@@ -1562,7 +1491,7 @@ class Backend:
def eval_custom_target_command(
self, target: build.CustomTarget, absolute_outputs: bool = False) -> \
- T.Tuple[T.List[str], T.List[str], T.List[str]]:
+ T.Tuple[T.List[str], T.List[str], T.List[str | programs.ExternalProgram]]:
# We want the outputs to be absolute only when using the VS backend
# XXX: Maybe allow the vs backend to use relative paths too?
source_root = self.build_to_src
@@ -1575,7 +1504,7 @@ class Backend:
outputs = [os.path.join(outdir, i) for i in target.get_outputs()]
inputs = self.get_custom_target_sources(target)
# Evaluate the command list
- cmd: T.List[str] = []
+ cmd: T.List[str | programs.ExternalProgram] = []
for i in target.command:
if isinstance(i, build.BuildTarget):
cmd += self.build_target_to_cmd_array(i)
@@ -1611,6 +1540,9 @@ class Backend:
if not target.absolute_paths:
pdir = self.get_target_private_dir(target)
i = i.replace('@PRIVATE_DIR@', pdir)
+ elif isinstance(i, programs.ExternalProgram):
+ # Let it pass and be extended elsewhere
+ pass
else:
raise RuntimeError(f'Argument {i} is of unknown type {type(i)}')
cmd.append(i)
@@ -1635,7 +1567,7 @@ class Backend:
# fixed.
#
# https://github.com/mesonbuild/meson/pull/737
- cmd = [i.replace('\\', '/') for i in cmd]
+ cmd = [i.replace('\\', '/') if isinstance(i, str) else i for i in cmd]
return inputs, outputs, cmd
def get_introspect_command(self) -> str:
@@ -1777,7 +1709,7 @@ class Backend:
for alias, to, tag in t.get_aliases():
alias = os.path.join(first_outdir, alias)
- s = InstallSymlinkData(to, alias, first_outdir, t.subproject, tag, allow_missing=True)
+ s = InstallSymlinkData(to, alias, first_outdir, t.subproject, tag)
d.symlinks.append(s)
if isinstance(t, (build.SharedLibrary, build.SharedModule, build.Executable)):
@@ -1996,6 +1928,8 @@ class Backend:
compiler += [j]
elif isinstance(j, (build.BuildTarget, build.CustomTarget)):
compiler += j.get_outputs()
+ elif isinstance(j, programs.ExternalProgram):
+ compiler += j.get_command()
else:
raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug')
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index d7de987..d93e8c9 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -500,11 +500,6 @@ class NinjaBackend(backends.Backend):
# - https://github.com/mesonbuild/meson/pull/9453
# - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040
self.allow_thin_archives = PerMachine[bool](True, True)
- if self.environment:
- for for_machine in MachineChoice:
- if 'cuda' in self.environment.coredata.compilers[for_machine]:
- mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
- self.allow_thin_archives[for_machine] = False
def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement:
'''
@@ -595,6 +590,12 @@ class NinjaBackend(backends.Backend):
# We don't yet have a use case where we'd expect to make use of this,
# so no harm in catching and reporting something unexpected.
raise MesonBugException('We do not expect the ninja backend to be given a valid \'vslite_ctx\'')
+ if self.environment:
+ for for_machine in MachineChoice:
+ if 'cuda' in self.environment.coredata.compilers[for_machine]:
+ mlog.debug('cuda enabled globally, disabling thin archives for {}, since nvcc/nvlink cannot handle thin archives natively'.format(for_machine))
+ self.allow_thin_archives[for_machine] = False
+
ninja = environment.detect_ninja_command_and_version(log=True)
if self.environment.coredata.optstore.get_value_for(OptionKey('vsenv')):
builddir = Path(self.environment.get_build_dir())
@@ -890,14 +891,14 @@ class NinjaBackend(backends.Backend):
self.generate_shlib_aliases(target, self.get_target_dir(target))
+ # Generate rules for GeneratedLists
+ self.generate_generator_list_rules(target)
+
# If target uses a language that cannot link to C objects,
# just generate for that language and return.
if isinstance(target, build.Jar):
self.generate_jar_target(target)
return
- if target.uses_rust():
- self.generate_rust_target(target)
- return
if 'cs' in target.compilers:
self.generate_cs_target(target)
return
@@ -934,8 +935,6 @@ class NinjaBackend(backends.Backend):
generated_sources = self.get_target_generated_sources(target)
transpiled_sources = []
self.scan_fortran_module_outputs(target)
- # Generate rules for GeneratedLists
- self.generate_generator_list_rules(target)
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
@@ -991,6 +990,8 @@ class NinjaBackend(backends.Backend):
# this target. We create the Ninja build file elements for this here
# because we need `header_deps` to be fully generated in the above loop.
for src in generated_source_files:
+ if not self.environment.is_separate_compile(src):
+ continue
if self.environment.is_llvm_ir(src):
o, s = self.generate_llvm_ir_compile(target, src)
else:
@@ -1049,21 +1050,24 @@ class NinjaBackend(backends.Backend):
# Generate compile targets for all the preexisting sources for this target
for src in target_sources.values():
- if not self.environment.is_header(src) or is_compile_target:
- if self.environment.is_llvm_ir(src):
- o, s = self.generate_llvm_ir_compile(target, src)
- obj_list.append(o)
- elif is_unity and self.get_target_source_can_unity(target, src):
- abs_src = os.path.join(self.environment.get_build_dir(),
- src.rel_to_builddir(self.build_to_src))
- unity_src.append(abs_src)
- else:
- o, s = self.generate_single_compile(target, src, False, [],
- header_deps + d_generated_deps + fortran_order_deps,
- fortran_inc_args)
- obj_list.append(o)
- compiled_sources.append(s)
- source2object[s] = o
+ if not self.environment.is_separate_compile(src):
+ continue
+ if self.environment.is_header(src) and not is_compile_target:
+ continue
+ if self.environment.is_llvm_ir(src):
+ o, s = self.generate_llvm_ir_compile(target, src)
+ obj_list.append(o)
+ elif is_unity and self.get_target_source_can_unity(target, src):
+ abs_src = os.path.join(self.environment.get_build_dir(),
+ src.rel_to_builddir(self.build_to_src))
+ unity_src.append(abs_src)
+ else:
+ o, s = self.generate_single_compile(target, src, False, [],
+ header_deps + d_generated_deps + fortran_order_deps,
+ fortran_inc_args)
+ obj_list.append(o)
+ compiled_sources.append(s)
+ source2object[s] = o
if is_unity:
for src in self.generate_unity_files(target, unity_src):
@@ -1083,8 +1087,14 @@ class NinjaBackend(backends.Backend):
final_obj_list = self.generate_prelink(target, obj_list)
else:
final_obj_list = obj_list
- elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
+
self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)
+
+ if target.uses_rust():
+ self.generate_rust_target(target, outname, final_obj_list, fortran_order_deps)
+ return
+
+ elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args)
self.add_build(elem)
#In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library
#object and create the build element.
@@ -1223,6 +1233,7 @@ class NinjaBackend(backends.Backend):
capture=ofilenames[0] if target.capture else None,
feed=srcs[0] if target.feed else None,
env=target.env,
+ can_use_rsp_file=target.rspable,
verbose=target.console)
if reason:
cmd_type = f' (wrapped by meson {reason})'
@@ -1554,7 +1565,6 @@ class NinjaBackend(backends.Backend):
elem.add_item('ARGS', commands)
self.add_build(elem)
- self.generate_generator_list_rules(target)
self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
def determine_java_compile_args(self, target, compiler) -> T.List[str]:
@@ -1765,6 +1775,9 @@ class NinjaBackend(backends.Backend):
girname = os.path.join(self.get_target_dir(target), target.vala_gir)
args += ['--gir', os.path.join('..', target.vala_gir)]
valac_outputs.append(girname)
+ shared_target = target.get('shared')
+ if isinstance(shared_target, build.SharedLibrary):
+ args += ['--shared-library', shared_target.get_filename()]
# Install GIR to default location if requested by user
if len(target.install_dir) > 3 and target.install_dir[3] is True:
target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
@@ -1775,7 +1788,7 @@ class NinjaBackend(backends.Backend):
gres_xml, = self.get_custom_target_sources(gensrc)
args += ['--gresources=' + gres_xml]
for source_dir in gensrc.source_dirs:
- gres_dirs += [os.path.join(self.get_target_dir(gensrc), source_dir)]
+ gres_dirs += [source_dir]
# Ensure that resources are built before vala sources
# This is required since vala code using [GtkTemplate] effectively depends on .ui files
# GResourceHeaderTarget is not suitable due to lacking depfile
@@ -1967,6 +1980,7 @@ class NinjaBackend(backends.Backend):
for s in f.get_outputs()])
self.all_structured_sources.update(_ods)
orderdeps.extend(_ods)
+ return orderdeps, main_rust_file
for i in target.get_sources():
if main_rust_file is None:
@@ -2005,7 +2019,8 @@ class NinjaBackend(backends.Backend):
args += target.get_extra_args('rust')
return args
- def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler) -> T.Tuple[T.List[str], T.List[str], T.List[RustDep], T.List[str]]:
+ def get_rust_compiler_deps_and_args(self, target: build.BuildTarget, rustc: Compiler,
+ obj_list: T.List[str]) -> T.Tuple[T.List[str], T.List[RustDep], T.List[str]]:
deps: T.List[str] = []
project_deps: T.List[RustDep] = []
args: T.List[str] = []
@@ -2037,11 +2052,9 @@ class NinjaBackend(backends.Backend):
type_ += ':' + ','.join(modifiers)
args.append(f'-l{type_}={libname}')
- objs, od = self.flatten_object_list(target)
- for o in objs:
+ for o in obj_list:
args.append(f'-Clink-arg={o}')
deps.append(o)
- fortran_order_deps = self.get_fortran_order_deps(od)
linkdirs = mesonlib.OrderedSet()
external_deps = target.external_deps.copy()
@@ -2091,20 +2104,24 @@ class NinjaBackend(backends.Backend):
for a in e.get_link_args():
if a in rustc.native_static_libs:
# Exclude link args that rustc already add by default
- pass
+ continue
elif a.startswith('-L'):
args.append(a)
- elif a.endswith(('.dll', '.so', '.dylib', '.a', '.lib')) and isinstance(target, build.StaticLibrary):
+ continue
+ elif a.endswith(('.dll', '.so', '.dylib', '.a', '.lib')):
dir_, lib = os.path.split(a)
linkdirs.add(dir_)
- if not verbatim:
- lib, ext = os.path.splitext(lib)
- if lib.startswith('lib'):
- lib = lib[3:]
- static = a.endswith(('.a', '.lib'))
- _link_library(lib, static)
- else:
- args.append(f'-Clink-arg={a}')
+
+ if isinstance(target, build.StaticLibrary):
+ if not verbatim:
+ lib, ext = os.path.splitext(lib)
+ if lib.startswith('lib'):
+ lib = lib[3:]
+ static = a.endswith(('.a', '.lib'))
+ _link_library(lib, static)
+ continue
+
+ args.append(f'-Clink-arg={a}')
for d in linkdirs:
d = d or '.'
@@ -2119,40 +2136,44 @@ class NinjaBackend(backends.Backend):
and dep.rust_crate_type == 'dylib'
for dep in target_deps)
- if target.rust_crate_type in {'dylib', 'proc-macro'} or has_rust_shared_deps:
- # add prefer-dynamic if any of the Rust libraries we link
+ if target.rust_crate_type in {'dylib', 'proc-macro'}:
+ # also add prefer-dynamic if any of the Rust libraries we link
# against are dynamic or this is a dynamic library itself,
# otherwise we'll end up with multiple implementations of libstd.
+ has_rust_shared_deps = True
+ elif self.get_target_option(target, 'rust_dynamic_std'):
+ if target.rust_crate_type == 'staticlib':
+ # staticlib crates always include a copy of the Rust libstd,
+ # therefore it is not possible to also link it dynamically.
+ # The options to avoid this (-Z staticlib-allow-rdylib-deps and
+ # -Z staticlib-prefer-dynamic) are not yet stable; alternatively,
+ # one could use "--emit obj" (implemented in the pull request at
+ # https://github.com/mesonbuild/meson/pull/11213) or "--emit rlib"
+ # (officially not recommended for linking with C programs).
+ raise MesonException('rust_dynamic_std does not support staticlib crates yet')
+ # want libstd as a shared dep
+ has_rust_shared_deps = True
+
+ if has_rust_shared_deps:
args += ['-C', 'prefer-dynamic']
-
- if isinstance(target, build.SharedLibrary) or has_shared_deps:
+ if has_shared_deps or has_rust_shared_deps:
args += self.get_build_rpath_args(target, rustc)
- return deps, fortran_order_deps, project_deps, args
-
- def generate_rust_target(self, target: build.BuildTarget) -> None:
- rustc = T.cast('RustCompiler', target.compilers['rust'])
- self.generate_generator_list_rules(target)
-
- for i in target.get_sources():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
- for g in target.get_generated_sources():
- for i in g.get_outputs():
- if not rustc.can_compile(i):
- raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.')
+ return deps, project_deps, args
+ def generate_rust_target(self, target: build.BuildTarget, target_name: str, obj_list: T.List[str],
+ fortran_order_deps: T.List[str]) -> None:
orderdeps, main_rust_file = self.generate_rust_sources(target)
- target_name = self.get_target_filename(target)
if main_rust_file is None:
raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report')
+ rustc = T.cast('RustCompiler', target.compilers['rust'])
args = rustc.compiler_args()
depfile = os.path.join(self.get_target_private_dir(target), target.name + '.d')
args += self.get_rust_compiler_args(target, rustc, target.rust_crate_type, depfile)
- deps, fortran_order_deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc)
+ deps, project_deps, deps_args = self.get_rust_compiler_deps_and_args(target, rustc, obj_list)
args += deps_args
proc_macro_dylib_path = None
@@ -2187,7 +2208,9 @@ class NinjaBackend(backends.Backend):
rustdoc = rustc.get_rustdoc(self.environment)
args = rustdoc.get_exe_args()
args += self.get_rust_compiler_args(target.doctests.target, rustdoc, target.rust_crate_type)
- _, _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc)
+ # Rustc does not add files in the obj_list to Rust rlibs,
+ # and is added by Meson to all of the dependencies, including here.
+ _, _, deps_args = self.get_rust_compiler_deps_and_args(target.doctests.target, rustdoc, obj_list)
args += deps_args
target.doctests.cmd_args = args.to_native() + [main_rust_file] + target.doctests.cmd_args
@@ -2209,10 +2232,7 @@ class NinjaBackend(backends.Backend):
def swift_module_file_name(self, target):
return os.path.join(self.get_target_private_dir(target),
- self.target_swift_modulename(target) + '.swiftmodule')
-
- def target_swift_modulename(self, target):
- return target.name
+ target.swift_module_name + '.swiftmodule')
def determine_swift_dep_modules(self, target):
result = []
@@ -2239,12 +2259,26 @@ class NinjaBackend(backends.Backend):
return srcs, others
def generate_swift_target(self, target) -> None:
- module_name = self.target_swift_modulename(target)
+ module_name = target.swift_module_name
swiftc = target.compilers['swift']
abssrc = []
relsrc = []
abs_headers = []
header_imports = []
+
+ if not target.uses_swift_cpp_interop():
+ cpp_targets = [t for t in target.link_targets if t.uses_swift_cpp_interop()]
+ if cpp_targets != []:
+ target_word = 'targets' if len(cpp_targets) > 1 else 'target'
+ first = ', '.join(repr(t.name) for t in cpp_targets[:-1])
+ and_word = ' and ' if len(cpp_targets) > 1 else ''
+ last = repr(cpp_targets[-1].name)
+ enable_word = 'enable' if len(cpp_targets) > 1 else 'enables'
+ raise MesonException('Swift target {0} links against {1} {2}{3}{4} which {5} C++ interoperability. '
+ 'This requires {0} to also have it enabled. '
+ 'Add "swift_interoperability_mode: \'cpp\'" to the definition of {0}.'
+ .format(repr(target.name), target_word, first, and_word, last, enable_word))
+
for i in target.get_sources():
if swiftc.can_compile(i):
rels = i.rel_to_builddir(self.build_to_src)
@@ -2261,6 +2295,16 @@ class NinjaBackend(backends.Backend):
os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True)
compile_args = self.generate_basic_compiler_args(target, swiftc)
compile_args += swiftc.get_module_args(module_name)
+ compile_args += swiftc.get_cxx_interoperability_args(target)
+ compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine)
+ compile_args += self.build.get_global_args(swiftc, target.for_machine)
+ if isinstance(target, (build.StaticLibrary, build.SharedLibrary)):
+ # swiftc treats modules with a single source file, and the main.swift file in multi-source file modules
+ # as top-level code. This is undesirable in library targets since it emits a main function. Add the
+ # -parse-as-library option as necessary to prevent emitting the main function while keeping files explicitly
+ # named main.swift treated as the entrypoint of the module in case this is desired.
+ if len(abssrc) == 1 and os.path.basename(abssrc[0]) != 'main.swift':
+ compile_args += swiftc.get_library_args()
for i in reversed(target.get_include_dirs()):
basedir = i.get_curdir()
for d in i.get_incdirs():
@@ -3127,9 +3171,9 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
# If TASKING compiler family is used and MIL linking is enabled for the target,
# then compilation rule name is a special one to output MIL files
# instead of object files for .c files
- key = OptionKey('b_lto')
if compiler.get_id() == 'tasking':
- if ((isinstance(target, build.StaticLibrary) and target.prelink) or target.get_option(key)) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']:
+ target_lto = self.get_target_option(target, OptionKey('b_lto', machine=target.for_machine, subproject=target.subproject))
+ if ((isinstance(target, build.StaticLibrary) and target.prelink) or target_lto) and src.rsplit('.', 1)[1] in compilers.lang_suffixes['c']:
compiler_name = self.get_compiler_rule_name('tasking_mil_compile', compiler.for_machine)
else:
compiler_name = self.compiler_to_rule_name(compiler)
@@ -3551,9 +3595,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
linker.build_rpath_args(self.environment,
self.environment.get_build_dir(),
target_slashname_workaround_dir,
- self.determine_rpath_dirs(target),
- target.build_rpath,
- target.install_rpath))
+ target))
return rpath_args
def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None):
@@ -3688,7 +3730,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs)
elem.add_dep(dep_targets + custom_target_libraries)
if linker.get_id() == 'tasking':
- if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not target.get_option(OptionKey('b_lto')):
+ if len([x for x in dep_targets + custom_target_libraries if x.endswith('.ma')]) > 0 and not self.get_target_option(target, OptionKey('b_lto', target.subproject, target.for_machine)):
raise MesonException(f'Tried to link the target named \'{target.name}\' with a MIL archive without LTO enabled! This causes the compiler to ignore the archive.')
# Compiler args must be included in TI C28x linker commands.
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 283f9f0..deb3dfb 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -147,6 +147,9 @@ class Vs2010Backend(backends.Backend):
self.handled_target_deps = {}
self.gen_lite = gen_lite # Synonymous with generating the simpler makefile-style multi-config projects that invoke 'meson compile' builds, avoiding native MSBuild complications
+ def detect_toolset(self) -> None:
+ pass
+
def get_target_private_dir(self, target):
return os.path.join(self.get_target_dir(target), target.get_id())
@@ -227,6 +230,7 @@ class Vs2010Backend(backends.Backend):
# Check for (currently) unexpected capture arg use cases -
if capture:
raise MesonBugException('We do not expect any vs backend to generate with \'capture = True\'')
+ self.detect_toolset()
host_machine = self.environment.machines.host.cpu_family
if host_machine in {'64', 'x86_64'}:
# amd64 or x86_64
@@ -619,7 +623,8 @@ class Vs2010Backend(backends.Backend):
conftype='Utility',
target_ext=None,
target_platform=None,
- gen_manifest=True) -> T.Tuple[ET.Element, ET.Element]:
+ gen_manifest=True,
+ masm_type: T.Optional[T.Literal['masm', 'marmasm']] = None) -> T.Tuple[ET.Element, ET.Element]:
root = ET.Element('Project', {'DefaultTargets': "Build",
'ToolsVersion': '4.0',
'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
@@ -657,6 +662,13 @@ class Vs2010Backend(backends.Backend):
# "The build tools for v142 (Platform Toolset = 'v142') cannot be found. ... please install v142 build tools."
# This is extremely unhelpful and misleading since the v14x build tools ARE installed.
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
+ ext_settings_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionSettings')
+ if masm_type:
+ ET.SubElement(
+ ext_settings_grp,
+ 'Import',
+ Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm_type}.props',
+ )
# This attribute makes sure project names are displayed as expected in solution files even when their project file names differ
pname = ET.SubElement(globalgroup, 'ProjectName')
@@ -692,9 +704,11 @@ class Vs2010Backend(backends.Backend):
if target_ext:
ET.SubElement(direlem, 'TargetExt').text = target_ext
- ET.SubElement(direlem, 'EmbedManifest').text = 'false'
- if not gen_manifest:
- ET.SubElement(direlem, 'GenerateManifest').text = 'false'
+ # Fix weird mt.exe error:
+ # mt.exe is trying to compile a non-existent .generated.manifest file and link it
+ # with the target. This does not happen without masm props.
+ ET.SubElement(direlem, 'EmbedManifest').text = 'true' if masm_type or gen_manifest == 'embed' else 'false'
+ ET.SubElement(direlem, 'GenerateManifest').text = 'true' if gen_manifest else 'false'
return (root, type_config)
@@ -775,12 +789,19 @@ class Vs2010Backend(backends.Backend):
platform = self.build_platform
else:
platform = self.platform
+
+ masm = self.get_masm_type(target)
+
(root, type_config) = self.create_basic_project(target.name,
temp_dir=target.get_id(),
guid=guid,
target_platform=platform,
- gen_manifest=self.get_gen_manifest(target))
+ gen_manifest=self.get_gen_manifest(target),
+ masm_type=masm)
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ext_tgt_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
+ if masm:
+ ET.SubElement(ext_tgt_grp, 'Import', Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm}.targets')
target.generated = [self.compile_target_to_generator(target)]
target.sources = []
self.generate_custom_generator_commands(target, root)
@@ -795,6 +816,8 @@ class Vs2010Backend(backends.Backend):
return 'c'
if ext in compilers.cpp_suffixes:
return 'cpp'
+ if ext in compilers.lang_suffixes['masm']:
+ return 'masm'
raise MesonException(f'Could not guess language from source file {src}.')
def add_pch(self, pch_sources, lang, inc_cl):
@@ -956,13 +979,13 @@ class Vs2010Backend(backends.Backend):
other.append(arg)
return lpaths, libs, other
- def _get_cl_compiler(self, target):
+ def _get_cl_compiler(self, target: build.BuildTarget):
for lang, c in target.compilers.items():
if lang in {'c', 'cpp'}:
return c
- # No source files, only objects, but we still need a compiler, so
+ # No C/C++ source files, only objects/assembly source, but we still need a compiler, so
# return a found compiler
- if len(target.objects) > 0:
+ if len(target.objects) > 0 or len(target.sources) > 0:
for lang, c in self.environment.coredata.compilers[target.for_machine].items():
if lang in {'c', 'cpp'}:
return c
@@ -1493,8 +1516,9 @@ class Vs2010Backend(backends.Backend):
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
if len(extra_link_args) > 0:
- extra_link_args.append('%(AdditionalOptions)')
- ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args)
+ args = [self.escape_additional_option(arg) for arg in extra_link_args]
+ args.append('%(AdditionalOptions)')
+ ET.SubElement(link, "AdditionalOptions").text = ' '.join(args)
if len(additional_libpaths) > 0:
additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)')
ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths)
@@ -1607,6 +1631,8 @@ class Vs2010Backend(backends.Backend):
else:
platform = self.platform
+ masm = self.get_masm_type(target)
+
tfilename = os.path.splitext(target.get_filename())
(root, type_config) = self.create_basic_project(tfilename[0],
@@ -1615,7 +1641,8 @@ class Vs2010Backend(backends.Backend):
conftype=conftype,
target_ext=tfilename[1],
target_platform=platform,
- gen_manifest=self.get_gen_manifest(target))
+ gen_manifest=self.get_gen_manifest(target),
+ masm_type=masm)
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(
target, root)
@@ -1719,12 +1746,17 @@ class Vs2010Backend(backends.Backend):
for s in sources:
relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src))
if path_normalize_add(relpath, previous_sources):
- inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ if lang == 'masm' and masm:
+ inc_cl = ET.SubElement(inc_src, masm.upper(), Include=relpath)
+ else:
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
+
if self.gen_lite:
self.add_project_nmake_defs_incs_and_opts(inc_cl, relpath, defs_paths_opts_per_lang_and_buildtype, platform)
else:
- lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(pch_sources, lang, inc_cl)
+ if lang != 'masm':
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1732,12 +1764,17 @@ class Vs2010Backend(backends.Backend):
self.object_filename_from_source(target, compiler, s)
for s in gen_src:
if path_normalize_add(s, previous_sources):
- inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
+ lang = Vs2010Backend.lang_from_source_file(s)
+ if lang == 'masm' and masm:
+ inc_cl = ET.SubElement(inc_src, masm.upper(), Include=s)
+ else:
+ inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
+
if self.gen_lite:
self.add_project_nmake_defs_incs_and_opts(inc_cl, s, defs_paths_opts_per_lang_and_buildtype, platform)
else:
- lang = Vs2010Backend.lang_from_source_file(s)
- self.add_pch(pch_sources, lang, inc_cl)
+ if lang != 'masm':
+ self.add_pch(pch_sources, lang, inc_cl)
self.add_additional_options(lang, inc_cl, file_args)
self.add_preprocessor_defines(lang, inc_cl, file_defines)
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
@@ -1786,6 +1823,9 @@ class Vs2010Backend(backends.Backend):
ET.SubElement(inc_objs, 'Object', Include=s)
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
+ ext_tgt_grp = ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
+ if masm:
+ ET.SubElement(ext_tgt_grp, 'Import', Project=rf'$(VCTargetsPath)\BuildCustomizations\{masm}.targets')
self.add_regen_dependency(root)
if not self.gen_lite:
# Injecting further target dependencies into this vcxproj implies and forces a Visual Studio BUILD dependency,
@@ -2096,7 +2136,8 @@ class Vs2010Backend(backends.Backend):
pass
# Returns if a target generates a manifest or not.
- def get_gen_manifest(self, target):
+ # Returns 'embed' if the generated manifest is embedded.
+ def get_gen_manifest(self, target: T.Optional[build.BuildTarget]):
if not isinstance(target, build.BuildTarget):
return True
@@ -2113,6 +2154,31 @@ class Vs2010Backend(backends.Backend):
arg = arg.upper()
if arg == '/MANIFEST:NO':
return False
+ if arg.startswith('/MANIFEST:EMBED'):
+ return 'embed'
if arg == '/MANIFEST' or arg.startswith('/MANIFEST:'):
break
return True
+
+ # FIXME: add a way to distinguish between arm64ec+marmasm (written in ARM assembly)
+ # and arm64ec+masm (written in x64 assembly).
+ #
+ # For now, assume it's the native ones. (same behavior as ninja backend)
+ def get_masm_type(self, target: build.BuildTarget):
+ if not isinstance(target, build.BuildTarget):
+ return None
+
+ if 'masm' not in target.compilers:
+ return None
+
+ if target.for_machine == MachineChoice.BUILD:
+ platform = self.build_platform
+ elif target.for_machine == MachineChoice.HOST:
+ platform = self.platform
+ else:
+ return None
+
+ if platform in {'ARM', 'arm64', 'arm64ec'}:
+ return 'marmasm'
+ else:
+ return 'masm'
diff --git a/mesonbuild/backend/vs2012backend.py b/mesonbuild/backend/vs2012backend.py
index 307964b..922cd60 100644
--- a/mesonbuild/backend/vs2012backend.py
+++ b/mesonbuild/backend/vs2012backend.py
@@ -21,6 +21,8 @@ class Vs2012Backend(Vs2010Backend):
self.vs_version = '2012'
self.sln_file_version = '12.00'
self.sln_version_comment = '2012'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2013backend.py b/mesonbuild/backend/vs2013backend.py
index ae0b68b..cf5d598 100644
--- a/mesonbuild/backend/vs2013backend.py
+++ b/mesonbuild/backend/vs2013backend.py
@@ -20,6 +20,8 @@ class Vs2013Backend(Vs2010Backend):
self.vs_version = '2013'
self.sln_file_version = '12.00'
self.sln_version_comment = '2013'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2015backend.py b/mesonbuild/backend/vs2015backend.py
index 4c515cc..1862def 100644
--- a/mesonbuild/backend/vs2015backend.py
+++ b/mesonbuild/backend/vs2015backend.py
@@ -21,6 +21,8 @@ class Vs2015Backend(Vs2010Backend):
self.vs_version = '2015'
self.sln_file_version = '12.00'
self.sln_version_comment = '14'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
# TODO: we assume host == build
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2017backend.py b/mesonbuild/backend/vs2017backend.py
index 393544f..372e1ce 100644
--- a/mesonbuild/backend/vs2017backend.py
+++ b/mesonbuild/backend/vs2017backend.py
@@ -24,6 +24,8 @@ class Vs2017Backend(Vs2010Backend):
self.vs_version = '2017'
self.sln_file_version = '12.00'
self.sln_version_comment = '15'
+
+ def detect_toolset(self) -> None:
# We assume that host == build
if self.environment is not None:
comps = self.environment.coredata.compilers.host
diff --git a/mesonbuild/backend/vs2019backend.py b/mesonbuild/backend/vs2019backend.py
index 4d6e226..61ad75d 100644
--- a/mesonbuild/backend/vs2019backend.py
+++ b/mesonbuild/backend/vs2019backend.py
@@ -22,6 +22,8 @@ class Vs2019Backend(Vs2010Backend):
super().__init__(build, interpreter)
self.sln_file_version = '12.00'
self.sln_version_comment = 'Version 16'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
comps = self.environment.coredata.compilers.host
if comps and all(c.id == 'clang-cl' for c in comps.values()):
diff --git a/mesonbuild/backend/vs2022backend.py b/mesonbuild/backend/vs2022backend.py
index 27e0438..ca449a4 100644
--- a/mesonbuild/backend/vs2022backend.py
+++ b/mesonbuild/backend/vs2022backend.py
@@ -22,6 +22,8 @@ class Vs2022Backend(Vs2010Backend):
super().__init__(build, interpreter, gen_lite=gen_lite)
self.sln_file_version = '12.00'
self.sln_version_comment = 'Version 17'
+
+ def detect_toolset(self) -> None:
if self.environment is not None:
comps = self.environment.coredata.compilers.host
if comps and all(c.id == 'clang-cl' for c in comps.values()):
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 587404a..e7bd487 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -176,6 +176,15 @@ class PbxDict:
self.keys.add(key)
self.items.append(item)
+ def get_item(self, key: str) -> PbxDictItem:
+ assert key in self.keys
+ for item in self.items:
+ if not isinstance(item, PbxDictItem):
+ continue
+ if item.key == key:
+ return item
+ return None
+
def has_item(self, key: str) -> bool:
return key in self.keys
@@ -396,10 +405,23 @@ class XCodeBackend(backends.Backend):
def generate_filemap(self) -> None:
self.filemap = {} # Key is source file relative to src root.
+ self.foldermap = {}
self.target_filemap = {}
for name, t in self.build_targets.items():
for s in t.sources:
if isinstance(s, mesonlib.File):
+ if '/' in s.fname:
+ # From the top level down, add the folders containing the source file.
+ folder = os.path.split(os.path.dirname(s.fname))
+ while folder:
+ fpath = os.path.join(*folder)
+ # Multiple targets might use the same folders, so store their targets with them.
+ # Otherwise, folders and their source files will appear in the wrong places in Xcode.
+ if (fpath, t) not in self.foldermap:
+ self.foldermap[(fpath, t)] = self.gen_id()
+ else:
+ break
+ folder = folder[:-1]
s = os.path.join(s.subdir, s.fname)
self.filemap[s] = self.gen_id()
for o in t.objects:
@@ -1052,6 +1074,24 @@ class XCodeBackend(backends.Backend):
main_children.add_item(frameworks_id, 'Frameworks')
main_dict.add_item('sourceTree', '<group>')
+ # Define each folder as a group in Xcode. That way, it can build the file tree correctly.
+ # This must be done before the project tree group is generated, as source files are added during that phase.
+ for (path, target), id in self.foldermap.items():
+ folder_dict = PbxDict()
+ objects_dict.add_item(id, folder_dict, path)
+ folder_dict.add_item('isa', 'PBXGroup')
+ folder_children = PbxArray()
+ folder_dict.add_item('children', folder_children)
+ folder_dict.add_item('name', '"{}"'.format(path.rsplit('/', 1)[-1]))
+ folder_dict.add_item('path', f'"{path}"')
+ folder_dict.add_item('sourceTree', 'SOURCE_ROOT')
+
+ # Add any detected subdirectories (not declared as subdir()) here, but only one level higher.
+ # Example: In "root", add "root/sub", but not "root/sub/subtwo".
+ for path_dep, target_dep in self.foldermap:
+ if path_dep.startswith(path) and path_dep.split('/', 1)[0] == path.split('/', 1)[0] and path_dep != path and path_dep.count('/') == path.count('/') + 1 and target == target_dep:
+ folder_children.add_item(self.foldermap[(path_dep, target)], path_dep)
+
self.add_projecttree(objects_dict, projecttree_id)
resource_dict = PbxDict()
@@ -1121,6 +1161,7 @@ class XCodeBackend(backends.Backend):
tid = t.get_id()
group_id = self.gen_id()
target_dict = PbxDict()
+ folder_ids = set()
objects_dict.add_item(group_id, target_dict, tid)
target_dict.add_item('isa', 'PBXGroup')
target_children = PbxArray()
@@ -1130,6 +1171,18 @@ class XCodeBackend(backends.Backend):
source_files_dict = PbxDict()
for s in t.sources:
if isinstance(s, mesonlib.File):
+ # If the file is in a folder, add it to the group representing that folder.
+ if '/' in s.fname:
+ folder = '/'.join(s.fname.split('/')[:-1])
+ folder_dict = objects_dict.get_item(self.foldermap[(folder, t)]).value.get_item('children').value
+ temp = os.path.join(s.subdir, s.fname)
+ folder_dict.add_item(self.fileref_ids[(tid, temp)], temp)
+ if self.foldermap[(folder, t)] in folder_ids:
+ continue
+ if len(folder.split('/')) == 1:
+ target_children.add_item(self.foldermap[(folder, t)], folder)
+ folder_ids.add(self.foldermap[(folder, t)])
+ continue
s = os.path.join(s.subdir, s.fname)
elif isinstance(s, str):
s = os.path.join(t.subdir, s)
@@ -1596,6 +1649,7 @@ class XCodeBackend(backends.Backend):
headerdirs = []
bridging_header = ""
is_swift = self.is_swift_target(target)
+ langs = set()
for d in target.include_dirs:
for sd in d.incdirs:
cd = os.path.join(d.curdir, sd)
@@ -1715,6 +1769,7 @@ class XCodeBackend(backends.Backend):
lang = 'c'
elif lang == 'objcpp':
lang = 'cpp'
+ langs.add(lang)
langname = LANGNAMEMAP[lang]
langargs.setdefault(langname, [])
langargs[langname] = cargs + cti_args + args
@@ -1776,6 +1831,8 @@ class XCodeBackend(backends.Backend):
settings_dict.add_item('SECTORDER_FLAGS', '')
if is_swift and bridging_header:
settings_dict.add_item('SWIFT_OBJC_BRIDGING_HEADER', bridging_header)
+ if self.objversion >= 60 and target.uses_swift_cpp_interop():
+ settings_dict.add_item('SWIFT_OBJC_INTEROP_MODE', 'objcxx')
settings_dict.add_item('BUILD_DIR', symroot)
settings_dict.add_item('OBJROOT', f'{symroot}/build')
sysheader_arr = PbxArray()