aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--mesonbuild/ast/introspection.py3
-rw-r--r--mesonbuild/backend/backends.py18
-rw-r--r--mesonbuild/backend/ninjabackend.py2
-rw-r--r--mesonbuild/build.py152
-rw-r--r--mesonbuild/compilers/mixins/visualstudio.py2
-rw-r--r--mesonbuild/interpreter/interpreter.py5
-rw-r--r--mesonbuild/modules/rust.py2
-rw-r--r--test cases/rust/5 polyglot static/meson.build13
-rw-r--r--test cases/rust/5 polyglot static/test.json3
-rw-r--r--test cases/unit/113 complex link cases/main.c6
-rw-r--r--test cases/unit/113 complex link cases/meson.build40
-rw-r--r--test cases/unit/113 complex link cases/s1.c3
-rw-r--r--test cases/unit/113 complex link cases/s2.c5
-rw-r--r--test cases/unit/113 complex link cases/s3.c5
-rw-r--r--unittests/allplatformstests.py3
-rw-r--r--unittests/linuxliketests.py17
16 files changed, 195 insertions, 84 deletions
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 4d6cd0c..d66e73f 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -283,8 +283,7 @@ class IntrospectionInterpreter(AstInterpreter):
kwargs_reduced['_allow_no_sources'] = True
target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, [], objects,
self.environment, self.coredata.compilers[for_machine], kwargs_reduced)
- target.process_compilers()
- target.process_compilers_late([])
+ target.process_compilers_late()
new_target = {
'name': target.get_basename(),
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index d425f06..3b87623 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -858,6 +858,8 @@ class Backend:
def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_build_root: str) -> T.List[str]:
result: T.List[str] = []
+ targetdir = self.get_target_private_dir(extobj.target)
+
# Merge sources and generated sources
raw_sources = list(extobj.srclist)
for gensrc in extobj.genlist:
@@ -874,12 +876,18 @@ class Backend:
elif self.environment.is_object(s):
result.append(s.relative_name())
+ # MSVC generate an object file for PCH
+ if extobj.pch:
+ for lang, pch in extobj.target.pch.items():
+ compiler = extobj.target.compilers[lang]
+ if compiler.get_argument_syntax() == 'msvc':
+ objname = self.get_msvc_pch_objname(lang, pch)
+ result.append(os.path.join(proj_dir_to_build_root, targetdir, objname))
+
# extobj could contain only objects and no sources
if not sources:
return result
- targetdir = self.get_target_private_dir(extobj.target)
-
# With unity builds, sources don't map directly to objects,
# we only support extracting all the objects in this mode,
# so just return all object files.
@@ -914,6 +922,12 @@ class Backend:
args += compiler.get_pch_use_args(pchpath, p[0])
return includeargs + args
+ def get_msvc_pch_objname(self, lang: str, pch: T.List[str]) -> str:
+ if len(pch) == 1:
+ # Same name as in create_msvc_pch_implementation() below.
+ return f'meson_pch-{lang}.obj'
+ return os.path.splitext(pch[1])[0] + '.obj'
+
def create_msvc_pch_implementation(self, target: build.BuildTarget, lang: str, pch_header: str) -> str:
# We have to include the language in the file name, otherwise
# pch.c and pch.cpp will both end up as pch.obj in VS backends.
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 27be9a9..108aa72 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -3203,7 +3203,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
extradep = None
pch_objects += objs
rulename = self.compiler_to_pch_rule_name(compiler)
- elem = NinjaBuildElement(self.all_outputs, dst, rulename, src)
+ elem = NinjaBuildElement(self.all_outputs, objs + [dst], rulename, src)
if extradep is not None:
elem.add_dep(extradep)
self.add_header_deps(target, elem, header_deps)
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 1ecab0d..8f85e15 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -17,7 +17,6 @@ from collections import defaultdict, OrderedDict
from dataclasses import dataclass, field, InitVar
from functools import lru_cache
import abc
-import copy
import hashlib
import itertools, pathlib
import os
@@ -413,6 +412,7 @@ class ExtractedObjects(HoldableObject):
genlist: T.List['GeneratedTypes'] = field(default_factory=list)
objlist: T.List[T.Union[str, 'File', 'ExtractedObjects']] = field(default_factory=list)
recursive: bool = True
+ pch: bool = False
def __post_init__(self) -> None:
if self.target.is_unity:
@@ -754,6 +754,9 @@ class BuildTarget(Target):
# 2. Compiled objects created by and extracted from another target
self.process_objectlist(objects)
self.process_kwargs(kwargs)
+ self.missing_languages = self.process_compilers()
+ self.link(extract_as_list(kwargs, 'link_with'))
+ self.link_whole(extract_as_list(kwargs, 'link_whole'))
if not any([self.sources, self.generated, self.objects, self.link_whole_targets, self.structured_sources,
kwargs.pop('_allow_no_sources', False)]):
mlog.warning(f'Build target {name} has no sources. '
@@ -848,14 +851,14 @@ class BuildTarget(Target):
removed = True
return removed
- def process_compilers_late(self, extra_languages: T.List[str]):
+ def process_compilers_late(self):
"""Processes additional compilers after kwargs have been evaluated.
This can add extra compilers that might be required by keyword
arguments, such as link_with or dependencies. It will also try to guess
which compiler to use if one hasn't been selected already.
"""
- for lang in extra_languages:
+ for lang in self.missing_languages:
self.compilers[lang] = self.all_compilers[lang]
# did user override clink_langs for this target?
@@ -1001,18 +1004,6 @@ class BuildTarget(Target):
'Link_depends arguments must be strings, Files, '
'or a Custom Target, or lists thereof.')
- def get_original_kwargs(self):
- return self.kwargs
-
- def copy_kwargs(self, kwargs):
- self.kwargs = copy.copy(kwargs)
- for k, v in self.kwargs.items():
- if isinstance(v, list):
- self.kwargs[k] = listify(v, flatten=True)
- for t in ['dependencies', 'link_with', 'include_directories', 'sources']:
- if t in self.kwargs:
- self.kwargs[t] = listify(self.kwargs[t], flatten=True)
-
def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedTypes']]) -> ExtractedObjects:
sources_set = set(self.sources)
generated_set = set(self.generated)
@@ -1040,7 +1031,7 @@ class BuildTarget(Target):
def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects:
return ExtractedObjects(self, self.sources, self.generated, self.objects,
- recursive)
+ recursive, pch=True)
def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]:
return self.get_transitive_link_deps()
@@ -1086,23 +1077,9 @@ class BuildTarget(Target):
def process_kwargs(self, kwargs):
self.process_kwargs_base(kwargs)
- self.copy_kwargs(kwargs)
+ self.original_kwargs = kwargs
kwargs.get('modules', [])
self.need_install = kwargs.get('install', self.need_install)
- llist = extract_as_list(kwargs, 'link_with')
- for linktarget in llist:
- if isinstance(linktarget, dependencies.ExternalLibrary):
- raise MesonException(textwrap.dedent('''\
- An external library was used in link_with keyword argument, which
- is reserved for libraries built as part of this project. External
- libraries must be passed using the dependencies keyword argument
- instead, because they are conceptually "external dependencies",
- just like those detected with the dependency() function.
- '''))
- self.link(linktarget)
- lwhole = extract_as_list(kwargs, 'link_whole')
- for linktarget in lwhole:
- self.link_whole(linktarget)
for lang in all_languages:
lang_args = extract_as_list(kwargs, f'{lang}_args')
@@ -1293,17 +1270,36 @@ class BuildTarget(Target):
def get_extra_args(self, language):
return self.extra_args.get(language, [])
- def get_dependencies(self, exclude=None):
- transitive_deps = []
- if exclude is None:
- exclude = []
+ @lru_cache(maxsize=None)
+ def get_dependencies(self) -> OrderedSet[Target]:
+ # Get all targets needed for linking. This includes all link_with and
+ # link_whole targets, and also all dependencies of static libraries
+ # recursively. The algorithm here is closely related to what we do in
+ # get_internal_static_libraries(): Installed static libraries include
+ # objects from all their dependencies already.
+ result: OrderedSet[Target] = OrderedSet()
for t in itertools.chain(self.link_targets, self.link_whole_targets):
- if t in transitive_deps or t in exclude:
+ if t not in result:
+ result.add(t)
+ if isinstance(t, StaticLibrary):
+ t.get_dependencies_recurse(result)
+ return result
+
+ def get_dependencies_recurse(self, result: OrderedSet[Target], include_internals: bool = True) -> None:
+ # self is always a static library because we don't need to pull dependencies
+ # of shared libraries. If self is installed (not internal) it already
+ # include objects extracted from all its internal dependencies so we can
+ # skip them.
+ include_internals = include_internals and self.is_internal()
+ for t in self.link_targets:
+ if t in result:
continue
- transitive_deps.append(t)
+ if include_internals or not t.is_internal():
+ result.add(t)
if isinstance(t, StaticLibrary):
- transitive_deps += t.get_dependencies(transitive_deps + exclude)
- return transitive_deps
+ t.get_dependencies_recurse(result, include_internals)
+ for t in self.link_whole_targets:
+ t.get_dependencies_recurse(result, include_internals)
def get_source_subdir(self):
return self.subdir
@@ -1341,10 +1337,8 @@ class BuildTarget(Target):
self.extra_files.extend(f for f in dep.extra_files if f not in self.extra_files)
self.add_include_dirs(dep.include_directories, dep.get_include_type())
self.objects.extend(dep.objects)
- for l in dep.libraries:
- self.link(l)
- for l in dep.whole_libraries:
- self.link_whole(l)
+ self.link(dep.libraries)
+ self.link_whole(dep.whole_libraries)
if dep.get_compile_args() or dep.get_link_args():
# Those parts that are external.
extpart = dependencies.InternalDependency('undefined',
@@ -1393,27 +1387,27 @@ You probably should put it in link_with instead.''')
def is_internal(self) -> bool:
return False
- def link(self, target):
- for t in listify(target):
+ def link(self, targets):
+ for t in targets:
if isinstance(self, StaticLibrary) and self.need_install:
if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.should_install():
mlog.warning(f'Try to link an installed static library target {self.name} with a'
'custom target that is not installed, this might cause problems'
'when you try to use this static library')
- elif t.is_internal() and not t.uses_rust():
+ elif t.is_internal():
# When we're a static library and we link_with to an
# internal/convenience library, promote to link_whole.
- #
- # There are cases we cannot do this, however. In Rust, for
- # example, this can't be done with Rust ABI libraries, though
- # it could be done with C ABI libraries, though there are
- # several meson issues that need to be fixed:
- # https://github.com/mesonbuild/meson/issues/10722
- # https://github.com/mesonbuild/meson/issues/10723
- # https://github.com/mesonbuild/meson/issues/10724
- return self.link_whole(t)
+ return self.link_whole([t])
if not isinstance(t, (Target, CustomTargetIndex)):
+ if isinstance(t, dependencies.ExternalLibrary):
+ raise MesonException(textwrap.dedent('''\
+ An external library was used in link_with keyword argument, which
+ is reserved for libraries built as part of this project. External
+ libraries must be passed using the dependencies keyword argument
+ instead, because they are conceptually "external dependencies",
+ just like those detected with the dependency() function.
+ '''))
raise InvalidArguments(f'{t!r} is not a target.')
if not t.is_linkable_target():
raise InvalidArguments(f"Link target '{t!s}' is not linkable.")
@@ -1429,16 +1423,13 @@ You probably should put it in link_with instead.''')
mlog.warning(msg + ' This will fail in cross build.')
self.link_targets.append(t)
- def link_whole(self, target):
- for t in listify(target):
+ def link_whole(self, targets):
+ for t in targets:
if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.is_linkable_target():
raise InvalidArguments(f'Custom target {t!r} is not linkable.')
if t.links_dynamically():
raise InvalidArguments('Can only link_whole custom targets that are static archives.')
- if isinstance(self, StaticLibrary):
- # FIXME: We could extract the .a archive to get object files
- raise InvalidArguments('Cannot link_whole a custom target into a static library')
elif not isinstance(t, StaticLibrary):
raise InvalidArguments(f'{t!r} is not a static library.')
elif isinstance(self, SharedLibrary) and not t.pic:
@@ -1451,18 +1442,41 @@ You probably should put it in link_with instead.''')
raise InvalidArguments(msg + ' This is not possible in a cross build.')
else:
mlog.warning(msg + ' This will fail in cross build.')
- if isinstance(self, StaticLibrary):
+ if isinstance(self, StaticLibrary) and not self.uses_rust():
+ if isinstance(t, (CustomTarget, CustomTargetIndex)) or t.uses_rust():
+ # There are cases we cannot do this, however. In Rust, for
+ # example, this can't be done with Rust ABI libraries, though
+ # it could be done with C ABI libraries, though there are
+ # several meson issues that need to be fixed:
+ # https://github.com/mesonbuild/meson/issues/10722
+ # https://github.com/mesonbuild/meson/issues/10723
+ # https://github.com/mesonbuild/meson/issues/10724
+ # FIXME: We could extract the .a archive to get object files
+ raise InvalidArguments('Cannot link_whole a custom or Rust target into a static library')
# When we're a static library and we link_whole: to another static
# library, we need to add that target's objects to ourselves.
- self.objects += t.extract_all_objects_recurse()
+ self.objects += [t.extract_all_objects()]
+ # If we install this static library we also need to include objects
+ # from all uninstalled static libraries it depends on.
+ if self.need_install:
+ for lib in t.get_internal_static_libraries():
+ self.objects += [lib.extract_all_objects()]
self.link_whole_targets.append(t)
- def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
- objs = [self.extract_all_objects()]
+ @lru_cache(maxsize=None)
+ def get_internal_static_libraries(self) -> OrderedSet[Target]:
+ result: OrderedSet[Target] = OrderedSet()
+ self.get_internal_static_libraries_recurse(result)
+ return result
+
+ def get_internal_static_libraries_recurse(self, result: OrderedSet[Target]) -> None:
for t in self.link_targets:
+ if t.is_internal() and t not in result:
+ result.add(t)
+ t.get_internal_static_libraries_recurse(result)
+ for t in self.link_whole_targets:
if t.is_internal():
- objs += t.extract_all_objects_recurse()
- return objs
+ t.get_internal_static_libraries_recurse(result)
def add_pch(self, language: str, pchlist: T.List[str]) -> None:
if not pchlist:
@@ -2713,7 +2727,7 @@ class CustomTarget(Target, CommandBase):
return False
return CustomTargetIndex(self, self.outputs[0]).is_internal()
- def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
+ def extract_all_objects(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
return self.get_outputs()
def type_suffix(self):
@@ -2975,8 +2989,8 @@ class CustomTargetIndex(HoldableObject):
suf = os.path.splitext(self.output)[-1]
return suf in {'.a', '.lib'} and not self.should_install()
- def extract_all_objects_recurse(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
- return self.target.extract_all_objects_recurse()
+ def extract_all_objects(self) -> T.List[T.Union[str, 'ExtractedObjects']]:
+ return self.target.extract_all_objects()
def get_custom_install_dir(self) -> T.List[T.Union[str, Literal[False]]]:
return self.target.get_custom_install_dir()
diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py
index 76d9829..acf475a 100644
--- a/mesonbuild/compilers/mixins/visualstudio.py
+++ b/mesonbuild/compilers/mixins/visualstudio.py
@@ -213,7 +213,7 @@ class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta):
return ['/DEF:' + defsfile]
def gen_pch_args(self, header: str, source: str, pchname: str) -> T.Tuple[str, T.List[str]]:
- objname = os.path.splitext(pchname)[0] + '.obj'
+ objname = os.path.splitext(source)[0] + '.obj'
return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]
def openmp_flags(self) -> T.List[str]:
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index f6133b7..363de54 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -3157,9 +3157,8 @@ class Interpreter(InterpreterBase, HoldableObject):
raise InvalidCode(f'Tried to create target "{name}", but a target of that name already exists.')
if isinstance(tobj, build.BuildTarget):
- missing_languages = tobj.process_compilers()
- self.add_languages(missing_languages, True, tobj.for_machine)
- tobj.process_compilers_late(missing_languages)
+ self.add_languages(tobj.missing_languages, True, tobj.for_machine)
+ tobj.process_compilers_late()
self.add_stdlib_info(tobj)
self.build.targets[idname] = tobj
diff --git a/mesonbuild/modules/rust.py b/mesonbuild/modules/rust.py
index 92b0470..3514412 100644
--- a/mesonbuild/modules/rust.py
+++ b/mesonbuild/modules/rust.py
@@ -153,7 +153,7 @@ class RustModule(ExtensionModule):
tkwargs['args'] = extra_args + ['--test', '--format', 'pretty']
tkwargs['protocol'] = 'rust'
- new_target_kwargs = base_target.kwargs.copy()
+ new_target_kwargs = base_target.original_kwargs.copy()
# Don't mutate the shallow copied list, instead replace it with a new
# one
new_target_kwargs['rust_args'] = \
diff --git a/test cases/rust/5 polyglot static/meson.build b/test cases/rust/5 polyglot static/meson.build
index 9fbba68..22c0cd0 100644
--- a/test cases/rust/5 polyglot static/meson.build
+++ b/test cases/rust/5 polyglot static/meson.build
@@ -1,7 +1,18 @@
project('static rust and c polyglot executable', 'c', 'rust')
r = static_library('stuff', 'stuff.rs', rust_crate_type : 'staticlib')
-l = static_library('clib', 'clib.c', link_with : r, install : true)
+
+# clib is installed static library and stuff is not installed. That means that
+# to be usable clib must link_whole stuff. Meson automatically promote to link_whole,
+# as it would do with C libraries, but then cannot extract objects from stuff and
+# thus should error out.
+# FIXME: We should support this use-case in the future.
+testcase expect_error('Cannot link_whole a custom or Rust target into a static library')
+ l = static_library('clib', 'clib.c', link_with : r, install : true)
+endtestcase
+
+l = static_library('clib', 'clib.c', link_with : r)
+
e = executable('prog', 'prog.c',
link_with : l,
install : true)
diff --git a/test cases/rust/5 polyglot static/test.json b/test cases/rust/5 polyglot static/test.json
index cc0d2da..135300d 100644
--- a/test cases/rust/5 polyglot static/test.json
+++ b/test cases/rust/5 polyglot static/test.json
@@ -1,7 +1,6 @@
{
"installed": [
{"type": "exe", "file": "usr/bin/prog"},
- {"type": "pdb", "file": "usr/bin/prog"},
- {"type": "file", "file": "usr/lib/libclib.a"}
+ {"type": "pdb", "file": "usr/bin/prog"}
]
}
diff --git a/test cases/unit/113 complex link cases/main.c b/test cases/unit/113 complex link cases/main.c
new file mode 100644
index 0000000..739b413
--- /dev/null
+++ b/test cases/unit/113 complex link cases/main.c
@@ -0,0 +1,6 @@
+int s3(void);
+
+int main(int argc, char *argv[])
+{
+ return s3();
+}
diff --git a/test cases/unit/113 complex link cases/meson.build b/test cases/unit/113 complex link cases/meson.build
new file mode 100644
index 0000000..d3387c2
--- /dev/null
+++ b/test cases/unit/113 complex link cases/meson.build
@@ -0,0 +1,40 @@
+project('complex link cases', 'c')
+
+# In all tests, e1 uses s3 which uses s2 which uses s1.
+
+# Executable links with s3 and s1 but not s2 because it is included in s3.
+s1 = static_library('t1-s1', 's1.c')
+s2 = static_library('t1-s2', 's2.c', link_with: s1)
+s3 = static_library('t1-s3', 's3.c', link_whole: s2)
+e = executable('t1-e1', 'main.c', link_with: s3)
+
+# s3 is installed but not s1 so it has to include s1 too.
+# Executable links only s3 because it contains s1 and s2.
+s1 = static_library('t2-s1', 's1.c')
+s2 = static_library('t2-s2', 's2.c', link_with: s1)
+s3 = static_library('t2-s3', 's3.c', link_whole: s2, install: true)
+e = executable('t2-e1', 'main.c', link_with: s3)
+
+# Executable needs to link with s3 only
+s1 = static_library('t3-s1', 's1.c')
+s2 = static_library('t3-s2', 's2.c', link_with: s1)
+s3 = shared_library('t3-s3', 's3.c', link_with: s2)
+e = executable('t3-e1', 'main.c', link_with: s3)
+
+# Executable needs to link with s3 and s2
+s1 = static_library('t4-s1', 's1.c')
+s2 = shared_library('t4-s2', 's2.c', link_with: s1)
+s3 = static_library('t4-s3', 's3.c', link_with: s2)
+e = executable('t4-e1', 'main.c', link_with: s3)
+
+# Executable needs to link with s3 and s1
+s1 = shared_library('t5-s1', 's1.c')
+s2 = static_library('t5-s2', 's2.c', link_with: s1)
+s3 = static_library('t5-s3', 's3.c', link_with: s2, install: true)
+e = executable('t5-e1', 'main.c', link_with: s3)
+
+# Executable needs to link with s3 and s2
+s1 = static_library('t6-s1', 's1.c')
+s2 = static_library('t6-s2', 's2.c', link_with: s1, install: true)
+s3 = static_library('t6-s3', 's3.c', link_with: s2, install: true)
+e = executable('t6-e1', 'main.c', link_with: s3)
diff --git a/test cases/unit/113 complex link cases/s1.c b/test cases/unit/113 complex link cases/s1.c
new file mode 100644
index 0000000..68bba49
--- /dev/null
+++ b/test cases/unit/113 complex link cases/s1.c
@@ -0,0 +1,3 @@
+int s1(void) {
+ return 1;
+}
diff --git a/test cases/unit/113 complex link cases/s2.c b/test cases/unit/113 complex link cases/s2.c
new file mode 100644
index 0000000..835870c
--- /dev/null
+++ b/test cases/unit/113 complex link cases/s2.c
@@ -0,0 +1,5 @@
+int s1(void);
+
+int s2(void) {
+ return s1() + 1;
+}
diff --git a/test cases/unit/113 complex link cases/s3.c b/test cases/unit/113 complex link cases/s3.c
new file mode 100644
index 0000000..08e0620
--- /dev/null
+++ b/test cases/unit/113 complex link cases/s3.c
@@ -0,0 +1,5 @@
+int s2(void);
+
+int s3(void) {
+ return s2() + 1;
+}
diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py
index c484f2d..438e4fe 100644
--- a/unittests/allplatformstests.py
+++ b/unittests/allplatformstests.py
@@ -4407,8 +4407,7 @@ class AllPlatformTests(BasePlatformTests):
structured_sources=None,
objects=[], environment=env, compilers=env.coredata.compilers[MachineChoice.HOST],
kwargs={})
- target.process_compilers()
- target.process_compilers_late([])
+ target.process_compilers_late()
return target.filename
shared_lib_name = lambda name: output_name(name, SharedLibrary)
diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py
index 1911539..4bb6809 100644
--- a/unittests/linuxliketests.py
+++ b/unittests/linuxliketests.py
@@ -1828,3 +1828,20 @@ class LinuxlikeTests(BasePlatformTests):
with self.assertRaises(subprocess.CalledProcessError) as e:
self.run_tests()
self.assertNotIn('Traceback', e.exception.output)
+
+ @skipUnless(is_linux(), "Ninja file differs on different platforms")
+ def test_complex_link_cases(self):
+ testdir = os.path.join(self.unit_test_dir, '113 complex link cases')
+ self.init(testdir)
+ self.build()
+ with open(os.path.join(self.builddir, 'build.ninja'), encoding='utf-8') as f:
+ content = f.read()
+ # Verify link dependencies, see comments in meson.build.
+ self.assertIn('build libt1-s3.a: STATIC_LINKER libt1-s2.a.p/s2.c.o libt1-s3.a.p/s3.c.o\n', content)
+ self.assertIn('build t1-e1: c_LINKER t1-e1.p/main.c.o | libt1-s1.a libt1-s3.a\n', content)
+ self.assertIn('build libt2-s3.a: STATIC_LINKER libt2-s2.a.p/s2.c.o libt2-s1.a.p/s1.c.o libt2-s3.a.p/s3.c.o\n', content)
+ self.assertIn('build t2-e1: c_LINKER t2-e1.p/main.c.o | libt2-s3.a\n', content)
+ self.assertIn('build t3-e1: c_LINKER t3-e1.p/main.c.o | libt3-s3.so.p/libt3-s3.so.symbols\n', content)
+ self.assertIn('build t4-e1: c_LINKER t4-e1.p/main.c.o | libt4-s2.so.p/libt4-s2.so.symbols libt4-s3.a\n', content)
+ self.assertIn('build t5-e1: c_LINKER t5-e1.p/main.c.o | libt5-s1.so.p/libt5-s1.so.symbols libt5-s3.a\n', content)
+ self.assertIn('build t6-e1: c_LINKER t6-e1.p/main.c.o | libt6-s2.a libt6-s3.a\n', content)