aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--data/macros.meson12
-rw-r--r--docs/markdown/Builtin-options.md2
-rw-r--r--docs/markdown/FAQ.md34
-rw-r--r--docs/markdown/Reference-manual.md4
-rw-r--r--docs/markdown/Syntax.md3
-rw-r--r--docs/markdown/snippets/link_language.md10
-rw-r--r--docs/markdown/snippets/linkcustom.md5
-rw-r--r--mesonbuild/ast/introspection.py8
-rw-r--r--mesonbuild/ast/visitor.py2
-rw-r--r--mesonbuild/backend/backends.py8
-rw-r--r--mesonbuild/backend/ninjabackend.py779
-rw-r--r--mesonbuild/backend/vs2010backend.py22
-rw-r--r--mesonbuild/backend/xcodebackend.py4
-rw-r--r--mesonbuild/build.py59
-rw-r--r--mesonbuild/compilers/__init__.py4
-rw-r--r--mesonbuild/compilers/c.py12
-rw-r--r--mesonbuild/compilers/compilers.py15
-rw-r--r--mesonbuild/compilers/cpp.py14
-rw-r--r--mesonbuild/compilers/cs.py4
-rw-r--r--mesonbuild/compilers/fortran.py8
-rw-r--r--mesonbuild/coredata.py2
-rw-r--r--mesonbuild/dependencies/base.py13
-rw-r--r--mesonbuild/dependencies/platform.py13
-rw-r--r--mesonbuild/dependencies/ui.py8
-rw-r--r--mesonbuild/envconfig.py126
-rw-r--r--mesonbuild/environment.py75
-rw-r--r--mesonbuild/interpreter.py5
-rw-r--r--mesonbuild/interpreterbase.py2
-rw-r--r--mesonbuild/mesonlib.py47
-rw-r--r--mesonbuild/mesonmain.py30
-rw-r--r--mesonbuild/minstall.py4
-rw-r--r--mesonbuild/mintro.py8
-rw-r--r--mesonbuild/mlog.py127
-rw-r--r--mesonbuild/modules/cmake.py12
-rw-r--r--mesonbuild/modules/python.py6
-rw-r--r--mesonbuild/modules/windows.py4
-rw-r--r--mesonbuild/mparser.py14
-rw-r--r--mesonbuild/mtest.py2
-rw-r--r--mesonbuild/munstable_coredata.py21
-rw-r--r--mesonbuild/scripts/depfixer.py4
-rw-r--r--mesonbuild/wrap/wrap.py4
-rw-r--r--mesonbuild/wrap/wraptool.py2
-rwxr-xr-xrun_project_tests.py8
-rwxr-xr-xrun_tests.py2
-rwxr-xr-xrun_unittests.py50
-rw-r--r--setup.cfg2
-rw-r--r--sideci.yml4
-rw-r--r--test cases/common/185 escape and unicode/meson.build14
-rw-r--r--test cases/common/216 link custom/meson.build22
-rw-r--r--test cases/common/217 link custom_i single from multiple/generate_conflicting_stlibs.py90
-rw-r--r--test cases/common/217 link custom_i single from multiple/meson.build37
-rw-r--r--test cases/common/217 link custom_i single from multiple/prog.c5
-rw-r--r--test cases/common/218 link custom_i multiple from multiple/generate_stlibs.py92
-rw-r--r--test cases/common/218 link custom_i multiple from multiple/meson.build37
-rw-r--r--test cases/common/218 link custom_i multiple from multiple/prog.c8
-rw-r--r--test cases/fortran/14 fortran links c/clib.c7
-rw-r--r--test cases/fortran/14 fortran links c/f_call_c.f9010
-rw-r--r--test cases/fortran/14 fortran links c/meson.build13
-rw-r--r--test cases/objc/2 nsstring/meson.build3
-rw-r--r--test cases/unit/4 suite selection/meson.build4
-rw-r--r--test cases/unit/4 suite selection/subprojects/subprjfail/meson.build4
-rw-r--r--test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build4
-rwxr-xr-xtools/boost_names.py2
63 files changed, 1257 insertions, 699 deletions
diff --git a/data/macros.meson b/data/macros.meson
index 05d21e5..c5b90de 100644
--- a/data/macros.meson
+++ b/data/macros.meson
@@ -2,6 +2,12 @@
%__meson_wrap_mode nodownload
%__meson_auto_features enabled
+%_smp_mesonflags %([ -z "$MESON_BUILD_NCPUS" ] \\\
+ && MESON_BUILD_NCPUS="`/usr/bin/getconf _NPROCESSORS_ONLN`"; \\\
+ ncpus_max=%{?_smp_ncpus_max}; \\\
+ if [ -n "$ncpus_max" ] && [ "$ncpus_max" -gt 0 ] && [ "$MESON_BUILD_NCPUS" -gt "$ncpus_max" ]; then MESON_BUILD_NCPUS="$ncpus_max"; fi; \\\
+ if [ "$MESON_BUILD_NCPUS" -gt 1 ]; then echo "--num-processes $MESON_BUILD_NCPUS"; fi)
+
%meson \
%set_build_flags \
%{shrink:%{__meson} \
@@ -31,4 +37,8 @@
%ninja_install -C %{_vpath_builddir}
%meson_test \
- %ninja_test -C %{_vpath_builddir}
+ %{shrink: %{__meson} test \
+ -C %{_vpath_builddir} \
+ %{?_smp_mesonflags} \
+ --print-errorlogs \
+ %{nil}}
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index 957ce4e..d25d7ab 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -132,7 +132,7 @@ compiler being used:
| c_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against |
| cpp_args | | free-form comma-separated list | C++ compile arguments to use |
| cpp_link_args| | free-form comma-separated list | C++ link arguments to use |
-| cpp_std | none | none, c++98, c++03, c++11, c++14, c++17, <br/>c++1z, gnu++03, gnu++11, gnu++14, gnu++17, gnu++1z | C++ language standard to use |
+| cpp_std | none | none, c++98, c++03, c++11, c++14, c++17, <br/>c++1z, gnu++03, gnu++11, gnu++14, gnu++17, gnu++1z, <br/> vc++14, vc++17, vc++latest | C++ language standard to use |
| cpp_debugstl | false | true, false | C++ STL debug mode |
| cpp_eh | sc | none, a, s, sc | C++ exception handling type |
| cpp_winlibs | see below | free-form comma-separated list | Standard Windows libs to link against |
diff --git a/docs/markdown/FAQ.md b/docs/markdown/FAQ.md
index ff93216..0208c1a 100644
--- a/docs/markdown/FAQ.md
+++ b/docs/markdown/FAQ.md
@@ -331,3 +331,37 @@ that could fullfill these requirements:
Out of these we have chosen Python because it is the best fit for our
needs.
+
+## I have proprietary compiler toolchain X that does not work with Meson, how can I make it work?
+
+Meson needs to know several details about each compiler in order to
+compile code with it. These include things such as which compiler
+flags to use for each option and how to detect the compiler from its
+output. This information can not be input via a configuration file,
+instead it requires changes to Meson's source code that need to be
+submitted to Meson master repository. In theory you can run your own
+forked version with custom patches, but that's not good use of your
+time. Please submit the code upstream so everyone can use the
+toolchain.
+
+The steps for adding a new compiler for an existing language are
+roughly the following. For simplicity we're going to assume a C
+compiler.
+
+- Create a new class with a proper name in
+ `mesonbuild/compilers/c.py`. Look at the methods that other
+ compilers for the same language have and duplicate what they do.
+
+- If the compiler can only be used for cross compilation, make sure to
+ flag it as such (see existing compiler classes for examples).
+
+- Add detection logic to `mesonbuild/environment.py`, look for a
+ method called `detect_c_compiler`.
+
+- Run the test suite and fix issues until the tests pass.
+
+- Submit a pull request, add the result of the test suite to your MR
+ (linking an existing page is fine).
+
+- If the compiler is freely available, consider adding it to the CI
+ system.
diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md
index 056612d..d86d825 100644
--- a/docs/markdown/Reference-manual.md
+++ b/docs/markdown/Reference-manual.md
@@ -518,6 +518,8 @@ be passed to [shared and static libraries](#library).
depends on such as a symbol visibility map. The purpose is to
automatically trigger a re-link (but not a re-compile) of the target
when this file changes.
+- `link_language` since 0.51.0 makes the linker for this target
+ be for the specified language. This is helpful for multi-language targets.
- `link_whole` links all contents of the given static libraries
whether they are used by not, equivalent to the
`-Wl,--whole-archive` argument flag of GCC, available since 0.40.0.
@@ -568,7 +570,7 @@ be passed to [shared and static libraries](#library).
the keyword argument for the default behaviour.
- `override_options` takes an array of strings in the same format as
`project`'s `default_options` overriding the values of these options
- for this target only, since 0.40.0
+ for this target only, since 0.40.0.
- `gnu_symbol_visibility` specifies how symbols should be exported, see
e.g [the GCC Wiki](https://gcc.gnu.org/wiki/Visibility) for more
information. This value can either be an empty string or one of
diff --git a/docs/markdown/Syntax.md b/docs/markdown/Syntax.md
index cf56dd3..aadb14a 100644
--- a/docs/markdown/Syntax.md
+++ b/docs/markdown/Syntax.md
@@ -116,6 +116,9 @@ The full list of escape sequences is:
As in python and C, up to three octal digits are accepted in `\ooo`.
+Unrecognized escape sequences are left in the string unchanged, i.e., the
+backslash is left in the string.
+
#### String concatenation
Strings can be concatenated to form a new string using the `+` symbol.
diff --git a/docs/markdown/snippets/link_language.md b/docs/markdown/snippets/link_language.md
new file mode 100644
index 0000000..28ebe8b
--- /dev/null
+++ b/docs/markdown/snippets/link_language.md
@@ -0,0 +1,10 @@
+## New target keyword argument: `link_language`
+There may be situations for which the user wishes to manually specify the linking language.
+For example, a C++ target may link C, Fortran, etc. and perhaps the automatic detection in Meson does not pick the desired compiler.
+The user can manually choose the linker by language per-target like this example of a target where one wishes to link with the Fortran compiler:
+```meson
+executable(..., link_language : 'fortran')
+```
+
+A specific case this option fixes is where for example the main program is Fortran that calls C and/or C++ code.
+The automatic language detection of Meson prioritizes C/C++, and so an compile-time error results like `undefined reference to main`, because the linker is C or C++ instead of Fortran, which is fixed by this per-target override.
diff --git a/docs/markdown/snippets/linkcustom.md b/docs/markdown/snippets/linkcustom.md
index d6ee801..0cf45ad 100644
--- a/docs/markdown/snippets/linkcustom.md
+++ b/docs/markdown/snippets/linkcustom.md
@@ -1,6 +1,6 @@
## Can link against custom targets
-The output of `custom_target` can be used in `link_with` and
+The output of `custom_target` and `custom_target[i]` can be used in `link_with` and
`link_whole` keyword arguments. This is useful for integrating custom
code generator steps, but note that there are many limitations:
@@ -10,7 +10,8 @@ code generator steps, but note that there are many limitations:
- The user is responsible for ensuring that the code produced by
different toolchains are compatible.
- - The custom target can only have one output file.
+ - `custom_target` may only be used when it has a single output file.
+ Use `custom_target[i]` when dealing with multiple output files.
- The output file must have the correct file name extension.
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 49d531f..b6ec450 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -122,7 +122,7 @@ class IntrospectionInterpreter(AstInterpreter):
subi.analyze()
subi.project_data['name'] = dirname
self.project_data['subprojects'] += [subi.project_data]
- except:
+ except (mesonlib.MesonException, RuntimeError):
return
def func_add_languages(self, node, args, kwargs):
@@ -173,9 +173,9 @@ class IntrospectionInterpreter(AstInterpreter):
arg_node = curr.args
elif isinstance(curr, IdNode):
# Try to resolve the ID and append the node to the queue
- id = curr.value
- if id in self.assignments and self.assignments[id]:
- tmp_node = self.assignments[id][0]
+ var_name = curr.value
+ if var_name in self.assignments and self.assignments[var_name]:
+ tmp_node = self.assignments[var_name][0]
if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)):
srcqueue += [tmp_node]
elif isinstance(curr, ArithmeticNode):
diff --git a/mesonbuild/ast/visitor.py b/mesonbuild/ast/visitor.py
index c8769d4..fab4ed2 100644
--- a/mesonbuild/ast/visitor.py
+++ b/mesonbuild/ast/visitor.py
@@ -134,7 +134,5 @@ class AstVisitor:
self.visit_default_func(node)
for i in node.arguments:
i.accept(self)
- for i in node.commas:
- pass
for val in node.kwargs.values():
val.accept(self)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 4a4f7f4..d0b4bb5 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -171,6 +171,8 @@ class Backend:
mlog.warning('custom_target {!r} has more than one output! '
'Using the first one.'.format(t.name))
filename = t.get_outputs()[0]
+ elif isinstance(t, build.CustomTargetIndex):
+ filename = t.get_outputs()[0]
else:
assert(isinstance(t, build.BuildTarget))
filename = t.get_filename()
@@ -214,7 +216,7 @@ class Backend:
return os.path.join(self.get_target_dir(target), link_lib)
elif isinstance(target, build.StaticLibrary):
return os.path.join(self.get_target_dir(target), target.get_filename())
- elif isinstance(target, build.CustomTarget):
+ elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)):
if not target.is_linkable_target():
raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name)
return os.path.join(self.get_target_dir(target), target.get_filename())
@@ -320,12 +322,14 @@ class Backend:
raise MesonException('Unknown data type in object list.')
return obj_list
- def serialize_executable(self, tname, exe, cmd_args, workdir, env={},
+ def serialize_executable(self, tname, exe, cmd_args, workdir, env=None,
extra_paths=None, capture=None):
'''
Serialize an executable for running with a generator or a custom target
'''
import hashlib
+ if env is None:
+ env = {}
if extra_paths is None:
# The callee didn't check if we needed extra paths, so check it here
if mesonlib.is_windows() or mesonlib.is_cygwin():
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 8edaeec..7eafcad 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -40,12 +40,12 @@ FORTRAN_SUBMOD_PAT = r"\s*submodule\s*\((\w+:?\w+)\)\s*(\w+)\s*$"
if mesonlib.is_windows():
quote_func = lambda s: '"{}"'.format(s)
- execute_wrapper = 'cmd /c'
- rmfile_prefix = 'del /f /s /q {} &&'
+ execute_wrapper = ['cmd', '/c']
+ rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&']
else:
quote_func = shlex.quote
- execute_wrapper = ''
- rmfile_prefix = 'rm -f {} &&'
+ execute_wrapper = []
+ rmfile_prefix = ['rm', '-f', '{}', '&&']
def ninja_quote(text, is_build_line=False):
if is_build_line:
@@ -63,6 +63,52 @@ Please report this error with a test case to the Meson bug tracker.''' % text
raise MesonException(errmsg)
return text
+class NinjaComment:
+ def __init__(self, comment):
+ self.comment = comment
+
+ def write(self, outfile):
+ for l in self.comment.split('\n'):
+ outfile.write('# ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
+
+class NinjaRule:
+ def __init__(self, rule, command, args, description,
+ rspable = False, deps = None, depfile = None, extra = None):
+ self.name = rule
+ self.command = command # includes args which never go into a rspfile
+ self.args = args # args which will go into a rspfile, if used
+ self.description = description
+ self.deps = deps # depstyle 'gcc' or 'msvc'
+ self.depfile = depfile
+ self.extra = extra
+ self.rspable = rspable # if a rspfile can be used
+ self.refcount = 0
+
+ def write(self, outfile):
+ if not self.refcount:
+ return
+
+ outfile.write('rule %s\n' % self.name)
+ if self.rspable:
+ outfile.write(' command = %s @$out.rsp\n' % ' '.join(self.command))
+ outfile.write(' rspfile = $out.rsp\n')
+ outfile.write(' rspfile_content = %s\n' % ' '.join(self.args))
+ else:
+ outfile.write(' command = %s\n' % ' '.join(self.command + self.args))
+ if self.deps:
+ outfile.write(' deps = %s\n' % self.deps)
+ if self.depfile:
+ outfile.write(' depfile = %s\n' % self.depfile)
+ outfile.write(' description = %s\n' % self.description)
+ if self.extra:
+ for l in self.extra.split('\n'):
+ outfile.write(' ')
+ outfile.write(l)
+ outfile.write('\n')
+ outfile.write('\n')
class NinjaBuildElement:
def __init__(self, all_outputs, outfilenames, rule, infilenames):
@@ -116,7 +162,10 @@ class NinjaBuildElement:
line = line.replace('\\', '/')
outfile.write(line)
- # All the entries that should remain unquoted
+ # ninja variables whose value should remain unquoted. The value of these
+ # ninja variables (or variables we use them in) is interpreted directly
+ # by ninja (e.g. the value of the depfile variable is a pathname that
+ # ninja will read from, etc.), so it must not be shell quoted.
raw_names = {'DEPFILE', 'DESC', 'pool', 'description'}
for e in self.elems:
@@ -154,7 +203,7 @@ class NinjaBackend(backends.Backend):
self.all_outputs = {}
self.introspection_data = {}
- def create_target_alias(self, to_target, outfile):
+ def create_target_alias(self, to_target):
# We need to use aliases for targets that might be used as directory
# names to workaround a Ninja bug that breaks `ninja -t clean`.
# This is used for 'reserved' targets such as 'test', 'install',
@@ -165,7 +214,7 @@ class NinjaBackend(backends.Backend):
raise AssertionError(m.format(to_target))
from_target = to_target[len('meson-'):]
elem = NinjaBuildElement(self.all_outputs, from_target, 'phony', to_target)
- elem.write(outfile)
+ self.add_build(elem)
def detect_vs_dep_prefix(self, tempfilename):
'''VS writes its dependency in a locale dependent format.
@@ -220,24 +269,40 @@ int dummy;
outfile.write('# It is autogenerated by the Meson build system.\n')
outfile.write('# Do not edit by hand.\n\n')
outfile.write('ninja_required_version = 1.5.1\n\n')
+
+ num_pools = self.environment.coredata.backend_options['backend_max_links'].value
+ if num_pools > 0:
+ outfile.write('''pool link_pool
+ depth = %d
+
+''' % num_pools)
+
with self.detect_vs_dep_prefix(tempfilename) as outfile:
- self.generate_rules(outfile)
- self.generate_phony(outfile)
- outfile.write('# Build rules for targets\n\n')
+ self.generate_rules()
+
+ self.build_elements = []
+ self.generate_phony()
+ self.add_build_comment(NinjaComment('Build rules for targets'))
for t in self.build.get_targets().values():
- self.generate_target(t, outfile)
- outfile.write('# Test rules\n\n')
- self.generate_tests(outfile)
- outfile.write('# Install rules\n\n')
- self.generate_install(outfile)
- self.generate_dist(outfile)
+ self.generate_target(t)
+ self.add_build_comment(NinjaComment('Test rules'))
+ self.generate_tests()
+ self.add_build_comment(NinjaComment('Install rules'))
+ self.generate_install()
+ self.generate_dist()
if 'b_coverage' in self.environment.coredata.base_options and \
self.environment.coredata.base_options['b_coverage'].value:
- outfile.write('# Coverage rules\n\n')
- self.generate_coverage_rules(outfile)
- outfile.write('# Suffix\n\n')
- self.generate_utils(outfile)
- self.generate_ending(outfile)
+ self.add_build_comment(NinjaComment('Coverage rules'))
+ self.generate_coverage_rules()
+ self.add_build_comment(NinjaComment('Suffix'))
+ self.generate_utils()
+ self.generate_ending()
+
+ self.write_rules(outfile)
+ self.write_builds(outfile)
+
+ default = 'default all\n\n'
+ outfile.write(default)
# Only overwrite the old build file after the new one has been
# fully created.
os.replace(tempfilename, outfilename)
@@ -341,9 +406,9 @@ int dummy;
}
}
'''
- id = target.get_id()
+ tid = target.get_id()
lang = comp.get_language()
- tgt = self.introspection_data[id]
+ tgt = self.introspection_data[tid]
# Find an existing entry or create a new one
id_hash = (lang, tuple(parameters))
src_block = tgt.get(id_hash, None)
@@ -377,11 +442,11 @@ int dummy;
return True
return False
- def generate_target(self, target, outfile):
+ def generate_target(self, target):
if isinstance(target, build.CustomTarget):
- self.generate_custom_target(target, outfile)
+ self.generate_custom_target(target)
if isinstance(target, build.RunTarget):
- self.generate_run_target(target, outfile)
+ self.generate_run_target(target)
name = target.get_id()
if name in self.processed_targets:
return
@@ -389,20 +454,20 @@ int dummy;
# Initialize an empty introspection source list
self.introspection_data[name] = {}
# Generate rules for all dependency targets
- self.process_target_dependencies(target, outfile)
+ self.process_target_dependencies(target)
# If target uses a language that cannot link to C objects,
# just generate for that language and return.
if isinstance(target, build.Jar):
- self.generate_jar_target(target, outfile)
+ self.generate_jar_target(target)
return
if self.is_rust_target(target):
- self.generate_rust_target(target, outfile)
+ self.generate_rust_target(target)
return
if 'cs' in target.compilers:
- self.generate_cs_target(target, outfile)
+ self.generate_cs_target(target)
return
if 'swift' in target.compilers:
- self.generate_swift_target(target, outfile)
+ self.generate_swift_target(target)
return
# Now we handle the following languages:
@@ -420,14 +485,14 @@ int dummy;
# Sources consumed by valac are filtered out. These only contain
# C/C++ sources, objects, generated libs, and unknown sources now.
target_sources, generated_sources, \
- vala_generated_sources = self.generate_vala_compile(target, outfile)
+ vala_generated_sources = self.generate_vala_compile(target)
else:
target_sources = self.get_target_sources(target)
generated_sources = self.get_target_generated_sources(target)
vala_generated_sources = []
self.scan_fortran_module_outputs(target)
# Generate rules for GeneratedLists
- self.generate_generator_list_rules(target, outfile)
+ self.generate_generator_list_rules(target)
# Generate rules for building the remaining source files in this target
outname = self.get_target_filename(target)
@@ -455,7 +520,7 @@ int dummy;
# This will be set as dependencies of all the target's sources. At the
# same time, also deal with generated sources that need to be compiled.
generated_source_files = []
- for rel_src, gensrc in generated_sources.items():
+ for rel_src in generated_sources.keys():
dirpart, fnamepart = os.path.split(rel_src)
raw_src = File(True, dirpart, fnamepart)
if self.environment.is_source(rel_src) and not self.environment.is_header(rel_src):
@@ -479,15 +544,15 @@ int dummy;
# because we need `header_deps` to be fully generated in the above loop.
for src in generated_source_files:
if self.environment.is_llvm_ir(src):
- o = self.generate_llvm_ir_compile(target, outfile, src)
+ o = self.generate_llvm_ir_compile(target, src)
else:
- o = self.generate_single_compile(target, outfile, src, True,
+ o = self.generate_single_compile(target, src, True,
header_deps=header_deps)
obj_list.append(o)
use_pch = self.environment.coredata.base_options.get('b_pch', False)
if use_pch and target.has_pch():
- pch_objects = self.generate_pch(target, outfile, header_deps=header_deps)
+ pch_objects = self.generate_pch(target, header_deps=header_deps)
else:
pch_objects = []
@@ -517,39 +582,39 @@ int dummy;
# Passing 'vala' here signifies that we want the compile
# arguments to be specialized for C code generated by
# valac. For instance, no warnings should be emitted.
- obj_list.append(self.generate_single_compile(target, outfile, src, 'vala', [], header_deps))
+ obj_list.append(self.generate_single_compile(target, src, 'vala', [], header_deps))
# Generate compile targets for all the pre-existing sources for this target
- for f, src in target_sources.items():
+ for src in target_sources.values():
if not self.environment.is_header(src):
if self.environment.is_llvm_ir(src):
- obj_list.append(self.generate_llvm_ir_compile(target, outfile, src))
+ obj_list.append(self.generate_llvm_ir_compile(target, src))
elif is_unity and self.get_target_source_can_unity(target, src):
abs_src = os.path.join(self.environment.get_build_dir(),
src.rel_to_builddir(self.build_to_src))
unity_src.append(abs_src)
else:
- obj_list.append(self.generate_single_compile(target, outfile, src, False, [], header_deps))
+ obj_list.append(self.generate_single_compile(target, src, False, [], header_deps))
obj_list += self.flatten_object_list(target)
if is_unity:
for src in self.generate_unity_files(target, unity_src):
- obj_list.append(self.generate_single_compile(target, outfile, src, True, unity_deps + header_deps))
+ obj_list.append(self.generate_single_compile(target, src, True, unity_deps + header_deps))
linker, stdlib_args = self.determine_linker_and_stdlib_args(target)
- elem = self.generate_link(target, outfile, outname, obj_list, linker, pch_objects, stdlib_args=stdlib_args)
+ elem = self.generate_link(target, outname, obj_list, linker, pch_objects, stdlib_args=stdlib_args)
self.generate_shlib_aliases(target, self.get_target_dir(target))
- elem.write(outfile)
+ self.add_build(elem)
- def process_target_dependencies(self, target, outfile):
+ def process_target_dependencies(self, target):
for t in target.get_dependencies():
if t.get_id() not in self.processed_targets:
- self.generate_target(t, outfile)
+ self.generate_target(t)
- def custom_target_generator_inputs(self, target, outfile):
+ def custom_target_generator_inputs(self, target):
for s in target.sources:
if hasattr(s, 'held_object'):
s = s.held_object
if isinstance(s, build.GeneratedList):
- self.generate_genlist_for_target(s, target, outfile)
+ self.generate_genlist_for_target(s, target)
def unwrap_dep_list(self, target):
deps = []
@@ -562,8 +627,8 @@ int dummy;
deps.append(os.path.join(self.get_target_dir(i), output))
return deps
- def generate_custom_target(self, target, outfile):
- self.custom_target_generator_inputs(target, outfile)
+ def generate_custom_target(self, target):
+ self.custom_target_generator_inputs(target)
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target)
deps = self.unwrap_dep_list(target)
deps += self.get_custom_target_depend_files(target)
@@ -624,10 +689,10 @@ int dummy;
cmd = self.replace_paths(target, cmd)
elem.add_item('COMMAND', cmd)
elem.add_item('description', desc.format(target.name, cmd_type))
- elem.write(outfile)
+ self.add_build(elem)
self.processed_targets[target.get_id()] = True
- def generate_run_target(self, target, outfile):
+ def generate_run_target(self, target):
cmd = self.environment.get_build_command() + ['--internal', 'commandrunner']
deps = self.unwrap_dep_list(target)
arg_strings = []
@@ -686,9 +751,9 @@ int dummy;
elem.add_item('COMMAND', cmd)
elem.add_item('description', 'Running external command %s.' % target.name)
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the target defined above with the name the user specified
- self.create_target_alias(target_name, outfile)
+ self.create_target_alias(target_name)
self.processed_targets[target.get_id()] = True
def generate_coverage_command(self, elem, outputs):
@@ -701,49 +766,49 @@ int dummy;
self.environment.get_build_dir(),
self.environment.get_log_dir()])
- def generate_coverage_rules(self, outfile):
+ def generate_coverage_rules(self):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, [])
e.add_item('description', 'Generates coverage reports.')
- e.write(outfile)
+ self.add_build(e)
# Alias that runs the target defined above
- self.create_target_alias('meson-coverage', outfile)
- self.generate_coverage_legacy_rules(outfile)
+ self.create_target_alias('meson-coverage')
+ self.generate_coverage_legacy_rules()
- def generate_coverage_legacy_rules(self, outfile):
+ def generate_coverage_legacy_rules(self):
e = NinjaBuildElement(self.all_outputs, 'meson-coverage-xml', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--xml'])
e.add_item('description', 'Generates XML coverage report.')
- e.write(outfile)
+ self.add_build(e)
# Alias that runs the target defined above
- self.create_target_alias('meson-coverage-xml', outfile)
+ self.create_target_alias('meson-coverage-xml')
e = NinjaBuildElement(self.all_outputs, 'meson-coverage-text', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--text'])
e.add_item('description', 'Generates text coverage report.')
- e.write(outfile)
+ self.add_build(e)
# Alias that runs the target defined above
- self.create_target_alias('meson-coverage-text', outfile)
+ self.create_target_alias('meson-coverage-text')
e = NinjaBuildElement(self.all_outputs, 'meson-coverage-html', 'CUSTOM_COMMAND', 'PHONY')
self.generate_coverage_command(e, ['--html'])
e.add_item('description', 'Generates HTML coverage report.')
- e.write(outfile)
+ self.add_build(e)
# Alias that runs the target defined above
- self.create_target_alias('meson-coverage-html', outfile)
+ self.create_target_alias('meson-coverage-html')
- def generate_install(self, outfile):
+ def generate_install(self):
self.create_install_data_files()
elem = NinjaBuildElement(self.all_outputs, 'meson-install', 'CUSTOM_COMMAND', 'PHONY')
elem.add_dep('all')
elem.add_item('DESC', 'Installing files.')
elem.add_item('COMMAND', self.environment.get_build_command() + ['install', '--no-rebuild'])
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the target defined above
- self.create_target_alias('meson-install', outfile)
+ self.create_target_alias('meson-install')
- def generate_tests(self, outfile):
+ def generate_tests(self):
self.serialize_tests()
cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild']
if not self.environment.coredata.get_builtin_option('stdsplit'):
@@ -754,9 +819,9 @@ int dummy;
elem.add_item('COMMAND', cmd)
elem.add_item('DESC', 'Running all tests.')
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the above-defined meson-test target
- self.create_target_alias('meson-test', outfile)
+ self.create_target_alias('meson-test')
# And then benchmarks.
cmd = self.environment.get_build_command(True) + [
@@ -766,54 +831,71 @@ int dummy;
elem.add_item('COMMAND', cmd)
elem.add_item('DESC', 'Running benchmark suite.')
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the above-defined meson-benchmark target
- self.create_target_alias('meson-benchmark', outfile)
+ self.create_target_alias('meson-benchmark')
- def generate_rules(self, outfile):
- outfile.write('# Rules for compiling.\n\n')
- self.generate_compile_rules(outfile)
- outfile.write('# Rules for linking.\n\n')
- num_pools = self.environment.coredata.backend_options['backend_max_links'].value
- if num_pools > 0:
- outfile.write('''pool link_pool
- depth = %d
+ def generate_rules(self):
+ self.rules = []
+ self.ruledict = {}
-''' % num_pools)
+ self.add_rule_comment(NinjaComment('Rules for compiling.'))
+ self.generate_compile_rules()
+ self.add_rule_comment(NinjaComment('Rules for linking.'))
if self.environment.is_cross_build():
- self.generate_static_link_rules(True, outfile)
- self.generate_static_link_rules(False, outfile)
- self.generate_dynamic_link_rules(outfile)
- outfile.write('# Other rules\n\n')
- outfile.write('rule CUSTOM_COMMAND\n')
- outfile.write(' command = $COMMAND\n')
- outfile.write(' description = $DESC\n')
- outfile.write(' restat = 1\n\n')
+ self.generate_static_link_rules(True)
+ self.generate_static_link_rules(False)
+ self.generate_dynamic_link_rules()
+ self.add_rule_comment(NinjaComment('Other rules'))
# Ninja errors out if you have deps = gcc but no depfile, so we must
# have two rules for custom commands.
- outfile.write('rule CUSTOM_COMMAND_DEP\n')
- outfile.write(' command = $COMMAND\n')
- outfile.write(' description = $DESC\n')
- outfile.write(' deps = gcc\n')
- outfile.write(' depfile = $DEPFILE\n')
- outfile.write(' restat = 1\n\n')
- outfile.write('rule REGENERATE_BUILD\n')
+ self.add_rule(NinjaRule('CUSTOM_COMMAND', ['$COMMAND'], [], '$DESC',
+ extra='restat = 1'))
+ self.add_rule(NinjaRule('CUSTOM_COMMAND_DEP', ['$COMMAND'], [], '$DESC',
+ deps='gcc', depfile='$DEPFILE',
+ extra='restat = 1'))
+
c = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
['--internal',
'regenerate',
ninja_quote(quote_func(self.environment.get_source_dir())),
ninja_quote(quote_func(self.environment.get_build_dir()))]
- outfile.write(" command = " + ' '.join(c) + ' --backend ninja\n')
- outfile.write(' description = Regenerating build files.\n')
- outfile.write(' generator = 1\n\n')
- outfile.write('\n')
+ self.add_rule(NinjaRule('REGENERATE_BUILD',
+ c + ['--backend', 'ninja'], [],
+ 'Regenerating build files.',
+ extra='generator = 1'))
- def generate_phony(self, outfile):
- outfile.write('# Phony build target, always out of date\n')
- outfile.write('build PHONY: phony\n')
- outfile.write('\n')
+ def add_rule_comment(self, comment):
+ self.rules.append(comment)
+
+ def add_build_comment(self, comment):
+ self.build_elements.append(comment)
+
+ def add_rule(self, rule):
+ self.rules.append(rule)
+ self.ruledict[rule.name] = rule
+
+ def add_build(self, build):
+ self.build_elements.append(build)
+
+ # increment rule refcount
+ if build.rule != 'phony':
+ self.ruledict[build.rule].refcount += 1
+
+ def write_rules(self, outfile):
+ for r in self.rules:
+ r.write(outfile)
+
+ def write_builds(self, outfile):
+ for b in self.build_elements:
+ b.write(outfile)
+
+ def generate_phony(self):
+ self.add_build_comment(NinjaComment('Phony build target, always out of date'))
+ elem = NinjaBuildElement(self.all_outputs, 'PHONY', 'phony', '')
+ self.add_build(elem)
- def generate_jar_target(self, target, outfile):
+ def generate_jar_target(self, target):
fname = target.get_filename()
outname_rel = os.path.join(self.get_target_dir(target), fname)
src_list = target.get_sources()
@@ -830,7 +912,7 @@ int dummy;
# Add possible java generated files to src list
generated_sources = self.get_target_generated_sources(target)
gen_src_list = []
- for rel_src, gensrc in generated_sources.items():
+ for rel_src in generated_sources.keys():
dirpart, fnamepart = os.path.split(rel_src)
raw_src = File(True, dirpart, fnamepart)
if rel_src.endswith('.java'):
@@ -838,7 +920,7 @@ int dummy;
compile_args = self.determine_single_java_compile_args(target, compiler)
for src in src_list + gen_src_list:
- plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args, outfile)
+ plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args)
class_list.append(plain_class_path)
class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list]
manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF')
@@ -863,11 +945,11 @@ int dummy;
elem = NinjaBuildElement(self.all_outputs, outname_rel, jar_rule, [])
elem.add_dep(class_dep_list)
elem.add_item('ARGS', commands)
- elem.write(outfile)
+ self.add_build(elem)
# Create introspection information
self.create_target_source_introspection(target, compiler, compile_args, src_list, gen_src_list)
- def generate_cs_resource_tasks(self, target, outfile):
+ def generate_cs_resource_tasks(self, target):
args = []
deps = []
for r in target.resources:
@@ -880,7 +962,7 @@ int dummy;
elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile)
elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename])
elem.add_item('DESC', 'Compiling resource %s.' % rel_sourcefile)
- elem.write(outfile)
+ self.add_build(elem)
deps.append(ofilename)
a = '-resource:' + ofilename
else:
@@ -888,7 +970,7 @@ int dummy;
args.append(a)
return args, deps
- def generate_cs_target(self, target, outfile):
+ def generate_cs_target(self, target):
buildtype = self.get_option_for_target('buildtype', target)
fname = target.get_filename()
outname_rel = os.path.join(self.get_target_dir(target), fname)
@@ -906,7 +988,7 @@ int dummy;
commands.append('-target:library')
else:
raise MesonException('Unknown C# target type.')
- (resource_args, resource_deps) = self.generate_cs_resource_tasks(target, outfile)
+ (resource_args, resource_deps) = self.generate_cs_resource_tasks(target)
commands += resource_args
deps += resource_deps
commands += compiler.get_output_args(outname_rel)
@@ -934,9 +1016,9 @@ int dummy;
elem = NinjaBuildElement(self.all_outputs, outputs, 'cs_COMPILER', rel_srcs + generated_rel_srcs)
elem.add_dep(deps)
elem.add_item('ARGS', commands)
- elem.write(outfile)
+ self.add_build(elem)
- self.generate_generator_list_rules(target, outfile)
+ self.generate_generator_list_rules(target)
self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs)
def determine_single_java_compile_args(self, target, compiler):
@@ -956,10 +1038,10 @@ int dummy;
args += ['-sourcepath', sourcepath]
return args
- def generate_single_java_compile(self, src, target, compiler, args, outfile):
+ def generate_single_java_compile(self, src, target, compiler, args):
deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets]
generated_sources = self.get_target_generated_sources(target)
- for rel_src, gensrc in generated_sources.items():
+ for rel_src in generated_sources.keys():
if rel_src.endswith('.java'):
deps.append(rel_src)
rel_src = src.rel_to_builddir(self.build_to_src)
@@ -968,17 +1050,14 @@ int dummy;
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler.get_language() + '_COMPILER', rel_src)
element.add_dep(deps)
element.add_item('ARGS', args)
- element.write(outfile)
+ self.add_build(element)
return plain_class_path
- def generate_java_link(self, outfile):
- rule = 'rule java_LINKER\n'
- command = ' command = jar $ARGS\n'
- description = ' description = Creating JAR $out.\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write('\n')
+ def generate_java_link(self):
+ rule = 'java_LINKER'
+ command = ['jar', '$ARGS']
+ description = 'Creating JAR $out.'
+ self.add_rule(NinjaRule(rule, command, [], description))
def determine_dep_vapis(self, target):
"""
@@ -1054,7 +1133,7 @@ int dummy;
srctype[f] = gensrc
return vala, vapi, (others, othersgen)
- def generate_vala_compile(self, target, outfile):
+ def generate_vala_compile(self, target):
"""Vala is compiled into C. Set up all necessary build steps here."""
(vala_src, vapi_src, other_src) = self.split_vala_sources(target)
extra_dep_files = []
@@ -1148,7 +1227,7 @@ int dummy;
if len(target.install_dir) > 3 and target.install_dir[3] is True:
target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0')
# Detect gresources and add --gresources arguments for each
- for (gres, gensrc) in other_src[1].items():
+ for gensrc in other_src[1].values():
if isinstance(gensrc, modules.GResourceTarget):
gres_xml, = self.get_custom_target_sources(gensrc)
args += ['--gresources=' + gres_xml]
@@ -1169,11 +1248,11 @@ int dummy;
all_files + dependency_vapis)
element.add_item('ARGS', args)
element.add_dep(extra_dep_files)
- element.write(outfile)
+ self.add_build(element)
self.create_target_source_introspection(target, valac, args, all_files, [])
return other_src[0], other_src[1], vala_c_src
- def generate_rust_target(self, target, outfile):
+ def generate_rust_target(self, target):
rustc = target.compilers['rust']
# Rust compiler takes only the main file as input and
# figures out what other files are needed via import
@@ -1261,9 +1340,9 @@ int dummy;
element.add_item('ARGS', args)
element.add_item('targetdep', depfile)
element.add_item('cratetype', cratetype)
- element.write(outfile)
+ self.add_build(element)
if isinstance(target, build.SharedLibrary):
- self.generate_shsym(outfile, target)
+ self.generate_shsym(target)
self.create_target_source_introspection(target, rustc, args, [main_rust_file], [])
def swift_module_file_name(self, target):
@@ -1309,7 +1388,7 @@ int dummy;
others.append(i)
return srcs, others
- def generate_swift_target(self, target, outfile):
+ def generate_swift_target(self, target):
module_name = self.target_swift_modulename(target)
swiftc = target.compilers['swift']
abssrc = []
@@ -1383,35 +1462,35 @@ int dummy;
elem.add_dep(abs_headers)
elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes)
elem.add_item('RUNDIR', rundir)
- elem.write(outfile)
+ self.add_build(elem)
elem = NinjaBuildElement(self.all_outputs, out_module_name,
'swift_COMPILER',
abssrc)
elem.add_dep(in_module_files + rel_generated)
elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args())
elem.add_item('RUNDIR', rundir)
- elem.write(outfile)
+ self.add_build(elem)
if isinstance(target, build.StaticLibrary):
- elem = self.generate_link(target, outfile, self.get_target_filename(target),
+ elem = self.generate_link(target, self.get_target_filename(target),
rel_objects, self.build.static_linker)
- elem.write(outfile)
+ self.add_build(elem)
elif isinstance(target, build.Executable):
elem = NinjaBuildElement(self.all_outputs, self.get_target_filename(target), 'swift_COMPILER', [])
elem.add_dep(rel_objects)
elem.add_dep(link_deps)
elem.add_item('ARGS', link_args + swiftc.get_std_exe_link_args() + objects + abs_link_deps)
elem.add_item('RUNDIR', rundir)
- elem.write(outfile)
+ self.add_build(elem)
else:
raise MesonException('Swift supports only executable and static library targets.')
# Introspection information
self.create_target_source_introspection(target, swiftc, compile_args + header_imports + module_includes, relsrc, rel_generated)
- def generate_static_link_rules(self, is_cross, outfile):
+ def generate_static_link_rules(self, is_cross):
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
if 'java' in self.build.compilers:
if not is_cross:
- self.generate_java_link(outfile)
+ self.generate_java_link()
if is_cross:
if self.environment.is_cross_build():
static_linker = self.build.static_cross_linker
@@ -1423,15 +1502,9 @@ int dummy;
crstr = ''
if static_linker is None:
return
- rule = 'rule STATIC%s_LINKER\n' % crstr
- if static_linker.can_linker_accept_rsp():
- command_template = ''' command = {executable} $LINK_ARGS {output_args} @$out.rsp
- rspfile = $out.rsp
- rspfile_content = $in
-'''
- else:
- command_template = ' command = {executable} $LINK_ARGS {output_args} $in\n'
+ rule = 'STATIC%s_LINKER' % crstr
cmdlist = []
+ args = ['$in']
# FIXME: Must normalize file names with pathlib.Path before writing
# them out to fix this properly on Windows. See:
# https://github.com/mesonbuild/meson/issues/1517
@@ -1440,19 +1513,20 @@ int dummy;
# `ar` has no options to overwrite archives. It always appends,
# which is never what we want. Delete an existing library first if
# it exists. https://github.com/mesonbuild/meson/issues/1355
- cmdlist = [execute_wrapper, rmfile_prefix.format('$out')]
+ cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix]
cmdlist += static_linker.get_exelist()
- command = command_template.format(
- executable=' '.join(cmdlist),
- output_args=' '.join(static_linker.get_output_args('$out')))
- description = ' description = Linking static target $out.\n\n'
- outfile.write(rule)
- outfile.write(command)
+ cmdlist += ['$LINK_ARGS']
+ cmdlist += static_linker.get_output_args('$out')
+ description = 'Linking static target $out.'
if num_pools > 0:
- outfile.write(' pool = link_pool\n')
- outfile.write(description)
+ pool = 'pool = link_pool'
+ else:
+ pool = None
+ self.add_rule(NinjaRule(rule, cmdlist, args, description,
+ rspable=static_linker.can_linker_accept_rsp(),
+ extra=pool))
- def generate_dynamic_link_rules(self, outfile):
+ def generate_dynamic_link_rules(self):
num_pools = self.environment.coredata.backend_options['backend_max_links'].value
ctypes = [(self.build.compilers, False)]
if self.environment.is_cross_build():
@@ -1469,180 +1543,126 @@ int dummy;
crstr = ''
if is_cross:
crstr = '_CROSS'
- rule = 'rule %s%s_LINKER\n' % (langname, crstr)
- if compiler.can_linker_accept_rsp():
- command_template = ''' command = {executable} @$out.rsp
- rspfile = $out.rsp
- rspfile_content = $ARGS {output_args} $in $LINK_ARGS $aliasing
-'''
- else:
- command_template = ' command = {executable} $ARGS {output_args} $in $LINK_ARGS $aliasing\n'
- command = command_template.format(
- executable=' '.join(compiler.get_linker_exelist()),
- output_args=' '.join(compiler.get_linker_output_args('$out'))
- )
- description = ' description = Linking target $out.\n'
- outfile.write(rule)
- outfile.write(command)
+ rule = '%s%s_LINKER' % (langname, crstr)
+ command = compiler.get_linker_exelist()
+ args = ['$ARGS'] + compiler.get_linker_output_args('$out') + ['$in', '$LINK_ARGS']
+ description = 'Linking target $out.'
if num_pools > 0:
- outfile.write(' pool = link_pool\n')
- outfile.write(description)
- outfile.write('\n')
- outfile.write('\n')
+ pool = 'pool = link_pool'
+ else:
+ pool = None
+ self.add_rule(NinjaRule(rule, command, args, description,
+ rspable=compiler.can_linker_accept_rsp(),
+ extra=pool))
+
args = [ninja_quote(quote_func(x)) for x in self.environment.get_build_command()] + \
['--internal',
'symbolextractor',
'$in',
'$out']
- symrule = 'rule SHSYM\n'
- symcmd = ' command = ' + ' '.join(args) + ' $CROSS\n'
- synstat = ' restat = 1\n'
- syndesc = ' description = Generating symbol file $out.\n'
- outfile.write(symrule)
- outfile.write(symcmd)
- outfile.write(synstat)
- outfile.write(syndesc)
- outfile.write('\n')
-
- def generate_java_compile_rule(self, compiler, outfile):
- rule = 'rule %s_COMPILER\n' % compiler.get_language()
- invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
- command = ' command = %s $ARGS $in\n' % invoc
- description = ' description = Compiling Java object $in.\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write('\n')
-
- def generate_cs_compile_rule(self, compiler, outfile):
- rule = 'rule %s_COMPILER\n' % compiler.get_language()
- invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
-
- if mesonlib.is_windows():
- command = ''' command = {executable} @$out.rsp
- rspfile = $out.rsp
- rspfile_content = $ARGS $in
-'''.format(executable=invoc)
- else:
- command = ' command = %s $ARGS $in\n' % invoc
-
- description = ' description = Compiling C Sharp target $out.\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write('\n')
-
- def generate_vala_compile_rules(self, compiler, outfile):
- rule = 'rule %s_COMPILER\n' % compiler.get_language()
- invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
- command = ' command = %s $ARGS $in\n' % invoc
- description = ' description = Compiling Vala source $in.\n'
- restat = ' restat = 1\n' # ValaC does this always to take advantage of it.
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write(restat)
- outfile.write('\n')
-
- def generate_rust_compile_rules(self, compiler, outfile, is_cross):
+ symrule = 'SHSYM'
+ symcmd = args + ['$CROSS']
+ syndesc = 'Generating symbol file $out.'
+ synstat = 'restat = 1'
+ self.add_rule(NinjaRule(symrule, symcmd, [], syndesc, extra=synstat))
+
+ def generate_java_compile_rule(self, compiler):
+ rule = '%s_COMPILER' % compiler.get_language()
+ invoc = [ninja_quote(i) for i in compiler.get_exelist()]
+ command = invoc + ['$ARGS', '$in']
+ description = 'Compiling Java object $in.'
+ self.add_rule(NinjaRule(rule, command, [], description))
+
+ def generate_cs_compile_rule(self, compiler):
+ rule = '%s_COMPILER' % compiler.get_language()
+ invoc = [ninja_quote(i) for i in compiler.get_exelist()]
+ command = invoc
+ args = ['$ARGS', '$in']
+ description = 'Compiling C Sharp target $out.'
+ self.add_rule(NinjaRule(rule, command, args, description,
+ rspable=mesonlib.is_windows()))
+
+ def generate_vala_compile_rules(self, compiler):
+ rule = '%s_COMPILER' % compiler.get_language()
+ invoc = [ninja_quote(i) for i in compiler.get_exelist()]
+ command = invoc + ['$ARGS', '$in']
+ description = 'Compiling Vala source $in.'
+ self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1'))
+
+ def generate_rust_compile_rules(self, compiler, is_cross):
crstr = ''
if is_cross:
crstr = '_CROSS'
- rule = 'rule %s%s_COMPILER\n' % (compiler.get_language(), crstr)
- invoc = ' '.join([ninja_quote(i) for i in compiler.get_exelist()])
- command = ' command = %s $ARGS $in\n' % invoc
- description = ' description = Compiling Rust source $in.\n'
- depfile = ' depfile = $targetdep\n'
-
- depstyle = ' deps = gcc\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write(depfile)
- outfile.write(depstyle)
- outfile.write('\n')
-
- def generate_swift_compile_rules(self, compiler, outfile):
- rule = 'rule %s_COMPILER\n' % compiler.get_language()
+ rule = '%s%s_COMPILER' % (compiler.get_language(), crstr)
+ invoc = [ninja_quote(i) for i in compiler.get_exelist()]
+ command = invoc + ['$ARGS', '$in']
+ description = 'Compiling Rust source $in.'
+ depfile = '$targetdep'
+ depstyle = 'gcc'
+ self.add_rule(NinjaRule(rule, command, [], description, deps=depstyle,
+ depfile=depfile))
+
+ def generate_swift_compile_rules(self, compiler):
+ rule = '%s_COMPILER' % compiler.get_language()
full_exe = [ninja_quote(x) for x in self.environment.get_build_command()] + [
'--internal',
'dirchanger',
'$RUNDIR',
]
- invoc = (' '.join(full_exe) + ' ' +
- ' '.join(ninja_quote(i) for i in compiler.get_exelist()))
- command = ' command = %s $ARGS $in\n' % invoc
- description = ' description = Compiling Swift source $in.\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write('\n')
+ invoc = full_exe + [ninja_quote(i) for i in compiler.get_exelist()]
+ command = invoc + ['$ARGS', '$in']
+ description = 'Compiling Swift source $in.'
+ self.add_rule(NinjaRule(rule, command, [], description))
- def generate_fortran_dep_hack(self, outfile, crstr):
+ def generate_fortran_dep_hack(self, crstr):
+ rule = 'FORTRAN_DEP_HACK%s' % (crstr)
if mesonlib.is_windows():
- cmd = 'cmd /C ""'
+ cmd = ['cmd', '/C']
else:
- cmd = 'true'
- template = '''# Workaround for these issues:
-# https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8
-# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485
-rule FORTRAN_DEP_HACK%s
- command = %s
- description = Dep hack
- restat = 1
-
-'''
- outfile.write(template % (crstr, cmd))
-
- def generate_llvm_ir_compile_rule(self, compiler, is_cross, outfile):
+ cmd = ['true']
+ self.add_rule_comment(NinjaComment('''Workaround for these issues:
+https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8
+https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485'''))
+ self.add_rule(NinjaRule(rule, cmd, [], 'Dep hack', extra='restat = 1'))
+
+ def generate_llvm_ir_compile_rule(self, compiler, is_cross):
if getattr(self, 'created_llvm_ir_rule', False):
return
- rule = 'rule llvm_ir{}_COMPILER\n'.format('_CROSS' if is_cross else '')
- if compiler.can_linker_accept_rsp():
- command_template = ' command = {executable} @$out.rsp\n' \
- ' rspfile = $out.rsp\n' \
- ' rspfile_content = $ARGS {output_args} {compile_only_args} $in\n'
- else:
- command_template = ' command = {executable} $ARGS {output_args} {compile_only_args} $in\n'
- command = command_template.format(
- executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),
- output_args=' '.join(compiler.get_output_args('$out')),
- compile_only_args=' '.join(compiler.get_compile_only_args())
- )
- description = ' description = Compiling LLVM IR object $in.\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(description)
- outfile.write('\n')
+ rule = 'llvm_ir{}_COMPILER'.format('_CROSS' if is_cross else '')
+ command = [ninja_quote(i) for i in compiler.get_exelist()]
+ args = ['$ARGS'] + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
+ description = 'Compiling LLVM IR object $in.'
+ self.add_rule(NinjaRule(rule, command, args, description,
+ rspable=compiler.can_linker_accept_rsp()))
self.created_llvm_ir_rule = True
- def generate_compile_rule_for(self, langname, compiler, is_cross, outfile):
+ def generate_compile_rule_for(self, langname, compiler, is_cross):
if langname == 'java':
if not is_cross:
- self.generate_java_compile_rule(compiler, outfile)
+ self.generate_java_compile_rule(compiler)
return
if langname == 'cs':
if not is_cross:
- self.generate_cs_compile_rule(compiler, outfile)
+ self.generate_cs_compile_rule(compiler)
return
if langname == 'vala':
if not is_cross:
- self.generate_vala_compile_rules(compiler, outfile)
+ self.generate_vala_compile_rules(compiler)
return
if langname == 'rust':
- self.generate_rust_compile_rules(compiler, outfile, is_cross)
+ self.generate_rust_compile_rules(compiler, is_cross)
return
if langname == 'swift':
if not is_cross:
- self.generate_swift_compile_rules(compiler, outfile)
+ self.generate_swift_compile_rules(compiler)
return
if is_cross:
crstr = '_CROSS'
else:
crstr = ''
if langname == 'fortran':
- self.generate_fortran_dep_hack(outfile, crstr)
- rule = 'rule %s%s_COMPILER\n' % (langname, crstr)
+ self.generate_fortran_dep_hack(crstr)
+ rule = '%s%s_COMPILER' % (langname, crstr)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
quoted_depargs = []
for d in depargs:
@@ -1650,39 +1670,27 @@ rule FORTRAN_DEP_HACK%s
d = quote_func(d)
quoted_depargs.append(d)
- if compiler.can_linker_accept_rsp():
- command_template = ''' command = {executable} @$out.rsp
- rspfile = $out.rsp
- rspfile_content = $ARGS {dep_args} {output_args} {compile_only_args} $in
-'''
- else:
- command_template = ' command = {executable} $ARGS {dep_args} {output_args} {compile_only_args} $in\n'
- command = command_template.format(
- executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),
- dep_args=' '.join(quoted_depargs),
- output_args=' '.join(compiler.get_output_args('$out')),
- compile_only_args=' '.join(compiler.get_compile_only_args())
- )
- description = ' description = Compiling %s object $out.\n' % compiler.get_display_language()
+ command = [ninja_quote(i) for i in compiler.get_exelist()]
+ args = ['$ARGS'] + quoted_depargs + compiler.get_output_args('$out') + compiler.get_compile_only_args() + ['$in']
+ description = 'Compiling %s object $out.' % compiler.get_display_language()
if isinstance(compiler, VisualStudioCCompiler):
- deps = ' deps = msvc\n'
+ deps = 'msvc'
+ depfile = None
else:
- deps = ' deps = gcc\n'
- deps += ' depfile = $DEPFILE\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(deps)
- outfile.write(description)
- outfile.write('\n')
+ deps = 'gcc'
+ depfile = '$DEPFILE'
+ self.add_rule(NinjaRule(rule, command, args, description,
+ rspable=compiler.can_linker_accept_rsp(),
+ deps=deps, depfile=depfile))
- def generate_pch_rule_for(self, langname, compiler, is_cross, outfile):
+ def generate_pch_rule_for(self, langname, compiler, is_cross):
if langname != 'c' and langname != 'cpp':
return
if is_cross:
crstr = '_CROSS'
else:
crstr = ''
- rule = 'rule %s%s_PCH\n' % (langname, crstr)
+ rule = '%s%s_PCH' % (langname, crstr)
depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE')
quoted_depargs = []
@@ -1691,50 +1699,42 @@ rule FORTRAN_DEP_HACK%s
d = quote_func(d)
quoted_depargs.append(d)
if isinstance(compiler, VisualStudioCCompiler):
- output = ''
+ output = []
else:
- output = ' '.join(compiler.get_output_args('$out'))
- command = " command = {executable} $ARGS {dep_args} {output_args} {compile_only_args} $in\n".format(
- executable=' '.join(compiler.get_exelist()),
- dep_args=' '.join(quoted_depargs),
- output_args=output,
- compile_only_args=' '.join(compiler.get_compile_only_args())
- )
- description = ' description = Precompiling header %s.\n' % '$in'
+ output = compiler.get_output_args('$out')
+ command = compiler.get_exelist() + ['$ARGS'] + quoted_depargs + output + compiler.get_compile_only_args() + ['$in']
+ description = 'Precompiling header $in.'
if isinstance(compiler, VisualStudioCCompiler):
- deps = ' deps = msvc\n'
+ deps = 'msvc'
+ depfile = None
else:
- deps = ' deps = gcc\n'
- deps += ' depfile = $DEPFILE\n'
- outfile.write(rule)
- outfile.write(command)
- outfile.write(deps)
- outfile.write(description)
- outfile.write('\n')
+ deps = 'gcc'
+ depfile = '$DEPFILE'
+ self.add_rule(NinjaRule(rule, command, [], description, deps=deps,
+ depfile=depfile))
- def generate_compile_rules(self, outfile):
+ def generate_compile_rules(self):
for langname, compiler in self.build.compilers.items():
if compiler.get_id() == 'clang':
- self.generate_llvm_ir_compile_rule(compiler, False, outfile)
- self.generate_compile_rule_for(langname, compiler, False, outfile)
- self.generate_pch_rule_for(langname, compiler, False, outfile)
+ self.generate_llvm_ir_compile_rule(compiler, False)
+ self.generate_compile_rule_for(langname, compiler, False)
+ self.generate_pch_rule_for(langname, compiler, False)
if self.environment.is_cross_build():
cclist = self.build.cross_compilers
for langname, compiler in cclist.items():
if compiler.get_id() == 'clang':
- self.generate_llvm_ir_compile_rule(compiler, True, outfile)
- self.generate_compile_rule_for(langname, compiler, True, outfile)
- self.generate_pch_rule_for(langname, compiler, True, outfile)
- outfile.write('\n')
+ self.generate_llvm_ir_compile_rule(compiler, True)
+ self.generate_compile_rule_for(langname, compiler, True)
+ self.generate_pch_rule_for(langname, compiler, True)
- def generate_generator_list_rules(self, target, outfile):
+ def generate_generator_list_rules(self, target):
# CustomTargets have already written their rules and
# CustomTargetIndexes don't actually get generated, so write rules for
# GeneratedLists here
for genlist in target.get_generated_sources():
if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
continue
- self.generate_genlist_for_target(genlist, target, outfile)
+ self.generate_genlist_for_target(genlist, target)
def replace_paths(self, target, args, override_subdir=None):
if override_subdir:
@@ -1750,7 +1750,7 @@ rule FORTRAN_DEP_HACK%s
args = [x.replace('\\', '/') for x in args]
return args
- def generate_genlist_for_target(self, genlist, target, outfile):
+ def generate_genlist_for_target(self, genlist, target):
generator = genlist.get_generator()
subdir = genlist.subdir
exe = generator.get_exe()
@@ -1811,7 +1811,7 @@ rule FORTRAN_DEP_HACK%s
if isinstance(exe, build.BuildTarget):
elem.add_dep(self.get_target_filename(exe))
elem.add_item('COMMAND', cmd)
- elem.write(outfile)
+ self.add_build(elem)
def scan_fortran_module_outputs(self, target):
"""
@@ -1945,7 +1945,7 @@ rule FORTRAN_DEP_HACK%s
def get_link_debugfile_args(self, linker, target, outname):
return linker.get_link_debugfile_args(outname)
- def generate_llvm_ir_compile(self, target, outfile, src):
+ def generate_llvm_ir_compile(self, target, src):
compiler = get_compiler_for_source(target.compilers.values(), src)
commands = CompilerArgs(compiler)
# Compiler args for compiling this target
@@ -1977,7 +1977,7 @@ rule FORTRAN_DEP_HACK%s
# current compiler.
commands = commands.to_native()
element.add_item('ARGS', commands)
- element.write(outfile)
+ self.add_build(element)
return rel_obj
def get_source_dir_include_args(self, target, compiler):
@@ -2098,7 +2098,7 @@ rule FORTRAN_DEP_HACK%s
commands += compiler.get_include_args(self.get_target_private_dir(target), False)
return commands
- def generate_single_compile(self, target, outfile, src, is_generated=False, header_deps=None, order_deps=None):
+ def generate_single_compile(self, target, src, is_generated=False, header_deps=None, order_deps=None):
"""
Compiles C/C++, ObjC/ObjC++, Fortran, and D sources
"""
@@ -2175,7 +2175,7 @@ rule FORTRAN_DEP_HACK%s
if srcfile == src:
depelem = NinjaBuildElement(self.all_outputs, modfile, 'FORTRAN_DEP_HACK' + crstr, rel_obj)
- depelem.write(outfile)
+ self.add_build(depelem)
commands += compiler.get_module_outdir_args(self.get_target_private_dir(target))
element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src)
@@ -2196,7 +2196,7 @@ rule FORTRAN_DEP_HACK%s
element.add_orderdep(i)
element.add_item('DEPFILE', dep_file)
element.add_item('ARGS', commands)
- element.write(outfile)
+ self.add_build(element)
return rel_obj
def add_header_deps(self, target, ninja_element, header_deps):
@@ -2259,7 +2259,7 @@ rule FORTRAN_DEP_HACK%s
dep = dst + '.' + compiler.get_depfile_suffix()
return commands, dep, dst, [] # Gcc does not create an object file during pch generation.
- def generate_pch(self, target, outfile, header_deps=None):
+ def generate_pch(self, target, header_deps=None):
header_deps = header_deps if header_deps is not None else []
cstr = ''
pch_objects = []
@@ -2293,10 +2293,10 @@ rule FORTRAN_DEP_HACK%s
self.add_header_deps(target, elem, header_deps)
elem.add_item('ARGS', commands)
elem.add_item('DEPFILE', dep)
- elem.write(outfile)
+ self.add_build(elem)
return pch_objects
- def generate_shsym(self, outfile, target):
+ def generate_shsym(self, target):
target_name = target.get_filename()
target_file = self.get_target_filename(target)
targetdir = self.get_target_private_dir(target)
@@ -2304,7 +2304,7 @@ rule FORTRAN_DEP_HACK%s
elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file)
if self.environment.is_cross_build():
elem.add_item('CROSS', '--cross-host=' + self.environment.machines.host.system)
- elem.write(outfile)
+ self.add_build(elem)
def get_cross_stdlib_link_args(self, target, linker):
if isinstance(target, build.StaticLibrary) or not target.is_cross:
@@ -2439,7 +2439,7 @@ rule FORTRAN_DEP_HACK%s
return guessed_dependencies + absolute_libs
- def generate_link(self, target, outfile, outname, obj_list, linker, extra_args=None, stdlib_args=None):
+ def generate_link(self, target, outname, obj_list, linker, extra_args=None, stdlib_args=None):
extra_args = extra_args if extra_args is not None else []
stdlib_args = stdlib_args if stdlib_args is not None else []
if isinstance(target, build.StaticLibrary):
@@ -2447,7 +2447,7 @@ rule FORTRAN_DEP_HACK%s
else:
linker_base = linker.get_language() # Fixme.
if isinstance(target, build.SharedLibrary):
- self.generate_shsym(outfile, target)
+ self.generate_shsym(target)
crstr = ''
if target.is_cross:
crstr = '_CROSS'
@@ -2600,38 +2600,38 @@ rule FORTRAN_DEP_HACK%s
except OSError:
mlog.debug("Library versioning disabled because we do not have symlink creation privileges.")
- def generate_custom_target_clean(self, outfile, trees):
+ def generate_custom_target_clean(self, trees):
e = NinjaBuildElement(self.all_outputs, 'meson-clean-ctlist', 'CUSTOM_COMMAND', 'PHONY')
d = CleanTrees(self.environment.get_build_dir(), trees)
d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat')
e.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'cleantrees', d_file])
e.add_item('description', 'Cleaning custom target directories.')
- e.write(outfile)
+ self.add_build(e)
# Alias that runs the target defined above
- self.create_target_alias('meson-clean-ctlist', outfile)
+ self.create_target_alias('meson-clean-ctlist')
# Write out the data file passed to the script
with open(d_file, 'wb') as ofile:
pickle.dump(d, ofile)
return 'clean-ctlist'
- def generate_gcov_clean(self, outfile):
+ def generate_gcov_clean(self):
gcno_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcno', 'CUSTOM_COMMAND', 'PHONY')
script_root = self.environment.get_script_dir()
clean_script = os.path.join(script_root, 'delwithsuffix.py')
gcno_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcno'])
gcno_elem.add_item('description', 'Deleting gcno files.')
- gcno_elem.write(outfile)
+ self.add_build(gcno_elem)
# Alias that runs the target defined above
- self.create_target_alias('meson-clean-gcno', outfile)
+ self.create_target_alias('meson-clean-gcno')
gcda_elem = NinjaBuildElement(self.all_outputs, 'meson-clean-gcda', 'CUSTOM_COMMAND', 'PHONY')
script_root = self.environment.get_script_dir()
clean_script = os.path.join(script_root, 'delwithsuffix.py')
gcda_elem.add_item('COMMAND', mesonlib.python_command + [clean_script, '.', 'gcda'])
gcda_elem.add_item('description', 'Deleting gcda files.')
- gcda_elem.write(outfile)
+ self.add_build(gcda_elem)
# Alias that runs the target defined above
- self.create_target_alias('meson-clean-gcda', outfile)
+ self.create_target_alias('meson-clean-gcda')
def get_user_option_args(self):
cmds = []
@@ -2642,7 +2642,7 @@ rule FORTRAN_DEP_HACK%s
# affect behavior in any other way.
return sorted(cmds)
- def generate_dist(self, outfile):
+ def generate_dist(self):
elem = NinjaBuildElement(self.all_outputs, 'meson-dist', 'CUSTOM_COMMAND', 'PHONY')
elem.add_item('DESC', 'Creating source packages')
elem.add_item('COMMAND', self.environment.get_build_command() + [
@@ -2651,22 +2651,22 @@ rule FORTRAN_DEP_HACK%s
self.environment.build_dir,
] + self.environment.get_build_command())
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the target defined above
- self.create_target_alias('meson-dist', outfile)
+ self.create_target_alias('meson-dist')
- def generate_scanbuild(self, outfile):
+ def generate_scanbuild(self):
cmd = self.environment.get_build_command() + \
['--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir] + \
self.environment.get_build_command() + self.get_user_option_args()
elem = NinjaBuildElement(self.all_outputs, 'meson-scan-build', 'CUSTOM_COMMAND', 'PHONY')
elem.add_item('COMMAND', cmd)
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the target defined above
- self.create_target_alias('meson-scan-build', outfile)
+ self.create_target_alias('meson-scan-build')
- def generate_clangformat(self, outfile):
+ def generate_clangformat(self):
import shutil
target_name = 'clang-format'
if shutil.which('clang-format') is None:
@@ -2681,22 +2681,22 @@ rule FORTRAN_DEP_HACK%s
elem = NinjaBuildElement(self.all_outputs, 'meson-' + target_name, 'CUSTOM_COMMAND', 'PHONY')
elem.add_item('COMMAND', cmd)
elem.add_item('pool', 'console')
- elem.write(outfile)
- self.create_target_alias('meson-' + target_name, outfile)
+ self.add_build(elem)
+ self.create_target_alias('meson-' + target_name)
# For things like scan-build and other helper tools we might have.
- def generate_utils(self, outfile):
- self.generate_scanbuild(outfile)
- self.generate_clangformat(outfile)
+ def generate_utils(self):
+ self.generate_scanbuild()
+ self.generate_clangformat()
cmd = self.environment.get_build_command() + ['--internal', 'uninstall']
elem = NinjaBuildElement(self.all_outputs, 'meson-uninstall', 'CUSTOM_COMMAND', 'PHONY')
elem.add_item('COMMAND', cmd)
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
# Alias that runs the target defined above
- self.create_target_alias('meson-uninstall', outfile)
+ self.create_target_alias('meson-uninstall')
- def generate_ending(self, outfile):
+ def generate_ending(self):
targetlist = []
for t in self.get_build_by_default_targets().values():
# Add the first output of each target to the 'all' target so that
@@ -2704,16 +2704,13 @@ rule FORTRAN_DEP_HACK%s
targetlist.append(os.path.join(self.get_target_dir(t), t.get_outputs()[0]))
elem = NinjaBuildElement(self.all_outputs, 'all', 'phony', targetlist)
- elem.write(outfile)
-
- default = 'default all\n\n'
- outfile.write(default)
+ self.add_build(elem)
elem = NinjaBuildElement(self.all_outputs, 'meson-clean', 'CUSTOM_COMMAND', 'PHONY')
elem.add_item('COMMAND', [self.ninja_command, '-t', 'clean'])
elem.add_item('description', 'Cleaning.')
# Alias that runs the above-defined meson-clean target
- self.create_target_alias('meson-clean', outfile)
+ self.create_target_alias('meson-clean')
# If we have custom targets in this project, add all their outputs to
# the list that is passed to the `cleantrees.py` script. The script
@@ -2728,33 +2725,33 @@ rule FORTRAN_DEP_HACK%s
for o in t.get_outputs():
ctlist.append(os.path.join(self.get_target_dir(t), o))
if ctlist:
- elem.add_dep(self.generate_custom_target_clean(outfile, ctlist))
+ elem.add_dep(self.generate_custom_target_clean(ctlist))
if 'b_coverage' in self.environment.coredata.base_options and \
self.environment.coredata.base_options['b_coverage'].value:
- self.generate_gcov_clean(outfile)
+ self.generate_gcov_clean()
elem.add_dep('clean-gcda')
elem.add_dep('clean-gcno')
- elem.write(outfile)
+ self.add_build(elem)
deps = self.get_regen_filelist()
elem = NinjaBuildElement(self.all_outputs, 'build.ninja', 'REGENERATE_BUILD', deps)
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
elem = NinjaBuildElement(self.all_outputs, 'reconfigure', 'REGENERATE_BUILD', 'PHONY')
elem.add_item('pool', 'console')
- elem.write(outfile)
+ self.add_build(elem)
elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '')
- elem.write(outfile)
+ self.add_build(elem)
def get_introspection_data(self, target_id, target):
if target_id not in self.introspection_data or len(self.introspection_data[target_id]) == 0:
return super().get_introspection_data(target_id, target)
result = []
- for _, i in self.introspection_data[target_id].items():
+ for i in self.introspection_data[target_id].values():
result += [i]
return result
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index d1bf1e5..d25798e 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -249,9 +249,15 @@ class Vs2010Backend(backends.Backend):
all_deps[d.get_id()] = d
elif isinstance(target, build.BuildTarget):
for ldep in target.link_targets:
- all_deps[ldep.get_id()] = ldep
+ if isinstance(ldep, build.CustomTargetIndex):
+ all_deps[ldep.get_id()] = ldep.target
+ else:
+ all_deps[ldep.get_id()] = ldep
for ldep in target.link_whole_targets:
- all_deps[ldep.get_id()] = ldep
+ if isinstance(ldep, build.CustomTargetIndex):
+ all_deps[ldep.get_id()] = ldep.target
+ else:
+ all_deps[ldep.get_id()] = ldep
for obj_id, objdep in self.get_obj_target_deps(target.objects):
all_deps[obj_id] = objdep
for gendep in target.get_generated_sources():
@@ -306,7 +312,7 @@ class Vs2010Backend(backends.Backend):
target = self.build.targets[prj[0]]
lang = 'default'
if hasattr(target, 'compilers') and target.compilers:
- for (lang_out, _) in target.compilers.items():
+ for lang_out in target.compilers.keys():
lang = lang_out
break
prj_line = prj_templ % (
@@ -380,7 +386,7 @@ class Vs2010Backend(backends.Backend):
for p in projlist:
if p[1].parent != PurePath('.'):
ofile.write("\t\t{%s} = {%s}\n" % (p[2], self.subdirs[p[1].parent][0]))
- for (_, subdir) in self.subdirs.items():
+ for subdir in self.subdirs.values():
if subdir[1]:
ofile.write("\t\t{%s} = {%s}\n" % (subdir[0], subdir[1]))
ofile.write('\tEndGlobalSection\n')
@@ -457,7 +463,7 @@ class Vs2010Backend(backends.Backend):
def add_target_deps(self, root, target):
target_dict = {target.get_id(): target}
- for name, dep in self.get_target_deps(target_dict).items():
+ for dep in self.get_target_deps(target_dict).values():
if dep.get_id() in self.handled_target_deps[target.get_id()]:
# This dependency was already handled manually.
continue
@@ -1111,7 +1117,11 @@ class Vs2010Backend(backends.Backend):
# Add more libraries to be linked if needed
for t in target.get_dependencies():
- lobj = self.build.targets[t.get_id()]
+ if isinstance(t, build.CustomTargetIndex):
+ # We don't need the actual project here, just the library name
+ lobj = t
+ else:
+ lobj = self.build.targets[t.get_id()]
linkname = os.path.join(down, self.get_target_filename_for_linking(lobj))
if t in target.link_whole_targets:
# /WHOLEARCHIVE:foo must go into AdditionalOptions
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 7dd3674..be22c78 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -345,7 +345,7 @@ class XCodeBackend(backends.Backend):
self.ofile.write('/* End PBXFileReference section */\n')
def generate_pbx_frameworks_buildphase(self):
- for tname, t in self.build.targets.items():
+ for t in self.build.targets.values():
self.ofile.write('\n/* Begin PBXFrameworksBuildPhase section */\n')
self.write_line('%s /* %s */ = {\n' % (t.buildphasemap['Frameworks'], 'Frameworks'))
self.indent_level += 1
@@ -587,7 +587,7 @@ class XCodeBackend(backends.Backend):
def generate_pbx_sources_build_phase(self):
self.ofile.write('\n/* Begin PBXSourcesBuildPhase section */\n')
- for name, phase_id in self.source_phase.items():
+ for name in self.source_phase.keys():
t = self.build.targets[name]
self.write_line('%s /* Sources */ = {' % (t.buildphasemap[name]))
self.indent_level += 1
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 5248d97..603e0d0 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -12,6 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+from typing import List
import copy, os, re
from collections import OrderedDict
import itertools, pathlib
@@ -28,7 +29,7 @@ from .mesonlib import (
get_filenames_templates_dict, substitute_values,
for_windows, for_darwin, for_cygwin, for_android, has_path_sep
)
-from .compilers import is_object, clink_langs, sort_clink, lang_suffixes, get_macos_dylib_install_name
+from .compilers import Compiler, is_object, clink_langs, sort_clink, lang_suffixes, get_macos_dylib_install_name
from .interpreterbase import FeatureNew
pch_kwargs = set(['c_pch', 'cpp_pch'])
@@ -88,7 +89,7 @@ known_build_target_kwargs = (
rust_kwargs |
cs_kwargs)
-known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'}
+known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'link_language', 'pie'}
known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'}
known_shmod_kwargs = known_build_target_kwargs
known_stlib_kwargs = known_build_target_kwargs | {'pic'}
@@ -425,7 +426,7 @@ a hard error in the future.''' % name)
self.option_overrides = self.parse_overrides(kwargs)
- def parse_overrides(self, kwargs):
+ def parse_overrides(self, kwargs) -> dict:
result = {}
overrides = stringlistify(kwargs.get('override_options', []))
for o in overrides:
@@ -437,7 +438,7 @@ a hard error in the future.''' % name)
result[k] = v
return result
- def is_linkable_target(self):
+ def is_linkable_target(self) -> bool:
return False
class BuildTarget(Target):
@@ -450,10 +451,11 @@ class BuildTarget(Target):
self.is_unity = unity_opt == 'on' or (unity_opt == 'subprojects' and subproject != '')
self.environment = environment
self.sources = []
- self.compilers = OrderedDict()
+ self.compilers = OrderedDict() # type: OrderedDict[str, Compiler]
self.objects = []
self.external_deps = []
self.include_dirs = []
+ self.link_language = kwargs.get('link_language')
self.link_targets = []
self.link_whole_targets = []
self.link_depends = []
@@ -571,15 +573,18 @@ class BuildTarget(Target):
else:
compilers = self.environment.coredata.compilers
+ # did user override clink_langs for this target?
+ link_langs = [self.link_language] if self.link_language else clink_langs
+
# If this library is linked against another library we need to consider
# the languages of those libraries as well.
if self.link_targets or self.link_whole_targets:
extra = set()
for t in itertools.chain(self.link_targets, self.link_whole_targets):
- if isinstance(t, CustomTarget):
+ if isinstance(t, CustomTarget) or isinstance(t, CustomTargetIndex):
continue # We can't know anything about these.
for name, compiler in t.compilers.items():
- if name in clink_langs:
+ if name in link_langs:
extra.add((name, compiler))
for name, compiler in sorted(extra, key=lambda p: sort_clink(p[0])):
self.compilers[name] = compiler
@@ -588,7 +593,7 @@ class BuildTarget(Target):
# No source files or parent targets, target consists of only object
# files of unknown origin. Just add the first clink compiler
# that we have and hope that it can link these objects
- for lang in clink_langs:
+ for lang in link_langs:
if lang in compilers:
self.compilers[lang] = compilers[lang]
break
@@ -1066,7 +1071,7 @@ You probably should put it in link_with instead.''')
def link(self, target):
for t in listify(target, unholder=True):
- if not isinstance(t, Target):
+ if not isinstance(t, (Target, CustomTargetIndex)):
raise InvalidArguments('{!r} is not a target.'.format(t))
if not t.is_linkable_target():
raise InvalidArguments('Link target {!r} is not linkable.'.format(t))
@@ -1074,13 +1079,13 @@ You probably should put it in link_with instead.''')
msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
msg += "Use the 'pic' option to static_library to build with PIC."
raise InvalidArguments(msg)
- if not isinstance(t, CustomTarget) and self.is_cross != t.is_cross:
+ if not isinstance(t, (CustomTarget, CustomTargetIndex)) and self.is_cross != t.is_cross:
raise InvalidArguments('Tried to mix cross built and native libraries in target {!r}'.format(self.name))
self.link_targets.append(t)
def link_whole(self, target):
for t in listify(target, unholder=True):
- if isinstance(t, CustomTarget):
+ if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.is_linkable_target():
raise InvalidArguments('Custom target {!r} is not linkable.'.format(t))
if not t.get_filename().endswith('.a'):
@@ -1091,7 +1096,7 @@ You probably should put it in link_with instead.''')
msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name)
msg += "Use the 'pic' option to static_library to build with PIC."
raise InvalidArguments(msg)
- if not isinstance(t, CustomTarget) and self.is_cross != t.is_cross:
+ if not isinstance(t, (CustomTarget, CustomTargetIndex)) and self.is_cross != t.is_cross:
raise InvalidArguments('Tried to mix cross built and native libraries in target {!r}'.format(self.name))
self.link_whole_targets.append(t)
@@ -1149,7 +1154,7 @@ You probably should put it in link_with instead.''')
def get_aliases(self):
return {}
- def get_langs_used_by_deps(self):
+ def get_langs_used_by_deps(self) -> List[str]:
'''
Sometimes you want to link to a C++ library that exports C API, which
means the linker must link in the C++ stdlib, and we must use a C++
@@ -1159,6 +1164,11 @@ You probably should put it in link_with instead.''')
See: https://github.com/mesonbuild/meson/issues/1653
'''
langs = []
+
+ # User specified link_language of target (for multi-language targets)
+ if self.link_language:
+ return [self.link_language]
+
# Check if any of the external libraries were written in this language
for dep in self.external_deps:
if dep.language is None:
@@ -1168,11 +1178,12 @@ You probably should put it in link_with instead.''')
# Check if any of the internal libraries this target links to were
# written in this language
for link_target in itertools.chain(self.link_targets, self.link_whole_targets):
- if isinstance(link_target, CustomTarget):
+ if isinstance(link_target, (CustomTarget, CustomTargetIndex)):
continue
for language in link_target.compilers:
if language not in langs:
langs.append(language)
+
return langs
def get_clink_dynamic_linker_and_stdlibs(self):
@@ -2259,6 +2270,26 @@ class CustomTargetIndex:
def get_subdir(self):
return self.target.get_subdir()
+ def get_filename(self):
+ return self.output
+
+ def get_id(self):
+ return self.target.get_id()
+
+ def get_all_link_deps(self):
+ return self.target.get_all_link_deps()
+
+ def get_link_deps_mapping(self, prefix, environment):
+ return self.target.get_link_deps_mapping(prefix, environment)
+
+ def get_link_dep_subdirs(self):
+ return self.target.get_link_dep_subdirs()
+
+ def is_linkable_target(self):
+ suf = os.path.splitext(self.output)[-1]
+ if suf == '.a' or suf == '.dll' or suf == '.lib' or suf == '.so':
+ return True
+
class ConfigureFile:
def __init__(self, subdir, sourcename, targetname, configuration_data):
diff --git a/mesonbuild/compilers/__init__.py b/mesonbuild/compilers/__init__.py
index 5de0e59..4cb7ebf 100644
--- a/mesonbuild/compilers/__init__.py
+++ b/mesonbuild/compilers/__init__.py
@@ -69,6 +69,8 @@ __all__ = [
'IntelCCompiler',
'IntelCPPCompiler',
'IntelFortranCompiler',
+ 'IntelClCCompiler',
+ 'IntelClCPPCompiler',
'JavaCompiler',
'LLVMDCompiler',
'MonoCompiler',
@@ -130,6 +132,7 @@ from .c import (
GnuCCompiler,
ElbrusCCompiler,
IntelCCompiler,
+ IntelClCCompiler,
PGICCompiler,
CcrxCCompiler,
VisualStudioCCompiler,
@@ -143,6 +146,7 @@ from .cpp import (
GnuCPPCompiler,
ElbrusCPPCompiler,
IntelCPPCompiler,
+ IntelClCPPCompiler,
PGICPPCompiler,
CcrxCPPCompiler,
VisualStudioCPPCompiler,
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index da51ce2..1aeb637 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -1043,7 +1043,7 @@ class CCompiler(Compiler):
elf_class = 2
else:
elf_class = 1
- except:
+ except (MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here
elf_class = 0
# Search in the specified dirs, and then in the system libraries
for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)):
@@ -1331,9 +1331,9 @@ class GnuCCompiler(GnuCompiler, CCompiler):
class PGICCompiler(PGICompiler, CCompiler):
- def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs):
+ def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs):
CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
- PGICompiler.__init__(self, CompilerType.PGI_STANDARD)
+ PGICompiler.__init__(self, compiler_type)
class ElbrusCCompiler(GnuCCompiler, ElbrusCompiler):
@@ -1729,6 +1729,12 @@ class ClangClCCompiler(VisualStudioCCompiler):
self.id = 'clang-cl'
+class IntelClCCompiler(VisualStudioCCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrap, target):
+ super().__init__(exelist, version, is_cross, exe_wrap, target)
+ self.id = 'intel'
+
+
class ArmCCompiler(ArmCompiler, CCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs):
CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index b03458a..2f3c7b7 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -1413,18 +1413,20 @@ class CompilerType(enum.Enum):
CCRX_WIN = 40
PGI_STANDARD = 50
+ PGI_OSX = 51
+ PGI_WIN = 52
@property
def is_standard_compiler(self):
- return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD')
+ return self.name in ('GCC_STANDARD', 'CLANG_STANDARD', 'ICC_STANDARD', 'PGI_STANDARD')
@property
def is_osx_compiler(self):
- return self.name in ('GCC_OSX', 'CLANG_OSX', 'ICC_OSX')
+ return self.name in ('GCC_OSX', 'CLANG_OSX', 'ICC_OSX', 'PGI_OSX')
@property
def is_windows_compiler(self):
- return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN', 'ARM_WIN', 'CCRX_WIN')
+ return self.name in ('GCC_MINGW', 'GCC_CYGWIN', 'CLANG_MINGW', 'ICC_WIN', 'ARM_WIN', 'CCRX_WIN', 'PGI_WIN')
def get_macos_dylib_install_name(prefix, shlib_name, suffix, soversion):
@@ -1706,7 +1708,7 @@ class GnuCompiler(GnuLikeCompiler):
class PGICompiler:
- def __init__(self, compiler_type=None):
+ def __init__(self, compiler_type):
self.id = 'pgi'
self.compiler_type = compiler_type
@@ -1722,6 +1724,11 @@ class PGICompiler:
def get_no_warn_args(self) -> List[str]:
return ['-silent']
+ def get_pic_args(self) -> List[str]:
+ if self.compiler_type.is_osx_compiler or self.compiler_type.is_windows_compiler:
+ return [] # PGI -fPIC is Linux only.
+ return ['-fPIC']
+
def openmp_flags(self) -> List[str]:
return ['-mp']
diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py
index 3e0942d..e2bcaf0 100644
--- a/mesonbuild/compilers/cpp.py
+++ b/mesonbuild/compilers/cpp.py
@@ -19,11 +19,10 @@ from .. import coredata
from .. import mlog
from ..mesonlib import MesonException, version_compare
-from .c import CCompiler, VisualStudioCCompiler, ClangClCCompiler
+from .c import CCompiler, VisualStudioCCompiler, ClangClCCompiler, IntelClCCompiler
from .compilers import (
gnu_winlibs,
msvc_winlibs,
- CompilerType,
ClangCompiler,
GnuCompiler,
ElbrusCompiler,
@@ -239,9 +238,9 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler):
class PGICPPCompiler(PGICompiler, CPPCompiler):
- def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwargs):
+ def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwargs):
CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwargs)
- PGICompiler.__init__(self, CompilerType.PGI_STANDARD)
+ PGICompiler.__init__(self, compiler_type)
class ElbrusCPPCompiler(GnuCPPCompiler, ElbrusCompiler):
@@ -407,6 +406,13 @@ class ClangClCPPCompiler(VisualStudioCPPCompiler, ClangClCCompiler):
VisualStudioCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, target)
self.id = 'clang-cl'
+
+class IntelClCPPCompiler(VisualStudioCPPCompiler, IntelClCCompiler):
+ def __init__(self, exelist, version, is_cross, exe_wrap, target):
+ VisualStudioCPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, target)
+ self.id = 'intel'
+
+
class ArmCPPCompiler(ArmCompiler, CPPCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrap=None, **kwargs):
CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap, **kwargs)
diff --git a/mesonbuild/compilers/cs.py b/mesonbuild/compilers/cs.py
index cd67da0..c6355f2 100644
--- a/mesonbuild/compilers/cs.py
+++ b/mesonbuild/compilers/cs.py
@@ -28,10 +28,10 @@ cs_optimization_args = {'0': [],
}
class CsCompiler(Compiler):
- def __init__(self, exelist, version, id, runner=None):
+ def __init__(self, exelist, version, comp_id, runner=None):
self.language = 'cs'
super().__init__(exelist, version)
- self.id = id
+ self.id = comp_id
self.is_cross = False
self.runner = runner
diff --git a/mesonbuild/compilers/fortran.py b/mesonbuild/compilers/fortran.py
index acfb506..b4eb327 100644
--- a/mesonbuild/compilers/fortran.py
+++ b/mesonbuild/compilers/fortran.py
@@ -333,7 +333,6 @@ class GnuFortranCompiler(GnuCompiler, FortranCompiler):
def language_stdlib_only_link_flags(self):
return ['-lgfortran', '-lm']
-
class ElbrusFortranCompiler(GnuFortranCompiler, ElbrusCompiler):
def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, defines=None, **kwargs):
GnuFortranCompiler.__init__(self, exelist, version, compiler_type, is_cross, exe_wrapper, defines, **kwargs)
@@ -423,10 +422,13 @@ class PathScaleFortranCompiler(FortranCompiler):
class PGIFortranCompiler(PGICompiler, FortranCompiler):
- def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags):
+ def __init__(self, exelist, version, compiler_type, is_cross, exe_wrapper=None, **kwags):
FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper, **kwags)
- PGICompiler.__init__(self, CompilerType.PGI_STANDARD)
+ PGICompiler.__init__(self, compiler_type)
+ def language_stdlib_only_link_flags(self) -> List[str]:
+ return ['-lpgf90rtl', '-lpgf90', '-lpgf90_rpm1', '-lpgf902',
+ '-lpgf90rtl', '-lpgftnrtl', '-lrt']
class FlangFortranCompiler(ClangCompiler, FortranCompiler):
def __init__(self, exelist, version, is_cross, exe_wrapper=None, **kwags):
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index a2c5ff9..183b333 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -20,7 +20,7 @@ from pathlib import PurePath
from collections import OrderedDict
from .mesonlib import (
MesonException, MachineChoice, PerMachine,
- default_libdir, default_libexecdir, default_prefix, stringlistify
+ default_libdir, default_libexecdir, default_prefix
)
from .wrap import WrapMode
import ast
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 6063fd3..9664215 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -14,7 +14,6 @@
# This file contains the detection logic for external dependencies.
# Custom logic for several other packages are in separate files.
-from typing import Dict, Any
import copy
import functools
import os
@@ -26,7 +25,7 @@ import textwrap
import platform
import itertools
import ctypes
-from typing import List, Tuple
+from typing import Any, Dict, List, Tuple
from enum import Enum
from pathlib import Path, PurePath
@@ -924,9 +923,11 @@ class CMakeTraceLine:
return s.format(self.file, self.line, self.func, self.args)
class CMakeTarget:
- def __init__(self, name, type, properies = {}):
+ def __init__(self, name, target_type, properies=None):
+ if properies is None:
+ properies = {}
self.name = name
- self.type = type
+ self.type = target_type
self.properies = properies
def __repr__(self):
@@ -1107,7 +1108,7 @@ class CMakeDependency(ExternalDependency):
for l in lexer1:
if l.func == 'set':
self._cmake_set(l)
- except:
+ except MesonException:
return None
# Extract the variables and sanity check them
@@ -2302,7 +2303,7 @@ class ExtraFrameworkDependency(ExternalDependency):
return 'framework'
-def get_dep_identifier(name, kwargs, want_cross):
+def get_dep_identifier(name, kwargs, want_cross: bool) -> Tuple:
identifier = (name, want_cross)
for key, value in kwargs.items():
# 'version' is irrelevant for caching; the caller must check version matches
diff --git a/mesonbuild/dependencies/platform.py b/mesonbuild/dependencies/platform.py
index 9863fb1..e913ed4 100644
--- a/mesonbuild/dependencies/platform.py
+++ b/mesonbuild/dependencies/platform.py
@@ -16,7 +16,7 @@
# platform-specific (generally speaking).
from .base import ExternalDependency, DependencyException
-
+from ..mesonlib import MesonException
class AppleFrameworks(ExternalDependency):
def __init__(self, env, kwargs):
@@ -31,7 +31,16 @@ class AppleFrameworks(ExternalDependency):
raise DependencyException('No C-like compilers are available, cannot find the framework')
self.is_found = True
for f in self.frameworks:
- args = self.clib_compiler.find_framework(f, env, [])
+ try:
+ args = self.clib_compiler.find_framework(f, env, [])
+ except MesonException as e:
+ if 'non-clang' in str(e):
+ self.is_found = False
+ self.link_args = []
+ self.compile_args = []
+ return
+ raise
+
if args is not None:
# No compile args are needed for system frameworks
self.link_args += args
diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py
index 4b3fb70..b1fa632 100644
--- a/mesonbuild/dependencies/ui.py
+++ b/mesonbuild/dependencies/ui.py
@@ -298,8 +298,8 @@ class QtBaseDependency(ExternalDependency):
# the Qt + m_name there is not a symlink, it's a file
mod_private_dir = qt_inc_dir
mod_private_inc = _qt_get_private_includes(mod_private_dir, m_name, m.version)
- for dir in mod_private_inc:
- self.compile_args.append('-I' + dir)
+ for directory in mod_private_inc:
+ self.compile_args.append('-I' + directory)
self.link_args += m.get_link_args()
if 'Core' in modules:
@@ -402,8 +402,8 @@ class QtBaseDependency(ExternalDependency):
if self.private_headers:
priv_inc = self.get_private_includes(mincdir, module)
- for dir in priv_inc:
- self.compile_args.append('-I' + dir)
+ for directory in priv_inc:
+ self.compile_args.append('-I' + directory)
libfile = self.clib_compiler.find_library(self.qtpkgname + module + modules_lib_suffix,
self.env,
libdir)
diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py
index e211945..70f964e 100644
--- a/mesonbuild/envconfig.py
+++ b/mesonbuild/envconfig.py
@@ -19,6 +19,8 @@ from . import mesonlib
from .mesonlib import EnvironmentException, MachineChoice, PerMachine
from . import mlog
+_T = typing.TypeVar('_T')
+
# These classes contains all the data pulled from configuration files (native
# and cross file currently), and also assists with the reading environment
@@ -69,7 +71,7 @@ CPU_FAMILES_64_BIT = [
class MesonConfigFile:
@classmethod
- def from_config_parser(cls, parser: configparser.ConfigParser):
+ def from_config_parser(cls, parser: configparser.ConfigParser) -> typing.Dict[str, typing.Dict[str, typing.Dict[str, str]]]:
out = {}
# This is a bit hackish at the moment.
for s in parser.sections():
@@ -106,55 +108,58 @@ class HasEnvVarFallback:
that we deal with environment variables will become more structured, and
this can be starting point.
"""
- def __init__(self, fallback = True):
+ def __init__(self, fallback: bool = True):
self.fallback = fallback
class Properties(HasEnvVarFallback):
def __init__(
self,
properties: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None,
- fallback = True):
+ fallback: bool = True):
super().__init__(fallback)
- self.properties = properties or {}
+ self.properties = properties or {} # type: typing.Dict[str, typing.Union[str, typing.List[str]]]
- def has_stdlib(self, language):
+ def has_stdlib(self, language: str) -> bool:
return language + '_stdlib' in self.properties
- def get_stdlib(self, language):
+ # Some of get_stdlib, get_root, get_sys_root are wider than is actually
+ # true, but without heterogenious dict annotations it's not practical to
+ # narrow them
+ def get_stdlib(self, language: str) -> typing.Union[str, typing.List[str]]:
return self.properties[language + '_stdlib']
- def get_root(self):
+ def get_root(self) -> typing.Optional[typing.Union[str, typing.List[str]]]:
return self.properties.get('root', None)
- def get_sys_root(self):
+ def get_sys_root(self) -> typing.Optional[typing.Union[str, typing.List[str]]]:
return self.properties.get('sys_root', None)
- def __eq__(self, other):
+ def __eq__(self, other: typing.Any) -> 'typing.Union[bool, NotImplemented]':
if isinstance(other, type(self)):
return self.properties == other.properties
return NotImplemented
# TODO consider removing so Properties is less freeform
- def __getitem__(self, key):
+ def __getitem__(self, key: str) -> typing.Any:
return self.properties[key]
# TODO consider removing so Properties is less freeform
- def __contains__(self, item):
+ def __contains__(self, item: typing.Any) -> bool:
return item in self.properties
# TODO consider removing, for same reasons as above
- def get(self, key, default=None):
+ def get(self, key: str, default: typing.Any = None) -> typing.Any:
return self.properties.get(key, default)
class MachineInfo:
- def __init__(self, system, cpu_family, cpu, endian):
+ def __init__(self, system: str, cpu_family: str, cpu: str, endian: str):
self.system = system
self.cpu_family = cpu_family
self.cpu = cpu
self.endian = endian
- self.is_64_bit = cpu_family in CPU_FAMILES_64_BIT
+ self.is_64_bit = cpu_family in CPU_FAMILES_64_BIT # type: bool
- def __eq__(self, other):
+ def __eq__(self, other: typing.Any) -> 'typing.Union[bool, NotImplemented]':
if self.__class__ is not other.__class__:
return NotImplemented
return \
@@ -163,16 +168,16 @@ class MachineInfo:
self.cpu == other.cpu and \
self.endian == other.endian
- def __ne__(self, other):
+ def __ne__(self, other: typing.Any) -> 'typing.Union[bool, NotImplemented]':
if self.__class__ is not other.__class__:
return NotImplemented
return not self.__eq__(other)
- def __repr__(self):
+ def __repr__(self) -> str:
return '<MachineInfo: {} {} ({})>'.format(self.system, self.cpu_family, self.cpu)
- @staticmethod
- def from_literal(literal):
+ @classmethod
+ def from_literal(cls, literal: typing.Dict[str, str]) -> 'MachineInfo':
minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
if set(literal) < minimum_literal:
raise EnvironmentException(
@@ -187,49 +192,45 @@ class MachineInfo:
if endian not in ('little', 'big'):
mlog.warning('Unknown endian %s' % endian)
- return MachineInfo(
- literal['system'],
- cpu_family,
- literal['cpu'],
- endian)
+ return cls(literal['system'], cpu_family, literal['cpu'], endian)
- def is_windows(self):
+ def is_windows(self) -> bool:
"""
Machine is windows?
"""
- return self.system == 'windows'
+ return self.system in {'windows', 'mingw'}
- def is_cygwin(self):
+ def is_cygwin(self) -> bool:
"""
Machine is cygwin?
"""
return self.system == 'cygwin'
- def is_linux(self):
+ def is_linux(self) -> bool:
"""
Machine is linux?
"""
return self.system == 'linux'
- def is_darwin(self):
+ def is_darwin(self) -> bool:
"""
Machine is Darwin (iOS/OS X)?
"""
- return self.system in ('darwin', 'ios')
+ return self.system in {'darwin', 'ios'}
- def is_android(self):
+ def is_android(self) -> bool:
"""
Machine is Android?
"""
return self.system == 'android'
- def is_haiku(self):
+ def is_haiku(self) -> bool:
"""
Machine is Haiku?
"""
return self.system == 'haiku'
- def is_openbsd(self):
+ def is_openbsd(self) -> bool:
"""
Machine is OpenBSD?
"""
@@ -239,29 +240,28 @@ class MachineInfo:
# static libraries, and executables.
# Versioning is added to these names in the backends as-needed.
- def get_exe_suffix(self):
+ def get_exe_suffix(self) -> str:
if self.is_windows() or self.is_cygwin():
return 'exe'
else:
return ''
- def get_object_suffix(self):
+ def get_object_suffix(self) -> str:
if self.is_windows():
return 'obj'
else:
return 'o'
- def libdir_layout_is_win(self):
- return self.is_windows() \
- or self.is_cygwin()
+ def libdir_layout_is_win(self) -> bool:
+ return self.is_windows() or self.is_cygwin()
-class PerMachineDefaultable(PerMachine):
+class PerMachineDefaultable(PerMachine[typing.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
"""
- def __init__(self):
+ def __init__(self) -> None:
super().__init__(None, None, None)
- def default_missing(self):
+ def default_missing(self) -> None:
"""Default host to buid and target to host.
This allows just specifying nothing in the native case, just host in the
@@ -273,7 +273,7 @@ class PerMachineDefaultable(PerMachine):
if self.target is None:
self.target = self.host
- def miss_defaulting(self):
+ def miss_defaulting(self) -> None:
"""Unset definition duplicated from their previous to None
This is the inverse of ''default_missing''. By removing defaulted
@@ -285,18 +285,17 @@ class PerMachineDefaultable(PerMachine):
if self.host == self.build:
self.host = None
-class MachineInfos(PerMachineDefaultable):
- def matches_build_machine(self, machine: MachineChoice):
+class MachineInfos(PerMachineDefaultable[MachineInfo]):
+ def matches_build_machine(self, machine: MachineChoice) -> bool:
return self.build == self[machine]
class BinaryTable(HasEnvVarFallback):
def __init__(
self,
binaries: typing.Optional[typing.Dict[str, typing.Union[str, typing.List[str]]]] = None,
-
- fallback = True):
+ fallback: bool = True):
super().__init__(fallback)
- self.binaries = binaries or {}
+ self.binaries = binaries or {} # type: typing.Dict[str, typing.Union[str, typing.List[str]]]
for name, command in self.binaries.items():
if not isinstance(command, (list, str)):
# TODO generalize message
@@ -325,29 +324,25 @@ class BinaryTable(HasEnvVarFallback):
'cmake': 'CMAKE',
'qmake': 'QMAKE',
'pkgconfig': 'PKG_CONFIG',
- }
+ } # type: typing.Dict[str, str]
- @classmethod
- def detect_ccache(cls):
+ @staticmethod
+ def detect_ccache() -> typing.List[str]:
try:
- has_ccache = subprocess.call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- except OSError:
- has_ccache = 1
- if has_ccache == 0:
- cmdlist = ['ccache']
- else:
- cmdlist = []
- return cmdlist
+ subprocess.check_call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except (OSError, subprocess.CalledProcessError):
+ return []
+ return ['ccache']
@classmethod
- def _warn_about_lang_pointing_to_cross(cls, compiler_exe, evar):
+ def _warn_about_lang_pointing_to_cross(cls, compiler_exe: str, evar: str) -> None:
evar_str = os.environ.get(evar, 'WHO_WOULD_CALL_THEIR_COMPILER_WITH_THIS_NAME')
if evar_str == compiler_exe:
mlog.warning('''Env var %s seems to point to the cross compiler.
This is probably wrong, it should always point to the native compiler.''' % evar)
@classmethod
- def parse_entry(cls, entry):
+ def parse_entry(cls, entry: typing.Union[str, typing.List[str]]) -> typing.Tuple[typing.List[str], typing.List[str]]:
compiler = mesonlib.stringlistify(entry)
# Ensure ccache exists and remove it if it doesn't
if compiler[0] == 'ccache':
@@ -358,8 +353,8 @@ This is probably wrong, it should always point to the native compiler.''' % evar
# Return value has to be a list of compiler 'choices'
return compiler, ccache
- def lookup_entry(self, name):
- """Lookup binary
+ def lookup_entry(self, name: str) -> typing.Optional[typing.List[str]]:
+ """Lookup binaryk
Returns command with args as list if found, Returns `None` if nothing is
found.
@@ -408,11 +403,12 @@ class Directories:
self.sharedstatedir = sharedstatedir
self.sysconfdir = sysconfdir
- def __contains__(self, key: str) -> str:
+ def __contains__(self, key: str) -> bool:
return hasattr(self, key)
- def __getitem__(self, key: str) -> str:
- return getattr(self, key)
+ def __getitem__(self, key: str) -> typing.Optional[str]:
+ # Mypy can't figure out what to do with getattr here, so we'll case for it
+ return typing.cast(typing.Optional[str], getattr(self, key))
def __setitem__(self, key: str, value: typing.Optional[str]) -> None:
setattr(self, key, value)
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 0c0f00a..462672e 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -12,14 +12,13 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import configparser, os, platform, re, sys, shlex, shutil, subprocess
-import typing
+import os, platform, re, sys, shlex, shutil, subprocess, typing
from . import coredata
from .linkers import ArLinker, ArmarLinker, VisualStudioLinker, DLinker, CcrxLinker
from . import mesonlib
from .mesonlib import (
- MesonException, EnvironmentException, MachineChoice, PerMachine, Popen_safe,
+ MesonException, EnvironmentException, MachineChoice, Popen_safe,
)
from . import mlog
@@ -29,6 +28,7 @@ from .envconfig import (
)
from . import compilers
from .compilers import (
+ Compiler,
CompilerType,
is_assembly,
is_header,
@@ -38,7 +38,6 @@ from .compilers import (
is_source,
)
from .compilers import (
- Compiler,
ArmCCompiler,
ArmCPPCompiler,
ArmclangCCompiler,
@@ -62,6 +61,8 @@ from .compilers import (
IntelCCompiler,
IntelCPPCompiler,
IntelFortranCompiler,
+ IntelClCCompiler,
+ IntelClCPPCompiler,
JavaCompiler,
MonoCompiler,
CudaCompiler,
@@ -83,6 +84,8 @@ from .compilers import (
build_filename = 'meson.build'
+CompilersDict = typing.Dict[str, Compiler]
+
def detect_gcovr(min_version='3.3', new_rootdir_version='4.2', log=False):
gcovr_exe = 'gcovr'
try:
@@ -150,7 +153,7 @@ def detect_native_windows_arch():
raise EnvironmentException('Unable to detect native OS architecture')
return arch
-def detect_windows_arch(compilers):
+def detect_windows_arch(compilers: CompilersDict) -> str:
"""
Detecting the 'native' architecture of Windows is not a trivial task. We
cannot trust that the architecture that Python is built for is the 'native'
@@ -190,7 +193,7 @@ def detect_windows_arch(compilers):
return 'x86'
return os_arch
-def any_compiler_has_define(compilers, define):
+def any_compiler_has_define(compilers: CompilersDict, define):
for c in compilers.values():
try:
if c.has_builtin_define(define):
@@ -200,7 +203,7 @@ def any_compiler_has_define(compilers, define):
pass
return False
-def detect_cpu_family(compilers):
+def detect_cpu_family(compilers: CompilersDict) -> str:
"""
Python is inconsistent in its platform module.
It returns different values for the same cpu.
@@ -262,7 +265,7 @@ def detect_cpu_family(compilers):
return trial
-def detect_cpu(compilers):
+def detect_cpu(compilers: CompilersDict):
if mesonlib.is_windows():
trial = detect_windows_arch(compilers)
else:
@@ -295,7 +298,7 @@ def detect_msys2_arch():
return os.environ['MSYSTEM_CARCH']
return None
-def detect_machine_info(compilers = None) -> MachineInfo:
+def detect_machine_info(compilers: typing.Optional[CompilersDict] = None) -> MachineInfo:
"""Detect the machine we're running on
If compilers are not provided, we cannot know as much. None out those
@@ -455,11 +458,13 @@ class Environment:
# List of potential compilers.
if mesonlib.is_windows():
- self.default_c = ['cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc']
- self.default_cpp = ['cl', 'c++', 'g++', 'clang++', 'clang-cl', 'pgc++']
+ # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere.
+ self.default_c = ['cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc', 'icl']
+ # There is currently no pgc++ for Windows, only for Mac and Linux.
+ self.default_cpp = ['cl', 'c++', 'g++', 'clang++', 'clang-cl', 'icl']
else:
- self.default_c = ['cc', 'gcc', 'clang', 'pgcc']
- self.default_cpp = ['c++', 'g++', 'clang++', 'pgc++']
+ self.default_c = ['cc', 'gcc', 'clang', 'pgcc', 'icc']
+ self.default_cpp = ['c++', 'g++', 'clang++', 'pgc++', 'icpc']
if mesonlib.is_windows():
self.default_cs = ['csc', 'mcs']
else:
@@ -467,7 +472,7 @@ class Environment:
self.default_objc = ['cc']
self.default_objcpp = ['c++']
self.default_d = ['ldc2', 'ldc', 'gdc', 'dmd']
- self.default_fortran = ['gfortran', 'g95', 'f95', 'f90', 'f77', 'ifort', 'pgfortran']
+ self.default_fortran = ['gfortran', 'flang', 'pgfortran', 'ifort', 'g95']
self.default_java = ['javac']
self.default_cuda = ['nvcc']
self.default_rust = ['rustc']
@@ -676,6 +681,7 @@ class Environment:
arg = '-v'
else:
arg = '--version'
+
try:
p, out, err = Popen_safe(compiler + [arg])
except OSError as e:
@@ -684,6 +690,11 @@ class Environment:
if 'ccrx' in compiler[0]:
out = err
+ if 'icl' in compiler[0]:
+ # https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-alphabetical-list-of-compiler-options
+ # https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-logo
+ # most consistent way for ICL is to just let compiler error and tell version
+ out = err
full_version = out.split('\n', 1)[0]
version = search_version(out)
@@ -769,19 +780,29 @@ class Environment:
target = 'x86'
cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler
return cls(compiler, version, is_cross, exe_wrap, target)
+
if 'PGI Compilers' in out:
+ if mesonlib.for_darwin(is_cross, self):
+ compiler_type = CompilerType.PGI_OSX
+ elif mesonlib.for_windows(is_cross, self):
+ compiler_type = CompilerType.PGI_WIN
+ else:
+ compiler_type = CompilerType.PGI_STANDARD
cls = PGICCompiler if lang == 'c' else PGICPPCompiler
- return cls(ccache + compiler, version, is_cross, exe_wrap)
+ return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap)
if '(ICC)' in out:
if mesonlib.for_darwin(want_cross, self):
compiler_type = CompilerType.ICC_OSX
elif mesonlib.for_windows(want_cross, self):
- # TODO: fix ICC on Windows
- compiler_type = CompilerType.ICC_WIN
+ raise EnvironmentException('At the time of authoring, there was no ICC for Windows')
else:
compiler_type = CompilerType.ICC_STANDARD
cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler
return cls(ccache + compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version)
+ if out.startswith('Intel(R) C++') and mesonlib.for_windows(want_cross, self):
+ cls = IntelClCCompiler if lang == 'c' else IntelClCPPCompiler
+ target = 'x64' if 'Intel(R) 64 Compiler' in out else 'x86'
+ return cls(compiler, version, is_cross, exe_wrap, target)
if 'ARM' in out:
compiler_type = CompilerType.ARM_WIN
cls = ArmCCompiler if lang == 'c' else ArmCPPCompiler
@@ -846,6 +867,13 @@ class Environment:
popen_exceptions[' '.join(compiler + [arg])] = e
continue
+ if mesonlib.for_windows(is_cross, self):
+ if 'ifort' in compiler[0]:
+ # https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-alphabetical-list-of-compiler-options
+ # https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-logo
+ # most consistent way for ICL is to just let compiler error and tell version
+ out = err
+
version = search_version(out)
full_version = out.split('\n', 1)[0]
@@ -876,14 +904,20 @@ class Environment:
version = search_version(err)
return SunFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
- if 'ifort (IFORT)' in out:
+ if 'ifort (IFORT)' in out or out.startswith('Intel(R) Visual Fortran'):
return IntelFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
if 'PathScale EKOPath(tm)' in err:
return PathScaleFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
if 'PGI Compilers' in out:
- return PGIFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
+ if mesonlib.for_darwin(is_cross, self):
+ compiler_type = CompilerType.PGI_OSX
+ elif mesonlib.for_windows(is_cross, self):
+ compiler_type = CompilerType.PGI_WIN
+ else:
+ compiler_type = CompilerType.PGI_STANDARD
+ return PGIFortranCompiler(compiler, version, compiler_type, is_cross, exe_wrap, full_version=full_version)
if 'flang' in out or 'clang' in out:
return FlangFortranCompiler(compiler, version, is_cross, exe_wrap, full_version=full_version)
@@ -1039,7 +1073,8 @@ class Environment:
# up to date language version at time (2016).
if exelist is not None:
if os.path.basename(exelist[-1]).startswith(('ldmd', 'gdmd')):
- raise EnvironmentException('Meson doesn\'t support %s as it\'s only a DMD frontend for another compiler. Please provide a valid value for DC or unset it so that Meson can resolve the compiler by itself.' % exelist[-1])
+ raise EnvironmentException('Meson does not support {} as it is only a DMD frontend for another compiler.'
+ 'Please provide a valid value for DC or unset it so that Meson can resolve the compiler by itself.'.format(exelist[-1]))
else:
for d in self.default_d:
if shutil.which(d):
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index a6e34fc..ba97083 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -2206,7 +2206,7 @@ class Interpreter(InterpreterBase):
def check_cross_stdlibs(self):
if self.build.environment.is_cross_build():
props = self.build.environment.properties.host
- for l, c in self.build.cross_compilers.items():
+ for l in self.build.cross_compilers.keys():
try:
di = mesonlib.stringlistify(props.get_stdlib(l))
if len(di) != 2:
@@ -2467,7 +2467,7 @@ external dependencies (including libraries) must go to "dependencies".''')
with mlog.nested():
# Suppress the 'ERROR:' prefix because this exception is not
# fatal and VS CI treat any logs with "ERROR:" as fatal.
- mlog.exception(e, prefix=None)
+ mlog.exception(e, prefix=mlog.yellow('Exception:'))
mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)')
return self.disabled_subproject(dirname)
raise e
@@ -3009,6 +3009,7 @@ external dependencies (including libraries) must go to "dependencies".''')
self._handle_featurenew_dependencies(name)
kwargs['required'] = required and not has_fallback
dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+
kwargs['required'] = required
# Only store found-deps in the cache
# Never add fallback deps to self.coredata.deps since we
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index c148cbd..71a4ef3 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -144,7 +144,7 @@ def stringArgs(f):
return wrapped
def noArgsFlattening(f):
- setattr(f, 'no-args-flattening', True)
+ setattr(f, 'no-args-flattening', True) # noqa: B010
return f
def disablerIfNotFound(f):
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index f233730..f78fa35 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -22,9 +22,13 @@ import platform, subprocess, operator, os, shutil, re
import collections
from enum import Enum
from functools import lru_cache
+import typing
from mesonbuild import mlog
+_T = typing.TypeVar('_T')
+_U = typing.TypeVar('_U')
+
have_fcntl = False
have_msvcrt = False
# {subproject: project_meson_version}
@@ -319,20 +323,20 @@ class MachineChoice(OrderedEnum):
HOST = 1
TARGET = 2
-class PerMachine:
- def __init__(self, build, host, target):
+class PerMachine(typing.Generic[_T]):
+ def __init__(self, build: _T, host: _T, target: _T):
self.build = build
self.host = host
self.target = target
- def __getitem__(self, machine: MachineChoice):
+ def __getitem__(self, machine: MachineChoice) -> _T:
return {
MachineChoice.BUILD: self.build,
MachineChoice.HOST: self.host,
MachineChoice.TARGET: self.target
}[machine]
- def __setitem__(self, machine: MachineChoice, val):
+ def __setitem__(self, machine: MachineChoice, val: _T) -> None:
key = {
MachineChoice.BUILD: 'build',
MachineChoice.HOST: 'host',
@@ -716,11 +720,11 @@ def has_path_sep(name, sep='/\\'):
return True
return False
-def do_replacement(regex, line, format, confdata):
+def do_replacement(regex, line, variable_format, confdata):
missing_variables = set()
start_tag = '@'
backslash_tag = '\\@'
- if format == 'cmake':
+ if variable_format == 'cmake':
start_tag = '${'
backslash_tag = '\\${'
@@ -773,7 +777,7 @@ def do_mesondefine(line, confdata):
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
-def do_conf_file(src, dst, confdata, format, encoding='utf-8'):
+def do_conf_file(src, dst, confdata, variable_format, encoding='utf-8'):
try:
with open(src, encoding=encoding, newline='') as f:
data = f.readlines()
@@ -781,15 +785,15 @@ def do_conf_file(src, dst, confdata, format, encoding='utf-8'):
raise MesonException('Could not read input file %s: %s' % (src, str(e)))
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
# Also allow escaping '@' with '\@'
- if format in ['meson', 'cmake@']:
+ if variable_format in ['meson', 'cmake@']:
regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
- elif format == 'cmake':
+ elif variable_format == 'cmake':
regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
else:
- raise MesonException('Format "{}" not handled'.format(format))
+ raise MesonException('Format "{}" not handled'.format(variable_format))
search_token = '#mesondefine'
- if format != 'meson':
+ if variable_format != 'meson':
search_token = '#cmakedefine'
result = []
@@ -802,7 +806,7 @@ def do_conf_file(src, dst, confdata, format, encoding='utf-8'):
confdata_useless = False
line = do_mesondefine(line, confdata)
else:
- line, missing = do_replacement(regex, line, format, confdata)
+ line, missing = do_replacement(regex, line, variable_format, confdata)
missing_variables.update(missing)
if missing:
confdata_useless = False
@@ -916,14 +920,14 @@ def extract_as_list(dict_object, *keys, pop=False, **kwargs):
result.append(listify(fetch(key, []), **kwargs))
return result
-
-def typeslistify(item, types):
+def typeslistify(item: 'typing.Union[_T, typing.List[_T]]',
+ types: 'typing.Union[typing.Type[_T], typing.Tuple[typing.Type[_T]]]') -> typing.List[_T]:
'''
Ensure that type(@item) is one of @types or a
list of items all of which are of type @types
'''
if isinstance(item, types):
- item = [item]
+ item = typing.cast(typing.List[_T], [item])
if not isinstance(item, list):
raise MesonException('Item must be a list or one of {!r}'.format(types))
for i in item:
@@ -931,7 +935,7 @@ def typeslistify(item, types):
raise MesonException('List item must be one of {!r}'.format(types))
return item
-def stringlistify(item):
+def stringlistify(item: typing.Union[str, typing.List[str]]) -> typing.List[str]:
return typeslistify(item, str)
def expand_arguments(args):
@@ -963,7 +967,7 @@ def Popen_safe(args, write=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
return p, o, e
def Popen_safe_legacy(args, write=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs):
- p = subprocess.Popen(args, universal_newlines=False,
+ p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
stdout=stdout, stderr=stderr, **kwargs)
if write is not None:
write = write.encode('utf-8')
@@ -1202,7 +1206,14 @@ def detect_subprojects(spdir_name, current_dir='', result=None):
result[basename] = [trial]
return result
-def get_error_location_string(fname, lineno):
+# This isn't strictly correct. What we really want here is something like:
+# class StringProtocol(typing_extensions.Protocol):
+#
+# def __str__(self) -> str: ...
+#
+# This would more accurately embody what this funcitonc an handle, but we
+# don't have that yet, so instead we'll do some casting to work around it
+def get_error_location_string(fname: str, lineno: str) -> str:
return '{}:{}:'.format(fname, lineno)
def substring_is_in_list(substr, strlist):
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index 4326c20..c94f1bf 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -41,41 +41,41 @@ class CommandLineParser:
self.subparsers = self.parser.add_subparsers(title='Commands',
description='If no command is specified it defaults to setup command.')
self.add_command('setup', msetup.add_arguments, msetup.run,
- help='Configure the project')
+ help_msg='Configure the project')
self.add_command('configure', mconf.add_arguments, mconf.run,
- help='Change project options',)
+ help_msg='Change project options',)
self.add_command('install', minstall.add_arguments, minstall.run,
- help='Install the project')
+ help_msg='Install the project')
self.add_command('introspect', mintro.add_arguments, mintro.run,
- help='Introspect project')
+ help_msg='Introspect project')
self.add_command('init', minit.add_arguments, minit.run,
- help='Create a new project')
+ help_msg='Create a new project')
self.add_command('test', mtest.add_arguments, mtest.run,
- help='Run tests')
+ help_msg='Run tests')
self.add_command('wrap', wraptool.add_arguments, wraptool.run,
- help='Wrap tools')
+ help_msg='Wrap tools')
self.add_command('subprojects', msubprojects.add_arguments, msubprojects.run,
- help='Manage subprojects')
+ help_msg='Manage subprojects')
self.add_command('help', self.add_help_arguments, self.run_help_command,
- help='Print help of a subcommand')
+ help_msg='Print help of a subcommand')
self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formater), rewriter.run,
- help='Modify the project definition')
+ help_msg='Modify the project definition')
# Hidden commands
self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
- help=argparse.SUPPRESS)
+ help_msg=argparse.SUPPRESS)
self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run,
- help=argparse.SUPPRESS)
+ help_msg=argparse.SUPPRESS)
- def add_command(self, name, add_arguments_func, run_func, help, aliases=None):
+ def add_command(self, name, add_arguments_func, run_func, help_msg, aliases=None):
aliases = aliases or []
# FIXME: Cannot have hidden subparser:
# https://bugs.python.org/issue22848
- if help == argparse.SUPPRESS:
+ if help_msg == argparse.SUPPRESS:
p = argparse.ArgumentParser(prog='meson ' + name, formatter_class=self.formater)
self.hidden_commands.append(name)
else:
- p = self.subparsers.add_parser(name, help=help, aliases=aliases, formatter_class=self.formater)
+ p = self.subparsers.add_parser(name, help=help_msg, aliases=aliases, formatter_class=self.formater)
add_arguments_func(p)
p.set_defaults(run_func=run_func)
for i in [name] + aliases:
diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py
index c6b6bbf..ed82c37 100644
--- a/mesonbuild/minstall.py
+++ b/mesonbuild/minstall.py
@@ -65,7 +65,7 @@ class DirMaker:
def __enter__(self):
return self
- def __exit__(self, type, value, traceback):
+ def __exit__(self, exception_type, value, traceback):
self.dirs.reverse()
for d in self.dirs:
append_to_log(self.lf, d)
@@ -93,7 +93,7 @@ def set_chown(path, user=None, group=None, dir_fd=None, follow_symlinks=True):
dir_fd=dir_fd,
follow_symlinks=follow_symlinks)
shutil.chown(path, user, group)
- except:
+ except Exception:
raise
finally:
os.chown = real_os_chown
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index a4a6978..8c8aa15 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -27,7 +27,7 @@ from . import mlog
from .backend import backends
from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
from typing import List, Optional
-import sys, os
+import os
import pathlib
def get_meson_info_file(info_dir: str):
@@ -117,11 +117,11 @@ def list_installed(installdata):
for t in installdata.targets:
res[os.path.join(installdata.build_dir, t.fname)] = \
os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))
- for path, installpath, unused_prefix in installdata.data:
+ for path, installpath, _ in installdata.data:
res[path] = os.path.join(installdata.prefix, installpath)
- for path, installdir, unused_custom_install_mode in installdata.headers:
+ for path, installdir, _ in installdata.headers:
res[path] = os.path.join(installdata.prefix, installdir, os.path.basename(path))
- for path, installpath, unused_custom_install_mode in installdata.man:
+ for path, installpath, _ in installdata.man:
res[path] = os.path.join(installdata.prefix, installpath)
return res
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index 0434274..79dee47 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -18,13 +18,16 @@ import sys
import time
import platform
from contextlib import contextmanager
+import typing
+from typing import Any, Generator, List, Optional, Sequence, TextIO, Union
"""This is (mostly) a standalone module used to write logging
information about Meson runs. Some output goes to screen,
some to logging dir and some goes to both."""
-def _windows_ansi():
- from ctypes import windll, byref
+def _windows_ansi() -> bool:
+ # windll only exists on windows, so mypy will get mad
+ from ctypes import windll, byref # type: ignore
from ctypes.wintypes import DWORD
kernel = windll.kernel32
@@ -35,48 +38,48 @@ def _windows_ansi():
# ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0x4
# If the call to enable VT processing fails (returns 0), we fallback to
# original behavior
- return kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON')
+ return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
if platform.system().lower() == 'windows':
- colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+ colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi() # type: bool
else:
colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM') != 'dumb'
-log_dir = None
-log_file = None
-log_fname = 'meson-log.txt'
-log_depth = 0
-log_timestamp_start = None
-log_fatal_warnings = False
-log_disable_stdout = False
-log_errors_only = False
-
-def disable():
+log_dir = None # type: Optional[str]
+log_file = None # type: Optional[TextIO]
+log_fname = 'meson-log.txt' # type: str
+log_depth = 0 # type: int
+log_timestamp_start = None # type: Optional[float]
+log_fatal_warnings = False # type: bool
+log_disable_stdout = False # type: bool
+log_errors_only = False # type: bool
+
+def disable() -> None:
global log_disable_stdout
log_disable_stdout = True
-def enable():
+def enable() -> None:
global log_disable_stdout
log_disable_stdout = False
-def set_quiet():
+def set_quiet() -> None:
global log_errors_only
log_errors_only = True
-def set_verbose():
+def set_verbose() -> None:
global log_errors_only
log_errors_only = False
-def initialize(logdir, fatal_warnings=False):
+def initialize(logdir: str, fatal_warnings: bool = False) -> None:
global log_dir, log_file, log_fatal_warnings
log_dir = logdir
log_file = open(os.path.join(logdir, log_fname), 'w', encoding='utf8')
log_fatal_warnings = fatal_warnings
-def set_timestamp_start(start):
+def set_timestamp_start(start: float) -> None:
global log_timestamp_start
log_timestamp_start = start
-def shutdown():
+def shutdown() -> Optional[str]:
global log_file
if log_file is not None:
path = log_file.name
@@ -89,12 +92,12 @@ def shutdown():
class AnsiDecorator:
plain_code = "\033[0m"
- def __init__(self, text, code, quoted=False):
+ def __init__(self, text: str, code: str, quoted: bool = False):
self.text = text
self.code = code
self.quoted = quoted
- def get_text(self, with_codes):
+ def get_text(self, with_codes: bool) -> str:
text = self.text
if with_codes:
text = self.code + self.text + AnsiDecorator.plain_code
@@ -102,26 +105,28 @@ class AnsiDecorator:
text = '"{}"'.format(text)
return text
-def bold(text, quoted=False):
+def bold(text: str, quoted: bool = False) -> AnsiDecorator:
return AnsiDecorator(text, "\033[1m", quoted=quoted)
-def red(text):
+def red(text: str) -> AnsiDecorator:
return AnsiDecorator(text, "\033[1;31m")
-def green(text):
+def green(text: str) -> AnsiDecorator:
return AnsiDecorator(text, "\033[1;32m")
-def yellow(text):
+def yellow(text: str) -> AnsiDecorator:
return AnsiDecorator(text, "\033[1;33m")
-def blue(text):
+def blue(text: str) -> AnsiDecorator:
return AnsiDecorator(text, "\033[1;34m")
-def cyan(text):
+def cyan(text: str) -> AnsiDecorator:
return AnsiDecorator(text, "\033[1;36m")
-def process_markup(args, keep):
- arr = []
+# This really should be AnsiDecorator or anything that implements
+# __str__(), but that requires protocols from typing_extensions
+def process_markup(args: Sequence[Union[AnsiDecorator, str]], keep: bool) -> List[str]:
+ arr = [] # type: List[str]
if log_timestamp_start is not None:
arr = ['[{:.3f}]'.format(time.monotonic() - log_timestamp_start)]
for arg in args:
@@ -135,7 +140,7 @@ def process_markup(args, keep):
arr.append(str(arg))
return arr
-def force_print(*args, **kwargs):
+def force_print(*args: str, **kwargs: Any) -> None:
global log_disable_stdout
if log_disable_stdout:
return
@@ -155,41 +160,51 @@ def force_print(*args, **kwargs):
cleaned = raw.encode('ascii', 'replace').decode('ascii')
print(cleaned, end='')
-def debug(*args, **kwargs):
+# We really want a heterogenous dict for this, but that's in typing_extensions
+def debug(*args: Union[str, AnsiDecorator], **kwargs: Any) -> None:
arr = process_markup(args, False)
if log_file is not None:
- print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
+ print(*arr, file=log_file, **kwargs)
log_file.flush()
-def log(*args, is_error=False, **kwargs):
+def log(*args: Union[str, AnsiDecorator], is_error: bool = False,
+ **kwargs: Any) -> None:
global log_errors_only
arr = process_markup(args, False)
if log_file is not None:
- print(*arr, file=log_file, **kwargs) # Log file never gets ANSI codes.
+ print(*arr, file=log_file, **kwargs)
log_file.flush()
if colorize_console:
arr = process_markup(args, True)
if not log_errors_only or is_error:
force_print(*arr, **kwargs)
-def _log_error(severity, *args, **kwargs):
+def _log_error(severity: str, *rargs: Union[str, AnsiDecorator], **kwargs: Any) -> None:
from .mesonlib import get_error_location_string
from .environment import build_filename
from .mesonlib import MesonException
+
+ # The tping requirements here are non-obvious. Lists are invariant,
+ # therefore List[A] and List[Union[A, B]] are not able to be joined
if severity == 'warning':
- args = (yellow('WARNING:'),) + args
+ label = [yellow('WARNING:')] # type: List[Union[str, AnsiDecorator]]
elif severity == 'error':
- args = (red('ERROR:'),) + args
+ label = [red('ERROR:')]
elif severity == 'deprecation':
- args = (red('DEPRECATION:'),) + args
+ label = [red('DEPRECATION:')]
else:
- assert False, 'Invalid severity ' + severity
+ raise MesonException('Invalid severity ' + severity)
+ # rargs is a tuple, not a list
+ args = label + list(rargs)
location = kwargs.pop('location', None)
if location is not None:
location_file = os.path.join(location.subdir, build_filename)
location_str = get_error_location_string(location_file, location.lineno)
- args = (location_str,) + args
+ # Unions are frankly awful, and we have to cast here to get mypy
+ # to understand that the list concatenation is safe
+ location_list = typing.cast(List[Union[str, AnsiDecorator]], [location_str])
+ args = location_list + args
log(*args, **kwargs)
@@ -197,40 +212,44 @@ def _log_error(severity, *args, **kwargs):
if log_fatal_warnings:
raise MesonException("Fatal warnings enabled, aborting")
-def error(*args, **kwargs):
+def error(*args: Union[str, AnsiDecorator], **kwargs: Any) -> None:
return _log_error('error', *args, **kwargs, is_error=True)
-def warning(*args, **kwargs):
+def warning(*args: Union[str, AnsiDecorator], **kwargs: Any) -> None:
return _log_error('warning', *args, **kwargs, is_error=True)
-def deprecation(*args, **kwargs):
+def deprecation(*args: Union[str, AnsiDecorator], **kwargs: Any) -> None:
return _log_error('deprecation', *args, **kwargs, is_error=True)
-def exception(e, prefix=red('ERROR:')):
+def exception(e: Exception, prefix: Optional[AnsiDecorator] = None) -> None:
+ if prefix is None:
+ prefix = red('ERROR:')
log()
- args = []
+ args = [] # type: List[Union[AnsiDecorator, str]]
if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
- args.append('%s:%d:%d:' % (e.file, e.lineno, e.colno))
+ # Mypy can't figure this out, and it's pretty easy to vidual inspect
+ # that this is correct, so we'll just ignore it.
+ args.append('%s:%d:%d:' % (e.file, e.lineno, e.colno)) # type: ignore
if prefix:
args.append(prefix)
- args.append(e)
+ args.append(str(e))
log(*args)
# Format a list for logging purposes as a string. It separates
# all but the last item with commas, and the last with 'and'.
-def format_list(list):
- l = len(list)
+def format_list(input_list: List[str]) -> str:
+ l = len(input_list)
if l > 2:
- return ' and '.join([', '.join(list[:-1]), list[-1]])
+ return ' and '.join([', '.join(input_list[:-1]), input_list[-1]])
elif l == 2:
- return ' and '.join(list)
+ return ' and '.join(input_list)
elif l == 1:
- return list[0]
+ return input_list[0]
else:
return ''
@contextmanager
-def nested():
+def nested() -> Generator[None, None, None]:
global log_depth
log_depth += 1
try:
diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py
index 2878134..6af4adb 100644
--- a/mesonbuild/modules/cmake.py
+++ b/mesonbuild/modules/cmake.py
@@ -72,20 +72,12 @@ class CmakeModule(ExtensionModule):
mlog.log('error retrieving cmake informations: returnCode={0} stdout={1} stderr={2}'.format(p.returncode, stdout, stderr))
return False
- match = re.search('\n_INCLUDED_FILE \\"([^"]+)"\n', stdout.strip())
+ match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip())
if not match:
mlog.log('unable to determine cmake root')
return False
- # compilerpath is something like '/usr/share/cmake-3.5/Modules/Platform/Linux-GNU-CXX.cmake'
- # or 'C:/Program Files (x86)/CMake 2.8/share/cmake-2.8/Modules/Platform/Windows-MSVC-CXX.cmake' under windows
- compilerpath = match.group(1)
- pos = compilerpath.find('/Modules/Platform/')
- if pos < 0:
- mlog.log('unknown _INCLUDED_FILE path scheme')
- return False
-
- cmakePath = pathlib.PurePath(compilerpath[0:pos])
+ cmakePath = pathlib.PurePath(match.group(1))
self.cmake_root = os.path.join(*cmakePath.parts)
self.cmake_detected = True
return True
diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py
index a0ebe0e..04941ea 100644
--- a/mesonbuild/modules/python.py
+++ b/mesonbuild/modules/python.py
@@ -477,9 +477,9 @@ class PythonModule(ExtensionModule):
ver = {'python2': '-2', 'python3': '-3'}[name_or_path]
cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"]
_, stdout, _ = mesonlib.Popen_safe(cmd)
- dir = stdout.strip()
- if os.path.exists(dir):
- return os.path.join(dir, 'python')
+ directory = stdout.strip()
+ if os.path.exists(directory):
+ return os.path.join(directory, 'python')
else:
return None
diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py
index e8d266e..87a83fe 100644
--- a/mesonbuild/modules/windows.py
+++ b/mesonbuild/modules/windows.py
@@ -59,7 +59,7 @@ class WindowsModule(ExtensionModule):
if not rescomp.found():
raise MesonException('Could not find Windows resource compiler')
- for (arg, match, type) in [
+ for (arg, match, rc_type) in [
('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc),
('--version', '^.*GNU windres.*$', ResourceCompilerType.windres),
]:
@@ -67,7 +67,7 @@ class WindowsModule(ExtensionModule):
m = re.search(match, o, re.MULTILINE)
if m:
mlog.log('Windows resource compiler: %s' % m.group())
- self._rescomp = (rescomp, type)
+ self._rescomp = (rescomp, rc_type)
break
else:
raise MesonException('Could not determine type of Windows resource compiler')
diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py
index 17783ce..b4fb032 100644
--- a/mesonbuild/mparser.py
+++ b/mesonbuild/mparser.py
@@ -21,12 +21,12 @@ from . import mlog
# This is the regex for the supported escape sequences of a regular string
# literal, like 'abc\x00'
ESCAPE_SEQUENCE_SINGLE_RE = re.compile(r'''
- ( \\U........ # 8-digit hex escapes
- | \\u.... # 4-digit hex escapes
- | \\x.. # 2-digit hex escapes
- | \\[0-7]{1,3} # Octal escapes
- | \\N\{[^}]+\} # Unicode characters by name
- | \\[\\'abfnrtv] # Single-character escapes
+ ( \\U[A-Fa-f0-9]{8} # 8-digit hex escapes
+ | \\u[A-Fa-f0-9]{4} # 4-digit hex escapes
+ | \\x[A-Fa-f0-9]{2} # 2-digit hex escapes
+ | \\[0-7]{1,3} # Octal escapes
+ | \\N\{[^}]+\} # Unicode characters by name
+ | \\[\\'abfnrtv] # Single-character escapes
)''', re.UNICODE | re.VERBOSE)
class MesonUnicodeDecodeError(MesonException):
@@ -217,7 +217,7 @@ class BaseNode:
fname = 'visit_{}'.format(type(self).__name__)
if hasattr(visitor, fname):
func = getattr(visitor, fname)
- if hasattr(func, '__call__'):
+ if callable(func):
func(self)
class ElementaryNode(BaseNode):
diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py
index 17af4df..8df8f48 100644
--- a/mesonbuild/mtest.py
+++ b/mesonbuild/mtest.py
@@ -873,7 +873,7 @@ Timeout: %4d
return wrap
def get_pretty_suite(self, test):
- if len(self.suites) > 1:
+ if len(self.suites) > 1 and test.suite:
rv = TestHarness.split_suite_string(test.suite[0])[0]
s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
if len(s):
diff --git a/mesonbuild/munstable_coredata.py b/mesonbuild/munstable_coredata.py
index aaf6523..f16468c 100644
--- a/mesonbuild/munstable_coredata.py
+++ b/mesonbuild/munstable_coredata.py
@@ -31,9 +31,12 @@ def dump_compilers(compilers):
print(' ' + lang + ':')
print(' Id: ' + compiler.id)
print(' Command: ' + ' '.join(compiler.exelist))
- print(' Full version: ' + compiler.full_version)
- print(' Detected version: ' + compiler.version)
- print(' Detected type: ' + repr(compiler.compiler_type))
+ if compiler.full_version:
+ print(' Full version: ' + compiler.full_version)
+ if compiler.version:
+ print(' Detected version: ' + compiler.version)
+ if hasattr(compiler, 'compiler_type'):
+ print(' Detected type: ' + repr(compiler.compiler_type))
#pprint.pprint(compiler.__dict__)
@@ -51,7 +54,7 @@ def run(options):
'change the working directory to it.')
return 1
- all = options.all
+ all_backends = options.all
print('This is a dump of the internal unstable cache of meson. This is for debugging only.')
print('Do NOT parse, this will change from version to version in incompatible ways')
@@ -64,18 +67,18 @@ def run(options):
# use `meson configure` to view these
pass
elif k in ['install_guid', 'test_guid', 'regen_guid']:
- if all or backend.startswith('vs'):
+ if all_backends or backend.startswith('vs'):
print(k + ': ' + v)
elif k == 'target_guids':
- if all or backend.startswith('vs'):
+ if all_backends or backend.startswith('vs'):
print(k + ':')
dump_guids(v)
elif k in ['lang_guids']:
- if all or backend.startswith('vs') or backend == 'xcode':
+ if all_backends or backend.startswith('vs') or backend == 'xcode':
print(k + ':')
dump_guids(v)
elif k == 'meson_command':
- if all or backend.startswith('vs'):
+ if all_backends or backend.startswith('vs'):
print('Meson command used in build file regeneration: ' + ' '.join(v))
elif k == 'pkgconf_envvar':
print('Last seen PKGCONFIG enviroment variable value: ' + v)
@@ -97,7 +100,7 @@ def run(options):
native = []
cross = []
for dep_key, dep in sorted(v.items()):
- if dep_key[2]:
+ if dep_key[1]:
cross.append((dep_key, dep))
else:
native.append((dep_key, dep))
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index 7294186..cc4669c 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -123,7 +123,7 @@ class Elf(DataSizes):
self.parse_header()
self.parse_sections()
self.parse_dynamic()
- except:
+ except (struct.error, RuntimeError):
self.bf.close()
raise
@@ -180,7 +180,7 @@ class Elf(DataSizes):
def parse_sections(self):
self.bf.seek(self.e_shoff)
self.sections = []
- for i in range(self.e_shnum):
+ for _ in range(self.e_shnum):
self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
def read_str(self):
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index 4d9d032..3eb68a7 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -84,7 +84,7 @@ class PackageDefinition:
try:
self.config = configparser.ConfigParser(interpolation=None)
self.config.read(fname)
- except:
+ except configparser.Error:
raise WrapException('Failed to parse {}'.format(self.basename))
if len(self.config.sections()) < 1:
raise WrapException('Missing sections in {}'.format(self.basename))
@@ -338,7 +338,7 @@ class Resolver:
"""
Copy directory tree. Overwrites also read only files.
"""
- for src_dir, dirs, files in os.walk(root_src_dir):
+ for src_dir, _, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py
index 132decf..80cc027 100644
--- a/mesonbuild/wrap/wraptool.py
+++ b/mesonbuild/wrap/wraptool.py
@@ -177,7 +177,7 @@ def promote(options):
# check if the argument is a full path to a subproject directory or wrap file
system_native_path_argument = argument.replace('/', os.sep)
- for _, matches in sprojs.items():
+ for matches in sprojs.values():
if system_native_path_argument in matches:
do_promotion(system_native_path_argument, spdir_name)
return
diff --git a/run_project_tests.py b/run_project_tests.py
index c1d42fc..324d824 100755
--- a/run_project_tests.py
+++ b/run_project_tests.py
@@ -505,6 +505,10 @@ def skippable(suite, test):
if test.endswith('netcdf'):
return True
+ # MSVC doesn't link with GFortran
+ if test.endswith('14 fortran links c'):
+ return True
+
# No frameworks test should be skipped on linux CI, as we expect all
# prerequisites to be installed
if mesonlib.is_linux():
@@ -774,7 +778,7 @@ def detect_system_compiler():
try:
comp = env.compiler_from_language(lang, env.is_cross_build())
details = '%s %s' % (' '.join(comp.get_exelist()), comp.get_version_string())
- except:
+ except mesonlib.MesonException:
comp = None
details = 'not found'
print('%-7s: %s' % (lang, details))
@@ -819,7 +823,7 @@ if __name__ == '__main__':
print(l, '\n')
except UnicodeError:
print(l.encode('ascii', errors='replace').decode(), '\n')
- for name, dirs, skip in all_tests:
+ for name, dirs, _ in all_tests:
dirs = (x.name for x in dirs)
for k, g in itertools.groupby(dirs, key=lambda x: x.split()[0]):
tests = list(g)
diff --git a/run_tests.py b/run_tests.py
index fb3bc28..a4b0fa2 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -136,7 +136,7 @@ def find_vcxproj_with_target(builddir, target):
p = r'<TargetName>{}</TargetName>\s*<TargetExt>\{}</TargetExt>'.format(t, ext)
else:
p = r'<TargetName>{}</TargetName>'.format(t)
- for root, dirs, files in os.walk(builddir):
+ for _, _, files in os.walk(builddir):
for f in fnmatch.filter(files, '*.vcxproj'):
f = os.path.join(builddir, f)
with open(f, 'r', encoding='utf-8') as o:
diff --git a/run_unittests.py b/run_unittests.py
index 2457a50..3a473ea 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -1244,7 +1244,7 @@ class BasePlatformTests(unittest.TestCase):
print('Stderr:\n')
print(err)
raise RuntimeError('Configure failed')
- except:
+ except Exception:
self._print_meson_log()
raise
finally:
@@ -1257,7 +1257,7 @@ class BasePlatformTests(unittest.TestCase):
out = self._run(self.setup_command + args + extra_args)
except unittest.SkipTest:
raise unittest.SkipTest('Project requested skipping: ' + srcdir)
- except:
+ except Exception:
self._print_meson_log()
raise
return out
@@ -1813,48 +1813,48 @@ class AllPlatformTests(BasePlatformTests):
self.init(testdir)
self.build()
- self.assertFailedTestCount(3, self.mtest_command)
+ self.assertFailedTestCount(4, self.mtest_command)
self.assertFailedTestCount(0, self.mtest_command + ['--suite', ':success'])
self.assertFailedTestCount(3, self.mtest_command + ['--suite', ':fail'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', ':success'])
- self.assertFailedTestCount(0, self.mtest_command + ['--no-suite', ':fail'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', ':success'])
+ self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', ':fail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix'])
- self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'mainprj'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjsucc'])
- self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail'])
- self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjmix'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'mainprj:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'mainprj:success'])
- self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'mainprj:fail'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:success'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'mainprj:fail'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'mainprj:success'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjfail:success'])
- self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:success'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjfail:fail'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjfail:success'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjsucc:success'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjsucc:fail'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjsucc:success'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:fail'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjsucc:success'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjmix:fail'])
self.assertFailedTestCount(0, self.mtest_command + ['--suite', 'subprjmix:success'])
- self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjmix:fail'])
- self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:success'])
+ self.assertFailedTestCount(3, self.mtest_command + ['--no-suite', 'subprjmix:fail'])
+ self.assertFailedTestCount(4, self.mtest_command + ['--no-suite', 'subprjmix:success'])
self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix:fail'])
self.assertFailedTestCount(3, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj'])
self.assertFailedTestCount(2, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail'])
self.assertFailedTestCount(1, self.mtest_command + ['--suite', 'subprjfail', '--suite', 'subprjmix', '--suite', 'mainprj', '--no-suite', 'subprjmix:fail', 'mainprj-failing_test'])
- self.assertFailedTestCount(1, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail'])
+ self.assertFailedTestCount(2, self.mtest_command + ['--no-suite', 'subprjfail:fail', '--no-suite', 'subprjmix:fail'])
def test_build_by_default(self):
testdir = os.path.join(self.common_test_dir, '134 build by default')
@@ -2681,9 +2681,9 @@ int main(int argc, char **argv) {
if ninja is None:
raise unittest.SkipTest('This test currently requires ninja. Fix this once "meson build" works.')
for lang in ('c', 'cpp'):
- for type in ('executable', 'library'):
+ for target_type in ('executable', 'library'):
with tempfile.TemporaryDirectory() as tmpdir:
- self._run(self.meson_command + ['init', '--language', lang, '--type', type],
+ self._run(self.meson_command + ['init', '--language', lang, '--type', target_type],
workdir=tmpdir)
self._run(self.setup_command + ['--backend=ninja', 'builddir'],
workdir=tmpdir)
@@ -3642,6 +3642,12 @@ recommended as it is not supported on some platforms''')
self.maxDiff = None
self.assertListEqual(res_nb, expected)
+ def test_unstable_coredata(self):
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ self.init(testdir)
+ # just test that the command does not fail (e.g. because it throws an exception)
+ self._run([*self.meson_command, 'unstable-coredata', self.builddir])
+
class FailureTests(BasePlatformTests):
'''
Tests that test failure conditions. Build files here should be dynamically
@@ -4443,7 +4449,7 @@ class LinuxlikeTests(BasePlatformTests):
self.assertIn(cmd_std, cmd)
try:
self.build()
- except:
+ except Exception:
print('{} was {!r}'.format(lang_std, v))
raise
self.wipe()
@@ -5527,7 +5533,7 @@ class RewriterTests(BasePlatformTests):
out = self.rewrite(self.builddir, os.path.join(self.builddir, 'info.json'))
expected = {'name': 'myExe', 'sources': ['main.cpp']}
self.assertEqual(len(out['target']), 2)
- for _, val in out['target'].items():
+ for val in out['target'].values():
self.assertDictEqual(expected, val)
def test_kwargs_info(self):
diff --git a/setup.cfg b/setup.cfg
index 7a94d85..d818786 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -28,4 +28,6 @@ ignore =
E722
# W504: line break after binary operator
W504
+ # A003: builtin class attribute
+ A003
max-line-length = 120
diff --git a/sideci.yml b/sideci.yml
index 2e95afd..5c56196 100644
--- a/sideci.yml
+++ b/sideci.yml
@@ -1,3 +1,7 @@
linter:
flake8:
version: 3
+ plugins:
+ - flake8-blind-except
+ - flake8-builtins
+ - flake8-bugbear
diff --git a/test cases/common/185 escape and unicode/meson.build b/test cases/common/185 escape and unicode/meson.build
index 65377b6..e4fe628 100644
--- a/test cases/common/185 escape and unicode/meson.build
+++ b/test cases/common/185 escape and unicode/meson.build
@@ -22,3 +22,17 @@ foreach l : find_file_list.stdout().strip('\x00').split('\x00')
endforeach
test('second', executable('second', found_files_hex + [gen_file]))
+
+# Unrecognized and malformed escape sequences are literal
+
+malformed = [
+ [ '\c', 'c' ],
+ [ '\Uabcdefghi', 'Uabcdefghi'],
+ [ '\u123 ', 'u123 '],
+ [ '\xqr', 'xqr'],
+]
+
+foreach m : malformed
+ assert(m[0].endswith(m[1]), 'bad escape sequence had unexpected end')
+ assert(m[0].startswith('\\'), 'bad escape sequence had unexpected start')
+endforeach
diff --git a/test cases/common/216 link custom/meson.build b/test cases/common/216 link custom/meson.build
index 5af27cd..c8d3a6d 100644
--- a/test cases/common/216 link custom/meson.build
+++ b/test cases/common/216 link custom/meson.build
@@ -16,6 +16,8 @@ clib = custom_target('linkcustom',
'-o', '@OUTPUT@',
'--private-dir', '@PRIVATE_DIR@'] + cc.cmd_array())
+# custom_target tests
+
exe = executable('prog', 'prog.c', link_with: clib)
test('linkcustom', exe)
@@ -33,3 +35,23 @@ d2 = declare_dependency(link_whole: clib)
exe4 = executable('prog4', 'prog.c', dependencies: d2)
test('linkwhole2', exe2)
+
+# custom_target[i] tests
+
+exe_i = executable('prog_i', 'prog.c', link_with: clib[0])
+test('linkcustom', exe_i)
+
+d_i = declare_dependency(link_with: clib[0])
+
+exe2_i = executable('prog2_i', 'prog.c', dependencies: d_i)
+test('linkcustom2_i', exe2_i)
+
+# Link whole tests
+
+exe3_i = executable('prog3_i', 'prog.c', link_whole: clib[0])
+test('linkwhole', exe)
+
+d2_i = declare_dependency(link_whole: clib[0])
+
+exe4_i = executable('prog4_i', 'prog.c', dependencies: d2_i)
+test('linkwhole2_i', exe2_i)
diff --git a/test cases/common/217 link custom_i single from multiple/generate_conflicting_stlibs.py b/test cases/common/217 link custom_i single from multiple/generate_conflicting_stlibs.py
new file mode 100644
index 0000000..42d6631
--- /dev/null
+++ b/test cases/common/217 link custom_i single from multiple/generate_conflicting_stlibs.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python3
+
+import shutil, sys, subprocess, argparse, pathlib
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--private-dir', required=True)
+parser.add_argument('-o', nargs='+', required=True)
+parser.add_argument('cmparr', nargs='+')
+
+contents = ['''
+int flob() {
+ return 0;
+}
+''', '''
+int flob() {
+ return 1;
+}
+''']
+
+def generate_lib_gnulike(outfile, c_file, private_dir, compiler_array):
+ if shutil.which('ar'):
+ static_linker = 'ar'
+ elif shutil.which('llvm-ar'):
+ static_linker = 'llvm-ar'
+ elif shutil.which('gcc-ar'):
+ static_linker = 'gcc-ar'
+ else:
+ sys.exit('Could not detect a static linker.')
+ o_file = c_file.with_suffix('.o')
+ compile_cmd = compiler_array + ['-c', '-g', '-O2', '-o', str(o_file), str(c_file)]
+ subprocess.check_call(compile_cmd)
+ out_file = pathlib.Path(outfile)
+ if out_file.exists():
+ out_file.unlink()
+ link_cmd = [static_linker, 'csr', outfile, str(o_file)]
+ subprocess.check_call(link_cmd)
+ return 0
+
+
+def generate_lib_msvc(outfile, c_file, private_dir, compiler_array):
+ static_linker = 'lib'
+ o_file = c_file.with_suffix('.obj')
+ compile_cmd = compiler_array + ['/MDd',
+ '/nologo',
+ '/ZI',
+ '/Ob0',
+ '/Od',
+ '/c',
+ '/Fo' + str(o_file),
+ str(c_file)]
+ subprocess.check_call(compile_cmd)
+ out_file = pathlib.Path(outfile)
+ if out_file.exists():
+ out_file.unlink()
+ link_cmd = [static_linker,
+ '/nologo',
+ '/OUT:' + str(outfile),
+ str(o_file)]
+ subprocess.check_call(link_cmd)
+ return 0
+
+def generate_lib(outfiles, private_dir, compiler_array):
+ private_dir = pathlib.Path(private_dir)
+ if not private_dir.exists():
+ private_dir.mkdir()
+
+ for i, content in enumerate(contents):
+ c_file = private_dir / ('flob_' + str(i + 1) + '.c')
+ c_file.write_text(content)
+ outfile = outfiles[i]
+
+ cl_found = False
+ for cl_arg in compiler_array:
+ if (cl_arg.endswith('cl') or cl_arg.endswith('cl.exe')) and 'clang-cl' not in cl_arg:
+ ret = generate_lib_msvc(outfile, c_file, private_dir, compiler_array)
+ if ret > 0:
+ return ret
+ else:
+ cl_found = True
+ break
+ if not cl_found:
+ ret = generate_lib_gnulike(outfile, c_file, private_dir, compiler_array)
+ if ret > 0:
+ return ret
+ return 0
+
+if __name__ == '__main__':
+ options = parser.parse_args()
+ sys.exit(generate_lib(options.o, options.private_dir, options.cmparr))
diff --git a/test cases/common/217 link custom_i single from multiple/meson.build b/test cases/common/217 link custom_i single from multiple/meson.build
new file mode 100644
index 0000000..eee1fe1
--- /dev/null
+++ b/test cases/common/217 link custom_i single from multiple/meson.build
@@ -0,0 +1,37 @@
+project('linkcustom', 'c')
+
+# This would require passing the static linker to the build script or having
+# it detect it by itself. I'm too lazy to implement it now and it is not
+# really needed for testing that custom targets work. It is the responsibility
+# of the custom target to produce things in the correct format.
+assert(not meson.is_cross_build(),
+ 'MESON_SKIP_TEST cross checking not implemented.')
+
+cc = meson.get_compiler('c')
+genprog = find_program('generate_conflicting_stlibs.py')
+
+clib = custom_target('linkcustom',
+ output: ['libflob_1.a', 'libflob_2.a'],
+ command: [genprog,
+ '-o', '@OUTPUT@',
+ '--private-dir', '@PRIVATE_DIR@'] + cc.cmd_array())
+
+clib_2 = clib[1]
+
+exe = executable('prog', 'prog.c', link_with: clib_2)
+test('linkcustom', exe)
+
+d = declare_dependency(link_with: clib_2)
+
+exe2 = executable('prog2', 'prog.c', dependencies: d)
+test('linkcustom2', exe2)
+
+# Link whole tests
+
+exe3 = executable('prog3', 'prog.c', link_whole: clib_2)
+test('linkwhole', exe)
+
+d2 = declare_dependency(link_whole: clib_2)
+
+exe4 = executable('prog4', 'prog.c', dependencies: d2)
+test('linkwhole2', exe2)
diff --git a/test cases/common/217 link custom_i single from multiple/prog.c b/test cases/common/217 link custom_i single from multiple/prog.c
new file mode 100644
index 0000000..8013034
--- /dev/null
+++ b/test cases/common/217 link custom_i single from multiple/prog.c
@@ -0,0 +1,5 @@
+int flob();
+
+int main(int argc, char **argv) {
+ return (flob() == 1 ? 0 : 1);
+}
diff --git a/test cases/common/218 link custom_i multiple from multiple/generate_stlibs.py b/test cases/common/218 link custom_i multiple from multiple/generate_stlibs.py
new file mode 100644
index 0000000..5292006
--- /dev/null
+++ b/test cases/common/218 link custom_i multiple from multiple/generate_stlibs.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3
+
+import shutil, sys, subprocess, argparse, pathlib
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--private-dir', required=True)
+parser.add_argument('-o', nargs='+', required=True)
+parser.add_argument('cmparr', nargs='+')
+
+contents = ['''#include<stdio.h>
+
+void flob_1() {
+ printf("Now flobbing #1.\\n");
+}
+''', '''#include<stdio.h>
+
+void flob_2() {
+ printf("Now flobbing #2.\\n");
+}
+''']
+
+def generate_lib_gnulike(outfile, c_file, private_dir, compiler_array):
+ if shutil.which('ar'):
+ static_linker = 'ar'
+ elif shutil.which('llvm-ar'):
+ static_linker = 'llvm-ar'
+ elif shutil.which('gcc-ar'):
+ static_linker = 'gcc-ar'
+ else:
+ sys.exit('Could not detect a static linker.')
+ o_file = c_file.with_suffix('.o')
+ compile_cmd = compiler_array + ['-c', '-g', '-O2', '-o', str(o_file), str(c_file)]
+ subprocess.check_call(compile_cmd)
+ out_file = pathlib.Path(outfile)
+ if out_file.exists():
+ out_file.unlink()
+ link_cmd = [static_linker, 'csr', outfile, str(o_file)]
+ subprocess.check_call(link_cmd)
+ return 0
+
+
+def generate_lib_msvc(outfile, c_file, private_dir, compiler_array):
+ static_linker = 'lib'
+ o_file = c_file.with_suffix('.obj')
+ compile_cmd = compiler_array + ['/MDd',
+ '/nologo',
+ '/ZI',
+ '/Ob0',
+ '/Od',
+ '/c',
+ '/Fo' + str(o_file),
+ str(c_file)]
+ subprocess.check_call(compile_cmd)
+ out_file = pathlib.Path(outfile)
+ if out_file.exists():
+ out_file.unlink()
+ link_cmd = [static_linker,
+ '/nologo',
+ '/OUT:' + str(outfile),
+ str(o_file)]
+ subprocess.check_call(link_cmd)
+ return 0
+
+def generate_lib(outfiles, private_dir, compiler_array):
+ private_dir = pathlib.Path(private_dir)
+ if not private_dir.exists():
+ private_dir.mkdir()
+
+ for i, content in enumerate(contents):
+ c_file = private_dir / ('flob_' + str(i + 1) + '.c')
+ c_file.write_text(content)
+ outfile = outfiles[i]
+
+ cl_found = False
+ for cl_arg in compiler_array:
+ if (cl_arg.endswith('cl') or cl_arg.endswith('cl.exe')) and 'clang-cl' not in cl_arg:
+ ret = generate_lib_msvc(outfile, c_file, private_dir, compiler_array)
+ if ret > 0:
+ return ret
+ else:
+ cl_found = True
+ break
+ if not cl_found:
+ ret = generate_lib_gnulike(outfile, c_file, private_dir, compiler_array)
+ if ret > 0:
+ return ret
+ return 0
+
+if __name__ == '__main__':
+ options = parser.parse_args()
+ sys.exit(generate_lib(options.o, options.private_dir, options.cmparr))
diff --git a/test cases/common/218 link custom_i multiple from multiple/meson.build b/test cases/common/218 link custom_i multiple from multiple/meson.build
new file mode 100644
index 0000000..e5236e5
--- /dev/null
+++ b/test cases/common/218 link custom_i multiple from multiple/meson.build
@@ -0,0 +1,37 @@
+project('linkcustom', 'c')
+
+# This would require passing the static linker to the build script or having
+# it detect it by itself. I'm too lazy to implement it now and it is not
+# really needed for testing that custom targets work. It is the responsibility
+# of the custom target to produce things in the correct format.
+assert(not meson.is_cross_build(),
+ 'MESON_SKIP_TEST cross checking not implemented.')
+
+cc = meson.get_compiler('c')
+genprog = find_program('generate_stlibs.py')
+
+clib = custom_target('linkcustom',
+ output: ['libflob_1.a', 'libflob_2.a'],
+ command: [genprog,
+ '-o', '@OUTPUT@',
+ '--private-dir', '@PRIVATE_DIR@'] + cc.cmd_array())
+
+clibs = [clib[0], clib[1]]
+
+exe = executable('prog', 'prog.c', link_with: clibs)
+test('linkcustom', exe)
+
+d = declare_dependency(link_with: clibs)
+
+exe2 = executable('prog2', 'prog.c', dependencies: d)
+test('linkcustom2', exe2)
+
+# Link whole tests
+
+exe3 = executable('prog3', 'prog.c', link_whole: clibs)
+test('linkwhole', exe)
+
+d2 = declare_dependency(link_whole: clibs)
+
+exe4 = executable('prog4', 'prog.c', dependencies: d2)
+test('linkwhole2', exe2)
diff --git a/test cases/common/218 link custom_i multiple from multiple/prog.c b/test cases/common/218 link custom_i multiple from multiple/prog.c
new file mode 100644
index 0000000..51effe6
--- /dev/null
+++ b/test cases/common/218 link custom_i multiple from multiple/prog.c
@@ -0,0 +1,8 @@
+void flob_1();
+void flob_2();
+
+int main(int argc, char **argv) {
+ flob_1();
+ flob_2();
+ return 0;
+}
diff --git a/test cases/fortran/14 fortran links c/clib.c b/test cases/fortran/14 fortran links c/clib.c
new file mode 100644
index 0000000..81b2e0c
--- /dev/null
+++ b/test cases/fortran/14 fortran links c/clib.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+
+void hello(void){
+
+ printf("hello from C\n");
+
+}
diff --git a/test cases/fortran/14 fortran links c/f_call_c.f90 b/test cases/fortran/14 fortran links c/f_call_c.f90
new file mode 100644
index 0000000..af1e79c
--- /dev/null
+++ b/test cases/fortran/14 fortran links c/f_call_c.f90
@@ -0,0 +1,10 @@
+implicit none
+
+interface
+subroutine hello() bind (c)
+end subroutine hello
+end interface
+
+call hello()
+
+end program
diff --git a/test cases/fortran/14 fortran links c/meson.build b/test cases/fortran/14 fortran links c/meson.build
new file mode 100644
index 0000000..163aec6
--- /dev/null
+++ b/test cases/fortran/14 fortran links c/meson.build
@@ -0,0 +1,13 @@
+project('Fortran calling C', 'fortran', 'c')
+
+ccid = meson.get_compiler('c').get_id()
+if ccid == 'msvc' or ccid == 'clang-cl'
+ error('MESON_SKIP_TEST: MSVC and GCC do not interoperate like this.')
+endif
+
+c_lib = library('clib', 'clib.c')
+
+f_call_c = executable('f_call_c', 'f_call_c.f90',
+ link_with: c_lib,
+ link_language: 'fortran')
+test('Fortran calling C', f_call_c)
diff --git a/test cases/objc/2 nsstring/meson.build b/test cases/objc/2 nsstring/meson.build
index 7f2483f..94d2cf1 100644
--- a/test cases/objc/2 nsstring/meson.build
+++ b/test cases/objc/2 nsstring/meson.build
@@ -15,3 +15,6 @@ else
endif
exe = executable('stringprog', 'stringprog.m', dependencies : dep)
test('stringtest', exe)
+
+# Ensure that a non-required dep that is not found does not cause an error
+dependency('appleframeworks', modules: 'nonexisting', required: false)
diff --git a/test cases/unit/4 suite selection/meson.build b/test cases/unit/4 suite selection/meson.build
index d3d4e1a..ea6db92 100644
--- a/test cases/unit/4 suite selection/meson.build
+++ b/test cases/unit/4 suite selection/meson.build
@@ -11,3 +11,7 @@ test('mainprj-failing_test',
test('mainprj-successful_test',
executable('successful_test', 'successful_test.c'),
suite : 'success')
+
+test('mainprj-successful_test_no_suite',
+ executable('no_suite_test', 'successful_test.c'),
+ suite : [])
diff --git a/test cases/unit/4 suite selection/subprojects/subprjfail/meson.build b/test cases/unit/4 suite selection/subprojects/subprjfail/meson.build
index d95f271..e6270a8 100644
--- a/test cases/unit/4 suite selection/subprojects/subprjfail/meson.build
+++ b/test cases/unit/4 suite selection/subprojects/subprjfail/meson.build
@@ -3,3 +3,7 @@ project('subprjfail', 'c')
test('subprjfail-failing_test',
executable('failing_test', 'failing_test.c'),
suite : 'fail')
+
+test('subprjfail-failing_test_no_suite',
+ executable('failing_test_no_suite', 'failing_test.c'),
+ suite : [])
diff --git a/test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build b/test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build
index 8dafd65..b5ffaa4 100644
--- a/test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build
+++ b/test cases/unit/4 suite selection/subprojects/subprjsucc/meson.build
@@ -3,3 +3,7 @@ project('subprjsucc', 'c')
test('subprjsucc-successful_test',
executable('successful_test', 'successful_test.c'),
suite : 'success')
+
+test('subprjsucc-successful_test_no_suite',
+ executable('successful_test_no_suite', 'successful_test.c'),
+ suite : [])
diff --git a/tools/boost_names.py b/tools/boost_names.py
index d381162..d0e5444 100755
--- a/tools/boost_names.py
+++ b/tools/boost_names.py
@@ -132,7 +132,7 @@ def get_modules_2():
# The python module uses an older build system format and is not easily parseable.
# We add the python module libraries manually.
modules.append(Module('python', 'Python', ['boost_python', 'boost_python3', 'boost_numpy', 'boost_numpy3']))
- for (root, dirs, files) in os.walk(LIBS):
+ for (root, _, files) in os.walk(LIBS):
for f in files:
if f == "libraries.json":
projectdir = os.path.dirname(root)