aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJussi Pakkanen <jpakkane@gmail.com>2020-03-08 14:49:23 +0200
committerGitHub <noreply@github.com>2020-03-08 14:49:23 +0200
commit44ff3e6c7de0db188284cc834b304e7b0e960d00 (patch)
treeda5c45cecc16083c5120a3977383b42bd9d4cbf2
parent91976a3489acbe53593e866fdb11951b515fda54 (diff)
parent06b1a317d26cbe2a1bd7a232dd9726590d0c0a48 (diff)
downloadmeson-44ff3e6c7de0db188284cc834b304e7b0e960d00.zip
meson-44ff3e6c7de0db188284cc834b304e7b0e960d00.tar.gz
meson-44ff3e6c7de0db188284cc834b304e7b0e960d00.tar.bz2
Merge pull request #6736 from dcbaker/mesonlib-type-annotations
Mesonlib type annotations
-rw-r--r--.github/workflows/lint_mypy.yml2
-rw-r--r--mesonbuild/ast/introspection.py4
-rw-r--r--mesonbuild/backend/backends.py18
-rw-r--r--mesonbuild/backend/ninjabackend.py7
-rw-r--r--mesonbuild/build.py98
-rw-r--r--mesonbuild/compilers/compilers.py2
-rw-r--r--mesonbuild/dependencies/base.py4
-rw-r--r--mesonbuild/dependencies/boost.py18
-rw-r--r--mesonbuild/interpreter.py28
-rw-r--r--mesonbuild/interpreterbase.py2
-rw-r--r--mesonbuild/mesonlib.py380
-rw-r--r--mesonbuild/mlog.py2
-rw-r--r--mesonbuild/modules/__init__.py8
-rw-r--r--mesonbuild/modules/gnome.py51
-rw-r--r--mesonbuild/modules/i18n.py5
-rw-r--r--mesonbuild/modules/pkgconfig.py4
-rw-r--r--mesonbuild/modules/qt.py10
-rw-r--r--mesonbuild/modules/windows.py6
-rw-r--r--mypy.ini3
-rwxr-xr-xrun_unittests.py28
20 files changed, 355 insertions, 325 deletions
diff --git a/.github/workflows/lint_mypy.yml b/.github/workflows/lint_mypy.yml
index 7c83714..c826729 100644
--- a/.github/workflows/lint_mypy.yml
+++ b/.github/workflows/lint_mypy.yml
@@ -30,4 +30,4 @@ jobs:
with:
python-version: '3.x'
- run: python -m pip install mypy
- - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py
+ - run: mypy --follow-imports=skip mesonbuild/interpreterbase.py mesonbuild/mtest.py mesonbuild/minit.py mesonbuild/mintro.py mesonbuild/mparser.py mesonbuild/msetup.py mesonbuild/ast mesonbuild/wrap tools/ mesonbuild/modules/fs.py mesonbuild/dependencies/boost.py mesonbuild/dependencies/mpi.py mesonbuild/dependencies/hdf5.py mesonbuild/compilers/mixins/intel.py mesonbuild/mlog.py mesonbuild/mcompile.py mesonbuild/mesonlib.py
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index c9a51bf..142c219 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -101,8 +101,8 @@ class IntrospectionInterpreter(AstInterpreter):
self.coredata.merge_user_options(oi.options)
def_opts = self.flatten_args(kwargs.get('default_options', []))
- self.project_default_options = mesonlib.stringlistify(def_opts)
- self.project_default_options = cdata.create_options_dict(self.project_default_options)
+ _project_default_options = mesonlib.stringlistify(def_opts)
+ self.project_default_options = cdata.create_options_dict(_project_default_options)
self.default_options.update(self.project_default_options)
self.coredata.set_default_options(self.default_options, self.subproject, self.environment)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index a8f4789..e9ab9f4 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -21,7 +21,7 @@ from .. import mlog
import json
import subprocess
from ..mesonlib import MachineChoice, MesonException, OrderedSet, OptionOverrideProxy
-from ..mesonlib import classify_unity_sources
+from ..mesonlib import classify_unity_sources, unholder
from ..mesonlib import File
from ..compilers import CompilerArgs, VisualStudioLikeCompiler
from ..interpreter import Interpreter
@@ -748,9 +748,7 @@ class Backend:
else:
extra_paths = []
cmd_args = []
- for a in t.cmd_args:
- if hasattr(a, 'held_object'):
- a = a.held_object
+ for a in unholder(t.cmd_args):
if isinstance(a, build.BuildTarget):
extra_paths += self.determine_windows_extra_paths(a, [])
if isinstance(a, mesonlib.File):
@@ -868,14 +866,10 @@ class Backend:
# also be built by default. XXX: Sometime in the future these should be
# built only before running tests.
for t in self.build.get_tests():
- exe = t.exe
- if hasattr(exe, 'held_object'):
- exe = exe.held_object
+ exe = unholder(t.exe)
if isinstance(exe, (build.CustomTarget, build.BuildTarget)):
result[exe.get_id()] = exe
- for arg in t.cmd_args:
- if hasattr(arg, 'held_object'):
- arg = arg.held_object
+ for arg in unholder(t.cmd_args):
if not isinstance(arg, (build.CustomTarget, build.BuildTarget)):
continue
result[arg.get_id()] = arg
@@ -915,9 +909,7 @@ class Backend:
Returns the path to them relative to the build root directory.
'''
srcs = []
- for i in target.get_sources():
- if hasattr(i, 'held_object'):
- i = i.held_object
+ for i in unholder(target.get_sources()):
if isinstance(i, str):
fname = [os.path.join(self.build_to_src, target.subdir, i)]
elif isinstance(i, build.BuildTarget):
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 24c91f3..c80d832 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -32,7 +32,8 @@ from ..compilers import (Compiler, CompilerArgs, CCompiler, FortranCompiler,
PGICCompiler, VisualStudioLikeCompiler)
from ..linkers import ArLinker
from ..mesonlib import (
- File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine, ProgressBar, quote_arg
+ File, LibType, MachineChoice, MesonException, OrderedSet, PerMachine,
+ ProgressBar, quote_arg, unholder,
)
from ..mesonlib import get_compiler_for_source, has_path_sep
from .backends import CleanTrees
@@ -648,9 +649,7 @@ int dummy;
self.generate_target(t)
def custom_target_generator_inputs(self, target):
- for s in target.sources:
- if hasattr(s, 'held_object'):
- s = s.held_object
+ for s in unholder(target.sources):
if isinstance(s, build.GeneratedList):
self.generate_genlist_for_target(s, target)
diff --git a/mesonbuild/build.py b/mesonbuild/build.py
index 2b5c0ea..6755dca 100644
--- a/mesonbuild/build.py
+++ b/mesonbuild/build.py
@@ -26,7 +26,7 @@ from . import mlog
from .mesonlib import (
File, MesonException, MachineChoice, PerMachine, OrderedSet, listify,
extract_as_list, typeslistify, stringlistify, classify_unity_sources,
- get_filenames_templates_dict, substitute_values, has_path_sep,
+ get_filenames_templates_dict, substitute_values, has_path_sep, unholder
)
from .compilers import Compiler, is_object, clink_langs, sort_clink, lang_suffixes
from .linkers import StaticLinker
@@ -541,9 +541,7 @@ class BuildTarget(Target):
def process_objectlist(self, objects):
assert(isinstance(objects, list))
- for s in objects:
- if hasattr(s, 'held_object'):
- s = s.held_object
+ for s in unholder(objects):
if isinstance(s, (str, File, ExtractedObjects)):
self.objects.append(s)
elif isinstance(s, (GeneratedList, CustomTarget)):
@@ -559,10 +557,7 @@ class BuildTarget(Target):
def process_sourcelist(self, sources):
sources = listify(sources)
added_sources = {} # If the same source is defined multiple times, use it only once.
- for s in sources:
- # Holder unpacking. Ugly.
- if hasattr(s, 'held_object'):
- s = s.held_object
+ for s in unholder(sources):
if isinstance(s, File):
if s not in added_sources:
self.sources.append(s)
@@ -639,9 +634,7 @@ class BuildTarget(Target):
# which is what we need.
if not is_object(s):
sources.append(s)
- for d in self.external_deps:
- if hasattr(d, 'held_object'):
- d = d.held_object
+ for d in unholder(self.external_deps):
for s in d.sources:
if isinstance(s, (str, File)):
sources.append(s)
@@ -703,10 +696,7 @@ class BuildTarget(Target):
link_depends.
"""
sources = listify(sources)
- for s in sources:
- if hasattr(s, 'held_object'):
- s = s.held_object
-
+ for s in unholder(sources):
if isinstance(s, File):
self.link_depends.append(s)
elif isinstance(s, str):
@@ -817,11 +807,7 @@ class BuildTarget(Target):
kwargs.get('modules', [])
self.need_install = kwargs.get('install', self.need_install)
llist = extract_as_list(kwargs, 'link_with')
- for linktarget in llist:
- # Sorry for this hack. Keyword targets are kept in holders
- # in kwargs. Unpack here without looking at the exact type.
- if hasattr(linktarget, "held_object"):
- linktarget = linktarget.held_object
+ for linktarget in unholder(llist):
if isinstance(linktarget, dependencies.ExternalLibrary):
raise MesonException('''An external library was used in link_with keyword argument, which
is reserved for libraries built as part of this project. External
@@ -834,8 +820,7 @@ just like those detected with the dependency() function.''')
self.link_whole(linktarget)
c_pchlist, cpp_pchlist, clist, cpplist, cudalist, cslist, valalist, objclist, objcpplist, fortranlist, rustlist \
- = extract_as_list(kwargs, 'c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args',
- 'objcpp_args', 'fortran_args', 'rust_args')
+ = [extract_as_list(kwargs, c) for c in ['c_pch', 'cpp_pch', 'c_args', 'cpp_args', 'cuda_args', 'cs_args', 'vala_args', 'objc_args', 'objcpp_args', 'fortran_args', 'rust_args']]
self.add_pch('c', c_pchlist)
self.add_pch('cpp', cpp_pchlist)
@@ -863,7 +848,7 @@ just like those detected with the dependency() function.''')
if dfeature_debug:
dfeatures['debug'] = dfeature_debug
if 'd_import_dirs' in kwargs:
- dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs', unholder=True)
+ dfeature_import_dirs = unholder(extract_as_list(kwargs, 'd_import_dirs'))
for d in dfeature_import_dirs:
if not isinstance(d, IncludeDirs):
raise InvalidArguments('Arguments to d_import_dirs must be include_directories.')
@@ -1041,9 +1026,7 @@ This will become a hard error in a future Meson release.''')
def add_deps(self, deps):
deps = listify(deps)
- for dep in deps:
- if hasattr(dep, 'held_object'):
- dep = dep.held_object
+ for dep in unholder(deps):
if isinstance(dep, dependencies.InternalDependency):
# Those parts that are internal.
self.process_sourcelist(dep.sources)
@@ -1090,7 +1073,7 @@ You probably should put it in link_with instead.''')
return isinstance(self, StaticLibrary) and not self.need_install
def link(self, target):
- for t in listify(target, unholder=True):
+ for t in unholder(listify(target)):
if isinstance(self, StaticLibrary) and self.need_install and t.is_internal():
# When we're a static library and we link_with to an
# internal/convenience library, promote to link_whole.
@@ -1112,7 +1095,7 @@ You probably should put it in link_with instead.''')
self.link_targets.append(t)
def link_whole(self, target):
- for t in listify(target, unholder=True):
+ for t in unholder(listify(target)):
if isinstance(t, (CustomTarget, CustomTargetIndex)):
if not t.is_linkable_target():
raise InvalidArguments('Custom target {!r} is not linkable.'.format(t))
@@ -1178,10 +1161,7 @@ You probably should put it in link_with instead.''')
def add_include_dirs(self, args, set_is_system: T.Optional[str] = None):
ids = []
- for a in args:
- # FIXME same hack, forcibly unpack from holder.
- if hasattr(a, 'held_object'):
- a = a.held_object
+ for a in unholder(args):
if not isinstance(a, IncludeDirs):
raise InvalidArguments('Include directory to be added is not an include directory object.')
ids.append(a)
@@ -1268,7 +1248,7 @@ You probably should put it in link_with instead.''')
if dl != linker.language:
stdlib_args += all_compilers[dl].language_stdlib_only_link_flags()
added_languages.add(dl)
- # Type of var 'linker' is Compiler.
+ # Type of var 'linker' is Compiler.
# Pretty hard to fix because the return value is passed everywhere
return linker, stdlib_args
@@ -1321,9 +1301,7 @@ class Generator:
def __init__(self, args, kwargs):
if len(args) != 1:
raise InvalidArguments('Generator requires exactly one positional argument: the executable')
- exe = args[0]
- if hasattr(exe, 'held_object'):
- exe = exe.held_object
+ exe = unholder(args[0])
if not isinstance(exe, (Executable, dependencies.ExternalProgram)):
raise InvalidArguments('First generator argument must be an executable.')
self.exe = exe
@@ -1379,7 +1357,7 @@ class Generator:
raise InvalidArguments('Capture must be boolean.')
self.capture = capture
if 'depends' in kwargs:
- depends = listify(kwargs['depends'], unholder=True)
+ depends = unholder(listify(kwargs['depends']))
for d in depends:
if not isinstance(d, BuildTarget):
raise InvalidArguments('Depends entries must be build targets.')
@@ -1424,9 +1402,7 @@ class Generator:
class GeneratedList:
def __init__(self, generator, subdir, preserve_path_from=None, extra_args=None):
- if hasattr(generator, 'held_object'):
- generator = generator.held_object
- self.generator = generator
+ self.generator = unholder(generator)
self.name = self.generator.exe
self.subdir = subdir
self.infilelist = []
@@ -1436,10 +1412,10 @@ class GeneratedList:
self.depend_files = []
self.preserve_path_from = preserve_path_from
self.extra_args = extra_args if extra_args is not None else []
- if isinstance(generator.exe, dependencies.ExternalProgram):
- if not generator.exe.found():
+ if isinstance(self.generator.exe, dependencies.ExternalProgram):
+ if not self.generator.exe.found():
raise InvalidArguments('Tried to use not-found external program as generator')
- path = generator.exe.get_path()
+ path = self.generator.exe.get_path()
if os.path.isabs(path):
# Can only add a dependency on an external program which we
# know the absolute path of
@@ -1881,9 +1857,7 @@ class SharedLibrary(BuildTarget):
# Visual Studio module-definitions file
if 'vs_module_defs' in kwargs:
- path = kwargs['vs_module_defs']
- if hasattr(path, 'held_object'):
- path = path.held_object
+ path = unholder(kwargs['vs_module_defs'])
if isinstance(path, str):
if os.path.isabs(path):
self.vs_module_defs = File.from_absolute_file(path)
@@ -2035,9 +2009,7 @@ class CustomTarget(Target):
def get_target_dependencies(self):
deps = self.dependencies[:]
deps += self.extra_depends
- for c in self.sources:
- if hasattr(c, 'held_object'):
- c = c.held_object
+ for c in unholder(self.sources):
if isinstance(c, (BuildTarget, CustomTarget)):
deps.append(c)
return deps
@@ -2061,7 +2033,7 @@ class CustomTarget(Target):
return bdeps
def flatten_command(self, cmd):
- cmd = listify(cmd, unholder=True)
+ cmd = unholder(listify(cmd))
final_cmd = []
for c in cmd:
if isinstance(c, str):
@@ -2089,7 +2061,7 @@ class CustomTarget(Target):
def process_kwargs(self, kwargs, backend):
self.process_kwargs_base(kwargs)
- self.sources = extract_as_list(kwargs, 'input', unholder=True)
+ self.sources = unholder(extract_as_list(kwargs, 'input'))
if 'output' not in kwargs:
raise InvalidArguments('Missing keyword argument "output".')
self.outputs = listify(kwargs['output'])
@@ -2168,10 +2140,8 @@ class CustomTarget(Target):
self.build_always_stale = kwargs['build_always_stale']
if not isinstance(self.build_always_stale, bool):
raise InvalidArguments('Argument build_always_stale must be a boolean.')
- extra_deps, depend_files = extract_as_list(kwargs, 'depends', 'depend_files', pop = False)
- for ed in extra_deps:
- while hasattr(ed, 'held_object'):
- ed = ed.held_object
+ extra_deps, depend_files = [extract_as_list(kwargs, c, pop=False) for c in ['depends', 'depend_files']]
+ for ed in unholder(extra_deps):
if not isinstance(ed, (CustomTarget, BuildTarget)):
raise InvalidArguments('Can only depend on toplevel targets: custom_target or build_target (executable or a library) got: {}({})'
.format(type(ed), ed))
@@ -2206,9 +2176,7 @@ class CustomTarget(Target):
def get_generated_lists(self):
genlists = []
- for c in self.sources:
- if hasattr(c, 'held_object'):
- c = c.held_object
+ for c in unholder(self.sources):
if isinstance(c, GeneratedList):
genlists.append(c)
return genlists
@@ -2416,20 +2384,20 @@ class ConfigureFile:
return self.targetname
class ConfigurationData:
- def __init__(self):
+ def __init__(self) -> None:
super().__init__()
- self.values = {}
+ self.values = {} # T.Dict[str, T.Union[str, int, bool]]
def __repr__(self):
return repr(self.values)
- def __contains__(self, value):
+ def __contains__(self, value: str) -> bool:
return value in self.values
- def get(self, name):
+ def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
return self.values[name] # (val, desc)
- def keys(self):
+ def keys(self) -> T.Iterator[str]:
return self.values.keys()
# A bit poorly named, but this represents plain data files to copy
@@ -2471,9 +2439,7 @@ def get_sources_string_names(sources, backend):
get all the output basenames.
'''
names = []
- for s in sources:
- if hasattr(s, 'held_object'):
- s = s.held_object
+ for s in unholder(sources):
if isinstance(s, str):
names.append(s)
elif isinstance(s, (BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList)):
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 1c0adff..caa8600 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -35,6 +35,8 @@ if T.TYPE_CHECKING:
from ..environment import Environment
from ..linkers import DynamicLinker # noqa: F401
+ CompilerType = T.TypeVar('CompilerType', bound=Compiler)
+
"""This file contains the data files of all compilers Meson knows
about. To support a new compiler, add its information below.
Also add corresponding autodetection code in environment.py."""
diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py
index 6f8181d..5f212e6 100644
--- a/mesonbuild/dependencies/base.py
+++ b/mesonbuild/dependencies/base.py
@@ -38,7 +38,7 @@ from ..mesonlib import Popen_safe, version_compare_many, version_compare, listif
from ..mesonlib import Version, LibType
if T.TYPE_CHECKING:
- from ..compilers.compilers import Compiler # noqa: F401
+ from ..compilers.compilers import CompilerType # noqa: F401
DependencyType = T.TypeVar('DependencyType', bound='Dependency')
# These must be defined in this file to avoid cyclical references.
@@ -2488,7 +2488,7 @@ def factory_methods(methods: T.Set[DependencyMethods]) -> 'FactoryType':
def detect_compiler(name: str, env: Environment, for_machine: MachineChoice,
- language: T.Optional[str]) -> T.Optional['Compiler']:
+ language: T.Optional[str]) -> T.Optional['CompilerType']:
"""Given a language and environment find the compiler used."""
compilers = env.coredata.compilers[for_machine]
diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py
index 53a0cfb..d083672 100644
--- a/mesonbuild/dependencies/boost.py
+++ b/mesonbuild/dependencies/boost.py
@@ -272,7 +272,7 @@ class BoostDependency(ExternalDependency):
self.boost_root = None
# Extract and validate modules
- self.modules = mesonlib.extract_as_list(kwargs, 'modules')
+ self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
for i in self.modules:
if not isinstance(i, str):
raise DependencyException('Boost module argument is not a string.')
@@ -299,9 +299,9 @@ class BoostDependency(ExternalDependency):
mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
boost_inc_dir = None
- for i in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
- if i.is_file():
- boost_inc_dir = self._include_dir_from_version_header(i)
+ for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+ if j.is_file():
+ boost_inc_dir = self._include_dir_from_version_header(j)
break
if not boost_inc_dir:
self.is_found = False
@@ -317,20 +317,20 @@ class BoostDependency(ExternalDependency):
roots = list(mesonlib.OrderedSet(roots))
# B) Foreach candidate
- for i in roots:
+ for j in roots:
# 1. Look for the boost headers (boost/version.pp)
- mlog.debug('Checking potential boost root {}'.format(i.as_posix()))
- inc_dirs = self.detect_inc_dirs(i)
+ mlog.debug('Checking potential boost root {}'.format(j.as_posix()))
+ inc_dirs = self.detect_inc_dirs(j)
inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
# Early abort when boost is not found
if not inc_dirs:
continue
- lib_dirs = self.detect_lib_dirs(i)
+ lib_dirs = self.detect_lib_dirs(j)
self.is_found = self.run_check(inc_dirs, lib_dirs)
if self.is_found:
- self.boost_root = i
+ self.boost_root = j
break
def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 77d64c4..e8284bb 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -21,7 +21,7 @@ from . import optinterpreter
from . import compilers
from .wrap import wrap, WrapMode
from . import mesonlib
-from .mesonlib import FileMode, MachineChoice, Popen_safe, listify, extract_as_list, has_path_sep
+from .mesonlib import FileMode, MachineChoice, Popen_safe, listify, extract_as_list, has_path_sep, unholder
from .dependencies import ExternalProgram
from .dependencies import InternalDependency, Dependency, NotFoundDependency, DependencyException
from .depfile import DepFile
@@ -2484,11 +2484,11 @@ class Interpreter(InterpreterBase):
if not isinstance(version, str):
raise InterpreterException('Version must be a string.')
incs = self.extract_incdirs(kwargs)
- libs = extract_as_list(kwargs, 'link_with', unholder=True)
- libs_whole = extract_as_list(kwargs, 'link_whole', unholder=True)
+ libs = unholder(extract_as_list(kwargs, 'link_with'))
+ libs_whole = unholder(extract_as_list(kwargs, 'link_whole'))
sources = extract_as_list(kwargs, 'sources')
- sources = listify(self.source_strings_to_files(sources), unholder=True)
- deps = extract_as_list(kwargs, 'dependencies', unholder=True)
+ sources = unholder(listify(self.source_strings_to_files(sources)))
+ deps = unholder(extract_as_list(kwargs, 'dependencies'))
compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))
link_args = mesonlib.stringlistify(kwargs.get('link_args', []))
variables = kwargs.get('variables', {})
@@ -3111,9 +3111,7 @@ external dependencies (including libraries) must go to "dependencies".''')
def program_from_file_for(self, for_machine, prognames, silent):
bins = self.environment.binaries[for_machine]
- for p in prognames:
- if hasattr(p, 'held_object'):
- p = p.held_object
+ for p in unholder(prognames):
if isinstance(p, mesonlib.File):
continue # Always points to a local (i.e. self generated) file.
if not isinstance(p, str):
@@ -3655,12 +3653,12 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if 'command' not in kwargs:
raise InterpreterException('Missing "command" keyword argument')
all_args = extract_as_list(kwargs, 'command')
- deps = extract_as_list(kwargs, 'depends', unholder=True)
+ deps = unholder(extract_as_list(kwargs, 'depends'))
else:
raise InterpreterException('Run_target needs at least one positional argument.')
cleaned_args = []
- for i in listify(all_args, unholder=True):
+ for i in unholder(listify(all_args)):
if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, dependencies.ExternalProgram, mesonlib.File)):
mlog.debug('Wrong type:', str(i))
raise InterpreterException('Invalid argument to run_target.')
@@ -3691,7 +3689,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
name = args[0]
if not isinstance(name, str):
raise InterpreterException('First argument must be a string.')
- deps = listify(args[1:], unholder=True)
+ deps = unholder(listify(args[1:]))
for d in deps:
if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
raise InterpreterException('Depends items must be build targets.')
@@ -3749,7 +3747,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
par = kwargs.get('is_parallel', True)
if not isinstance(par, bool):
raise InterpreterException('Keyword argument is_parallel must be a boolean.')
- cmd_args = extract_as_list(kwargs, 'args', unholder=True)
+ cmd_args = unholder(extract_as_list(kwargs, 'args'))
for i in cmd_args:
if not isinstance(i, (str, mesonlib.File, build.Target)):
raise InterpreterException('Command line arguments must be strings, files or targets.')
@@ -3777,7 +3775,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
if len(s) > 0:
s = ':' + s
suite.append(prj.replace(' ', '_').replace(':', '_') + s)
- depends = extract_as_list(kwargs, 'depends', unholder=True)
+ depends = unholder(extract_as_list(kwargs, 'depends'))
for dep in depends:
if not isinstance(dep, (build.CustomTarget, build.BuildTarget)):
raise InterpreterException('Depends items must be build targets.')
@@ -4140,7 +4138,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self
return mesonlib.File.from_built_file(self.subdir, output)
def extract_incdirs(self, kwargs):
- prospectives = listify(kwargs.get('include_directories', []), unholder=True)
+ prospectives = unholder(extract_as_list(kwargs, 'include_directories'))
result = []
for p in prospectives:
if isinstance(p, build.IncludeDirs):
@@ -4201,7 +4199,7 @@ different subdirectory.
if ":" not in setup_name:
setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name
try:
- inp = extract_as_list(kwargs, 'exe_wrapper', unholder=True)
+ inp = unholder(extract_as_list(kwargs, 'exe_wrapper'))
exe_wrapper = []
for i in inp:
if isinstance(i, str):
diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py
index 8071136..b8008b0 100644
--- a/mesonbuild/interpreterbase.py
+++ b/mesonbuild/interpreterbase.py
@@ -446,7 +446,7 @@ class InterpreterBase:
self.current_lineno = cur.lineno
self.evaluate_statement(cur)
except Exception as e:
- if not hasattr(e, 'lineno'):
+ if getattr(e, 'lineno') is None:
# We are doing the equivalent to setattr here and mypy does not like it
e.lineno = cur.lineno # type: ignore
e.colno = cur.colno # type: ignore
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index 2298c86..e215dcd 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -1,4 +1,4 @@
-# Copyright 2012-2019 The Meson development team
+# Copyright 2012-2020 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,20 +20,27 @@ import time
import platform, subprocess, operator, os, shlex, shutil, re
import collections
from enum import Enum
-from functools import lru_cache, update_wrapper
+from functools import lru_cache, wraps
from itertools import tee, filterfalse
import typing as T
import uuid
+import textwrap
from mesonbuild import mlog
+if T.TYPE_CHECKING:
+ from .build import ConfigurationData
+ from .coredata import OptionDictType, UserOption
+ from .compilers.compilers import CompilerType
+ from .interpreterbase import ObjectHolder
+
_T = T.TypeVar('_T')
_U = T.TypeVar('_U')
have_fcntl = False
have_msvcrt = False
# {subproject: project_meson_version}
-project_meson_versions = {}
+project_meson_versions = {} # type: T.Dict[str, str]
try:
import fcntl
@@ -57,7 +64,7 @@ else:
meson_command = None
GIT = shutil.which('git')
-def git(cmd: T.List[str], workingdir: str, **kwargs) -> subprocess.CompletedProcess:
+def git(cmd: T.List[str], workingdir: str, **kwargs: T.Any) -> subprocess.CompletedProcess:
pc = subprocess.run([GIT, '-C', workingdir] + cmd,
# Redirect stdin to DEVNULL otherwise git messes up the
# console and ANSI colors stop working on Windows.
@@ -69,7 +76,7 @@ def git(cmd: T.List[str], workingdir: str, **kwargs) -> subprocess.CompletedProc
return pc
-def set_meson_command(mainfile):
+def set_meson_command(mainfile: str) -> None:
global python_command
global meson_command
# On UNIX-like systems `meson` is a Python script
@@ -86,7 +93,8 @@ def set_meson_command(mainfile):
if 'MESON_COMMAND_TESTS' in os.environ:
mlog.log('meson_command is {!r}'.format(meson_command))
-def is_ascii_string(astring) -> bool:
+
+def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
try:
if isinstance(astring, str):
astring.encode('ascii')
@@ -96,7 +104,8 @@ def is_ascii_string(astring) -> bool:
return False
return True
-def check_direntry_issues(direntry_array):
+
+def check_direntry_issues(direntry_array: T.Union[T.List[T.Union[str, bytes]], str, bytes]) -> None:
import locale
# Warn if the locale is not UTF-8. This can cause various unfixable issues
# such as os.stat not being able to decode filenames with unicode in them.
@@ -109,22 +118,31 @@ def check_direntry_issues(direntry_array):
for de in direntry_array:
if is_ascii_string(de):
continue
- mlog.warning('''You are using {!r} which is not a Unicode-compatible '
-locale but you are trying to access a file system entry called {!r} which is
-not pure ASCII. This may cause problems.
-'''.format(e, de), file=sys.stderr)
+ mlog.warning(textwrap.dedent('''
+ You are using {!r} which is not a Unicode-compatible
+ locale but you are trying to access a file system entry called {!r} which is
+ not pure ASCII. This may cause problems.
+ '''.format(e, de)), file=sys.stderr)
+
# Put this in objects that should not get dumped to pickle files
# by accident.
import threading
an_unpicklable_object = threading.Lock()
+
class MesonException(Exception):
'''Exceptions thrown by Meson'''
+ file = None # type: T.Optional[str]
+ lineno = None # type: T.Optional[int]
+ colno = None # type: T.Optional[int]
+
+
class EnvironmentException(MesonException):
'''Exceptions thrown while processing and creating the build environment'''
+
class FileMode:
# The first triad is for owner permissions, the second for group permissions,
# and the third for others (everyone else).
@@ -151,18 +169,19 @@ class FileMode:
'[r-][w-][xsS-]' # Group perms
'[r-][w-][xtT-]') # Others perms
- def __init__(self, perms=None, owner=None, group=None):
+ def __init__(self, perms: T.Optional[str] = None, owner: T.Optional[str] = None,
+ group: T.Optional[str] = None):
self.perms_s = perms
self.perms = self.perms_s_to_bits(perms)
self.owner = owner
self.group = group
- def __repr__(self):
+ def __repr__(self) -> str:
ret = '<FileMode: {!r} owner={} group={}'
return ret.format(self.perms_s, self.owner, self.group)
@classmethod
- def perms_s_to_bits(cls, perms_s):
+ def perms_s_to_bits(cls, perms_s: T.Optional[str]) -> int:
'''
Does the opposite of stat.filemode(), converts strings of the form
'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
@@ -221,8 +240,6 @@ class File:
self.is_built = is_built
self.subdir = subdir
self.fname = fname
- assert(isinstance(self.subdir, str))
- assert(isinstance(self.fname, str))
def __str__(self) -> str:
return self.relative_name()
@@ -236,17 +253,17 @@ class File:
@staticmethod
@lru_cache(maxsize=None)
- def from_source_file(source_root: str, subdir: str, fname: str):
+ def from_source_file(source_root: str, subdir: str, fname: str) -> 'File':
if not os.path.isfile(os.path.join(source_root, subdir, fname)):
raise MesonException('File %s does not exist.' % fname)
return File(False, subdir, fname)
@staticmethod
- def from_built_file(subdir: str, fname: str):
+ def from_built_file(subdir: str, fname: str) -> 'File':
return File(True, subdir, fname)
@staticmethod
- def from_absolute_file(fname: str):
+ def from_absolute_file(fname: str) -> 'File':
return File(False, '', fname)
@lru_cache(maxsize=None)
@@ -270,6 +287,8 @@ class File:
return self.fname.split(s)
def __eq__(self, other) -> bool:
+ if not isinstance(other, File):
+ return NotImplemented
return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
def __hash__(self) -> int:
@@ -280,14 +299,16 @@ class File:
return os.path.join(self.subdir, self.fname)
-def get_compiler_for_source(compilers, src):
+def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) -> 'CompilerType':
+ """Given a set of compilers and a source, find the compiler for that source type."""
for comp in compilers:
if comp.can_compile(src):
return comp
raise MesonException('No specified compiler can handle file {!s}'.format(src))
-def classify_unity_sources(compilers, sources):
- compsrclist = {}
+
+def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Iterable[str]) -> T.Dict['CompilerType', T.List[str]]:
+ compsrclist = {} # type: T.Dict[CompilerType, T.List[str]]
for src in sources:
comp = get_compiler_for_source(compilers, src)
if comp not in compsrclist:
@@ -296,6 +317,7 @@ def classify_unity_sources(compilers, sources):
compsrclist[comp].append(src)
return compsrclist
+
class OrderedEnum(Enum):
"""
An Enum which additionally offers homogeneous ordered comparison.
@@ -330,10 +352,10 @@ class MachineChoice(OrderedEnum):
BUILD = 0
HOST = 1
- def get_lower_case_name(self):
+ def get_lower_case_name(self) -> str:
return PerMachine('build', 'host')[self]
- def get_prefix(self):
+ def get_prefix(self) -> str:
return PerMachine('build.', '')[self]
@@ -397,6 +419,7 @@ class PerThreeMachine(PerMachine[_T]):
def matches_build_machine(self, machine: MachineChoice) -> bool:
return self.build == self[machine]
+
class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
"""Extends `PerMachine` with the ability to default from `None`s.
"""
@@ -439,40 +462,52 @@ class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Option
def is_sunos() -> bool:
return platform.system().lower() == 'sunos'
+
def is_osx() -> bool:
return platform.system().lower() == 'darwin'
+
def is_linux() -> bool:
return platform.system().lower() == 'linux'
+
def is_android() -> bool:
return platform.system().lower() == 'android'
+
def is_haiku() -> bool:
return platform.system().lower() == 'haiku'
+
def is_openbsd() -> bool:
return platform.system().lower() == 'openbsd'
+
def is_windows() -> bool:
platname = platform.system().lower()
return platname == 'windows' or 'mingw' in platname
+
def is_cygwin() -> bool:
return platform.system().lower().startswith('cygwin')
+
def is_debianlike() -> bool:
return os.path.isfile('/etc/debian_version')
+
def is_dragonflybsd() -> bool:
return platform.system().lower() == 'dragonfly'
+
def is_netbsd() -> bool:
return platform.system().lower() == 'netbsd'
+
def is_freebsd() -> bool:
return platform.system().lower() == 'freebsd'
+
def exe_exists(arglist: T.List[str]) -> bool:
try:
if subprocess.run(arglist, timeout=10).returncode == 0:
@@ -481,8 +516,9 @@ def exe_exists(arglist: T.List[str]) -> bool:
pass
return False
+
@lru_cache(maxsize=None)
-def darwin_get_object_archs(objpath):
+def darwin_get_object_archs(objpath: str) -> T.List[str]:
'''
For a specific object (executable, static library, dylib, etc), run `lipo`
to fetch the list of archs supported by it. Supports both thin objects and
@@ -501,7 +537,8 @@ def darwin_get_object_archs(objpath):
stdo += ' arm'
return stdo.split()
-def detect_vcs(source_dir):
+
+def detect_vcs(source_dir: str) -> T.Optional[T.Dict[str, str]]:
vcs_systems = [
dict(name = 'git', cmd = 'git', repo_dir = '.git', get_rev = 'git describe --dirty=+', rev_regex = '(.*)', dep = '.git/logs/HEAD'),
dict(name = 'mercurial', cmd = 'hg', repo_dir = '.hg', get_rev = 'hg id -i', rev_regex = '(.*)', dep = '.hg/dirstate'),
@@ -520,17 +557,19 @@ def detect_vcs(source_dir):
# a helper class which implements the same version ordering as RPM
class Version:
- def __init__(self, s):
+ def __init__(self, s: str):
self._s = s
# split into numeric, alphabetic and non-alphanumeric sequences
- sequences = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+ sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+
# non-alphanumeric separators are discarded
- sequences = [m for m in sequences if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+ sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+
# numeric sequences are converted from strings to ints
- sequences = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences]
+ sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
- self._v = sequences
+ self._v = sequences3
def __str__(self):
return '%s (V=%s)' % (self._s, str(self._v))
@@ -568,7 +607,7 @@ class Version:
return self._v != other._v
return NotImplemented
- def __cmp(self, other, comparator):
+ def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
# compare each sequence in order
for ours, theirs in zip(self._v, other._v):
# sort a non-digit sequence before a digit sequence
@@ -584,6 +623,7 @@ class Version:
# otherwise, the version with a suffix remaining is greater
return comparator(len(self._v), len(other._v))
+
def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
if vstr2.startswith('>='):
cmpop = operator.ge
@@ -611,12 +651,14 @@ def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], boo
return (cmpop, vstr2)
+
def version_compare(vstr1: str, vstr2: str) -> bool:
(cmpop, vstr2) = _version_extract_cmpop(vstr2)
return cmpop(Version(vstr1), Version(vstr2))
-def version_compare_many(vstr1, conditions):
- if not isinstance(conditions, (list, tuple, frozenset)):
+
+def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
+ if isinstance(conditions, str):
conditions = [conditions]
found = []
not_found = []
@@ -627,6 +669,7 @@ def version_compare_many(vstr1, conditions):
found.append(req)
return not_found == [], not_found, found
+
# determine if the minimum version satisfying the condition |condition| exceeds
# the minimum version for a feature |minimum|
def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
@@ -667,7 +710,8 @@ def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
return cmpop(Version(minimum), Version(condition))
-def default_libdir():
+
+def default_libdir() -> str:
if is_debianlike():
try:
pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
@@ -685,13 +729,16 @@ def default_libdir():
return 'lib64'
return 'lib'
-def default_libexecdir():
+
+def default_libexecdir() -> str:
# There is no way to auto-detect this, so it must be set at build time
return 'libexec'
-def default_prefix():
+
+def default_prefix() -> str:
return 'c:/' if is_windows() else '/usr/local'
+
def get_library_dirs() -> T.List[str]:
if is_windows():
return ['C:/mingw/lib'] # TODO: get programmatically
@@ -737,7 +784,8 @@ def get_library_dirs() -> T.List[str]:
return unixdirs
-def has_path_sep(name, sep='/\\'):
+
+def has_path_sep(name: str, sep: str = '/\\') -> bool:
'Checks if any of the specified @sep path separators are in @name'
for each in sep:
if each in name:
@@ -754,7 +802,7 @@ if is_windows():
_whitespace = ' \t\n\r'
_find_unsafe_char = re.compile(r'[{}"]'.format(_whitespace)).search
- def quote_arg(arg):
+ def quote_arg(arg: str) -> str:
if arg and not _find_unsafe_char(arg):
return arg
@@ -776,7 +824,7 @@ if is_windows():
result += (num_backslashes * 2) * '\\' + '"'
return result
- def split_args(cmd: T.Sequence[str]) -> T.List[str]:
+ def split_args(cmd: str) -> T.List[str]:
result = []
arg = ''
num_backslashes = 0
@@ -811,26 +859,29 @@ if is_windows():
return result
else:
- def quote_arg(arg):
+ def quote_arg(arg: str) -> str:
return shlex.quote(arg)
- def split_args(cmd):
+ def split_args(cmd: str) -> T.List[str]:
return shlex.split(cmd)
-def join_args(args):
+def join_args(args: T.Iterable[str]) -> str:
return ' '.join([quote_arg(x) for x in args])
-def do_replacement(regex, line, variable_format, confdata):
- missing_variables = set()
- start_tag = '@'
- backslash_tag = '\\@'
+def do_replacement(regex: T.Pattern[str], line: str, variable_format: str,
+ confdata: 'ConfigurationData') -> T.Tuple[str, T.Set[str]]:
+ missing_variables = set() # type: T.Set[str]
if variable_format == 'cmake':
start_tag = '${'
backslash_tag = '\\${'
+ else:
+ assert variable_format == 'meson'
+ start_tag = '@'
+ backslash_tag = '\\@'
- def variable_replace(match):
+ def variable_replace(match: T.Match[str]) -> str:
# Pairs of escape characters before '@' or '\@'
if match.group(0).endswith('\\'):
num_escapes = match.end(0) - match.start(0)
@@ -857,7 +908,8 @@ def do_replacement(regex, line, variable_format, confdata):
return var
return re.sub(regex, variable_replace, line), missing_variables
-def do_mesondefine(line, confdata):
+
+def do_mesondefine(line: str, confdata: 'ConfigurationData') -> str:
arr = line.split()
if len(arr) != 2:
raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
@@ -879,7 +931,8 @@ def do_mesondefine(line, confdata):
raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
-def do_conf_file(src, dst, confdata, variable_format, encoding='utf-8'):
+def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_format: str,
+ encoding: str = 'utf-8') -> T.Tuple[T.Set[str], bool]:
try:
with open(src, encoding=encoding, newline='') as f:
data = f.readlines()
@@ -937,7 +990,7 @@ CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
'''
-def dump_conf_header(ofilename, cdata, output_format):
+def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: str) -> None:
if output_format == 'c':
prelude = CONF_C_PRELUDE
prefix = '#'
@@ -967,7 +1020,8 @@ def dump_conf_header(ofilename, cdata, output_format):
raise MesonException('Unknown data type in configuration file entry: ' + k)
replace_if_different(ofilename, ofilename_tmp)
-def replace_if_different(dst, dst_tmp):
+
+def replace_if_different(dst: str, dst_tmp: str) -> None:
# If contents are identical, don't touch the file to prevent
# unnecessary rebuilds.
different = True
@@ -982,49 +1036,57 @@ def replace_if_different(dst, dst_tmp):
else:
os.unlink(dst_tmp)
-def listify(item: T.Any,
- flatten: bool = True,
- unholder: bool = False) -> T.List[T.Any]:
+
+@T.overload
+def unholder(item: 'ObjectHolder[_T]') -> _T: ...
+
+@T.overload
+def unholder(item: T.List['ObjectHolder[_T]']) -> T.List[_T]: ...
+
+@T.overload
+def unholder(item: T.List[_T]) -> T.List[_T]: ...
+
+@T.overload
+def unholder(item: T.List[T.Union[_T, 'ObjectHolder[_T]']]) -> T.List[_T]: ...
+
+def unholder(item):
+ """Get the held item of an object holder or list of object holders."""
+ if isinstance(item, list):
+ return [i.held_object if hasattr(i, 'held_object') else i for i in item]
+ if hasattr(item, 'held_object'):
+ return item.held_object
+ return item
+
+
+def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
'''
Returns a list with all args embedded in a list if they are not a list.
This function preserves order.
@flatten: Convert lists of lists to a flat list
- @unholder: Replace each item with the object it holds, if required
-
- Note: unholding only works recursively when flattening
'''
if not isinstance(item, list):
- if unholder and hasattr(item, 'held_object'):
- item = item.held_object
return [item]
- result = []
+ result = [] # type: T.List[T.Any]
for i in item:
- if unholder and hasattr(i, 'held_object'):
- i = i.held_object
if flatten and isinstance(i, list):
- result += listify(i, flatten=True, unholder=unholder)
+ result += listify(i, flatten=True)
else:
result.append(i)
return result
-def extract_as_list(dict_object, *keys, pop=False, **kwargs):
+def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
'''
Extracts all values from given dict_object and listifies them.
'''
- result = []
fetch = dict_object.get
if pop:
fetch = dict_object.pop
# If there's only one key, we don't return a list with one element
- if len(keys) == 1:
- return listify(fetch(keys[0], []), **kwargs)
- # Return a list of values corresponding to *keys
- for key in keys:
- result.append(listify(fetch(key, []), **kwargs))
- return result
+ return listify(fetch(key, []), flatten=True)
+
-def typeslistify(item: 'T.Union[_T, T.List[_T]]',
+def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
'''
Ensure that type(@item) is one of @types or a
@@ -1039,11 +1101,13 @@ def typeslistify(item: 'T.Union[_T, T.List[_T]]',
raise MesonException('List item must be one of {!r}'.format(types))
return item
-def stringlistify(item: T.Union[str, T.List[str]]) -> T.List[str]:
+
+def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
return typeslistify(item, str)
-def expand_arguments(args):
- expended_args = []
+
+def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
+ expended_args = [] # type: T.List[str]
for arg in args:
if not arg.startswith('@'):
expended_args.append(arg)
@@ -1055,17 +1119,24 @@ def expand_arguments(args):
extended_args = f.read().split()
expended_args += extended_args
except Exception as e:
- print('Error expanding command line arguments, %s not found' % args_file)
- print(e)
+ mlog.error('Expanding command line arguments:', args_file, 'not found')
+ mlog.exception(e)
return None
return expended_args
-def partition(pred, iterable):
- 'Use a predicate to partition entries into false entries and true entries'
- # partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
+
+def partition(pred: T.Callable[[_T], object], iterable: T.Iterator[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
+ """Use a predicate to partition entries into false entries and true
+ entries.
+
+ >>> x, y = partition(is_odd, range(10))
+ >>> (list(x), list(y))
+ ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+ """
t1, t2 = tee(iterable)
return filterfalse(pred, t1), filter(pred, t2)
+
def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
stdout: T.Union[T.BinaryIO, int] = subprocess.PIPE,
stderr: T.Union[T.BinaryIO, int] = subprocess.PIPE,
@@ -1088,6 +1159,7 @@ def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
mlog.setup_console()
return p, o, e
+
def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
stdout: T.Union[T.BinaryIO, int] = subprocess.PIPE,
stderr: T.Union[T.BinaryIO, int] = subprocess.PIPE,
@@ -1110,7 +1182,8 @@ def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
e = e.decode(errors='replace').replace('\r\n', '\n')
return p, o, e
-def iter_regexin_iter(regexiter, initer):
+
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
'''
Takes each regular expression in @regexiter and tries to search for it in
every item in @initer. If there is a match, returns that match.
@@ -1123,12 +1196,13 @@ def iter_regexin_iter(regexiter, initer):
match = re.search(regex, ii)
if match:
return match.group()
- return False
+ return None
+
-def _substitute_values_check_errors(command, values):
+def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, str]) -> None:
# Error checking
- inregex = ('@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@')
- outregex = ('@OUTPUT([0-9]+)?@', '@OUTDIR@')
+ inregex = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@'] # type: T.List[str]
+ outregex = ['@OUTPUT([0-9]+)?@', '@OUTDIR@'] # type: T.List[str]
if '@INPUT@' not in values:
# Error out if any input-derived templates are present in the command
match = iter_regexin_iter(inregex, command)
@@ -1146,10 +1220,10 @@ def _substitute_values_check_errors(command, values):
for each in command:
if not isinstance(each, str):
continue
- match = re.search(inregex[0], each)
- if match and match.group() not in values:
+ match2 = re.search(inregex[0], each)
+ if match2 and match2.group() not in values:
m = 'Command cannot have {!r} since there are only {!r} inputs'
- raise MesonException(m.format(match.group(), len(values['@INPUT@'])))
+ raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
if '@OUTPUT@' not in values:
# Error out if any output-derived templates are present in the command
match = iter_regexin_iter(outregex, command)
@@ -1161,12 +1235,13 @@ def _substitute_values_check_errors(command, values):
for each in command:
if not isinstance(each, str):
continue
- match = re.search(outregex[0], each)
- if match and match.group() not in values:
+ match2 = re.search(outregex[0], each)
+ if match2 and match2.group() not in values:
m = 'Command cannot have {!r} since there are only {!r} outputs'
- raise MesonException(m.format(match.group(), len(values['@OUTPUT@'])))
+ raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
-def substitute_values(command, values):
+
+def substitute_values(command: T.List[str], values: T.Dict[str, str]) -> T.List[str]:
'''
Substitute the template strings in the @values dict into the list of
strings @command and return a new list. For a full list of the templates,
@@ -1179,7 +1254,7 @@ def substitute_values(command, values):
# Error checking
_substitute_values_check_errors(command, values)
# Substitution
- outcmd = []
+ outcmd = [] # type: T.List[str]
rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
for vv in command:
@@ -1214,7 +1289,8 @@ def substitute_values(command, values):
outcmd.append(vv)
return outcmd
-def get_filenames_templates_dict(inputs, outputs):
+
+def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
'''
Create a dictionary with template strings as keys and values as values for
the following templates:
@@ -1236,7 +1312,7 @@ def get_filenames_templates_dict(inputs, outputs):
@OUTPUT0@, @OUTPUT1@, ... one for each output file
'''
- values = {}
+ values = {} # type: T.Dict[str, T.Union[str, T.List[str]]]
# Gather values derived from the input
if inputs:
# We want to substitute all the inputs.
@@ -1261,7 +1337,7 @@ def get_filenames_templates_dict(inputs, outputs):
return values
-def _make_tree_writable(topdir):
+def _make_tree_writable(topdir: str) -> None:
# Ensure all files and directories under topdir are writable
# (and readable) by owner.
for d, _, files in os.walk(topdir):
@@ -1272,7 +1348,7 @@ def _make_tree_writable(topdir):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
-def windows_proof_rmtree(f):
+def windows_proof_rmtree(f: str) -> None:
# On Windows if anyone is holding a file open you can't
# delete it. As an example an anti virus scanner might
# be scanning files you are trying to delete. The only
@@ -1292,7 +1368,7 @@ def windows_proof_rmtree(f):
shutil.rmtree(f)
-def windows_proof_rm(fpath):
+def windows_proof_rm(fpath: str) -> None:
"""Like windows_proof_rmtree, but for a single file."""
if os.path.isfile(fpath):
os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
@@ -1308,7 +1384,8 @@ def windows_proof_rm(fpath):
os.unlink(fpath)
-def detect_subprojects(spdir_name, current_dir='', result=None):
+def detect_subprojects(spdir_name: str, current_dir: str = '',
+ result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Optional[T.Dict[str, T.List[str]]]:
if result is None:
result = {}
spdir = os.path.join(current_dir, spdir_name)
@@ -1332,57 +1409,63 @@ def detect_subprojects(spdir_name, current_dir='', result=None):
result[basename] = [trial]
return result
+
def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
for s in strlist:
if substr in s:
return True
return False
-class OrderedSet(collections.abc.MutableSet):
+
+class OrderedSet(T.MutableSet[_T]):
"""A set that preserves the order in which items are added, by first
insertion.
"""
- def __init__(self, iterable=None):
- self.__container = collections.OrderedDict()
+ def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
+ # typing.OrderedDict is new in 3.7.2, so we can't use that, but we can
+ # use MutableMapping, which is fine in this case.
+ self.__container = collections.OrderedDict() # type: T.MutableMapping[_T, None]
if iterable:
self.update(iterable)
- def __contains__(self, value):
+ def __contains__(self, value: object) -> bool:
return value in self.__container
- def __iter__(self):
+ def __iter__(self) -> T.Iterator[_T]:
return iter(self.__container.keys())
- def __len__(self):
+ def __len__(self) -> int:
return len(self.__container)
- def __repr__(self):
+ def __repr__(self) -> str:
# Don't print 'OrderedSet("")' for an empty set.
if self.__container:
return 'OrderedSet("{}")'.format(
'", "'.join(repr(e) for e in self.__container.keys()))
return 'OrderedSet()'
- def __reversed__(self):
- return reversed(self.__container)
+ def __reversed__(self) -> T.Iterator[_T]:
+ # Mypy is complaining that sets cant be reversed, which is true for
+ # unordered sets, but this is an ordered, set so reverse() makes sense.
+ return reversed(self.__container.keys()) # type: ignore
- def add(self, value):
+ def add(self, value: _T) -> None:
self.__container[value] = None
- def discard(self, value):
+ def discard(self, value: _T) -> None:
if value in self.__container:
del self.__container[value]
- def update(self, iterable):
+ def update(self, iterable: T.Iterable[_T]) -> None:
for item in iterable:
self.__container[item] = None
- def difference(self, set_):
+ def difference(self, set_: T.Union[T.Set[_T], 'OrderedSet[_T]']) -> 'OrderedSet[_T]':
return type(self)(e for e in self if e not in set_)
class BuildDirLock:
- def __init__(self, builddir):
+ def __init__(self, builddir: str):
self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
def __enter__(self):
@@ -1431,7 +1514,8 @@ class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
fallback, it is safe to ignore the 'Iterator does not return self from
__iter__ method' warning.
'''
- def __init__(self, iterable=None, total=None, bar_type=None, desc=None):
+ def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
+ bar_type: T.Optional[str] = None, desc: T.Optional[str] = None):
if iterable is not None:
self.iterable = iter(iterable)
return
@@ -1445,18 +1529,18 @@ class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
# Pretend to be an iterator when called as one and don't print any
# progress
- def __iter__(self):
+ def __iter__(self) -> T.Iterator[str]:
return self.iterable
- def __next__(self):
+ def __next__(self) -> str:
return next(self.iterable)
- def print_dot(self):
+ def print_dot(self) -> None:
print('.', end='')
sys.stdout.flush()
self.printed_dots += 1
- def update(self, progress):
+ def update(self, progress: int) -> None:
self.done += progress
if not self.total:
# Just print one dot per call if we don't have a total length
@@ -1466,32 +1550,33 @@ class ProgressBarFallback: # lgtm [py/iter-returns-non-self]
while self.printed_dots < ratio:
self.print_dot()
- def close(self):
+ def close(self) -> None:
print('')
try:
from tqdm import tqdm
-
- class ProgressBar(tqdm):
- def __init__(self, *args, bar_type=None, **kwargs):
+except ImportError:
+ # ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
+ ProgressBar = ProgressBarFallback # type: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]]
+else:
+ class ProgressBarTqdm(tqdm):
+ def __init__(self, *args, bar_type: T.Optional[str] = None, **kwargs):
if bar_type == 'download':
kwargs.update({'unit': 'bytes', 'leave': True})
else:
kwargs.update({'leave': False})
kwargs['ncols'] = 100
super().__init__(*args, **kwargs)
-except ImportError:
- ProgressBar = ProgressBarFallback
+ ProgressBar = ProgressBarTqdm
-def get_wine_shortpath(winecmd, wine_paths):
- """ Get A short version of @wine_paths to avoid
- reaching WINEPATH number of char limit.
+def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.Sequence[str]) -> str:
+ """Get A short version of @wine_paths to avoid reaching WINEPATH number
+ of char limit.
"""
- seen = set()
- wine_paths = [p for p in wine_paths if not (p in seen or seen.add(p))]
+ wine_paths = list(OrderedSet(wine_paths))
getShortPathScript = '%s.bat' % str(uuid.uuid4()).lower()[:5]
with open(getShortPathScript, mode='w') as f:
@@ -1516,10 +1601,12 @@ def get_wine_shortpath(winecmd, wine_paths):
return wine_path.strip(';')
-def run_once(func):
- ret = []
- def wrapper(*args, **kwargs):
+def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
+ ret = [] # type: T.List[_T]
+
+ @wraps(func)
+ def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
if ret:
return ret[0]
@@ -1527,33 +1614,40 @@ def run_once(func):
ret.append(val)
return val
- return update_wrapper(wrapper, func)
+ return wrapper
-class OptionProxy:
- def __init__(self, value):
+class OptionProxy(T.Generic[_T]):
+ def __init__(self, value: _T):
self.value = value
+
class OptionOverrideProxy:
- '''Mimic an option list but transparently override
- selected option values.'''
- def __init__(self, overrides, *options):
+
+ '''Mimic an option list but transparently override selected option
+ values.
+ '''
+
+ # TODO: the typing here could be made more explicit using a TypeDict from
+ # python 3.8 or typing_extensions
+
+ def __init__(self, overrides: T.Dict[str, T.Any], *options: 'OptionDictType'):
self.overrides = overrides
self.options = options
- def __getitem__(self, option_name):
+ def __getitem__(self, option_name: str) -> T.Any:
for opts in self.options:
if option_name in opts:
return self._get_override(option_name, opts[option_name])
raise KeyError('Option not found', option_name)
- def _get_override(self, option_name, base_opt):
+ def _get_override(self, option_name: str, base_opt: 'UserOption[T.Any]') -> T.Union[OptionProxy[T.Any], 'UserOption[T.Any]']:
if option_name in self.overrides:
return OptionProxy(base_opt.validate_value(self.overrides[option_name]))
return base_opt
- def copy(self):
- result = {}
+ def copy(self) -> T.Dict[str, T.Any]:
+ result = {} # type: T.Dict[str, T.Any]
for opts in self.options:
for option_name in opts:
result[option_name] = self._get_override(option_name, opts[option_name])
diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py
index ea75bd0..88a47e7 100644
--- a/mesonbuild/mlog.py
+++ b/mesonbuild/mlog.py
@@ -298,7 +298,7 @@ def exception(e: Exception, prefix: T.Optional[AnsiDecorator] = None) -> None:
prefix = red('ERROR:')
log()
args = [] # type: T.List[T.Union[AnsiDecorator, str]]
- if hasattr(e, 'file') and hasattr(e, 'lineno') and hasattr(e, 'colno'):
+ if getattr(e, 'file') is not None and getattr(e, 'lineno') is not None and getattr(e, 'colno') is not None:
# Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
# that this is correct, so we'll just ignore it.
path = get_relative_path(Path(e.file), Path(os.getcwd())) # type: ignore
diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py
index dc3c786..dc86a1b 100644
--- a/mesonbuild/modules/__init__.py
+++ b/mesonbuild/modules/__init__.py
@@ -18,6 +18,7 @@
import os
from .. import build
+from ..mesonlib import unholder
class ExtensionModule:
@@ -38,12 +39,7 @@ def get_include_args(include_dirs, prefix='-I'):
return []
dirs_str = []
- for incdirs in include_dirs:
- if hasattr(incdirs, "held_object"):
- dirs = incdirs.held_object
- else:
- dirs = incdirs
-
+ for dirs in unholder(include_dirs):
if isinstance(dirs, str):
dirs_str += ['%s%s' % (prefix, dirs)]
continue
diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py
index 1743b59..4ddde28 100644
--- a/mesonbuild/modules/gnome.py
+++ b/mesonbuild/modules/gnome.py
@@ -29,7 +29,8 @@ from . import get_include_args
from . import ExtensionModule
from . import ModuleReturnValue
from ..mesonlib import (
- MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list, join_args
+ MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list,
+ join_args, unholder,
)
from ..dependencies import Dependency, PkgConfigDependency, InternalDependency
from ..interpreterbase import noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -83,11 +84,11 @@ class GnomeModule(ExtensionModule):
mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'))
@staticmethod
- @mesonlib.run_once
def _print_gdbus_warning():
mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
' include_directories of targets with GLib < 2.51.3:',
- mlog.bold('https://github.com/mesonbuild/meson/issues/1387'))
+ mlog.bold('https://github.com/mesonbuild/meson/issues/1387'),
+ once=True)
@FeatureNewKwargs('gnome.compile_resources', '0.37.0', ['gresource_bundle', 'export', 'install_header'])
@permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header',
@@ -98,7 +99,7 @@ class GnomeModule(ExtensionModule):
cmd = ['glib-compile-resources', '@INPUT@']
- source_dirs, dependencies = mesonlib.extract_as_list(kwargs, 'source_dir', 'dependencies', pop=True)
+ source_dirs, dependencies = [mesonlib.extract_as_list(kwargs, c, pop=True) for c in ['source_dir', 'dependencies']]
if len(args) < 2:
raise MesonException('Not enough arguments; the name of the resource '
@@ -238,9 +239,7 @@ class GnomeModule(ExtensionModule):
subdirs = []
for resfile in dep_files[:]:
resbasename = os.path.basename(resfile)
- for dep in dependencies:
- if hasattr(dep, 'held_object'):
- dep = dep.held_object
+ for dep in unholder(dependencies):
if isinstance(dep, mesonlib.File):
if dep.fname != resbasename:
continue
@@ -316,15 +315,13 @@ class GnomeModule(ExtensionModule):
# require two args in order, such as -framework AVFoundation
external_ldflags_nodedup = []
gi_includes = OrderedSet()
- deps = mesonlib.listify(deps, unholder=True)
+ deps = mesonlib.unholder(mesonlib.listify(deps))
for dep in deps:
if isinstance(dep, InternalDependency):
cflags.update(dep.get_compile_args())
cflags.update(get_include_args(dep.include_directories))
- for lib in dep.libraries:
- if hasattr(lib, 'held_object'):
- lib = lib.held_object
+ for lib in unholder(dep.libraries):
if isinstance(lib, build.SharedLibrary):
internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath))
libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath,
@@ -341,9 +338,7 @@ class GnomeModule(ExtensionModule):
external_ldflags.update(extdepflags[2])
external_ldflags_nodedup += extdepflags[3]
gi_includes.update(extdepflags[4])
- for source in dep.sources:
- if hasattr(source, 'held_object'):
- source = source.held_object
+ for source in unholder(dep.sources):
if isinstance(source, GirTarget):
gi_includes.update([os.path.join(state.environment.get_build_dir(),
source.get_subdir())])
@@ -457,9 +452,7 @@ class GnomeModule(ExtensionModule):
if 'includes' in kwargs:
includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True)
- for inc in includes:
- if hasattr(inc, 'held_object'):
- inc = inc.held_object
+ for inc in unholder(includes):
if isinstance(inc, str):
ret += ['--include=%s' % (inc, )]
elif isinstance(inc, GirTarget):
@@ -616,9 +609,7 @@ class GnomeModule(ExtensionModule):
gir_filelist_filename = os.path.join(gir_filelist_dir, '%s_%s_gir_filelist' % (ns, nsversion))
with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
- for s in libsources:
- if hasattr(s, 'held_object'):
- s = s.held_object
+ for s in unholder(libsources):
if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
for custom_output in s.get_outputs():
gir_filelist.write(os.path.join(state.environment.get_build_dir(),
@@ -671,15 +662,11 @@ class GnomeModule(ExtensionModule):
# dependencies and also find the include directories needed for the
# typelib generation custom target below.
typelib_includes = []
- for dep in deps:
- if hasattr(dep, 'held_object'):
- dep = dep.held_object
+ for dep in unholder(deps):
# Add a dependency on each GirTarget listed in dependencies and add
# the directory where it will be generated to the typelib includes
if isinstance(dep, InternalDependency):
- for source in dep.sources:
- if hasattr(source, 'held_object'):
- source = source.held_object
+ for source in unholder(dep.sources):
if isinstance(source, GirTarget) and source not in depends:
depends.append(source)
subdir = os.path.join(state.environment.get_build_dir(),
@@ -776,7 +763,7 @@ class GnomeModule(ExtensionModule):
langs_compilers = self._get_girtargets_langs_compilers(girtargets)
cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
deps = self._get_gir_targets_deps(girtargets)
- deps += extract_as_list(kwargs, 'dependencies', pop=True, unholder=True)
+ deps += mesonlib.unholder(extract_as_list(kwargs, 'dependencies', pop=True))
typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends)
# ldflags will be misinterpreted by gir scanner (showing
# spurious dependencies) but building GStreamer fails if they
@@ -1003,9 +990,7 @@ This will become a hard error in the future.''')
depends = []
content_files = []
- for s in mesonlib.extract_as_list(kwargs, 'content_files'):
- if hasattr(s, 'held_object'):
- s = s.held_object
+ for s in unholder(mesonlib.extract_as_list(kwargs, 'content_files')):
if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
depends.append(s)
for o in s.get_outputs():
@@ -1057,7 +1042,7 @@ This will become a hard error in the future.''')
def _get_build_args(self, kwargs, state, depends):
args = []
- deps = extract_as_list(kwargs, 'dependencies', unholder=True)
+ deps = mesonlib.unholder(extract_as_list(kwargs, 'dependencies'))
cflags = []
cflags.extend(mesonlib.stringlistify(kwargs.pop('c_args', [])))
deps_cflags, internal_ldflags, external_ldflags, gi_includes = \
@@ -1601,9 +1586,7 @@ G_END_DECLS'''
vapi_includes = []
ret = []
remaining_args = []
- for arg in arg_list:
- if hasattr(arg, 'held_object'):
- arg = arg.held_object
+ for arg in unholder(arg_list):
if isinstance(arg, InternalDependency):
targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
for target in targets:
diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py
index 80ec8bd..2652e7d 100644
--- a/mesonbuild/modules/i18n.py
+++ b/mesonbuild/modules/i18n.py
@@ -16,7 +16,7 @@ import shutil
from os import path
from .. import coredata, mesonlib, build, mlog
-from ..mesonlib import MesonException, run_once
+from ..mesonlib import MesonException
from . import ModuleReturnValue
from . import ExtensionModule
from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs
@@ -59,9 +59,8 @@ PRESET_ARGS = {
class I18nModule(ExtensionModule):
@staticmethod
- @run_once
def nogettext_warning():
- mlog.warning('Gettext not found, all translation targets will be ignored.')
+ mlog.warning('Gettext not found, all translation targets will be ignored.', once=True)
return ModuleReturnValue(None, [])
@staticmethod
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index 8de88c9..1d8e8a9 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -74,7 +74,7 @@ class DependenciesHelper:
def _process_reqs(self, reqs):
'''Returns string names of requirements'''
processed_reqs = []
- for obj in mesonlib.listify(reqs, unholder=True):
+ for obj in mesonlib.unholder(mesonlib.listify(reqs)):
if not isinstance(obj, str):
FeatureNew('pkgconfig.generate requirement from non-string object', '0.46.0').use(self.state.subproject)
if hasattr(obj, 'generated_pc'):
@@ -108,7 +108,7 @@ class DependenciesHelper:
self.cflags += mesonlib.stringlistify(cflags)
def _process_libs(self, libs, public):
- libs = mesonlib.listify(libs, unholder=True)
+ libs = mesonlib.unholder(mesonlib.listify(libs))
processed_libs = []
processed_reqs = []
processed_cflags = []
diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py
index 76edb7e..c7da530 100644
--- a/mesonbuild/modules/qt.py
+++ b/mesonbuild/modules/qt.py
@@ -15,7 +15,7 @@
import os
from .. import mlog
from .. import build
-from ..mesonlib import MesonException, Popen_safe, extract_as_list, File
+from ..mesonlib import MesonException, Popen_safe, extract_as_list, File, unholder
from ..dependencies import Dependency, Qt4Dependency, Qt5Dependency
import xml.etree.ElementTree as ET
from . import ModuleReturnValue, get_include_args, ExtensionModule
@@ -142,7 +142,7 @@ class QtBaseModule(ExtensionModule):
@permittedKwargs({'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'include_directories', 'dependencies', 'ui_files', 'qresources', 'method'})
def preprocess(self, state, args, kwargs):
rcc_files, ui_files, moc_headers, moc_sources, uic_extra_arguments, moc_extra_arguments, rcc_extra_arguments, sources, include_directories, dependencies \
- = extract_as_list(kwargs, 'qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies', pop = True)
+ = [extract_as_list(kwargs, c, pop=True) for c in ['qresources', 'ui_files', 'moc_headers', 'moc_sources', 'uic_extra_arguments', 'moc_extra_arguments', 'rcc_extra_arguments', 'sources', 'include_directories', 'dependencies']]
sources += args[1:]
method = kwargs.get('method', 'auto')
self._detect_tools(state.environment, method)
@@ -190,9 +190,7 @@ class QtBaseModule(ExtensionModule):
sources.append(ui_output)
inc = get_include_args(include_dirs=include_directories)
compile_args = []
- for dep in dependencies:
- if hasattr(dep, 'held_object'):
- dep = dep.held_object
+ for dep in unholder(dependencies):
if isinstance(dep, Dependency):
for arg in dep.get_compile_args():
if arg.startswith('-I') or arg.startswith('-D'):
@@ -221,7 +219,7 @@ class QtBaseModule(ExtensionModule):
@FeatureNew('qt.compile_translations', '0.44.0')
@permittedKwargs({'ts_files', 'install', 'install_dir', 'build_by_default', 'method'})
def compile_translations(self, state, args, kwargs):
- ts_files, install_dir = extract_as_list(kwargs, 'ts_files', 'install_dir', pop=True)
+ ts_files, install_dir = [extract_as_list(kwargs, c, pop=True) for c in ['ts_files', 'install_dir']]
self._detect_tools(state.environment, kwargs.get('method', 'auto'))
translations = []
for ts in ts_files:
diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py
index 7cf46f7..8589adc 100644
--- a/mesonbuild/modules/windows.py
+++ b/mesonbuild/modules/windows.py
@@ -18,7 +18,7 @@ import re
from .. import mlog
from .. import mesonlib, build
-from ..mesonlib import MachineChoice, MesonException, extract_as_list
+from ..mesonlib import MachineChoice, MesonException, extract_as_list, unholder
from . import get_include_args
from . import ModuleReturnValue
from . import ExtensionModule
@@ -116,9 +116,7 @@ class WindowsModule(ExtensionModule):
for subsrc in src:
add_target(subsrc)
return
-
- if hasattr(src, 'held_object'):
- src = src.held_object
+ src = unholder(src)
if isinstance(src, str):
name_format = 'file {!r}'
diff --git a/mypy.ini b/mypy.ini
index aacfb3f..b8dad03 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,4 +1,5 @@
[mypy]
strict_optional = False
show_error_context = False
-show_column_numbers = True \ No newline at end of file
+show_column_numbers = True
+ignore_missing_imports = True
diff --git a/run_unittests.py b/run_unittests.py
index e254d4a..c9b7563 100755
--- a/run_unittests.py
+++ b/run_unittests.py
@@ -686,12 +686,17 @@ class InternalTests(unittest.TestCase):
self.assertEqual([holder1], listify([holder1]))
self.assertEqual([holder1, 2], listify([holder1, 2]))
self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]]))
- self.assertEqual([1], listify(holder1, unholder=True))
- self.assertEqual([1], listify([holder1], unholder=True))
- self.assertEqual([1, 2], listify([holder1, 2], unholder=True))
- self.assertEqual([1, 2, 3], listify([holder1, 2, [holder3]], unholder=True))
- # Unholding doesn't work recursively when not flattening
- self.assertEqual([1, [2], [holder3]], listify([holder1, [2], [holder3]], unholder=True, flatten=False))
+
+ def test_unholder(self):
+ unholder = mesonbuild.mesonlib.unholder
+
+ holder1 = ObjectHolder(1)
+ holder3 = ObjectHolder(3)
+ holders = [holder1, holder3]
+
+ self.assertEqual(1, unholder(holder1))
+ self.assertEqual([1], unholder([holder1]))
+ self.assertEqual([1, 3], unholder(holders))
def test_extract_as_list(self):
extract = mesonbuild.mesonlib.extract_as_list
@@ -701,16 +706,15 @@ class InternalTests(unittest.TestCase):
self.assertEqual(kwargs, {'sources': [1, 2, 3]})
self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True))
self.assertEqual(kwargs, {})
+
# Test unholding
holder3 = ObjectHolder(3)
kwargs = {'sources': [1, 2, holder3]}
- self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True))
self.assertEqual(kwargs, {'sources': [1, 2, holder3]})
- self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True, pop=True))
- self.assertEqual(kwargs, {})
- # Test listification
- kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]}
- self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources'))
+
+ # flatten nested lists
+ kwargs = {'sources': [1, [2, [3]]]}
+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))
def test_pkgconfig_module(self):