From 6a0fabc6472f49621260de215f128a31ae70219b Mon Sep 17 00:00:00 2001 From: Eli Schwartz Date: Thu, 4 Mar 2021 17:16:11 -0500 Subject: mass rewrite of string formatting to use f-strings everywhere performed by running "pyupgrade --py36-plus" and committing the results --- docs/genrelnotes.py | 2 +- ghwt.py | 6 +- mesonbuild/arglist.py | 4 +- mesonbuild/ast/interpreter.py | 4 +- mesonbuild/backend/backends.py | 26 +-- mesonbuild/backend/ninjabackend.py | 76 ++++---- mesonbuild/backend/vs2010backend.py | 8 +- mesonbuild/backend/xcodebackend.py | 68 +++---- mesonbuild/build.py | 72 +++---- mesonbuild/cmake/client.py | 12 +- mesonbuild/cmake/common.py | 16 +- mesonbuild/cmake/executor.py | 8 +- mesonbuild/cmake/fileapi.py | 2 +- mesonbuild/cmake/interpreter.py | 66 +++---- mesonbuild/cmake/toolchain.py | 2 +- mesonbuild/cmake/traceparser.py | 22 +-- mesonbuild/compilers/c.py | 6 +- mesonbuild/compilers/compilers.py | 32 ++-- mesonbuild/compilers/cpp.py | 10 +- mesonbuild/compilers/cuda.py | 4 +- mesonbuild/compilers/d.py | 26 +-- mesonbuild/compilers/mixins/arm.py | 2 +- mesonbuild/compilers/mixins/clang.py | 4 +- mesonbuild/compilers/mixins/clike.py | 16 +- mesonbuild/compilers/mixins/emscripten.py | 2 +- mesonbuild/compilers/mixins/gnu.py | 4 +- mesonbuild/compilers/mixins/islinker.py | 4 +- mesonbuild/compilers/mixins/pgi.py | 2 +- mesonbuild/compilers/mixins/visualstudio.py | 2 +- mesonbuild/compilers/rust.py | 6 +- mesonbuild/compilers/vala.py | 4 +- mesonbuild/coredata.py | 32 ++-- mesonbuild/dependencies/base.py | 116 ++++++------ mesonbuild/dependencies/boost.py | 32 ++-- mesonbuild/dependencies/cuda.py | 24 +-- mesonbuild/dependencies/dev.py | 20 +- mesonbuild/dependencies/hdf5.py | 12 +- mesonbuild/dependencies/misc.py | 20 +- mesonbuild/dependencies/ui.py | 8 +- mesonbuild/envconfig.py | 10 +- mesonbuild/environment.py | 16 +- mesonbuild/interpreter.py | 88 ++++----- mesonbuild/interpreterbase.py | 40 ++-- mesonbuild/linkers.py | 36 ++-- mesonbuild/mcompile.py | 14 +- mesonbuild/mconf.py | 2 +- mesonbuild/mdist.py | 14 +- mesonbuild/mesonlib/universal.py | 42 ++--- mesonbuild/mesonmain.py | 4 +- mesonbuild/minit.py | 4 +- mesonbuild/minstall.py | 36 ++-- mesonbuild/mintro.py | 4 +- mesonbuild/mlog.py | 8 +- mesonbuild/modules/__init__.py | 8 +- mesonbuild/modules/cmake.py | 14 +- mesonbuild/modules/fs.py | 4 +- mesonbuild/modules/gnome.py | 34 ++-- mesonbuild/modules/hotdoc.py | 6 +- mesonbuild/modules/i18n.py | 2 +- mesonbuild/modules/keyval.py | 2 +- mesonbuild/modules/pkgconfig.py | 10 +- mesonbuild/modules/python.py | 38 ++-- mesonbuild/modules/python3.py | 2 +- mesonbuild/modules/qt.py | 20 +- mesonbuild/modules/sourceset.py | 2 +- mesonbuild/modules/unstable_external_project.py | 18 +- mesonbuild/modules/windows.py | 2 +- mesonbuild/mparser.py | 12 +- mesonbuild/msetup.py | 12 +- mesonbuild/msubprojects.py | 14 +- mesonbuild/mtest.py | 56 +++--- mesonbuild/rewriter.py | 10 +- mesonbuild/scripts/cleantrees.py | 2 +- mesonbuild/scripts/cmake_run_ctgt.py | 2 +- mesonbuild/scripts/coverage.py | 4 +- mesonbuild/scripts/depscan.py | 10 +- mesonbuild/scripts/externalproject.py | 6 +- mesonbuild/scripts/gettext.py | 4 +- mesonbuild/scripts/gtkdochelper.py | 6 +- mesonbuild/scripts/meson_exe.py | 2 +- mesonbuild/scripts/symbolextractor.py | 2 +- mesonbuild/scripts/uninstall.py | 2 +- mesonbuild/scripts/yelphelper.py | 6 +- mesonbuild/templates/mesontemplates.py | 6 +- mesonbuild/wrap/wrap.py | 34 ++-- mesonbuild/wrap/wraptool.py | 16 +- packaging/createmsi.py | 6 +- run_meson_command_tests.py | 2 +- run_project_tests.py | 50 ++--- run_tests.py | 16 +- run_unittests.py | 210 ++++++++++----------- skip_ci.py | 6 +- test cases/common/106 generatorcustom/gen-resx.py | 2 +- test cases/common/14 configure file/check_file.py | 4 +- .../common/14 configure file/generator-deps.py | 2 +- .../140 mesonintrospect from scripts/check_env.py | 2 +- test cases/common/169 preserve gendir/genprog.py | 2 +- .../com/mesonbuild/genprog.py | 2 +- .../check_arch.py | 2 +- test cases/common/50 custom target/depfile/dep.py | 2 +- test cases/common/96 manygen/subdir/manygen.py | 16 +- test cases/python/2 extmodule/blaster.py | 2 +- .../4 custom target depends extmodule/blaster.py | 2 +- test cases/python3/2 extmodule/blaster.py | 2 +- .../4 custom target depends extmodule/blaster.py | 2 +- test cases/unit/39 python extmodule/blaster.py | 2 +- tools/ac_converter.py | 6 +- tools/cmake2meson.py | 24 +-- tools/dircondenser.py | 2 +- 109 files changed, 917 insertions(+), 917 deletions(-) diff --git a/docs/genrelnotes.py b/docs/genrelnotes.py index 70d8915..5dad924 100755 --- a/docs/genrelnotes.py +++ b/docs/genrelnotes.py @@ -50,7 +50,7 @@ def generate(from_version, to_version): ''' Generate notes for Meson build next release. ''' - ofilename = 'Release-notes-for-{}.md'.format(to_version) + ofilename = f'Release-notes-for-{to_version}.md' with open(ofilename, 'w') as ofile: ofile.write(RELNOTE_TEMPLATE.format(to_version, to_version)) for snippetfile in glob('snippets/*.md'): diff --git a/ghwt.py b/ghwt.py index 5a71a38..6f9373b 100755 --- a/ghwt.py +++ b/ghwt.py @@ -43,7 +43,7 @@ def list_projects(): def unpack(sproj, branch): tmpdir = os.path.join(spdir, sproj + '_ghwt') shutil.rmtree(tmpdir, ignore_errors=True) - subprocess.check_call(['git', 'clone', '-b', branch, 'https://github.com/mesonbuild/{}.git'.format(sproj), tmpdir]) + subprocess.check_call(['git', 'clone', '-b', branch, f'https://github.com/mesonbuild/{sproj}.git', tmpdir]) usfile = os.path.join(tmpdir, 'upstream.wrap') assert(os.path.isfile(usfile)) config = configparser.ConfigParser(interpolation=None) @@ -52,7 +52,7 @@ def unpack(sproj, branch): if 'directory' in config['wrap-file']: outdir = os.path.join(spdir, config['wrap-file']['directory']) if os.path.isdir(outdir): - print('Subproject is already there. To update, nuke the {} dir and reinstall.'.format(outdir)) + print(f'Subproject is already there. To update, nuke the {outdir} dir and reinstall.') shutil.rmtree(tmpdir) return 1 us_url = config['wrap-file']['source_url'] @@ -85,7 +85,7 @@ def install(sproj, requested_branch=None): if not os.path.isdir(spdir): print('Run this in your source root and make sure there is a subprojects directory in it.') return 1 - blist = gh_get('https://api.github.com/repos/mesonbuild/{}/branches'.format(sproj)) + blist = gh_get(f'https://api.github.com/repos/mesonbuild/{sproj}/branches') blist = [b['name'] for b in blist] blist = [b for b in blist if b != 'master'] blist.sort() diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py index 23b1924..e150d39 100644 --- a/mesonbuild/arglist.py +++ b/mesonbuild/arglist.py @@ -290,7 +290,7 @@ class CompilerArgs(collections.abc.MutableSequence): ''' tmp_pre = collections.deque() # type: T.Deque[str] if not isinstance(args, collections.abc.Iterable): - raise TypeError('can only concatenate Iterable[str] (not "{}") to CompilerArgs'.format(args)) + raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs') for arg in args: # If the argument can be de-duped, do it either by removing the # previous occurrence of it and adding a new one, or not adding the @@ -331,4 +331,4 @@ class CompilerArgs(collections.abc.MutableSequence): def __repr__(self) -> str: self.flush_pre_post() - return 'CompilerArgs({!r}, {!r})'.format(self.compiler, self._container) + return f'CompilerArgs({self.compiler!r}, {self._container!r})' diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py index 23c8427..73db15c 100644 --- a/mesonbuild/ast/interpreter.py +++ b/mesonbuild/ast/interpreter.py @@ -141,7 +141,7 @@ class AstInterpreter(interpreterbase.InterpreterBase): def func_subdir(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: args = self.flatten_args(args) if len(args) != 1 or not isinstance(args[0], str): - sys.stderr.write('Unable to evaluate subdir({}) in AstInterpreter --> Skipping\n'.format(args)) + sys.stderr.write(f'Unable to evaluate subdir({args}) in AstInterpreter --> Skipping\n') return prev_subdir = self.subdir @@ -156,7 +156,7 @@ class AstInterpreter(interpreterbase.InterpreterBase): self.visited_subdirs[symlinkless_dir] = True if not os.path.isfile(absname): - sys.stderr.write('Unable to find build file {} --> Skipping\n'.format(buildfilename)) + sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n') return with open(absname, encoding='utf8') as f: code = f.read() diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 2e1f081..96d2f46 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -65,7 +65,7 @@ class TestProtocol(enum.Enum): return cls.GTEST elif string == 'rust': return cls.RUST - raise MesonException('unknown test format {}'.format(string)) + raise MesonException(f'unknown test format {string}') def __str__(self) -> str: cls = type(self) @@ -282,14 +282,14 @@ class Backend: return os.path.join(self.get_target_dir(target), target.get_filename()) elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)): if not target.is_linkable_target(): - raise MesonException('Tried to link against custom target "{}", which is not linkable.'.format(target.name)) + raise MesonException(f'Tried to link against custom target "{target.name}", which is not linkable.') return os.path.join(self.get_target_dir(target), target.get_filename()) elif isinstance(target, build.Executable): if target.import_filename: return os.path.join(self.get_target_dir(target), target.get_import_filename()) else: return None - raise AssertionError('BUG: Tried to link to {!r} which is not linkable'.format(target)) + raise AssertionError(f'BUG: Tried to link to {target!r} which is not linkable') @lru_cache(maxsize=None) def get_target_dir(self, target): @@ -335,7 +335,7 @@ class Backend: def get_unity_source_file(self, target, suffix, number): # There is a potential conflict here, but it is unlikely that # anyone both enables unity builds and has a file called foo-unity.cpp. - osrc = '{}-unity{}.{}'.format(target.name, number, suffix) + osrc = f'{target.name}-unity{number}.{suffix}' return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc) def generate_unity_files(self, target, unity_src): @@ -368,7 +368,7 @@ class Backend: ofile = init_language_file(comp.get_default_suffix(), unity_file_number) unity_file_number += 1 files_in_current = 0 - ofile.write('#include<{}>\n'.format(src)) + ofile.write(f'#include<{src}>\n') files_in_current += 1 if ofile: ofile.close() @@ -505,7 +505,7 @@ class Backend: data = bytes(str(es.env) + str(es.cmd_args) + str(es.workdir) + str(capture), encoding='utf-8') digest = hashlib.sha1(data).hexdigest() - scratch_file = 'meson_exe_{}_{}.dat'.format(basename, digest) + scratch_file = f'meson_exe_{basename}_{digest}.dat' exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file) with open(exe_data, 'wb') as f: pickle.dump(es, f) @@ -575,7 +575,7 @@ class Backend: for dir in symbols_match.group(1).split(':'): # Prevent usage of --just-symbols to specify rpath if Path(dir).is_dir(): - raise MesonException('Invalid arg for --just-symbols, {} is a directory.'.format(dir)) + raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.') return dirs def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True): @@ -601,7 +601,7 @@ class Backend: continue if libdir.startswith(self.environment.get_source_dir()): rel_to_src = libdir[len(self.environment.get_source_dir()) + 1:] - assert not os.path.isabs(rel_to_src), 'rel_to_src: {} is absolute'.format(rel_to_src) + assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute' paths.append(os.path.join(self.build_to_src, rel_to_src)) else: paths.append(libdir) @@ -717,7 +717,7 @@ class Backend: def create_msvc_pch_implementation(self, target, lang, pch_header): # We have to include the language in the file name, otherwise # pch.c and pch.cpp will both end up as pch.obj in VS backends. - impl_name = 'meson_pch-{}.{}'.format(lang, lang) + impl_name = f'meson_pch-{lang}.{lang}' pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name) # Make sure to prepend the build dir, since the working directory is # not defined. Otherwise, we might create the file in the wrong path. @@ -833,7 +833,7 @@ class Backend: args = [] for d in deps: if not (d.is_linkable_target()): - raise RuntimeError('Tried to link with a non-library target "{}".'.format(d.get_basename())) + raise RuntimeError(f'Tried to link with a non-library target "{d.get_basename()}".') arg = self.get_target_filename_for_linking(d) if not arg: continue @@ -1011,7 +1011,7 @@ class Backend: # to the future by a minuscule amount, less than # 0.001 seconds. I don't know why. if delta > 0.001: - raise MesonException('Clock skew detected. File {} has a time stamp {:.4f}s in the future.'.format(absf, delta)) + raise MesonException(f'Clock skew detected. File {absf} has a time stamp {delta:.4f}s in the future.') def build_target_to_cmd_array(self, bt): if isinstance(bt, build.BuildTarget): @@ -1036,7 +1036,7 @@ class Backend: m = regex.search(arg) while m is not None: index = int(m.group(1)) - src = '@OUTPUT{}@'.format(index) + src = f'@OUTPUT{index}@' arg = arg.replace(src, os.path.join(private_dir, output_list[index])) m = regex.search(arg) newargs.append(arg) @@ -1239,7 +1239,7 @@ class Backend: for s in self.build.postconf_scripts: name = ' '.join(s.cmd_args) - mlog.log('Running postconf script {!r}'.format(name)) + mlog.log(f'Running postconf script {name!r}') run_exe(s, env) def create_install_data(self) -> InstallData: diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 240cc0a..819d478 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -64,7 +64,7 @@ def cmd_quote(s): # any terminal backslashes likewise need doubling s = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), s) # and double quote - s = '"{}"'.format(s) + s = f'"{s}"' return s @@ -238,7 +238,7 @@ class NinjaRule: yield '_RSP' for rsp in rule_iter(): - outfile.write('rule {}{}\n'.format(self.name, rsp)) + outfile.write(f'rule {self.name}{rsp}\n') if rsp == '_RSP': outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command]))) outfile.write(' rspfile = $out.rsp\n') @@ -246,10 +246,10 @@ class NinjaRule: else: outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in (self.command + self.args)]))) if self.deps: - outfile.write(' deps = {}\n'.format(self.deps)) + outfile.write(f' deps = {self.deps}\n') if self.depfile: - outfile.write(' depfile = {}\n'.format(self.depfile)) - outfile.write(' description = {}\n'.format(self.description)) + outfile.write(f' depfile = {self.depfile}\n') + outfile.write(f' description = {self.description}\n') if self.extra: for l in self.extra.split('\n'): outfile.write(' ') @@ -364,7 +364,7 @@ class NinjaBuildElement: mlog.debug("Command line for building %s is long, using a response file" % self.outfilenames) else: rulename = self.rulename - line = 'build {}{}: {} {}'.format(outs, implicit_outs, rulename, ins) + line = f'build {outs}{implicit_outs}: {rulename} {ins}' if len(self.deps) > 0: line += ' | ' + ' '.join([ninja_quote(x, True) for x in sorted(self.deps)]) if len(self.orderdeps) > 0: @@ -396,7 +396,7 @@ class NinjaBuildElement: for e in self.elems: (name, elems) = e should_quote = name not in raw_names - line = ' {} = '.format(name) + line = f' {name} = ' newelems = [] for i in elems: if not should_quote or i == '&&': # Hackety hack hack @@ -411,7 +411,7 @@ class NinjaBuildElement: def check_outputs(self): for n in self.outfilenames: if n in self.all_outputs: - raise MesonException('Multiple producers for Ninja target "{}". Please rename your targets.'.format(n)) + raise MesonException(f'Multiple producers for Ninja target "{n}". Please rename your targets.') self.all_outputs[n] = True class NinjaBackend(backends.Backend): @@ -511,7 +511,7 @@ int dummy; outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) tempfilename = outfilename + '~' with open(tempfilename, 'w', encoding='utf-8') as outfile: - outfile.write('# This is the build file for project "{}"\n'.format(self.build.get_project())) + outfile.write(f'# This is the build file for project "{self.build.get_project()}"\n') outfile.write('# It is autogenerated by the Meson build system.\n') outfile.write('# Do not edit by hand.\n\n') outfile.write('ninja_required_version = 1.8.2\n\n') @@ -563,9 +563,9 @@ int dummy; # rule store as being wanted in compdb for for_machine in MachineChoice: for lang in self.environment.coredata.compilers[for_machine]: - rules += ["%s%s" % (rule, ext) for rule in [self.get_compiler_rule_name(lang, for_machine)] + rules += [f"{rule}{ext}" for rule in [self.get_compiler_rule_name(lang, for_machine)] for ext in ['', '_RSP']] - rules += ["%s%s" % (rule, ext) for rule in [self.get_pch_rule_name(lang, for_machine)] + rules += [f"{rule}{ext}" for rule in [self.get_pch_rule_name(lang, for_machine)] for ext in ['', '_RSP']] compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else [] ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules @@ -620,7 +620,7 @@ int dummy; # either in the source root, or generated with configure_file and # in the build root if not isinstance(s, File): - raise InvalidArguments('All sources in target {!r} must be of type mesonlib.File'.format(s)) + raise InvalidArguments(f'All sources in target {s!r} must be of type mesonlib.File') f = s.rel_to_builddir(self.build_to_src) srcs[f] = s return srcs @@ -962,7 +962,7 @@ int dummy; capture=ofilenames[0] if target.capture else None, env=target.env) if reason: - cmd_type = ' (wrapped by meson {})'.format(reason) + cmd_type = f' (wrapped by meson {reason})' else: cmd_type = '' if target.depfile is not None: @@ -980,10 +980,10 @@ int dummy; def build_run_target_name(self, target): if target.subproject != '': - subproject_prefix = '{}@@'.format(target.subproject) + subproject_prefix = f'{target.subproject}@@' else: subproject_prefix = '' - return '{}{}'.format(subproject_prefix, target.name) + return f'{subproject_prefix}{target.name}' def generate_run_target(self, target): target_name = self.build_run_target_name(target) @@ -998,8 +998,8 @@ int dummy; meson_exe_cmd, reason = self.as_meson_exe_cmdline(target_name, target.command[0], cmd[1:], force_serialize=True, env=target_env, verbose=True) - cmd_type = ' (wrapped by meson {})'.format(reason) - internal_target_name = 'meson-{}'.format(target_name) + cmd_type = f' (wrapped by meson {reason})' + internal_target_name = f'meson-{target_name}' elem = NinjaBuildElement(self.all_outputs, internal_target_name, 'CUSTOM_COMMAND', []) elem.add_item('COMMAND', meson_exe_cmd) elem.add_item('description', desc.format(target.name, cmd_type)) @@ -1141,7 +1141,7 @@ int dummy; def add_rule(self, rule): if rule.name in self.ruledict: - raise MesonException('Tried to add rule {} twice.'.format(rule.name)) + raise MesonException(f'Tried to add rule {rule.name} twice.') self.rules.append(rule) self.ruledict[rule.name] = rule @@ -1153,7 +1153,7 @@ int dummy; if build.rulename in self.ruledict: build.rule = self.ruledict[build.rulename] else: - mlog.warning("build statement for {} references non-existent rule {}".format(build.outfilenames, build.rulename)) + mlog.warning(f"build statement for {build.outfilenames} references non-existent rule {build.rulename}") def write_rules(self, outfile): for b in self.build_elements: @@ -1238,12 +1238,12 @@ int dummy; ofilename = os.path.join(self.get_target_private_dir(target), ofilebase) elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile) elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) - elem.add_item('DESC', 'Compiling resource {}'.format(rel_sourcefile)) + elem.add_item('DESC', f'Compiling resource {rel_sourcefile}') self.add_build(elem) deps.append(ofilename) a = '-resource:' + ofilename else: - raise InvalidArguments('Unknown resource file {}.'.format(r)) + raise InvalidArguments(f'Unknown resource file {r}.') args.append(a) return args, deps @@ -1546,13 +1546,13 @@ int dummy; main_rust_file = None for i in target.get_sources(): if not rustc.can_compile(i): - raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename())) + raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.') if main_rust_file is None: main_rust_file = i.rel_to_builddir(self.build_to_src) for g in target.get_generated_sources(): for i in g.get_outputs(): if not rustc.can_compile(i): - raise InvalidArguments('Rust target {} contains a non-rust source file.'.format(target.get_basename())) + raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.') if isinstance(g, GeneratedList): fname = os.path.join(self.get_target_private_dir(target), i) else: @@ -1581,7 +1581,7 @@ int dummy; # to be -C link-arg=foo if cratetype in {'bin', 'dylib'}: for a in rustc.linker.get_always_args(): - args += ['-C', 'link-arg={}'.format(a)] + args += ['-C', f'link-arg={a}'] opt_proxy = self.get_compiler_options_for_target(target) @@ -1593,7 +1593,7 @@ int dummy; args += self.build.get_global_args(rustc, target.for_machine) args += self.build.get_project_args(rustc, target.subproject, target.for_machine) depfile = os.path.join(target.subdir, target.name + '.d') - args += ['--emit', 'dep-info={}'.format(depfile), '--emit', 'link'] + args += ['--emit', f'dep-info={depfile}', '--emit', 'link'] args += target.get_extra_args('rust') args += rustc.get_output_args(os.path.join(target.subdir, target.get_filename())) args += self.environment.coredata.get_external_args(target.for_machine, rustc.language) @@ -1754,7 +1754,7 @@ int dummy; abs_headers.append(absh) header_imports += swiftc.get_header_import_args(absh) else: - raise InvalidArguments('Swift target {} contains a non-swift source file.'.format(target.get_basename())) + raise InvalidArguments(f'Swift target {target.get_basename()} contains a non-swift source file.') os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) compile_args = swiftc.get_compile_only_args() compile_args += swiftc.get_optimization_args(self.get_option_for_target(OptionKey('optimization'), target)) @@ -1954,7 +1954,7 @@ int dummy; def generate_fortran_dep_hack(self, crstr: str) -> None: if self.use_dyndeps_for_fortran(): return - rule = 'FORTRAN_DEP_HACK{}'.format(crstr) + rule = f'FORTRAN_DEP_HACK{crstr}' if mesonlib.is_windows(): cmd = ['cmd', '/C'] else: @@ -2001,7 +2001,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none) command = compiler.get_exelist() args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] - description = 'Compiling {} object $out'.format(compiler.get_display_language()) + description = f'Compiling {compiler.get_display_language()} object $out' if isinstance(compiler, VisualStudioLikeCompiler): deps = 'msvc' depfile = None @@ -2094,7 +2094,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) if len(generator.outputs) == 1: sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) else: - sole_output = '{}'.format(curfile) + sole_output = f'{curfile}' infilename = curfile.rel_to_builddir(self.build_to_src) base_args = generator.get_arglist(infilename) outfiles = genlist.get_outputs_for(curfile) @@ -2129,13 +2129,13 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) elem.add_dep(extra_dependencies) if len(generator.outputs) == 1: - what = '{!r}'.format(sole_output) + what = f'{sole_output!r}' else: # since there are multiple outputs, we log the source that caused the rebuild - what = 'from {!r}.'.format(sole_output) + what = f'from {sole_output!r}.' if reason: - reason = ' (wrapped by meson {})'.format(reason) - elem.add_item('DESC', 'Generating {}{}.'.format(what, reason)) + reason = f' (wrapped by meson {reason})' + elem.add_item('DESC', f'Generating {what}{reason}.') if isinstance(exe, build.BuildTarget): elem.add_dep(self.get_target_filename(exe)) @@ -2305,7 +2305,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) elif isinstance(src, File): rel_src = src.rel_to_builddir(self.build_to_src) else: - raise InvalidArguments('Invalid source type: {!r}'.format(src)) + raise InvalidArguments(f'Invalid source type: {src!r}') # Write the Ninja build command compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) @@ -2437,7 +2437,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) order_deps = order_deps if order_deps is not None else [] if isinstance(src, str) and src.endswith('.h'): - raise AssertionError('BUG: sources should not contain headers {!r}'.format(src)) + raise AssertionError(f'BUG: sources should not contain headers {src!r}') compiler = get_compiler_for_source(target.compilers.values(), src) commands = self._generate_single_compile(target, compiler, is_generated) @@ -2459,9 +2459,9 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) assert rel_src.startswith(build_dir) rel_src = rel_src[len(build_dir) + 1:] elif is_generated: - raise AssertionError('BUG: broken generated source file handling for {!r}'.format(src)) + raise AssertionError(f'BUG: broken generated source file handling for {src!r}') else: - raise InvalidArguments('Invalid source type: {!r}'.format(src)) + raise InvalidArguments(f'Invalid source type: {src!r}') obj_basename = self.object_filename_from_source(target, src) rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) dep_file = compiler.depfile_for_object(rel_obj) @@ -2808,7 +2808,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) cmd = self.replace_paths(target, cmd) elem.add_item('COMMAND', cmd) - elem.add_item('description', 'Prelinking {}.'.format(prelink_name)) + elem.add_item('description', f'Prelinking {prelink_name}.') self.add_build(elem) return [prelink_name] diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 82891cd..93b7f78 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -233,7 +233,7 @@ class Vs2010Backend(backends.Backend): target_arch = os.environ.get('Platform', 'x86') host_arch = target_arch arch = host_arch + '_' + target_arch if host_arch != target_arch else target_arch - return '"%s" %s' % (script_path, arch) + return f'"{script_path}" {arch}' # Otherwise try the VS2017 Developer Command Prompt. if 'VS150COMNTOOLS' in os.environ and has_arch_values: @@ -405,10 +405,10 @@ class Vs2010Backend(backends.Backend): 'preSolution\n') for p in projlist: if p[1].parent != PurePath('.'): - ofile.write("\t\t{%s} = {%s}\n" % (p[2], self.subdirs[p[1].parent][0])) + ofile.write("\t\t{{{}}} = {{{}}}\n".format(p[2], self.subdirs[p[1].parent][0])) for subdir in self.subdirs.values(): if subdir[1]: - ofile.write("\t\t{%s} = {%s}\n" % (subdir[0], subdir[1])) + ofile.write("\t\t{{{}}} = {{{}}}\n".format(subdir[0], subdir[1])) ofile.write('\tEndGlobalSection\n') ofile.write('EndGlobal\n') replace_if_different(sln_filename, sln_filename_tmp) @@ -690,7 +690,7 @@ class Vs2010Backend(backends.Backend): # kidding, this is how escaping works for process args on Windows. if option.endswith('\\'): option += '\\' - return '"{}"'.format(option) + return f'"{option}"' @staticmethod def split_link_args(args): diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index 7ee4e80..d87188e 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -118,7 +118,7 @@ class XCodeBackend(backends.Backend): xcodetype = self.xcodetypemap.get(fname.split('.')[-1].lower()) if not xcodetype: xcodetype = 'sourcecode.unknown' - mlog.warning('Unknown file type "%s" fallbacking to "%s". Xcode project might be malformed.' % (fname, xcodetype)) + mlog.warning(f'Unknown file type "{fname}" fallbacking to "{xcodetype}". Xcode project might be malformed.') return xcodetype def generate_filemap(self): @@ -228,10 +228,10 @@ class XCodeBackend(backends.Backend): buildconf_id = t[2] build_phases = t[3] dependencies = t[4] - self.write_line('%s /* %s */ = {' % (t[0], name)) + self.write_line('{} /* {} */ = {{'.format(t[0], name)) self.indent_level += 1 self.write_line('isa = PBXAggregateTarget;') - self.write_line('buildConfigurationList = %s /* Build configuration list for PBXAggregateTarget "%s" */;' % (buildconf_id, name)) + self.write_line(f'buildConfigurationList = {buildconf_id} /* Build configuration list for PBXAggregateTarget "{name}" */;') self.write_line('buildPhases = (') self.indent_level += 1 for bp in build_phases: @@ -260,7 +260,7 @@ class XCodeBackend(backends.Backend): for dep in t.get_external_deps(): if isinstance(dep, dependencies.AppleFrameworks): for f in dep.frameworks: - self.write_line('%s /* %s.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = %s /* %s.framework */; };\n' % (self.native_frameworks[f], f, self.native_frameworks_fileref[f], f)) + self.write_line('{} /* {}.framework in Frameworks */ = {{isa = PBXBuildFile; fileRef = {} /* {}.framework */; }};\n'.format(self.native_frameworks[f], f, self.native_frameworks_fileref[f], f)) for s in t.sources: if isinstance(s, mesonlib.File): @@ -287,7 +287,7 @@ class XCodeBackend(backends.Backend): # FIXME: Xcode 9 and later does not uses PBXBuildStyle and it gets removed. Maybe we can remove this part. self.ofile.write('\n/* Begin PBXBuildStyle section */\n') for name, idval in self.buildstylemap.items(): - self.write_line('%s /* %s */ = {\n' % (idval, name)) + self.write_line(f'{idval} /* {name} */ = {{\n') self.indent_level += 1 self.write_line('isa = PBXBuildStyle;\n') self.write_line('buildSettings = {\n') @@ -320,7 +320,7 @@ class XCodeBackend(backends.Backend): for dep in t.get_external_deps(): if isinstance(dep, dependencies.AppleFrameworks): for f in dep.frameworks: - self.write_line('%s /* %s.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = %s.framework; path = System/Library/Frameworks/%s.framework; sourceTree = SDKROOT; };\n' % (self.native_frameworks_fileref[f], f, f, f)) + self.write_line('{} /* {}.framework */ = {{isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = {}.framework; path = System/Library/Frameworks/{}.framework; sourceTree = SDKROOT; }};\n'.format(self.native_frameworks_fileref[f], f, f, f)) src_templ = '%s /* %s */ = { isa = PBXFileReference; explicitFileType = "%s"; fileEncoding = 4; name = "%s"; path = "%s"; sourceTree = SOURCE_ROOT; };\n' for fname, idval in self.filemap.items(): fullpath = os.path.join(self.environment.get_source_dir(), fname) @@ -348,7 +348,7 @@ class XCodeBackend(backends.Backend): def generate_pbx_frameworks_buildphase(self): for t in self.build.get_build_targets().values(): self.ofile.write('\n/* Begin PBXFrameworksBuildPhase section */\n') - self.write_line('%s /* %s */ = {\n' % (t.buildphasemap['Frameworks'], 'Frameworks')) + self.write_line('{} /* {} */ = {{\n'.format(t.buildphasemap['Frameworks'], 'Frameworks')) self.indent_level += 1 self.write_line('isa = PBXFrameworksBuildPhase;\n') self.write_line('buildActionMask = %s;\n' % (2147483647)) @@ -357,7 +357,7 @@ class XCodeBackend(backends.Backend): for dep in t.get_external_deps(): if isinstance(dep, dependencies.AppleFrameworks): for f in dep.frameworks: - self.write_line('%s /* %s.framework in Frameworks */,\n' % (self.native_frameworks[f], f)) + self.write_line('{} /* {}.framework in Frameworks */,\n'.format(self.native_frameworks[f], f)) self.indent_level -= 1 self.write_line(');\n') self.write_line('runOnlyForDeploymentPostprocessing = 0;\n') @@ -398,7 +398,7 @@ class XCodeBackend(backends.Backend): self.write_line('children = (') self.indent_level += 1 for t in self.build.get_build_targets(): - self.write_line('%s /* %s */,' % (groupmap[t], t)) + self.write_line('{} /* {} */,'.format(groupmap[t], t)) self.indent_level -= 1 self.write_line(');') self.write_line('name = Sources;') @@ -427,7 +427,7 @@ class XCodeBackend(backends.Backend): for dep in t.get_external_deps(): if isinstance(dep, dependencies.AppleFrameworks): for f in dep.frameworks: - self.write_line('%s /* %s.framework */,\n' % (self.native_frameworks_fileref[f], f)) + self.write_line('{} /* {}.framework */,\n'.format(self.native_frameworks_fileref[f], f)) self.indent_level -= 1 self.write_line(');') @@ -438,7 +438,7 @@ class XCodeBackend(backends.Backend): # Targets for t in self.build.get_build_targets(): - self.write_line('%s /* %s */ = {' % (groupmap[t], t)) + self.write_line('{} /* {} */ = {{'.format(groupmap[t], t)) self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') @@ -458,10 +458,10 @@ class XCodeBackend(backends.Backend): for s in self.build.get_build_targets()[t].sources: s = os.path.join(s.subdir, s.fname) if isinstance(s, str): - self.write_line('%s /* %s */,' % (self.filemap[s], s)) + self.write_line('{} /* {} */,'.format(self.filemap[s], s)) for o in self.build.get_build_targets()[t].objects: o = os.path.join(self.build.get_build_targets()[t].subdir, o) - self.write_line('%s /* %s */,' % (self.filemap[o], o)) + self.write_line('{} /* {} */,'.format(self.filemap[o], o)) self.indent_level -= 1 self.write_line(');') self.write_line('name = "Source files";') @@ -476,7 +476,7 @@ class XCodeBackend(backends.Backend): self.write_line('children = (') self.indent_level += 1 for t in self.build.get_build_targets(): - self.write_line('%s /* %s */,' % (self.target_filemap[t], t)) + self.write_line('{} /* {} */,'.format(self.target_filemap[t], t)) self.indent_level -= 1 self.write_line(');') self.write_line('name = Products;') @@ -489,7 +489,7 @@ class XCodeBackend(backends.Backend): self.ofile.write('\n/* Begin PBXNativeTarget section */\n') for tname, idval in self.native_targets.items(): t = self.build.get_build_targets()[tname] - self.write_line('%s /* %s */ = {' % (idval, tname)) + self.write_line(f'{idval} /* {tname} */ = {{') self.indent_level += 1 self.write_line('isa = PBXNativeTarget;') self.write_line('buildConfigurationList = %s /* Build configuration list for PBXNativeTarget "%s" */;' @@ -497,7 +497,7 @@ class XCodeBackend(backends.Backend): self.write_line('buildPhases = (') self.indent_level += 1 for bpname, bpval in t.buildphasemap.items(): - self.write_line('%s /* %s yyy */,' % (bpval, bpname)) + self.write_line(f'{bpval} /* {bpname} yyy */,') self.indent_level -= 1 self.write_line(');') self.write_line('buildRules = (') @@ -513,7 +513,7 @@ class XCodeBackend(backends.Backend): self.write_line(");") self.write_line('name = "%s";' % tname) self.write_line('productName = "%s";' % tname) - self.write_line('productReference = %s /* %s */;' % (self.target_filemap[tname], tname)) + self.write_line('productReference = {} /* {} */;'.format(self.target_filemap[tname], tname)) if isinstance(t, build.Executable): typestr = 'com.apple.product-type.tool' elif isinstance(t, build.StaticLibrary): @@ -544,7 +544,7 @@ class XCodeBackend(backends.Backend): self.write_line('buildStyles = (') self.indent_level += 1 for name, idval in self.buildstylemap.items(): - self.write_line('%s /* %s */,' % (idval, name)) + self.write_line(f'{idval} /* {name} */,') self.indent_level -= 1 self.write_line(');') self.write_line('compatibilityVersion = "Xcode 3.2";') @@ -557,7 +557,7 @@ class XCodeBackend(backends.Backend): self.write_line('%s /* ALL_BUILD */,' % self.all_id) self.write_line('%s /* RUN_TESTS */,' % self.test_id) for t in self.build.get_build_targets(): - self.write_line('%s /* %s */,' % (self.native_targets[t], t)) + self.write_line('{} /* {} */,'.format(self.native_targets[t], t)) self.indent_level -= 1 self.write_line(');') self.indent_level -= 1 @@ -599,7 +599,7 @@ class XCodeBackend(backends.Backend): for s in self.build.get_build_targets()[name].sources: s = os.path.join(s.subdir, s.fname) if not self.environment.is_header(s): - self.write_line('%s /* %s */,' % (self.buildmap[s], os.path.join(self.environment.get_source_dir(), s))) + self.write_line('{} /* {} */,'.format(self.buildmap[s], os.path.join(self.environment.get_source_dir(), s))) self.indent_level -= 1 self.write_line(');') self.write_line('runOnlyForDeploymentPostprocessing = 0;') @@ -620,7 +620,7 @@ class XCodeBackend(backends.Backend): self.write_line('%s /* PBXTargetDependency */ = {' % t[0]) self.indent_level += 1 self.write_line('isa = PBXTargetDependency;') - self.write_line('target = %s /* %s */;' % (t[1], t[2])) + self.write_line('target = {} /* {} */;'.format(t[1], t[2])) self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % t[3]) self.indent_level -= 1 self.write_line('};') @@ -630,7 +630,7 @@ class XCodeBackend(backends.Backend): self.ofile.write('\n/* Begin XCBuildConfiguration section */\n') # First the setup for the toplevel project. for buildtype in self.buildtypes: - self.write_line('%s /* %s */ = {' % (self.project_configurations[buildtype], buildtype)) + self.write_line('{} /* {} */ = {{'.format(self.project_configurations[buildtype], buildtype)) self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') @@ -647,7 +647,7 @@ class XCodeBackend(backends.Backend): # Then the all target. for buildtype in self.buildtypes: - self.write_line('%s /* %s */ = {' % (self.buildall_configurations[buildtype], buildtype)) + self.write_line('{} /* {} */ = {{'.format(self.buildall_configurations[buildtype], buildtype)) self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') @@ -675,7 +675,7 @@ class XCodeBackend(backends.Backend): # Then the test target. for buildtype in self.buildtypes: - self.write_line('%s /* %s */ = {' % (self.test_configurations[buildtype], buildtype)) + self.write_line('{} /* {} */ = {{'.format(self.test_configurations[buildtype], buildtype)) self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') @@ -763,7 +763,7 @@ class XCodeBackend(backends.Backend): langargs[langname] = args langargs[langname] += lang_cargs symroot = os.path.join(self.environment.get_build_dir(), target.subdir) - self.write_line('%s /* %s */ = {' % (valid, buildtype)) + self.write_line(f'{valid} /* {buildtype} */ = {{') self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') @@ -789,7 +789,7 @@ class XCodeBackend(backends.Backend): pchs = [pch for pch in pchs if pch.endswith('.h') or pch.endswith('.hh') or pch.endswith('hpp')] if pchs: if len(pchs) > 1: - mlog.warning('Unsupported Xcode configuration: More than 1 precompiled header found "%s". Target "%s" might not compile correctly.' % (str(pchs), target.name)) + mlog.warning('Unsupported Xcode configuration: More than 1 precompiled header found "{}". Target "{}" might not compile correctly.'.format(str(pchs), target.name)) relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)" self.write_line('GCC_PRECOMPILE_PREFIX_HEADER = YES;') self.write_line('GCC_PREFIX_HEADER = "$(PROJECT_DIR)/%s";' % relative_pch_path) @@ -822,13 +822,13 @@ class XCodeBackend(backends.Backend): def generate_xc_configurationList(self): # FIXME: sort items self.ofile.write('\n/* Begin XCConfigurationList section */\n') - self.write_line('%s /* Build configuration list for PBXProject "%s" */ = {' % (self.project_conflist, self.build.project_name)) + self.write_line(f'{self.project_conflist} /* Build configuration list for PBXProject "{self.build.project_name}" */ = {{') self.indent_level += 1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') self.indent_level += 1 for buildtype in self.buildtypes: - self.write_line('%s /* %s */,' % (self.project_configurations[buildtype], buildtype)) + self.write_line('{} /* {} */,'.format(self.project_configurations[buildtype], buildtype)) self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') @@ -843,7 +843,7 @@ class XCodeBackend(backends.Backend): self.write_line('buildConfigurations = (') self.indent_level += 1 for buildtype in self.buildtypes: - self.write_line('%s /* %s */,' % (self.buildall_configurations[buildtype], buildtype)) + self.write_line('{} /* {} */,'.format(self.buildall_configurations[buildtype], buildtype)) self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') @@ -858,7 +858,7 @@ class XCodeBackend(backends.Backend): self.write_line('buildConfigurations = (') self.indent_level += 1 for buildtype in self.buildtypes: - self.write_line('%s /* %s */,' % (self.test_configurations[buildtype], buildtype)) + self.write_line('{} /* {} */,'.format(self.test_configurations[buildtype], buildtype)) self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') @@ -868,14 +868,14 @@ class XCodeBackend(backends.Backend): for target_name in self.build.get_build_targets(): listid = self.buildconflistmap[target_name] - self.write_line('%s /* Build configuration list for PBXNativeTarget "%s" */ = {' % (listid, target_name)) + self.write_line(f'{listid} /* Build configuration list for PBXNativeTarget "{target_name}" */ = {{') self.indent_level += 1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') self.indent_level += 1 typestr = 'debug' idval = self.buildconfmap[target_name][typestr] - self.write_line('%s /* %s */,' % (idval, typestr)) + self.write_line(f'{idval} /* {typestr} */,') self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') @@ -890,9 +890,9 @@ class XCodeBackend(backends.Backend): value = flag_values[0] if (' ' in value): # If path contains spaces surround it with double colon - self.write_line('%s = "\\"%s\\"";' % (flag_name, value)) + self.write_line(f'{flag_name} = "\\"{value}\\"";') else: - self.write_line('%s = "%s";' % (flag_name, value)) + self.write_line(f'{flag_name} = "{value}";') else: self.write_line('%s = (' % flag_name) self.indent_level += 1 diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 77dfdaa..620f40f 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -628,7 +628,7 @@ class BuildTarget(Target): self.check_unknown_kwargs(kwargs) self.process_compilers() if not any([self.sources, self.generated, self.objects, self.link_whole]): - raise InvalidArguments('Build target {} has no sources.'.format(name)) + raise InvalidArguments(f'Build target {name} has no sources.') self.process_compilers_late() self.validate_sources() self.validate_install(environment) @@ -639,7 +639,7 @@ class BuildTarget(Target): return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) def __str__(self): - return "{}".format(self.name) + return f"{self.name}" def validate_install(self, environment): if self.for_machine is MachineChoice.BUILD and self.need_install: @@ -668,7 +668,7 @@ class BuildTarget(Target): self.objects.append(s) elif isinstance(s, (GeneratedList, CustomTarget)): msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \ - 'for target {!r}.\nIt is meant only for '.format(self.name) + \ + f'for target {self.name!r}.\nIt is meant only for ' + \ 'pre-built object files that are shipped with the\nsource ' + \ 'tree. Try adding it in the list of sources.' raise InvalidArguments(msg) @@ -808,7 +808,7 @@ class BuildTarget(Target): check_sources = list(self.sources) compiler = self.compilers[lang] if not self.can_compile_remove_sources(compiler, check_sources): - m = 'No {} sources found in target {!r}'.format(lang, self.name) + m = f'No {lang} sources found in target {self.name!r}' raise InvalidArguments(m) if check_sources: m = '{0} targets can only contain {0} files:\n'.format(lang.capitalize()) @@ -885,7 +885,7 @@ class BuildTarget(Target): raise MesonException('Object extraction arguments must be strings or Files.') # FIXME: It could be a generated source if src not in sources_set: - raise MesonException('Tried to extract unknown source {}.'.format(src)) + raise MesonException(f'Tried to extract unknown source {src}.') obj_src.append(src) return ExtractedObjects(self, obj_src) @@ -1035,7 +1035,7 @@ This will become a hard error in a future Meson release.''') assert(isinstance(i, File)) trial = os.path.join(environment.get_source_dir(), i.subdir, i.fname) if not(os.path.isfile(trial)): - raise InvalidArguments('Tried to add non-existing extra file {}.'.format(i)) + raise InvalidArguments(f'Tried to add non-existing extra file {i}.') self.extra_files = extra_files self.install_rpath: str = kwargs.get('install_rpath', '') if not isinstance(self.install_rpath, str): @@ -1049,7 +1049,7 @@ This will become a hard error in a future Meson release.''') raise InvalidArguments('Resource argument is not a string.') trial = os.path.join(environment.get_source_dir(), self.subdir, r) if not os.path.isfile(trial): - raise InvalidArguments('Tried to add non-existing resource {}.'.format(r)) + raise InvalidArguments(f'Tried to add non-existing resource {r}.') self.resources = resources if 'name_prefix' in kwargs: name_prefix = kwargs['name_prefix'] @@ -1104,7 +1104,7 @@ This will become a hard error in a future Meson release.''') def validate_win_subsystem(self, value: str) -> str: value = value.lower() if re.fullmatch(r'(boot_application|console|efi_application|efi_boot_service_driver|efi_rom|efi_runtime_driver|native|posix|windows)(,\d+(\.\d+)?)?', value) is None: - raise InvalidArguments('Invalid value for win_subsystem: {}.'.format(value)) + raise InvalidArguments(f'Invalid value for win_subsystem: {value}.') return value def _extract_pic_pie(self, kwargs, arg: str, environment, option: str): @@ -1123,7 +1123,7 @@ This will become a hard error in a future Meson release.''') val = False if not isinstance(val, bool): - raise InvalidArguments('Argument {} to {!r} must be boolean'.format(arg, self.name)) + raise InvalidArguments(f'Argument {arg} to {self.name!r} must be boolean') return val def get_filename(self): @@ -1238,15 +1238,15 @@ You probably should put it in link_with instead.''') # internal/convenience library, promote to link_whole. return self.link_whole(t) if not isinstance(t, (Target, CustomTargetIndex)): - raise InvalidArguments('{!r} is not a target.'.format(t)) + raise InvalidArguments(f'{t!r} is not a target.') if not t.is_linkable_target(): - raise InvalidArguments("Link target '{!s}' is not linkable.".format(t)) + raise InvalidArguments(f"Link target '{t!s}' is not linkable.") if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic: - msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name) + msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. " msg += "Use the 'pic' option to static_library to build with PIC." raise InvalidArguments(msg) if self.for_machine is not t.for_machine: - msg = 'Tried to mix libraries for machines {} and {} in target {!r}'.format(self.for_machine, t.for_machine, self.name) + msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}' if self.environment.is_cross_build(): raise InvalidArguments(msg + ' This is not possible in a cross build.') else: @@ -1257,20 +1257,20 @@ You probably should put it in link_with instead.''') for t in unholder(listify(target)): if isinstance(t, (CustomTarget, CustomTargetIndex)): if not t.is_linkable_target(): - raise InvalidArguments('Custom target {!r} is not linkable.'.format(t)) + raise InvalidArguments(f'Custom target {t!r} is not linkable.') if not t.get_filename().endswith('.a'): raise InvalidArguments('Can only link_whole custom targets that are .a archives.') if isinstance(self, StaticLibrary): # FIXME: We could extract the .a archive to get object files raise InvalidArguments('Cannot link_whole a custom target into a static library') elif not isinstance(t, StaticLibrary): - raise InvalidArguments('{!r} is not a static library.'.format(t)) + raise InvalidArguments(f'{t!r} is not a static library.') elif isinstance(self, SharedLibrary) and not t.pic: - msg = "Can't link non-PIC static library {!r} into shared library {!r}. ".format(t.name, self.name) + msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. " msg += "Use the 'pic' option to static_library to build with PIC." raise InvalidArguments(msg) if self.for_machine is not t.for_machine: - msg = 'Tried to mix libraries for machines {1} and {2} in target {0!r}'.format(self.name, self.for_machine, t.for_machine) + msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}' if self.environment.is_cross_build(): raise InvalidArguments(msg + ' This is not possible in a cross build.') else: @@ -1315,7 +1315,7 @@ You probably should put it in link_with instead.''') if not isinstance(f, str): raise MesonException('PCH arguments must be strings.') if not os.path.isfile(os.path.join(self.environment.source_dir, self.subdir, f)): - raise MesonException('File {} does not exist.'.format(f)) + raise MesonException(f'File {f} does not exist.') self.pch[language] = pchlist def add_include_dirs(self, args, set_is_system: T.Optional[str] = None): @@ -1387,7 +1387,7 @@ You probably should put it in link_with instead.''') 'Requires a compiler for language "{}", but that is not ' 'a project language.'.format(self.name, l)) return prelinker - raise MesonException('Could not determine prelinker for {!r}.'.format(self.name)) + raise MesonException(f'Could not determine prelinker for {self.name!r}.') def get_clink_dynamic_linker_and_stdlibs(self): ''' @@ -1587,7 +1587,7 @@ class Generator: elif isinstance(e, str): fs = [File.from_source_file(state.environment.source_dir, state.subdir, e)] elif not isinstance(e, File): - raise InvalidArguments('{} arguments must be strings, files or CustomTargets, not {!r}.'.format(name, e)) + raise InvalidArguments(f'{name} arguments must be strings, files or CustomTargets, not {e!r}.') for f in fs: if preserve_path_from: @@ -1597,7 +1597,7 @@ class Generator: output.add_file(f, state) if new: FeatureNew.single_use( - 'Calling "{}" with CustomTaget or Index of CustomTarget.'.format(name), + f'Calling "{name}" with CustomTaget or Index of CustomTarget.', '0.57.0', state.subproject) return output @@ -1723,8 +1723,8 @@ class Executable(BuildTarget): if not isinstance(kwargs.get('implib', False), bool): implib_basename = kwargs['implib'] if m.is_windows() or m.is_cygwin(): - self.vs_import_filename = '{}.lib'.format(implib_basename) - self.gcc_import_filename = 'lib{}.a'.format(implib_basename) + self.vs_import_filename = f'{implib_basename}.lib' + self.gcc_import_filename = f'lib{implib_basename}.a' if self.get_using_msvc(): self.import_filename = self.vs_import_filename else: @@ -1787,7 +1787,7 @@ class StaticLibrary(BuildTarget): self.rust_crate_type = 'rlib' # Don't let configuration proceed with a non-static crate type elif self.rust_crate_type not in ['rlib', 'staticlib']: - raise InvalidArguments('Crate type "{}" invalid for static libraries; must be "rlib" or "staticlib"'.format(self.rust_crate_type)) + raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for static libraries; must be "rlib" or "staticlib"') # By default a static library is named libfoo.a even on Windows because # MSVC does not have a consistent convention for what static libraries # are called. The MSVC CRT uses libfoo.lib syntax but nothing else uses @@ -1828,7 +1828,7 @@ class StaticLibrary(BuildTarget): if isinstance(rust_crate_type, str): self.rust_crate_type = rust_crate_type else: - raise InvalidArguments('Invalid rust_crate_type "{}": must be a string.'.format(rust_crate_type)) + raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.') def is_linkable_target(self): return True @@ -1859,7 +1859,7 @@ class SharedLibrary(BuildTarget): self.rust_crate_type = 'dylib' # Don't let configuration proceed with a non-dynamic crate type elif self.rust_crate_type not in ['dylib', 'cdylib']: - raise InvalidArguments('Crate type "{}" invalid for dynamic libraries; must be "dylib" or "cdylib"'.format(self.rust_crate_type)) + raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for dynamic libraries; must be "dylib" or "cdylib"') if not hasattr(self, 'prefix'): self.prefix = None if not hasattr(self, 'suffix'): @@ -1925,7 +1925,7 @@ class SharedLibrary(BuildTarget): # Shared library is of the form foo.dll prefix = '' # Import library is called foo.dll.lib - self.import_filename = '{}.dll.lib'.format(self.name) + self.import_filename = f'{self.name}.dll.lib' create_debug_file = True elif self.get_using_msvc(): # Shared library is of the form foo.dll @@ -2045,7 +2045,7 @@ class SharedLibrary(BuildTarget): if not isinstance(self.ltversion, str): raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__) if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion): - raise InvalidArguments('Invalid Shared library version "{}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.'.format(self.ltversion)) + raise InvalidArguments(f'Invalid Shared library version "{self.ltversion}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.') # Try to extract/deduce the soversion if 'soversion' in kwargs: self.soversion = kwargs['soversion'] @@ -2092,7 +2092,7 @@ class SharedLibrary(BuildTarget): if isinstance(rust_crate_type, str): self.rust_crate_type = rust_crate_type else: - raise InvalidArguments('Invalid rust_crate_type "{}": must be a string.'.format(rust_crate_type)) + raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.') def get_import_filename(self): """ @@ -2195,7 +2195,7 @@ class CommandBase: elif isinstance(c, list): final_cmd += self.flatten_command(c) else: - raise InvalidArguments('Argument {!r} in "command" is invalid'.format(c)) + raise InvalidArguments(f'Argument {c!r} in "command" is invalid') return final_cmd class CustomTarget(Target, CommandBase): @@ -2287,7 +2287,7 @@ class CustomTarget(Target, CommandBase): if i.strip() == '': raise InvalidArguments('Output must not consist only of whitespace.') if has_path_sep(i): - raise InvalidArguments('Output {!r} must not contain a path segment.'.format(i)) + raise InvalidArguments(f'Output {i!r} must not contain a path segment.') if '@INPUT@' in i or '@INPUT0@' in i: m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \ 'mean @PLAINNAME@ or @BASENAME@?' @@ -2507,10 +2507,10 @@ class Jar(BuildTarget): super().__init__(name, subdir, subproject, for_machine, sources, objects, environment, kwargs) for s in self.sources: if not s.endswith('.java'): - raise InvalidArguments('Jar source {} is not a java file.'.format(s)) + raise InvalidArguments(f'Jar source {s} is not a java file.') for t in self.link_targets: if not isinstance(t, Jar): - raise InvalidArguments('Link target {} is not a jar target.'.format(t)) + raise InvalidArguments(f'Link target {t} is not a jar target.') self.filename = self.name + '.jar' self.outputs = [self.filename] self.java_args = kwargs.get('java_args', []) @@ -2675,13 +2675,13 @@ def get_sources_string_names(sources, backend): elif isinstance(s, File): names.append(s.fname) else: - raise AssertionError('Unknown source type: {!r}'.format(s)) + raise AssertionError(f'Unknown source type: {s!r}') return names def load(build_dir: str) -> Build: filename = os.path.join(build_dir, 'meson-private', 'build.dat') - load_fail_msg = 'Build data file {!r} is corrupted. Try with a fresh build tree.'.format(filename) - nonexisting_fail_msg = 'No such build data file as "{!r}".'.format(filename) + load_fail_msg = f'Build data file {filename!r} is corrupted. Try with a fresh build tree.' + nonexisting_fail_msg = f'No such build data file as "{filename!r}".' try: with open(filename, 'rb') as f: obj = pickle.load(f) diff --git a/mesonbuild/cmake/client.py b/mesonbuild/cmake/client.py index eeaab57..bcbb52e 100644 --- a/mesonbuild/cmake/client.py +++ b/mesonbuild/cmake/client.py @@ -69,7 +69,7 @@ class RequestBase(MessageBase): @staticmethod def gen_cookie() -> str: RequestBase.cookie_counter += 1 - return 'meson_{}'.format(RequestBase.cookie_counter) + return f'meson_{RequestBase.cookie_counter}' class ReplyBase(MessageBase): def __init__(self, cookie: str, in_reply_to: str) -> None: @@ -214,7 +214,7 @@ class ReplyCodeModel(ReplyBase): def log(self) -> None: mlog.log('CMake code mode:') for idx, i in enumerate(self.configs): - mlog.log('Configuration {}:'.format(idx)) + mlog.log(f'Configuration {idx}:') with mlog.nested(): i.log() @@ -274,10 +274,10 @@ class CMakeClient: msg_type = raw_data['type'] func = self.type_map.get(msg_type, None) if not func: - raise CMakeException('Recieved unknown message type "{}"'.format(msg_type)) + raise CMakeException(f'Recieved unknown message type "{msg_type}"') for i in CMAKE_MESSAGE_TYPES[msg_type]: if i not in raw_data: - raise CMakeException('Key "{}" is missing from CMake server message type {}'.format(i, msg_type)) + raise CMakeException(f'Key "{i}" is missing from CMake server message type {msg_type}') return func(raw_data) def writeMessage(self, msg: MessageBase) -> None: @@ -316,10 +316,10 @@ class CMakeClient: reply_type = data['inReplyTo'] func = self.reply_map.get(reply_type, None) if not func: - raise CMakeException('Recieved unknown reply type "{}"'.format(reply_type)) + raise CMakeException(f'Recieved unknown reply type "{reply_type}"') for i in ['cookie'] + CMAKE_REPLY_TYPES[reply_type]: if i not in data: - raise CMakeException('Key "{}" is missing from CMake server message type {}'.format(i, type)) + raise CMakeException(f'Key "{i}" is missing from CMake server message type {type}') return func(data) def resolve_reply_cmakeInputs(self, data: T.Dict[str, T.Any]) -> ReplyCMakeInputs: diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py index 0ec1698..f0a54b5 100644 --- a/mesonbuild/cmake/common.py +++ b/mesonbuild/cmake/common.py @@ -56,7 +56,7 @@ class CMakeBuildFile: self.is_temp = is_temp def __repr__(self) -> str: - return '<{}: {}; cmake={}; temp={}>'.format(self.__class__.__name__, self.file, self.is_cmake, self.is_temp) + return f'<{self.__class__.__name__}: {self.file}; cmake={self.is_cmake}; temp={self.is_temp}>' def _flags_to_list(raw: str) -> T.List[str]: # Convert a raw commandline string into a list of strings @@ -103,10 +103,10 @@ def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]: mlog.warning(' --> Ignoring this option') continue if isinstance(val, (str, int, float)): - res += ['-D{}={}'.format(key, val)] + res += [f'-D{key}={val}'] elif isinstance(val, bool): val_str = 'ON' if val else 'OFF' - res += ['-D{}={}'.format(key, val_str)] + res += [f'-D{key}={val_str}'] else: raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key)) @@ -132,7 +132,7 @@ class CMakeInclude: self.isSystem = isSystem def __repr__(self) -> str: - return ''.format(self.path, self.isSystem) + return f'' class CMakeFileGroup: def __init__(self, data: T.Dict[str, T.Any]) -> None: @@ -201,7 +201,7 @@ class CMakeTarget: mlog.log('type =', mlog.bold(self.type)) # mlog.log('is_generator_provided =', mlog.bold('true' if self.is_generator_provided else 'false')) for idx, i in enumerate(self.files): - mlog.log('Files {}:'.format(idx)) + mlog.log(f'Files {idx}:') with mlog.nested(): i.log() @@ -220,7 +220,7 @@ class CMakeProject: mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) mlog.log('name =', mlog.bold(self.name)) for idx, i in enumerate(self.targets): - mlog.log('Target {}:'.format(idx)) + mlog.log(f'Target {idx}:') with mlog.nested(): i.log() @@ -234,7 +234,7 @@ class CMakeConfiguration: def log(self) -> None: mlog.log('name =', mlog.bold(self.name)) for idx, i in enumerate(self.projects): - mlog.log('Project {}:'.format(idx)) + mlog.log(f'Project {idx}:') with mlog.nested(): i.log() @@ -265,7 +265,7 @@ class SingleTargetOptions: opt = i[:i.find('=')] if opt not in self.opts: res += [i] - res += ['{}={}'.format(k, v) for k, v in self.opts.items()] + res += [f'{k}={v}' for k, v in self.opts.items()] return res def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]: diff --git a/mesonbuild/cmake/executor.py b/mesonbuild/cmake/executor.py index e4b85de..860d410 100644 --- a/mesonbuild/cmake/executor.py +++ b/mesonbuild/cmake/executor.py @@ -87,7 +87,7 @@ class CMakeExecutor: continue if not silent: mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()), - '({})'.format(version_if_ok)) + f'({version_if_ok})') CMakeExecutor.class_cmakebin[self.for_machine] = potential_cmakebin CMakeExecutor.class_cmakevers[self.for_machine] = version_if_ok break @@ -104,7 +104,7 @@ class CMakeExecutor: def check_cmake(self, cmakebin: 'ExternalProgram') -> T.Optional[str]: if not cmakebin.found(): - mlog.log('Did not find CMake {!r}'.format(cmakebin.name)) + mlog.log(f'Did not find CMake {cmakebin.name!r}') return None try: p, out = Popen_safe(cmakebin.get_command() + ['--version'])[0:2] @@ -202,9 +202,9 @@ class CMakeExecutor: return rc, out, err def _call_impl(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: - mlog.debug('Calling CMake ({}) in {} with:'.format(self.cmakebin.get_command(), build_dir)) + mlog.debug(f'Calling CMake ({self.cmakebin.get_command()}) in {build_dir} with:') for i in args: - mlog.debug(' - "{}"'.format(i)) + mlog.debug(f' - "{i}"') if not self.print_cmout: return self._call_quiet(args, build_dir, env) else: diff --git a/mesonbuild/cmake/fileapi.py b/mesonbuild/cmake/fileapi.py index ce63219..6773e9a 100644 --- a/mesonbuild/cmake/fileapi.py +++ b/mesonbuild/cmake/fileapi.py @@ -311,7 +311,7 @@ class CMakeFileAPI: def _reply_file_content(self, filename: Path) -> T.Dict[str, T.Any]: real_path = self.reply_dir / filename if not real_path.exists(): - raise CMakeException('File "{}" does not exist'.format(real_path)) + raise CMakeException(f'File "{real_path}" does not exist') data = json.loads(real_path.read_text()) assert isinstance(data, dict) diff --git a/mesonbuild/cmake/interpreter.py b/mesonbuild/cmake/interpreter.py index abb4983..cccd358 100644 --- a/mesonbuild/cmake/interpreter.py +++ b/mesonbuild/cmake/interpreter.py @@ -175,7 +175,7 @@ class OutputTargetMap: continue new_name = name[:-len(i) - 1] new_name = OutputTargetMap.rm_so_version.sub('', new_name) - candidates += ['{}.{}'.format(new_name, i)] + candidates += [f'{new_name}.{i}'] for i in candidates: keys += [self._rel_artifact_key(Path(i)), Path(i).name, self._base_artifact_key(Path(i))] return self._return_first_valid_key(keys) @@ -194,21 +194,21 @@ class OutputTargetMap: return None def _target_key(self, tgt_name: str) -> str: - return '__tgt_{}__'.format(tgt_name) + return f'__tgt_{tgt_name}__' def _rel_generated_file_key(self, fname: Path) -> T.Optional[str]: path = self._rel_path(fname) - return '__relgen_{}__'.format(path.as_posix()) if path else None + return f'__relgen_{path.as_posix()}__' if path else None def _base_generated_file_key(self, fname: Path) -> str: - return '__gen_{}__'.format(fname.name) + return f'__gen_{fname.name}__' def _rel_artifact_key(self, fname: Path) -> T.Optional[str]: path = self._rel_path(fname) - return '__relart_{}__'.format(path.as_posix()) if path else None + return f'__relart_{path.as_posix()}__' if path else None def _base_artifact_key(self, fname: Path) -> str: - return '__art_{}__'.format(fname.name) + return f'__art_{fname.name}__' class ConverterTarget: def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: MachineChoice) -> None: @@ -281,7 +281,7 @@ class ConverterTarget: # Add arguments, but avoid duplicates args = i.flags - args += ['-D{}'.format(x) for x in i.defines] + args += [f'-D{x}' for x in i.defines] for lang in languages: self.compile_opts[lang] += [x for x in args if x not in self.compile_opts[lang]] @@ -296,7 +296,7 @@ class ConverterTarget: self.sources += i.sources def __repr__(self) -> str: - return '<{}: {}>'.format(self.__class__.__name__, self.name) + return f'<{self.__class__.__name__}: {self.name}>' std_regex = re.compile(r'([-]{1,2}std=|/std:v?|[-]{1,2}std:)(.*)') @@ -321,7 +321,7 @@ class ConverterTarget: once=True ) continue - self.override_options += ['{}_std={}'.format(i, std)] + self.override_options += [f'{i}_std={std}'] elif j in ['-fPIC', '-fpic', '-fPIE', '-fpie']: self.pie = True elif isinstance(ctgt, ConverterCustomTarget): @@ -393,12 +393,12 @@ class ConverterTarget: if 'RELEASE' in cfgs: cfg = 'RELEASE' - if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties: - libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x] + if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties: + libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x] elif 'IMPORTED_IMPLIB' in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x] - elif 'IMPORTED_LOCATION_{}'.format(cfg) in tgt.properties: - libraries += [x for x in tgt.properties['IMPORTED_LOCATION_{}'.format(cfg)] if x] + elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: + libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] elif 'IMPORTED_LOCATION' in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] @@ -408,8 +408,8 @@ class ConverterTarget: if 'INTERFACE_LINK_LIBRARIES' in tgt.properties: otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] - if 'IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg) in tgt.properties: - otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg)] if x] + if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties: + otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x] elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties: otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x] @@ -445,7 +445,7 @@ class ConverterTarget: supported = list(header_suffixes) + list(obj_suffixes) for i in self.languages: supported += list(lang_suffixes[i]) - supported = ['.{}'.format(x) for x in supported] + supported = [f'.{x}' for x in supported] self.sources = [x for x in self.sources if any([x.name.endswith(y) for y in supported])] self.generated_raw = [x for x in self.generated_raw if any([x.name.endswith(y) for y in supported])] @@ -560,7 +560,7 @@ class ConverterTarget: candidates = [j] # type: T.List[str] if not any([j.endswith('.' + x) for x in exts]): mlog.warning('Object files do not contain source file extensions, thus falling back to guessing them.', once=True) - candidates += ['{}.{}'.format(j, x) for x in exts] + candidates += [f'{j}.{x}' for x in exts] if any([x in source_files for x in candidates]): if linker_workaround: self._append_objlib_sources(i) @@ -632,7 +632,7 @@ class ConverterTarget: return target_type_map.get(self.type.upper()) def log(self) -> None: - mlog.log('Target', mlog.bold(self.name), '({})'.format(self.cmake_name)) + mlog.log('Target', mlog.bold(self.name), f'({self.cmake_name})') mlog.log(' -- artifacts: ', mlog.bold(str(self.artifacts))) mlog.log(' -- full_name: ', mlog.bold(self.full_name)) mlog.log(' -- type: ', mlog.bold(self.type)) @@ -664,7 +664,7 @@ class CustomTargetReference: if self.valid(): return '<{}: {} [{}]>'.format(self.__class__.__name__, self.ctgt.name, self.ctgt.outputs[self.index]) else: - return '<{}: INVALID REFERENCE>'.format(self.__class__.__name__) + return f'<{self.__class__.__name__}: INVALID REFERENCE>' def valid(self) -> bool: return self.ctgt is not None and self.index >= 0 @@ -681,7 +681,7 @@ class ConverterCustomTarget: assert target.current_src_dir is not None self.name = target.name if not self.name: - self.name = 'custom_tgt_{}'.format(ConverterCustomTarget.tgt_counter) + self.name = f'custom_tgt_{ConverterCustomTarget.tgt_counter}' ConverterCustomTarget.tgt_counter += 1 self.cmake_name = str(self.name) self.original_outputs = list(target.outputs) @@ -702,7 +702,7 @@ class ConverterCustomTarget: self.name = _sanitize_cmake_name(self.name) def __repr__(self) -> str: - return '<{}: {} {}>'.format(self.__class__.__name__, self.name, self.outputs) + return f'<{self.__class__.__name__}: {self.name} {self.outputs}>' def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, all_outputs: T.List[str], trace: CMakeTraceParser) -> None: # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR} @@ -730,7 +730,7 @@ class ConverterCustomTarget: for i in self.outputs: if i in all_outputs: old = str(i) - i = 'c{}_{}'.format(ConverterCustomTarget.out_counter, i) + i = f'c{ConverterCustomTarget.out_counter}_{i}' ConverterCustomTarget.out_counter += 1 self.conflict_map[old] = i all_outputs += [i] @@ -760,7 +760,7 @@ class ConverterCustomTarget: if trace_tgt.type == 'EXECUTABLE' and 'IMPORTED_LOCATION' in trace_tgt.properties: cmd += trace_tgt.properties['IMPORTED_LOCATION'] continue - mlog.debug('CMake: Found invalid CMake target "{}" --> ignoring \n{}'.format(j, trace_tgt)) + mlog.debug(f'CMake: Found invalid CMake target "{j}" --> ignoring \n{trace_tgt}') # Fallthrough on error cmd += [j] @@ -829,7 +829,7 @@ class ConverterCustomTarget: return None def log(self) -> None: - mlog.log('Custom Target', mlog.bold(self.name), '({})'.format(self.cmake_name)) + mlog.log('Custom Target', mlog.bold(self.name), f'({self.cmake_name})') mlog.log(' -- command: ', mlog.bold(str(self.command))) mlog.log(' -- outputs: ', mlog.bold(str(self.outputs))) mlog.log(' -- conflict_map: ', mlog.bold(str(self.conflict_map))) @@ -901,10 +901,10 @@ class CMakeInterpreter: generator = backend_generator_map[self.backend_name] cmake_args = [] cmake_args += ['-G', generator] - cmake_args += ['-DCMAKE_INSTALL_PREFIX={}'.format(self.install_prefix)] + cmake_args += [f'-DCMAKE_INSTALL_PREFIX={self.install_prefix}'] cmake_args += extra_cmake_options trace_args = self.trace.trace_args() - cmcmp_args = ['-DCMAKE_POLICY_WARNING_{}=OFF'.format(x) for x in disable_policy_warnings] + cmcmp_args = [f'-DCMAKE_POLICY_WARNING_{x}=OFF' for x in disable_policy_warnings] if version_compare(cmake_exe.version(), '>=3.14'): self.cmake_api = CMakeAPI.FILE @@ -1217,14 +1217,14 @@ class CMakeInterpreter: # Determine the meson function to use for the build target tgt_func = tgt.meson_func() if not tgt_func: - raise CMakeException('Unknown target type "{}"'.format(tgt.type)) + raise CMakeException(f'Unknown target type "{tgt.type}"') # Determine the variable names - inc_var = '{}_inc'.format(tgt.name) - dir_var = '{}_dir'.format(tgt.name) - sys_var = '{}_sys'.format(tgt.name) - src_var = '{}_src'.format(tgt.name) - dep_var = '{}_dep'.format(tgt.name) + inc_var = f'{tgt.name}_inc' + dir_var = f'{tgt.name}_dir' + sys_var = f'{tgt.name}_sys' + src_var = f'{tgt.name}_src' + dep_var = f'{tgt.name}_dep' tgt_var = tgt.name install_tgt = options.get_install(tgt.cmake_name, tgt.install) @@ -1246,7 +1246,7 @@ class CMakeInterpreter: # Handle compiler args for key, val in tgt.compile_opts.items(): - tgt_kwargs['{}_args'.format(key)] = options.get_compile_args(tgt.cmake_name, key, val) + tgt_kwargs[f'{key}_args'] = options.get_compile_args(tgt.cmake_name, key, val) # Handle -fPCI, etc if tgt_func == 'executable': diff --git a/mesonbuild/cmake/toolchain.py b/mesonbuild/cmake/toolchain.py index 1870445..c9f821a 100644 --- a/mesonbuild/cmake/toolchain.py +++ b/mesonbuild/cmake/toolchain.py @@ -105,7 +105,7 @@ class CMakeToolchain: for key, value in self.variables.items(): res += 'set(' + key for i in value: - res += ' "{}"'.format(i) + res += f' "{i}"' res += ')\n' res += '\n' diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index 298c6b8..6294fa1 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -134,7 +134,7 @@ class CMakeTraceParser: base_args = ['--no-warn-unused-cli'] if not self.requires_stderr(): - base_args += ['--trace-redirect={}'.format(self.trace_file)] + base_args += [f'--trace-redirect={self.trace_file}'] return arg_map[self.trace_format] + base_args @@ -157,7 +157,7 @@ class CMakeTraceParser: elif self.trace_format == 'json-v1': lexer1 = self._lex_trace_json(trace) else: - raise CMakeException('CMake: Internal error: Invalid trace format {}. Expected [human, json-v1]'.format(self.trace_format)) + raise CMakeException(f'CMake: Internal error: Invalid trace format {self.trace_format}. Expected [human, json-v1]') # Primary pass -- parse everything for l in lexer1: @@ -213,9 +213,9 @@ class CMakeTraceParser: # Generate an exception if the parser is not in permissive mode if self.permissive: - mlog.debug('CMake trace warning: {}() {}\n{}'.format(function, error, tline)) + mlog.debug(f'CMake trace warning: {function}() {error}\n{tline}') return None - raise CMakeException('CMake: {}() {}\n{}'.format(function, error, tline)) + raise CMakeException(f'CMake: {function}() {error}\n{tline}') def _cmake_set(self, tline: CMakeTraceLine) -> None: """Handler for the CMake set() function in all variaties. @@ -439,7 +439,7 @@ class CMakeTraceParser: def do_target(tgt: str) -> None: if i not in self.targets: - return self._gen_exception('set_property', 'TARGET {} not found'.format(i), tline) + return self._gen_exception('set_property', f'TARGET {i} not found', tline) if identifier not in self.targets[i].properties: self.targets[i].properties[identifier] = [] @@ -525,7 +525,7 @@ class CMakeTraceParser: for name, value in arglist: for i in targets: if i not in self.targets: - return self._gen_exception('set_target_properties', 'TARGET {} not found'.format(i), tline) + return self._gen_exception('set_target_properties', f'TARGET {i} not found', tline) self.targets[i].properties[name] = value @@ -574,7 +574,7 @@ class CMakeTraceParser: target = args[0] if target not in self.targets: - return self._gen_exception(func, 'TARGET {} not found'.format(target), tline) + return self._gen_exception(func, f'TARGET {target} not found', tline) interface = [] private = [] @@ -706,13 +706,13 @@ class CMakeTraceParser: path_found = False elif reg_end.match(i): # File detected - curr_str = '{} {}'.format(curr_str, i) + curr_str = f'{curr_str} {i}' fixed_list += [curr_str] curr_str = None path_found = False - elif Path('{} {}'.format(curr_str, i)).exists(): + elif Path(f'{curr_str} {i}').exists(): # Path detected - curr_str = '{} {}'.format(curr_str, i) + curr_str = f'{curr_str} {i}' path_found = True elif path_found: # Add path to fixed_list after ensuring the whole path is in curr_str @@ -720,7 +720,7 @@ class CMakeTraceParser: curr_str = i path_found = False else: - curr_str = '{} {}'.format(curr_str, i) + curr_str = f'{curr_str} {i}' path_found = False if curr_str: diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py index 0a2d478..759b969 100644 --- a/mesonbuild/compilers/c.py +++ b/mesonbuild/compilers/c.py @@ -58,7 +58,7 @@ class CCompiler(CLikeCompiler, Compiler): try: return C_FUNC_ATTRIBUTES[name] except KeyError: - raise MesonException('Unknown function attribute "{}"'.format(name)) + raise MesonException(f'Unknown function attribute "{name}"') language = 'c' @@ -474,7 +474,7 @@ class ClangClCCompiler(_ClangCStds, ClangClCompiler, VisualStudioLikeCCompilerMi key = OptionKey('std', machine=self.for_machine, lang=self.language) std = options[key].value if std != "none": - return ['/clang:-std={}'.format(std)] + return [f'/clang:-std={std}'] return [] @@ -654,7 +654,7 @@ class CompCertCCompiler(CompCertCompiler, CCompiler): return ['-O0'] def get_output_args(self, target: str) -> T.List[str]: - return ['-o{}'.format(target)] + return [f'-o{target}'] def get_werror_args(self) -> T.List[str]: return ['-Werror'] diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 56c97f4..15c3805 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -693,7 +693,7 @@ class Compiler(metaclass=abc.ABCMeta): def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], libtype: LibType = LibType.PREFER_SHARED) -> T.Optional[T.List[str]]: - raise EnvironmentException('Language {} does not support library finding.'.format(self.get_display_language())) + raise EnvironmentException(f'Language {self.get_display_language()} does not support library finding.') def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool = False) -> T.Optional[T.Tuple[str, ...]]: @@ -896,7 +896,7 @@ class Compiler(metaclass=abc.ABCMeta): def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]: raise EnvironmentException( - 'Language {} does not support function attributes.'.format(self.get_display_language())) + f'Language {self.get_display_language()} does not support function attributes.') def get_pic_args(self) -> T.List[str]: m = 'Language {} does not support position-independent code' @@ -971,7 +971,7 @@ class Compiler(metaclass=abc.ABCMeta): return self.linker.bitcode_args() def get_buildtype_args(self, buildtype: str) -> T.List[str]: - raise EnvironmentException('{} does not implement get_buildtype_args'.format(self.id)) + raise EnvironmentException(f'{self.id} does not implement get_buildtype_args') def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: return self.linker.get_buildtype_args(buildtype) @@ -1047,22 +1047,22 @@ class Compiler(metaclass=abc.ABCMeta): return [] def find_framework_paths(self, env: 'Environment') -> T.List[str]: - raise EnvironmentException('{} does not support find_framework_paths'.format(self.id)) + raise EnvironmentException(f'{self.id} does not support find_framework_paths') def attribute_check_func(self, name: str) -> str: - raise EnvironmentException('{} does not support attribute checks'.format(self.id)) + raise EnvironmentException(f'{self.id} does not support attribute checks') def get_pch_suffix(self) -> str: - raise EnvironmentException('{} does not support pre compiled headers'.format(self.id)) + raise EnvironmentException(f'{self.id} does not support pre compiled headers') def get_pch_name(self, name: str) -> str: - raise EnvironmentException('{} does not support pre compiled headers'.format(self.id)) + raise EnvironmentException(f'{self.id} does not support pre compiled headers') def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: - raise EnvironmentException('{} does not support pre compiled headers'.format(self.id)) + raise EnvironmentException(f'{self.id} does not support pre compiled headers') def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: - raise EnvironmentException('{} does not support function attributes'.format(self.id)) + raise EnvironmentException(f'{self.id} does not support function attributes') def name_string(self) -> str: return ' '.join(self.exelist) @@ -1095,7 +1095,7 @@ class Compiler(metaclass=abc.ABCMeta): return objfile + '.' + self.get_depfile_suffix() def get_depfile_suffix(self) -> str: - raise OSError('{} does not implement get_depfile_suffix'.format(self.id)) + raise OSError(f'{self.id} does not implement get_depfile_suffix') def get_no_stdinc_args(self) -> T.List[str]: """Arguments to turn off default inclusion of standard libraries.""" @@ -1112,13 +1112,13 @@ class Compiler(metaclass=abc.ABCMeta): pass def get_module_incdir_args(self) -> T.Tuple[str, ...]: - raise OSError('{} does not implement get_module_incdir_args'.format(self.id)) + raise OSError(f'{self.id} does not implement get_module_incdir_args') def get_module_outdir_args(self, path: str) -> T.List[str]: - raise OSError('{} does not implement get_module_outdir_args'.format(self.id)) + raise OSError(f'{self.id} does not implement get_module_outdir_args') def module_name_to_filename(self, module_name: str) -> str: - raise OSError('{} does not implement module_name_to_filename'.format(self.id)) + raise OSError(f'{self.id} does not implement module_name_to_filename') def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: """Arguments to pass the compiler and/or linker for checks. @@ -1212,10 +1212,10 @@ class Compiler(metaclass=abc.ABCMeta): def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: """Used by D for extra language features.""" # TODO: using a TypeDict here would improve this - raise OSError('{} does not implement get_feature_args'.format(self.id)) + raise OSError(f'{self.id} does not implement get_feature_args') def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]: - raise EnvironmentException('{} does not know how to do prelinking.'.format(self.id)) + raise EnvironmentException(f'{self.id} does not know how to do prelinking.') def get_global_options(lang: str, @@ -1223,7 +1223,7 @@ def get_global_options(lang: str, for_machine: MachineChoice, env: 'Environment') -> 'KeyedOptionDictType': """Retrieve options that apply to all compilers for a given language.""" - description = 'Extra arguments passed to the {}'.format(lang) + description = f'Extra arguments passed to the {lang}' argkey = OptionKey('args', lang=lang, machine=for_machine) largkey = argkey.evolve('link_args') diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index ebe3a4f..4d55ccd 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -65,7 +65,7 @@ class CPPCompiler(CLikeCompiler, Compiler): try: return CXX_FUNC_ATTRIBUTES.get(name, C_FUNC_ATTRIBUTES[name]) except KeyError: - raise MesonException('Unknown function attribute "{}"'.format(name)) + raise MesonException(f'Unknown function attribute "{name}"') language = 'cpp' @@ -129,10 +129,10 @@ class CPPCompiler(CLikeCompiler, Compiler): CPP_TEST = 'int i = static_cast(0);' with self.compile(CPP_TEST, extra_args=[cpp_std_value], mode='compile') as p: if p.returncode == 0: - mlog.debug('Compiler accepts {}:'.format(cpp_std_value), 'YES') + mlog.debug(f'Compiler accepts {cpp_std_value}:', 'YES') return True else: - mlog.debug('Compiler accepts {}:'.format(cpp_std_value), 'NO') + mlog.debug(f'Compiler accepts {cpp_std_value}:', 'NO') return False @functools.lru_cache() @@ -166,7 +166,7 @@ class CPPCompiler(CLikeCompiler, Compiler): if self._test_cpp_std_arg(cpp_std_value): return cpp_std_value - raise MesonException('C++ Compiler does not support -std={}'.format(cpp_std)) + raise MesonException(f'C++ Compiler does not support -std={cpp_std}') def get_options(self) -> 'KeyedOptionDictType': opts = super().get_options() @@ -615,7 +615,7 @@ class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase): permissive, ver = self.VC_VERSION_MAP[options[key].value] if ver is not None: - args.append('/std:c++{}'.format(ver)) + args.append(f'/std:c++{ver}') if not permissive: args.append('/permissive-') diff --git a/mesonbuild/compilers/cuda.py b/mesonbuild/compilers/cuda.py index a55975a..b7dc0f5 100644 --- a/mesonbuild/compilers/cuda.py +++ b/mesonbuild/compilers/cuda.py @@ -514,7 +514,7 @@ class CudaCompiler(Compiler): mlog.debug(stde) mlog.debug('-----') if pc.returncode != 0: - raise EnvironmentException('Compiler {} can not compile programs.'.format(self.name_string())) + raise EnvironmentException(f'Compiler {self.name_string()} can not compile programs.') # Run sanity check (if possible) if self.is_cross: @@ -533,7 +533,7 @@ class CudaCompiler(Compiler): mlog.debug('-----') pe.wait() if pe.returncode != 0: - raise EnvironmentException('Executables created by {} compiler {} are not runnable.'.format(self.language, self.name_string())) + raise EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.') # Interpret the result of the sanity test. # As mentioned above, it is not only a sanity test but also a GPU diff --git a/mesonbuild/compilers/d.py b/mesonbuild/compilers/d.py index b16aca8..92d54e9 100644 --- a/mesonbuild/compilers/d.py +++ b/mesonbuild/compilers/d.py @@ -183,10 +183,10 @@ class DmdLikeCompilerMixin(CompilerMixinBase): if int(d) > debug_level: debug_level = int(d) else: - res.append('{}={}'.format(debug_arg, d)) + res.append(f'{debug_arg}={d}') if debug_level >= 0: - res.append('{}={}'.format(debug_arg, debug_level)) + res.append(f'{debug_arg}={debug_level}') if 'versions' in kwargs: version_level = -1 @@ -207,10 +207,10 @@ class DmdLikeCompilerMixin(CompilerMixinBase): if int(v) > version_level: version_level = int(v) else: - res.append('{}={}'.format(version_arg, v)) + res.append(f'{version_arg}={v}') if version_level >= 0: - res.append('{}={}'.format(version_arg, version_level)) + res.append(f'{version_arg}={version_level}') if 'import_dirs' in kwargs: import_dirs = kwargs.pop('import_dirs') @@ -230,8 +230,8 @@ class DmdLikeCompilerMixin(CompilerMixinBase): else: expdir = basedir srctreedir = os.path.join(build_to_src, expdir) - res.append('{}{}'.format(import_dir_arg, srctreedir)) - res.append('{}{}'.format(import_dir_arg, bldtreedir)) + res.append(f'{import_dir_arg}{srctreedir}') + res.append(f'{import_dir_arg}{bldtreedir}') if kwargs: raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) @@ -560,10 +560,10 @@ class DCompiler(Compiler): if int(d) > debug_level: debug_level = int(d) else: - res.append('{}={}'.format(debug_arg, d)) + res.append(f'{debug_arg}={d}') if debug_level >= 0: - res.append('{}={}'.format(debug_arg, debug_level)) + res.append(f'{debug_arg}={debug_level}') if 'versions' in kwargs: version_level = -1 @@ -584,10 +584,10 @@ class DCompiler(Compiler): if int(v) > version_level: version_level = int(v) else: - res.append('{}={}'.format(version_arg, v)) + res.append(f'{version_arg}={v}') if version_level >= 0: - res.append('{}={}'.format(version_arg, version_level)) + res.append(f'{version_arg}={version_level}') if 'import_dirs' in kwargs: import_dirs = kwargs.pop('import_dirs') @@ -607,8 +607,8 @@ class DCompiler(Compiler): else: expdir = basedir srctreedir = os.path.join(build_to_src, expdir) - res.append('{}{}'.format(import_dir_arg, srctreedir)) - res.append('{}{}'.format(import_dir_arg, bldtreedir)) + res.append(f'{import_dir_arg}{srctreedir}') + res.append(f'{import_dir_arg}{bldtreedir}') if kwargs: raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) @@ -769,7 +769,7 @@ class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): @classmethod def use_linker_args(cls, linker: str) -> T.List[str]: - return ['-linker={}'.format(linker)] + return [f'-linker={linker}'] def get_linker_always_args(self) -> T.List[str]: args = super().get_linker_always_args() diff --git a/mesonbuild/compilers/mixins/arm.py b/mesonbuild/compilers/mixins/arm.py index beb5fd5..4e1898a 100644 --- a/mesonbuild/compilers/mixins/arm.py +++ b/mesonbuild/compilers/mixins/arm.py @@ -142,7 +142,7 @@ class ArmclangCompiler(Compiler): raise mesonlib.EnvironmentException('armclang supports only cross-compilation.') # Check whether 'armlink' is available in path if not isinstance(self.linker, ArmClangDynamicLinker): - raise mesonlib.EnvironmentException('Unsupported Linker {}, must be armlink'.format(self.linker.exelist)) + raise mesonlib.EnvironmentException(f'Unsupported Linker {self.linker.exelist}, must be armlink') if not mesonlib.version_compare(self.version, '==' + self.linker.version): raise mesonlib.EnvironmentException('armlink version does not match with compiler version') self.id = 'armclang' diff --git a/mesonbuild/compilers/mixins/clang.py b/mesonbuild/compilers/mixins/clang.py index fd1966f..5e109c0 100644 --- a/mesonbuild/compilers/mixins/clang.py +++ b/mesonbuild/compilers/mixins/clang.py @@ -126,8 +126,8 @@ class ClangCompiler(GnuLikeCompiler): if shutil.which(linker): if not shutil.which(linker): raise mesonlib.MesonException( - 'Cannot find linker {}.'.format(linker)) - return ['-fuse-ld={}'.format(linker)] + f'Cannot find linker {linker}.') + return [f'-fuse-ld={linker}'] return super().use_linker_args(linker) def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index ad0e257..787c2c1 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -118,7 +118,7 @@ class CLikeCompilerArgs(arglist.CompilerArgs): def __repr__(self) -> str: self.flush_pre_post() - return 'CLikeCompilerArgs({!r}, {!r})'.format(self.compiler, self._container) + return f'CLikeCompilerArgs({self.compiler!r}, {self._container!r})' class CLikeCompiler(Compiler): @@ -321,7 +321,7 @@ class CLikeCompiler(Compiler): mlog.debug(stde) mlog.debug('-----') if pc.returncode != 0: - raise mesonlib.EnvironmentException('Compiler {} can not compile programs.'.format(self.name_string())) + raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} can not compile programs.') # Run sanity check if self.is_cross: if self.exe_wrapper is None: @@ -337,7 +337,7 @@ class CLikeCompiler(Compiler): raise mesonlib.EnvironmentException('Could not invoke sanity test executable: %s.' % str(e)) pe.wait() if pe.returncode != 0: - raise mesonlib.EnvironmentException('Executables created by {} compiler {} are not runnable.'.format(self.language, self.name_string())) + raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.') def sanity_check(self, work_dir: str, environment: 'Environment') -> None: code = 'int main(void) { int class=0; return class; }\n' @@ -485,7 +485,7 @@ class CLikeCompiler(Compiler): try: pe, so, se = mesonlib.Popen_safe(cmdlist) except Exception as e: - mlog.debug('Could not run: %s (error: %s)\n' % (cmdlist, e)) + mlog.debug(f'Could not run: {cmdlist} (error: {e})\n') return compilers.RunResult(False) mlog.debug('Program stdout:\n') @@ -692,7 +692,7 @@ class CLikeCompiler(Compiler): with func() as p: cached = p.cached if p.returncode != 0: - raise mesonlib.EnvironmentException('Could not get define {!r}'.format(dname)) + raise mesonlib.EnvironmentException(f'Could not get define {dname!r}') # Get the preprocessed value after the delimiter, # minus the extra newline at the end and # merge string literals. @@ -710,7 +710,7 @@ class CLikeCompiler(Compiler): fmt = '%lli' cast = '(long long int)' else: - raise AssertionError('BUG: Unknown return type {!r}'.format(rtype)) + raise AssertionError(f'BUG: Unknown return type {rtype!r}') fargs = {'prefix': prefix, 'f': fname, 'cast': cast, 'fmt': fmt} code = '''{prefix} #include @@ -810,7 +810,7 @@ class CLikeCompiler(Compiler): if val is not None: if isinstance(val, bool): return val, False - raise mesonlib.EnvironmentException('Cross variable {} is not a boolean.'.format(varname)) + raise mesonlib.EnvironmentException(f'Cross variable {varname} is not a boolean.') # TODO: we really need a protocol for this, # @@ -951,7 +951,7 @@ class CLikeCompiler(Compiler): elif symbol_name in line: mlog.debug("Symbols have underscore prefix: NO") return False - raise RuntimeError('BUG: {!r} check failed unexpectedly'.format(n)) + raise RuntimeError(f'BUG: {n!r} check failed unexpectedly') def _get_patterns(self, env: 'Environment', prefixes: T.List[str], suffixes: T.List[str], shared: bool = False) -> T.List[str]: patterns = [] # type: T.List[str] diff --git a/mesonbuild/compilers/mixins/emscripten.py b/mesonbuild/compilers/mixins/emscripten.py index 57295d0..226cc15 100644 --- a/mesonbuild/compilers/mixins/emscripten.py +++ b/mesonbuild/compilers/mixins/emscripten.py @@ -53,7 +53,7 @@ class EmscriptenMixin(Compiler): args = ['-s', 'USE_PTHREADS=1'] count: int = env.coredata.options[OptionKey('thread_count', lang=self.language, machine=self.for_machine)].value if count: - args.extend(['-s', 'PTHREAD_POOL_SIZE={}'.format(count)]) + args.extend(['-s', f'PTHREAD_POOL_SIZE={count}']) return args def get_options(self) -> 'coredata.KeyedOptionDictType': diff --git a/mesonbuild/compilers/mixins/gnu.py b/mesonbuild/compilers/mixins/gnu.py index 464c664..b007ff0 100644 --- a/mesonbuild/compilers/mixins/gnu.py +++ b/mesonbuild/compilers/mixins/gnu.py @@ -104,7 +104,7 @@ def gnulike_default_include_dirs(compiler: T.Tuple[str], lang: str) -> T.List[st lang = lang_map[lang] env = os.environ.copy() env["LC_ALL"] = 'C' - cmd = list(compiler) + ['-x{}'.format(lang), '-E', '-v', '-'] + cmd = list(compiler) + [f'-x{lang}', '-E', '-v', '-'] p = subprocess.Popen( cmd, stdin=subprocess.DEVNULL, @@ -317,7 +317,7 @@ class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta): raise mesonlib.MesonException( 'Unsupported linker, only bfd, gold, and lld are supported, ' 'not {}.'.format(linker)) - return ['-fuse-ld={}'.format(linker)] + return [f'-fuse-ld={linker}'] def get_coverage_args(self) -> T.List[str]: return ['--coverage'] diff --git a/mesonbuild/compilers/mixins/islinker.py b/mesonbuild/compilers/mixins/islinker.py index 298d47f..eb271f6 100644 --- a/mesonbuild/compilers/mixins/islinker.py +++ b/mesonbuild/compilers/mixins/islinker.py @@ -83,11 +83,11 @@ class BasicLinkerIsCompilerMixin(Compiler): def get_link_whole_for(self, args: T.List[str]) -> T.List[str]: raise mesonlib.EnvironmentException( - 'Linker {} does not support link_whole'.format(self.id)) + f'Linker {self.id} does not support link_whole') def get_allow_undefined_link_args(self) -> T.List[str]: raise mesonlib.EnvironmentException( - 'Linker {} does not support allow undefined'.format(self.id)) + f'Linker {self.id} does not support allow undefined') def get_pie_link_args(self) -> T.List[str]: m = 'Linker {} does not support position-independent executable' diff --git a/mesonbuild/compilers/mixins/pgi.py b/mesonbuild/compilers/mixins/pgi.py index 8461574..51de8af 100644 --- a/mesonbuild/compilers/mixins/pgi.py +++ b/mesonbuild/compilers/mixins/pgi.py @@ -100,7 +100,7 @@ class PGICompiler(Compiler): if self.language == 'cpp': return ['--pch', '--pch_dir', str(hdr.parent), - '-I{}'.format(hdr.parent)] + f'-I{hdr.parent}'] else: return [] diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index c5d39c3..29b1499 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -314,7 +314,7 @@ class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta): return '14.1' # (Visual Studio 2017) elif version < 1930: return '14.2' # (Visual Studio 2019) - mlog.warning('Could not find toolset for version {!r}'.format(self.version)) + mlog.warning(f'Could not find toolset for version {self.version!r}') return None def get_toolset_version(self) -> T.Optional[str]: diff --git a/mesonbuild/compilers/rust.py b/mesonbuild/compilers/rust.py index fd58819..7354d58 100644 --- a/mesonbuild/compilers/rust.py +++ b/mesonbuild/compilers/rust.py @@ -80,7 +80,7 @@ class RustCompiler(Compiler): stdo = _stdo.decode('utf-8', errors='replace') stde = _stde.decode('utf-8', errors='replace') if pc.returncode != 0: - raise EnvironmentException('Rust compiler %s can not compile programs.\n%s\n%s' % ( + raise EnvironmentException('Rust compiler {} can not compile programs.\n{}\n{}'.format( self.name_string(), stdo, stde)) @@ -119,7 +119,7 @@ class RustCompiler(Compiler): if i[:2] == '-L': for j in ['dependency', 'crate', 'native', 'framework', 'all']: combined_len = len(j) + 3 - if i[:combined_len] == '-L{}='.format(j): + if i[:combined_len] == f'-L{j}=': parameter_list[idx] = i[:combined_len] + os.path.normpath(os.path.join(build_dir, i[combined_len:])) break @@ -130,7 +130,7 @@ class RustCompiler(Compiler): @classmethod def use_linker_args(cls, linker: str) -> T.List[str]: - return ['-C', 'linker={}'.format(linker)] + return ['-C', f'linker={linker}'] # Rust does not have a use_linker_args because it dispatches to a gcc-like # C compiler for dynamic linking, as such we invoke the C compiler's diff --git a/mesonbuild/compilers/vala.py b/mesonbuild/compilers/vala.py index 80e91f6..b8144f6 100644 --- a/mesonbuild/compilers/vala.py +++ b/mesonbuild/compilers/vala.py @@ -100,7 +100,7 @@ class ValaCompiler(Compiler): extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language) with self.cached_compile(code, environment.coredata, extra_args=extra_flags, mode='compile') as p: if p.returncode != 0: - msg = 'Vala compiler {!r} can not compile programs'.format(self.name_string()) + msg = f'Vala compiler {self.name_string()!r} can not compile programs' raise EnvironmentException(msg) def get_buildtype_args(self, buildtype: str) -> T.List[str]: @@ -128,7 +128,7 @@ class ValaCompiler(Compiler): vapi = os.path.join(d, libname + '.vapi') if os.path.isfile(vapi): return [vapi] - mlog.debug('Searched {!r} and {!r} wasn\'t found'.format(extra_dirs, libname)) + mlog.debug(f'Searched {extra_dirs!r} and {libname!r} wasn\'t found') return None def thread_flags(self, env: 'Environment') -> T.List[str]: diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 99646ca..01a29c2 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -106,7 +106,7 @@ class UserBooleanOption(UserOption[bool]): if isinstance(value, bool): return value if not isinstance(value, str): - raise MesonException('Value {} cannot be converted to a boolean'.format(value)) + raise MesonException(f'Value {value} cannot be converted to a boolean') if value.lower() == 'true': return True if value.lower() == 'false': @@ -170,7 +170,7 @@ class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]): try: return int(valuestring, 8) except ValueError as e: - raise MesonException('Invalid mode: {}'.format(e)) + raise MesonException(f'Invalid mode: {e}') class UserComboOption(UserOption[str]): def __init__(self, description: str, choices: T.List[str], value: T.Any, yielding: T.Optional[bool] = None): @@ -190,7 +190,7 @@ class UserComboOption(UserOption[str]): _type = 'number' else: _type = 'string' - optionsstring = ', '.join(['"%s"' % (item,) for item in self.choices]) + optionsstring = ', '.join([f'"{item}"' for item in self.choices]) raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.' ' Possible choices are (as string): {}.'.format( value, _type, self.description, optionsstring)) @@ -216,7 +216,7 @@ class UserArrayOption(UserOption[T.List[str]]): try: newvalue = ast.literal_eval(value) except ValueError: - raise MesonException('malformed option {}'.format(value)) + raise MesonException(f'malformed option {value}') elif value == '': newvalue = [] else: @@ -227,7 +227,7 @@ class UserArrayOption(UserOption[T.List[str]]): elif isinstance(value, list): newvalue = value else: - raise MesonException('"{}" should be a string array, but it is not'.format(newvalue)) + raise MesonException(f'"{newvalue}" should be a string array, but it is not') if not self.allow_dups and len(set(newvalue)) != len(newvalue): msg = 'Duplicated values in array option is deprecated. ' \ @@ -433,7 +433,7 @@ class CoreData: # in this case we've been passed some kind of pipe, copy # the contents of that file into the meson private (scratch) # directory so that it can be re-read when wiping/reconfiguring - copy = os.path.join(scratch_dir, '{}.{}.ini'.format(uuid.uuid4(), ftype)) + copy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini') with open(f) as rf: with open(copy, 'w') as wf: wf.write(rf.read()) @@ -461,7 +461,7 @@ class CoreData: if found_invalid: mlog.log('Found invalid candidates for', ftype, 'file:', *found_invalid) mlog.log('Could not find any valid candidate for', ftype, 'files:', *missing) - raise MesonException('Cannot find specified {} file: {}'.format(ftype, f)) + raise MesonException(f'Cannot find specified {ftype} file: {f}') return real def builtin_options_libdir_cross_fixup(self): @@ -683,7 +683,7 @@ class CoreData: try: value.set_value(oldval.value) except MesonException as e: - mlog.warning('Old value(s) of {} are no longer valid, resetting to default ({}).'.format(key, value.value)) + mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).') def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool: if when_building_for == MachineChoice.BUILD: @@ -726,8 +726,8 @@ class CoreData: self.set_option(k, v) if unknown_options and warn_unknown: unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options)) - sub = 'In subproject {}: '.format(subproject) if subproject else '' - mlog.warning('{}Unknown options: "{}"'.format(sub, unknown_options_str)) + sub = f'In subproject {subproject}: ' if subproject else '' + mlog.warning(f'{sub}Unknown options: "{unknown_options_str}"') mlog.log('The value of new options can be set with:') mlog.log(mlog.bold('meson setup --reconfigure -Dnew_option=new_value ...')) if not self.is_cross_build(): @@ -833,14 +833,14 @@ class MachineFileParser(): section = {} for entry, value in self.parser.items(s): if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry: - raise EnvironmentException('Malformed variable name {!r} in machine file.'.format(entry)) + raise EnvironmentException(f'Malformed variable name {entry!r} in machine file.') # Windows paths... value = value.replace('\\', '\\\\') try: ast = mparser.Parser(value, 'machinefile').parse() res = self._evaluate_statement(ast.lines[0]) except MesonException: - raise EnvironmentException('Malformed value in machine file variable {!r}.'.format(entry)) + raise EnvironmentException(f'Malformed value in machine file variable {entry!r}.') except KeyError as e: raise EnvironmentException('Undefined constant {!r} in machine file variable {!r}.'.format(e.args[0], entry)) section[entry] = res @@ -927,9 +927,9 @@ def get_cmd_line_options(build_dir: str, options: argparse.Namespace) -> str: read_cmd_line_file(build_dir, copy) cmdline = ['-D{}={}'.format(str(k), v) for k, v in copy.cmd_line_options.items()] if options.cross_file: - cmdline += ['--cross-file {}'.format(f) for f in options.cross_file] + cmdline += [f'--cross-file {f}' for f in options.cross_file] if options.native_file: - cmdline += ['--native-file {}'.format(f) for f in options.native_file] + cmdline += [f'--native-file {f}' for f in options.native_file] return ' '.join([shlex.quote(x) for x in cmdline]) def major_versions_differ(v1: str, v2: str) -> bool: @@ -937,7 +937,7 @@ def major_versions_differ(v1: str, v2: str) -> bool: def load(build_dir: str) -> CoreData: filename = os.path.join(build_dir, 'meson-private', 'coredata.dat') - load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename) + load_fail_msg = f'Coredata file {filename!r} is corrupted. Try with a fresh build tree.' try: with open(filename, 'rb') as f: obj = pickle.load(f) @@ -986,7 +986,7 @@ def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[Op try: (key, value) = o.split('=', 1) except ValueError: - raise MesonException('Option {!r} must have a value separated by equals sign.'.format(o)) + raise MesonException(f'Option {o!r} must have a value separated by equals sign.') k = OptionKey.from_string(key) if subproject: k = k.evolve(subproject=subproject) diff --git a/mesonbuild/dependencies/base.py b/mesonbuild/dependencies/base.py index bd94648..4410b67 100644 --- a/mesonbuild/dependencies/base.py +++ b/mesonbuild/dependencies/base.py @@ -88,12 +88,12 @@ def find_external_program(env: Environment, for_machine: MachineChoice, name: st # We never fallback if the user-specified option is no good, so # stop returning options. return - mlog.debug('{} binary missing from cross or native file, or env var undefined.'.format(display_name)) + mlog.debug(f'{display_name} binary missing from cross or native file, or env var undefined.') # Fallback on hard-coded defaults, if a default binary is allowed for use # with cross targets, or if this is not a cross target if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)): for potential_path in default_names: - mlog.debug('Trying a default {} fallback at'.format(display_name), potential_path) + mlog.debug(f'Trying a default {display_name} fallback at', potential_path) yield ExternalProgram(potential_path, silent=True) else: mlog.debug('Default target is not allowed for cross use') @@ -193,10 +193,10 @@ class Dependency: return [] def get_pkgconfig_variable(self, variable_name: str, kwargs: T.Dict[str, T.Any]) -> str: - raise DependencyException('{!r} is not a pkgconfig dependency'.format(self.name)) + raise DependencyException(f'{self.name!r} is not a pkgconfig dependency') def get_configtool_variable(self, variable_name): - raise DependencyException('{!r} is not a config-tool dependency'.format(self.name)) + raise DependencyException(f'{self.name!r} is not a config-tool dependency') def get_partial_dependency(self, *, compile_args: bool = False, link_args: bool = False, links: bool = False, @@ -238,7 +238,7 @@ class Dependency: pkgconfig_define: T.Optional[T.List[str]] = None) -> T.Union[str, T.List[str]]: if default_value is not None: return default_value - raise DependencyException('No default provided for dependency {!r}, which is not pkg-config, cmake, or config-tool based.'.format(self)) + raise DependencyException(f'No default provided for dependency {self!r}, which is not pkg-config, cmake, or config-tool based.') def generate_system_dependency(self, include_type: str) -> T.Type['Dependency']: new_dep = copy.deepcopy(self) @@ -312,7 +312,7 @@ class InternalDependency(Dependency): val = self.variables.get(internal, default_value) if val is not None: return val - raise DependencyException('Could not get an internal variable and no default provided for {!r}'.format(self)) + raise DependencyException(f'Could not get an internal variable and no default provided for {self!r}') def generate_link_whole_dependency(self) -> T.Type['Dependency']: new_dep = copy.deepcopy(self) @@ -399,10 +399,10 @@ class ExternalDependency(Dependency, HasNativeKwarg): found_msg = ['Dependency', mlog.bold(self.name), 'found:'] found_msg += [mlog.red('NO'), 'found', mlog.normal_cyan(self.version), 'but need:', - mlog.bold(', '.join(["'{}'".format(e) for e in not_found]))] + mlog.bold(', '.join([f"'{e}'" for e in not_found]))] if found: found_msg += ['; matched:', - ', '.join(["'{}'".format(e) for e in found])] + ', '.join([f"'{e}'" for e in found])] mlog.log(*found_msg) if self.required: @@ -518,9 +518,9 @@ class ConfigToolDependency(ExternalDependency): if self.config is None: found_msg.append(mlog.red('NO')) if version is not None and req_version is not None: - found_msg.append('found {!r} but need {!r}'.format(version, req_version)) + found_msg.append(f'found {version!r} but need {req_version!r}') elif req_version: - found_msg.append('need {!r}'.format(req_version)) + found_msg.append(f'need {req_version!r}') else: found_msg += [mlog.green('YES'), '({})'.format(' '.join(self.config)), version] @@ -543,14 +543,14 @@ class ConfigToolDependency(ExternalDependency): return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL] def get_configtool_variable(self, variable_name): - p, out, _ = Popen_safe(self.config + ['--{}'.format(variable_name)]) + p, out, _ = Popen_safe(self.config + [f'--{variable_name}']) if p.returncode != 0: if self.required: raise DependencyException( 'Could not get variable "{}" for dependency {}'.format( variable_name, self.name)) variable = out.strip() - mlog.debug('Got config-tool variable {} : {}'.format(variable_name, variable)) + mlog.debug(f'Got config-tool variable {variable_name} : {variable}') return variable def log_tried(self): @@ -575,7 +575,7 @@ class ConfigToolDependency(ExternalDependency): self.required = restore if default_value is not None: return default_value - raise DependencyException('Could not get config-tool variable and no default provided for {!r}'.format(self)) + raise DependencyException(f'Could not get config-tool variable and no default provided for {self!r}') class PkgConfigDependency(ExternalDependency): @@ -644,7 +644,7 @@ class PkgConfigDependency(ExternalDependency): # Fetch the libraries and library paths needed for using this self._set_libs() except DependencyException as e: - mlog.debug("pkg-config error with '%s': %s" % (name, e)) + mlog.debug(f"pkg-config error with '{name}': {e}") if self.required: raise else: @@ -663,7 +663,7 @@ class PkgConfigDependency(ExternalDependency): p, out, err = Popen_safe(cmd, env=env) rc, out, err = p.returncode, out.strip(), err.strip() call = ' '.join(cmd) - mlog.debug("Called `{}` -> {}\n{}".format(call, rc, out)) + mlog.debug(f"Called `{call}` -> {rc}\n{out}") return rc, out, err @staticmethod @@ -937,9 +937,9 @@ class PkgConfigDependency(ExternalDependency): if 'default' in kwargs: variable = kwargs['default'] else: - mlog.warning("pkgconfig variable '%s' not defined for dependency %s." % (variable_name, self.name)) + mlog.warning(f"pkgconfig variable '{variable_name}' not defined for dependency {self.name}.") - mlog.debug('Got pkgconfig variable %s : %s' % (variable_name, variable)) + mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}') return variable @staticmethod @@ -948,7 +948,7 @@ class PkgConfigDependency(ExternalDependency): def check_pkgconfig(self, pkgbin): if not pkgbin.found(): - mlog.log('Did not find pkg-config by name {!r}'.format(pkgbin.name)) + mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}') return None try: p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2] @@ -1022,7 +1022,7 @@ class PkgConfigDependency(ExternalDependency): pass if default_value is not None: return default_value - raise DependencyException('Could not get pkg-config variable and no default provided for {!r}'.format(self)) + raise DependencyException(f'Could not get pkg-config variable and no default provided for {self!r}') class CMakeDependency(ExternalDependency): # The class's copy of the CMake path. Avoids having to search for it @@ -1035,7 +1035,7 @@ class CMakeDependency(ExternalDependency): class_working_generator = None def _gen_exception(self, msg): - return DependencyException('Dependency {} not found: {}'.format(self.name, msg)) + return DependencyException(f'Dependency {self.name} not found: {msg}') def _main_cmake_file(self) -> str: return 'CMakeLists.txt' @@ -1109,7 +1109,7 @@ class CMakeDependency(ExternalDependency): self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent) if not self.cmakebin.found(): self.cmakebin = None - msg = 'No CMake binary for machine {} not found. Giving up.'.format(self.for_machine) + msg = f'No CMake binary for machine {self.for_machine} not found. Giving up.' if self.required: raise DependencyException(msg) mlog.debug(msg) @@ -1179,8 +1179,8 @@ class CMakeDependency(ExternalDependency): CMakeDependency.class_working_generator = i break - mlog.debug('CMake failed to gather system information for generator {} with error code {}'.format(i, ret1)) - mlog.debug('OUT:\n{}\n\n\nERR:\n{}\n\n'.format(out1, err1)) + mlog.debug(f'CMake failed to gather system information for generator {i} with error code {ret1}') + mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n') # Check if any generator succeeded if ret1 != 0: @@ -1333,7 +1333,7 @@ class CMakeDependency(ExternalDependency): return True # Check the environment path - env_path = os.environ.get('{}_DIR'.format(name)) + env_path = os.environ.get(f'{name}_DIR') if env_path and find_module(env_path): return True @@ -1367,9 +1367,9 @@ class CMakeDependency(ExternalDependency): # Prepare options cmake_opts = [] - cmake_opts += ['-DNAME={}'.format(name)] + cmake_opts += [f'-DNAME={name}'] cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo['archs']))] - cmake_opts += ['-DVERSION={}'.format(package_version)] + cmake_opts += [f'-DVERSION={package_version}'] cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))] cmake_opts += args cmake_opts += self.traceparser.trace_args() @@ -1387,8 +1387,8 @@ class CMakeDependency(ExternalDependency): CMakeDependency.class_working_generator = i break - mlog.debug('CMake failed for generator {} and package {} with error code {}'.format(i, name, ret1)) - mlog.debug('OUT:\n{}\n\n\nERR:\n{}\n\n'.format(out1, err1)) + mlog.debug(f'CMake failed for generator {i} and package {name} with error code {ret1}') + mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n') # Check if any generator succeeded if ret1 != 0: @@ -1429,8 +1429,8 @@ class CMakeDependency(ExternalDependency): for i in self.traceparser.targets: tg = i.lower() lname = name.lower() - if '{}::{}'.format(lname, lname) == tg or lname == tg.replace('::', ''): - mlog.debug('Guessed CMake target \'{}\''.format(i)) + if f'{lname}::{lname}' == tg or lname == tg.replace('::', ''): + mlog.debug(f'Guessed CMake target \'{i}\'') modules = [(i, True)] autodetected_module_list = True break @@ -1443,12 +1443,12 @@ class CMakeDependency(ExternalDependency): # Try to use old style variables if no module is specified if len(libs) > 0: - self.compile_args = list(map(lambda x: '-I{}'.format(x), incDirs)) + defs + self.compile_args = list(map(lambda x: f'-I{x}', incDirs)) + defs self.link_args = libs - mlog.debug('using old-style CMake variables for dependency {}'.format(name)) - mlog.debug('Include Dirs: {}'.format(incDirs)) - mlog.debug('Compiler Definitions: {}'.format(defs)) - mlog.debug('Libraries: {}'.format(libs)) + mlog.debug(f'using old-style CMake variables for dependency {name}') + mlog.debug(f'Include Dirs: {incDirs}') + mlog.debug(f'Compiler Definitions: {defs}') + mlog.debug(f'Libraries: {libs}') return # Even the old-style approach failed. Nothing else we can do here @@ -1520,20 +1520,20 @@ class CMakeDependency(ExternalDependency): if 'RELEASE' in cfgs: cfg = 'RELEASE' - if 'IMPORTED_IMPLIB_{}'.format(cfg) in tgt.properties: - libraries += [x for x in tgt.properties['IMPORTED_IMPLIB_{}'.format(cfg)] if x] + if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties: + libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x] elif 'IMPORTED_IMPLIB' in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x] - elif 'IMPORTED_LOCATION_{}'.format(cfg) in tgt.properties: - libraries += [x for x in tgt.properties['IMPORTED_LOCATION_{}'.format(cfg)] if x] + elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: + libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] elif 'IMPORTED_LOCATION' in tgt.properties: libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] if 'INTERFACE_LINK_LIBRARIES' in tgt.properties: otherDeps += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] - if 'IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg) in tgt.properties: - otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES_{}'.format(cfg)] if x] + if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties: + otherDeps += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x] elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties: otherDeps += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x] @@ -1551,7 +1551,7 @@ class CMakeDependency(ExternalDependency): # as we do not have a compiler environment available to us, we cannot do the # same, but must assume any bare argument passed which is not also a CMake # target must be a system library we should try to link against - libraries += ["{}.lib".format(j)] + libraries += [f"{j}.lib"] else: mlog.warning('CMake: Dependency', mlog.bold(j), 'for', mlog.bold(name), 'target', mlog.bold(self._original_module_name(curr)), 'was not found') @@ -1563,16 +1563,16 @@ class CMakeDependency(ExternalDependency): compileOptions = sorted(set(compileOptions)) libraries = sorted(set(libraries)) - mlog.debug('Include Dirs: {}'.format(incDirs)) - mlog.debug('Compiler Definitions: {}'.format(compileDefinitions)) - mlog.debug('Compiler Options: {}'.format(compileOptions)) - mlog.debug('Libraries: {}'.format(libraries)) + mlog.debug(f'Include Dirs: {incDirs}') + mlog.debug(f'Compiler Definitions: {compileDefinitions}') + mlog.debug(f'Compiler Options: {compileOptions}') + mlog.debug(f'Libraries: {libraries}') - self.compile_args = compileOptions + compileDefinitions + ['-I{}'.format(x) for x in incDirs] + self.compile_args = compileOptions + compileDefinitions + [f'-I{x}' for x in incDirs] self.link_args = libraries def _get_build_dir(self) -> Path: - build_dir = Path(self.cmake_root_dir) / 'cmake_{}'.format(self.name) + build_dir = Path(self.cmake_root_dir) / f'cmake_{self.name}' build_dir.mkdir(parents=True, exist_ok=True) return build_dir @@ -1647,7 +1647,7 @@ class CMakeDependency(ExternalDependency): return v if default_value is not None: return default_value - raise DependencyException('Could not get cmake variable and no default provided for {!r}'.format(self)) + raise DependencyException(f'Could not get cmake variable and no default provided for {self!r}') class DubDependency(ExternalDependency): class_dubbin = None @@ -1759,7 +1759,7 @@ class DubDependency(ExternalDependency): for target in description['targets']: if target['rootPackage'] in packages: add_lib_args('libs', target) - add_lib_args('libs-{}'.format(platform.machine()), target) + add_lib_args(f'libs-{platform.machine()}', target) for file in target['buildSettings']['linkerFiles']: lib_path = self._find_right_lib_path(file, comp, description) if lib_path: @@ -2007,7 +2007,7 @@ class ExternalProgram: return commands + [script] except Exception as e: mlog.debug(e) - mlog.debug('Unusable script {!r}'.format(script)) + mlog.debug(f'Unusable script {script!r}') return None def _is_executable(self, path): @@ -2034,7 +2034,7 @@ class ExternalProgram: else: if mesonlib.is_windows(): for ext in self.windows_exts: - trial_ext = '{}.{}'.format(trial, ext) + trial_ext = f'{trial}.{ext}' if os.path.exists(trial_ext): return [trial_ext] return None @@ -2069,7 +2069,7 @@ class ExternalProgram: # but many people do it because it works in the MinGW shell. if os.path.isabs(name): for ext in self.windows_exts: - command = '{}.{}'.format(name, ext) + command = f'{name}.{ext}' if os.path.exists(command): return [command] # On Windows, interpreted scripts must have an extension otherwise they @@ -2217,7 +2217,7 @@ class ExtraFrameworkDependency(ExternalDependency): if not paths: paths = self.system_framework_paths for p in paths: - mlog.debug('Looking for framework {} in {}'.format(name, p)) + mlog.debug(f'Looking for framework {name} in {p}') # We need to know the exact framework path because it's used by the # Qt5 dependency class, and for setting the include path. We also # want to avoid searching in an invalid framework path which wastes @@ -2410,7 +2410,7 @@ def find_external_dependency(name, env, kwargs): raise DependencyException('Keyword "method" must be a string.') lname = name.lower() if lname not in _packages_accept_language and 'language' in kwargs: - raise DependencyException('%s dependency does not accept "language" keyword argument' % (name, )) + raise DependencyException(f'{name} dependency does not accept "language" keyword argument') if not isinstance(kwargs.get('version', ''), (str, list)): raise DependencyException('Keyword "Version" must be string or list.') @@ -2468,7 +2468,7 @@ def find_external_dependency(name, env, kwargs): tried = '' mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.red('NO'), - '(tried {})'.format(tried) if tried else '') + f'(tried {tried})' if tried else '') if required: # if an exception occurred with the first detection method, re-raise it @@ -2572,7 +2572,7 @@ def strip_system_libdirs(environment, for_machine: MachineChoice, link_args): in the system path, and a different version not in the system path if they want to link against the non-system path version. """ - exclude = {'-L{}'.format(p) for p in environment.get_compiler_system_dirs(for_machine)} + exclude = {f'-L{p}' for p in environment.get_compiler_system_dirs(for_machine)} return [l for l in link_args if l not in exclude] @@ -2582,7 +2582,7 @@ def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs) -> T.List return [method] # TODO: try/except? if method not in [e.value for e in DependencyMethods]: - raise DependencyException('method {!r} is invalid'.format(method)) + raise DependencyException(f'method {method!r} is invalid') method = DependencyMethods(method) # This sets per-tool config methods which are deprecated to to the new diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 8fb258e..2c735bc 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -91,11 +91,11 @@ class BoostIncludeDir(): major = int(self.version_int / 100000) minor = int((self.version_int / 100) % 1000) patch = int(self.version_int % 100) - self.version = '{}.{}.{}'.format(major, minor, patch) - self.version_lib = '{}_{}'.format(major, minor) + self.version = f'{major}.{minor}.{patch}' + self.version_lib = f'{major}_{minor}' def __repr__(self) -> str: - return ''.format(self.version, self.path) + return f'' def __lt__(self, other: object) -> bool: if isinstance(other, BoostIncludeDir): @@ -152,7 +152,7 @@ class BoostLibraryFile(): elif self.nvsuffix in ['a', 'lib']: self.static = True else: - raise DependencyException('Unable to process library extension "{}" ({})'.format(self.nvsuffix, self.path)) + raise DependencyException(f'Unable to process library extension "{self.nvsuffix}" ({self.path})') # boost_.lib is the dll import library if self.basename.startswith('boost_') and self.nvsuffix == 'lib': @@ -187,7 +187,7 @@ class BoostLibraryFile(): self.toolset = i def __repr__(self) -> str: - return ''.format(self.abitag, self.mod_name, self.path) + return f'' def __lt__(self, other: object) -> bool: if isinstance(other, BoostLibraryFile): @@ -320,7 +320,7 @@ class BoostLibraryFile(): elif vscrt in ['/MTd', '-MTd']: return (self.runtime_static or not self.static) and self.runtime_debug - mlog.warning('Boost: unknow vscrt tag {}. This may cause the compilation to fail. Please consider reporting this as a bug.'.format(vscrt), once=True) + mlog.warning(f'Boost: unknow vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True) return True def get_compiler_args(self) -> T.List[str]: @@ -386,7 +386,7 @@ class BoostDependency(ExternalDependency): roots = list(mesonlib.OrderedSet(roots)) for j in roots: # 1. Look for the boost headers (boost/version.hpp) - mlog.debug('Checking potential boost root {}'.format(j.as_posix())) + mlog.debug(f'Checking potential boost root {j.as_posix()}') inc_dirs = self.detect_inc_dirs(j) inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions @@ -419,8 +419,8 @@ class BoostDependency(ExternalDependency): raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute') mlog.debug('Trying to find boost with:') - mlog.debug(' - boost_includedir = {}'.format(inc_dir)) - mlog.debug(' - boost_librarydir = {}'.format(lib_dir)) + mlog.debug(f' - boost_includedir = {inc_dir}') + mlog.debug(f' - boost_librarydir = {lib_dir}') return self.detect_split_root(inc_dir, lib_dir) @@ -447,7 +447,7 @@ class BoostDependency(ExternalDependency): for i in lib_dirs: libs = self.detect_libraries(i) if libs: - mlog.debug(' - found boost library dir: {}'.format(i)) + mlog.debug(f' - found boost library dir: {i}') # mlog.debug(' - raw library list:') # for j in libs: # mlog.debug(' - {}'.format(j)) @@ -456,12 +456,12 @@ class BoostDependency(ExternalDependency): modules = ['boost_' + x for x in self.modules] for inc in inc_dirs: - mlog.debug(' - found boost {} include dir: {}'.format(inc.version, inc.path)) + mlog.debug(f' - found boost {inc.version} include dir: {inc.path}') f_libs = self.filter_libraries(libs, inc.version_lib) mlog.debug(' - filtered library list:') for j in f_libs: - mlog.debug(' - {}'.format(j)) + mlog.debug(f' - {j}') # 3. Select the libraries matching the requested modules not_found = [] # type: T.List[str] @@ -505,14 +505,14 @@ class BoostDependency(ExternalDependency): self.compile_args += self._extra_compile_args() self.compile_args = list(mesonlib.OrderedSet(self.compile_args)) self.link_args = link_args - mlog.debug(' - final compile args: {}'.format(self.compile_args)) - mlog.debug(' - final link args: {}'.format(self.link_args)) + mlog.debug(f' - final compile args: {self.compile_args}') + mlog.debug(f' - final link args: {self.link_args}') return True # in case we missed something log it and try again mlog.debug(' - NOT found:') for mod in not_found: - mlog.debug(' - {}'.format(mod)) + mlog.debug(f' - {mod}') return False @@ -720,7 +720,7 @@ class BoostDependency(ExternalDependency): raw = hfile.read_text() m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw) if not m: - mlog.debug('Failed to extract version information from {}'.format(hfile)) + mlog.debug(f'Failed to extract version information from {hfile}') return BoostIncludeDir(hfile.parents[1], 0) return BoostIncludeDir(hfile.parents[1], int(m.group(1))) diff --git a/mesonbuild/dependencies/cuda.py b/mesonbuild/dependencies/cuda.py index 6d17b90..a8325ff 100644 --- a/mesonbuild/dependencies/cuda.py +++ b/mesonbuild/dependencies/cuda.py @@ -33,7 +33,7 @@ class CudaDependency(ExternalDependency): compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)] language = self._detect_language(compilers) if language not in self.supported_languages: - raise DependencyException('Language \'{}\' is not supported by the CUDA Toolkit. Supported languages are {}.'.format(language, self.supported_languages)) + raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.') super().__init__('cuda', environment, kwargs, language=language) self.requested_modules = self.get_requested(kwargs) @@ -45,13 +45,13 @@ class CudaDependency(ExternalDependency): return if not os.path.isabs(self.cuda_path): - raise DependencyException('CUDA Toolkit path must be absolute, got \'{}\'.'.format(self.cuda_path)) + raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.') # nvcc already knows where to find the CUDA Toolkit, but if we're compiling # a mixed C/C++/CUDA project, we still need to make the include dir searchable if self.language != 'cuda' or len(compilers) > 1: self.incdir = os.path.join(self.cuda_path, 'include') - self.compile_args += ['-I{}'.format(self.incdir)] + self.compile_args += [f'-I{self.incdir}'] if self.language != 'cuda': arch_libdir = self._detect_arch_libdir() @@ -81,11 +81,11 @@ class CudaDependency(ExternalDependency): # make sure nvcc version satisfies specified version requirements (found_some, not_found, found) = mesonlib.version_compare_many(nvcc_version, version_reqs) if not_found: - msg = 'The current nvcc version {} does not satisfy the specified CUDA Toolkit version requirements {}.'.format(nvcc_version, version_reqs) + msg = f'The current nvcc version {nvcc_version} does not satisfy the specified CUDA Toolkit version requirements {version_reqs}.' return self._report_dependency_error(msg, (None, None, False)) # use nvcc version to find a matching CUDA Toolkit - version_reqs = ['={}'.format(nvcc_version)] + version_reqs = [f'={nvcc_version}'] else: nvcc_version = None @@ -99,7 +99,7 @@ class CudaDependency(ExternalDependency): platform_msg = 'set the CUDA_PATH environment variable' if self._is_windows() \ else 'set the CUDA_PATH environment variable/create the \'/usr/local/cuda\' symbolic link' - msg = 'Please specify the desired CUDA Toolkit version (e.g. dependency(\'cuda\', version : \'>=10.1\')) or {} to point to the location of your desired version.'.format(platform_msg) + msg = f'Please specify the desired CUDA Toolkit version (e.g. dependency(\'cuda\', version : \'>=10.1\')) or {platform_msg} to point to the location of your desired version.' return self._report_dependency_error(msg, (None, None, False)) def _find_matching_toolkit(self, paths, version_reqs, nvcc_version): @@ -108,10 +108,10 @@ class CudaDependency(ExternalDependency): defaults, rest = mesonlib.partition(lambda t: not t[2], paths) defaults = list(defaults) paths = defaults + sorted(rest, key=lambda t: mesonlib.Version(t[1]), reverse=True) - mlog.debug('Search paths: {}'.format(paths)) + mlog.debug(f'Search paths: {paths}') if nvcc_version and defaults: - default_src = "the {} environment variable".format(self.env_var) if self.env_var else "the \'/usr/local/cuda\' symbolic link" + default_src = f"the {self.env_var} environment variable" if self.env_var else "the \'/usr/local/cuda\' symbolic link" nvcc_warning = 'The default CUDA Toolkit as designated by {} ({}) doesn\'t match the current nvcc version {} and will be ignored.'.format(default_src, os.path.realpath(defaults[0][0]), nvcc_version) else: nvcc_warning = None @@ -168,7 +168,7 @@ class CudaDependency(ExternalDependency): if m: return m.group(1) else: - mlog.warning('Could not detect CUDA Toolkit version for {}'.format(path)) + mlog.warning(f'Could not detect CUDA Toolkit version for {path}') except Exception as e: mlog.warning('Could not detect CUDA Toolkit version for {}: {}'.format(path, str(e))) @@ -188,7 +188,7 @@ class CudaDependency(ExternalDependency): # use // for floor instead of / which produces a float major = vers_int // 1000 # type: int minor = (vers_int - major * 1000) // 10 # type: int - return '{}.{}'.format(major, minor) + return f'{major}.{minor}' return None def _read_toolkit_version_txt(self, path: str) -> T.Optional[str]: @@ -238,10 +238,10 @@ class CudaDependency(ExternalDependency): for module in self.requested_modules: args = self.clib_compiler.find_library(module, self.env, [self.libdir] if self.libdir else []) if args is None: - self._report_dependency_error('Couldn\'t find requested CUDA module \'{}\''.format(module)) + self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'') all_found = False else: - mlog.debug('Link args for CUDA module \'{}\' are {}'.format(module, args)) + mlog.debug(f'Link args for CUDA module \'{module}\' are {args}') self.lib_modules[module] = args return all_found diff --git a/mesonbuild/dependencies/dev.py b/mesonbuild/dependencies/dev.py index c35022d..2ac91b1 100644 --- a/mesonbuild/dependencies/dev.py +++ b/mesonbuild/dependencies/dev.py @@ -305,7 +305,7 @@ class LLVMDependencyConfigTool(ConfigToolDependency): lib_ext = get_shared_library_suffix(environment, self.for_machine) libdir = self.get_config_value(['--libdir'], 'link_args')[0] # Sort for reproducibility - matches = sorted(glob.iglob(os.path.join(libdir, 'libLLVM*{}'.format(lib_ext)))) + matches = sorted(glob.iglob(os.path.join(libdir, f'libLLVM*{lib_ext}'))) if not matches: if self.required: raise @@ -314,7 +314,7 @@ class LLVMDependencyConfigTool(ConfigToolDependency): self.link_args = self.get_config_value(['--ldflags'], 'link_args') libname = os.path.basename(matches[0]).rstrip(lib_ext).lstrip('lib') - self.link_args.append('-l{}'.format(libname)) + self.link_args.append(f'-l{libname}') return elif self.static and mode == 'shared': # If, however LLVM_BUILD_SHARED_LIBS is true # (*cough* gentoo *cough*) @@ -353,12 +353,12 @@ class LLVMDependencyConfigTool(ConfigToolDependency): # called libLLVM-.(so|dylib|dll) libdir = self.get_config_value(['--libdir'], 'link_args')[0] - expected_name = 'libLLVM-{}'.format(self.version) - re_name = re.compile(r'{}.(so|dll|dylib)$'.format(expected_name)) + expected_name = f'libLLVM-{self.version}' + re_name = re.compile(fr'{expected_name}.(so|dll|dylib)$') for file_ in os.listdir(libdir): if re_name.match(file_): - self.link_args = ['-L{}'.format(libdir), + self.link_args = [f'-L{libdir}', '-l{}'.format(os.path.splitext(file_.lstrip('lib'))[0])] break else: @@ -379,7 +379,7 @@ class LLVMDependencyConfigTool(ConfigToolDependency): self.is_found = False if self.required: raise DependencyException( - 'Could not find required LLVM Component: {}'.format(mod)) + f'Could not find required LLVM Component: {mod}') status = '(missing)' else: status = '(missing but optional)' @@ -431,10 +431,10 @@ class LLVMDependencyCMake(CMakeDependency): def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]: res = [] for mod, required in modules: - cm_targets = self.traceparser.get_cmake_var('MESON_LLVM_TARGETS_{}'.format(mod)) + cm_targets = self.traceparser.get_cmake_var(f'MESON_LLVM_TARGETS_{mod}') if not cm_targets: if required: - raise self._gen_exception('LLVM module {} was not found'.format(mod)) + raise self._gen_exception(f'LLVM module {mod} was not found') else: mlog.warning('Optional LLVM module', mlog.bold(mod), 'was not found') continue @@ -443,7 +443,7 @@ class LLVMDependencyCMake(CMakeDependency): return res def _original_module_name(self, module: str) -> str: - orig_name = self.traceparser.get_cmake_var('MESON_TARGET_TO_LLVM_{}'.format(module)) + orig_name = self.traceparser.get_cmake_var(f'MESON_TARGET_TO_LLVM_{module}') if orig_name: return orig_name[0] return module @@ -493,7 +493,7 @@ class ZlibSystemDependency(ExternalDependency): else: return else: - mlog.debug('Unsupported OS {}'.format(m.system)) + mlog.debug(f'Unsupported OS {m.system}') return v, _ = self.clib_compiler.get_define('ZLIB_VERSION', '#include ', self.env, [], [self]) diff --git a/mesonbuild/dependencies/hdf5.py b/mesonbuild/dependencies/hdf5.py index 7c35a02..59c7382 100644 --- a/mesonbuild/dependencies/hdf5.py +++ b/mesonbuild/dependencies/hdf5.py @@ -41,7 +41,7 @@ class HDF5PkgConfigDependency(PkgConfigDependency): def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: language = language or 'c' if language not in {'c', 'cpp', 'fortran'}: - raise DependencyException('Language {} is not supported with HDF5.'.format(language)) + raise DependencyException(f'Language {language} is not supported with HDF5.') super().__init__(name, environment, kwargs, language) if not self.is_found: @@ -92,7 +92,7 @@ class HDF5ConfigToolDependency(ConfigToolDependency): def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None: language = language or 'c' if language not in {'c', 'cpp', 'fortran'}: - raise DependencyException('Language {} is not supported with HDF5.'.format(language)) + raise DependencyException(f'Language {language} is not supported with HDF5.') if language == 'c': cenv = 'CC' @@ -117,12 +117,12 @@ class HDF5ConfigToolDependency(ConfigToolDependency): # linkers. compiler = environment.coredata.compilers[for_machine][language] try: - os.environ['HDF5_{}'.format(cenv)] = join_args(compiler.get_exelist()) - os.environ['HDF5_{}LINKER'.format(cenv)] = join_args(compiler.get_linker_exelist()) + os.environ[f'HDF5_{cenv}'] = join_args(compiler.get_exelist()) + os.environ[f'HDF5_{cenv}LINKER'] = join_args(compiler.get_linker_exelist()) super().__init__(name, environment, nkwargs, language) finally: - del os.environ['HDF5_{}'.format(cenv)] - del os.environ['HDF5_{}LINKER'.format(cenv)] + del os.environ[f'HDF5_{cenv}'] + del os.environ[f'HDF5_{cenv}LINKER'] if not self.is_found: return diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index b0b8f4a..46f2337 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -40,7 +40,7 @@ def netcdf_factory(env: 'Environment', for_machine: 'MachineChoice', kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyType']: language = kwargs.get('language', 'c') if language not in ('c', 'cpp', 'fortran'): - raise DependencyException('Language {} is not supported with NetCDF.'.format(language)) + raise DependencyException(f'Language {language} is not supported with NetCDF.') candidates = [] # type: T.List['DependencyType'] @@ -187,7 +187,7 @@ class Python3DependencySystem(ExternalDependency): return '32' elif pyplat in ('win64', 'win-amd64'): return '64' - mlog.log('Unknown Windows Python platform {!r}'.format(pyplat)) + mlog.log(f'Unknown Windows Python platform {pyplat!r}') return None def get_windows_link_args(self): @@ -195,13 +195,13 @@ class Python3DependencySystem(ExternalDependency): if pyplat.startswith('win'): vernum = sysconfig.get_config_var('py_version_nodot') if self.static: - libpath = Path('libs') / 'libpython{}.a'.format(vernum) + libpath = Path('libs') / f'libpython{vernum}.a' else: comp = self.get_compiler() if comp.id == "gcc": - libpath = 'python{}.dll'.format(vernum) + libpath = f'python{vernum}.dll' else: - libpath = Path('libs') / 'python{}.lib'.format(vernum) + libpath = Path('libs') / f'python{vernum}.lib' lib = Path(sysconfig.get_config_var('base')) / libpath elif pyplat == 'mingw': if self.static: @@ -230,7 +230,7 @@ class Python3DependencySystem(ExternalDependency): arch = '64' else: # We can't cross-compile Python 3 dependencies on Windows yet - mlog.log('Unknown architecture {!r} for'.format(arch), + mlog.log(f'Unknown architecture {arch!r} for', mlog.bold(self.name)) self.is_found = False return @@ -452,12 +452,12 @@ class CursesSystemDependency(ExternalDependency): # implementations. The one in illumos/OpenIndiana # doesn't seem to have a version defined in the header. if lib.startswith('ncurses'): - v, _ = self.clib_compiler.get_define('NCURSES_VERSION', '#include <{}>'.format(header), env, [], [self]) + v, _ = self.clib_compiler.get_define('NCURSES_VERSION', f'#include <{header}>', env, [], [self]) self.version = v.strip('"') if lib.startswith('pdcurses'): - v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', '#include <{}>'.format(header), env, [], [self]) - v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', '#include <{}>'.format(header), env, [], [self]) - self.version = '{}.{}'.format(v_major, v_minor) + v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', f'#include <{header}>', env, [], [self]) + v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', f'#include <{header}>', env, [], [self]) + self.version = f'{v_major}.{v_minor}' # Check the version if possible, emit a wraning if we can't req = kwargs.get('version') diff --git a/mesonbuild/dependencies/ui.py b/mesonbuild/dependencies/ui.py index d897d76..e323073 100644 --- a/mesonbuild/dependencies/ui.py +++ b/mesonbuild/dependencies/ui.py @@ -227,9 +227,9 @@ class QtBaseDependency(ExternalDependency): # It is important that this list does not change order as the order of # the returned ExternalPrograms will change as well bins = ['moc', 'uic', 'rcc', 'lrelease'] - found = {b: NonExistingExternalProgram(name='{}-{}'.format(b, self.name)) + found = {b: NonExistingExternalProgram(name=f'{b}-{self.name}') for b in bins} - wanted = '== {}'.format(self.version) + wanted = f'== {self.version}' def gen_bins(): for b in bins: @@ -237,7 +237,7 @@ class QtBaseDependency(ExternalDependency): yield os.path.join(self.bindir, b), b # prefer the -qt of the tool to the plain one, as we # don't know what the unsuffixed one points to without calling it. - yield '{}-{}'.format(b, self.name), b + yield f'{b}-{self.name}', b yield b, b for b, name in gen_bins(): @@ -510,7 +510,7 @@ class QtBaseDependency(ExternalDependency): return 'modules: ' + module_str def log_info(self): - return '{}'.format(self.from_text) + return f'{self.from_text}' def log_tried(self): return self.from_text diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index ba35d16..f2792c5 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -188,7 +188,7 @@ class Properties: assert isinstance(raw, str) cmake_toolchain_file = Path(raw) if not cmake_toolchain_file.is_absolute(): - raise EnvironmentException('cmake_toolchain_file ({}) is not absolute'.format(raw)) + raise EnvironmentException(f'cmake_toolchain_file ({raw}) is not absolute') return cmake_toolchain_file def get_cmake_skip_compiler_test(self) -> CMakeSkipCompilerTest: @@ -250,23 +250,23 @@ class MachineInfo: return not self.__eq__(other) def __repr__(self) -> str: - return ''.format(self.system, self.cpu_family, self.cpu) + return f'' @classmethod def from_literal(cls, literal: T.Dict[str, str]) -> 'MachineInfo': minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'} if set(literal) < minimum_literal: raise EnvironmentException( - 'Machine info is currently {}\n'.format(literal) + + f'Machine info is currently {literal}\n' + 'but is missing {}.'.format(minimum_literal - set(literal))) cpu_family = literal['cpu_family'] if cpu_family not in known_cpu_families: - mlog.warning('Unknown CPU family {}, please report this at https://github.com/mesonbuild/meson/issues/new'.format(cpu_family)) + mlog.warning(f'Unknown CPU family {cpu_family}, please report this at https://github.com/mesonbuild/meson/issues/new') endian = literal['endian'] if endian not in ('little', 'big'): - mlog.warning('Unknown endian {}'.format(endian)) + mlog.warning(f'Unknown endian {endian}') return cls(literal['system'], cpu_family, literal['cpu'], endian) diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 4d9d592..fe3eeb3 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -160,10 +160,10 @@ def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T if value is not None: break else: - formatted = ', '.join(['{!r}'.format(var) for var in candidates]) - mlog.debug('None of {} are defined in the environment, not changing global flags.'.format(formatted)) + formatted = ', '.join([f'{var!r}' for var in candidates]) + mlog.debug(f'None of {formatted} are defined in the environment, not changing global flags.') return None - mlog.debug('Using {!r} from environment with value: {!r}'.format(var, value)) + mlog.debug(f'Using {var!r} from environment with value: {value!r}') return value @@ -759,7 +759,7 @@ class Environment: deprecated_properties.add(lang + '_link_args') for k, v in properties.properties.copy().items(): if k in deprecated_properties: - mlog.deprecation('{} in the [properties] section of the machine file is deprecated, use the [built-in options] section.'.format(k)) + mlog.deprecation(f'{k} in the [properties] section of the machine file is deprecated, use the [built-in options] section.') self.options[OptionKey.from_string(k).evolve(machine=machine)] = v del properties.properties[k] for section, values in config.items(): @@ -1004,7 +1004,7 @@ class Environment: compilers = [compilers] else: if not self.machines.matches_build_machine(for_machine): - raise EnvironmentException('{!r} compiler binary not defined in cross or native file'.format(lang)) + raise EnvironmentException(f'{lang!r} compiler binary not defined in cross or native file') compilers = getattr(self, 'default_' + lang) ccache = BinaryTable.detect_ccache() @@ -1016,11 +1016,11 @@ class Environment: return compilers, ccache, exe_wrap def _handle_exceptions(self, exceptions, binaries, bintype='compiler'): - errmsg = 'Unknown {}(s): {}'.format(bintype, binaries) + errmsg = f'Unknown {bintype}(s): {binaries}' if exceptions: errmsg += '\nThe following exception(s) were encountered:' for (c, e) in exceptions.items(): - errmsg += '\nRunning "{}" gave "{}"'.format(c, e) + errmsg += f'\nRunning "{c}" gave "{e}"' raise EnvironmentException(errmsg) @staticmethod @@ -1809,7 +1809,7 @@ class Environment: # Also ensure that we pass any extra arguments to the linker for l in exelist: - compiler.extend(['-C', 'link-arg={}'.format(l)]) + compiler.extend(['-C', f'link-arg={l}']) # This trickery with type() gets us the class of the linker # so we can initialize a new copy for the Rust Compiler diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 1a861fb..c8cf078 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -65,7 +65,7 @@ def stringifyUserArguments(args, quote=False): if isinstance(args, list): return '[%s]' % ', '.join([stringifyUserArguments(x, True) for x in args]) elif isinstance(args, dict): - return '{%s}' % ', '.join(['%s : %s' % (stringifyUserArguments(k, True), stringifyUserArguments(v, True)) for k, v in args.items()]) + return '{%s}' % ', '.join(['{} : {}'.format(stringifyUserArguments(k, True), stringifyUserArguments(v, True)) for k, v in args.items()]) elif isinstance(args, int): return str(args) elif isinstance(args, str): @@ -140,7 +140,7 @@ def extract_search_dirs(kwargs): # discard without failing for end-user ease of cross-platform directory arrays continue if not d.is_absolute(): - raise InvalidCode('Search directory {} is not an absolute path.'.format(d)) + raise InvalidCode(f'Search directory {d} is not an absolute path.') return list(map(str, search_dirs)) class TryRunResultHolder(InterpreterObject): @@ -1039,7 +1039,7 @@ class SubprojectHolder(InterpreterObject, ObjectHolder[T.Optional['Interpreter'] if len(args) == 2: return args[1] - raise InvalidArguments('Requested variable "{}" not found.'.format(varname)) + raise InvalidArguments(f'Requested variable "{varname}" not found.') header_permitted_kwargs = { 'required', @@ -1246,7 +1246,7 @@ class CompilerHolder(InterpreterObject): It can be removed in a future version of Meson. ''' if not hasattr(self.compiler, 'get_feature_args'): - raise InterpreterException('This {} compiler has no feature arguments.'.format(self.compiler.get_display_language())) + raise InterpreterException(f'This {self.compiler.get_display_language()} compiler has no feature arguments.') build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src) @@ -1306,7 +1306,7 @@ class CompilerHolder(InterpreterObject): hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') - members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames])) + members = mlog.bold(', '.join([f'"{m}"' for m in membernames])) mlog.log('Checking whether type', mlog.bold(typename, True), 'has members', members, msg, hadtxt, cached) return had @@ -1537,7 +1537,7 @@ class CompilerHolder(InterpreterObject): dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if required and not haz: - raise InterpreterException('{} header {!r} not usable'.format(self.compiler.get_display_language(), hname)) + raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not usable') elif haz: h = mlog.green('YES') else: @@ -1565,7 +1565,7 @@ class CompilerHolder(InterpreterObject): extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if required and not haz: - raise InterpreterException('{} header {!r} not found'.format(self.compiler.get_display_language(), hname)) + raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not found') elif haz: h = mlog.green('YES') else: @@ -1585,7 +1585,7 @@ class CompilerHolder(InterpreterObject): raise InterpreterException('Prefix argument of has_header_symbol must be a string.') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) if disabled: - mlog.log('Header <{}> has symbol'.format(hname), mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled') + mlog.log(f'Header <{hname}> has symbol', mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled') return False extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) @@ -1593,13 +1593,13 @@ class CompilerHolder(InterpreterObject): extra_args=extra_args, dependencies=deps) if required and not haz: - raise InterpreterException('{} symbol {} not found in header {}'.format(self.compiler.get_display_language(), symbol, hname)) + raise InterpreterException(f'{self.compiler.get_display_language()} symbol {symbol} not found in header {hname}') elif haz: h = mlog.green('YES') else: h = mlog.red('NO') cached = mlog.blue('(cached)') if cached else '' - mlog.log('Header <{}> has symbol'.format(hname), mlog.bold(symbol, True), msg, h, cached) + mlog.log(f'Header <{hname}> has symbol', mlog.bold(symbol, True), msg, h, cached) return haz def notfound_library(self, libname): @@ -1777,7 +1777,7 @@ class ModuleObjectHolder(InterpreterObject, ObjectHolder['ModuleObject']): # FIXME: Port all modules to use the methods dict. method = getattr(modobj, method_name, None) if method_name.startswith('_'): - raise InvalidArguments('Method {!r} is private.'.format(method_name)) + raise InvalidArguments(f'Method {method_name!r} is private.') if not method: raise InvalidCode('Unknown method "%s" in object.' % method_name) if not getattr(method, 'no-args-flattening', False): @@ -1819,7 +1819,7 @@ class Summary: raise InterpreterException('list_sep keyword argument must be string') for k, v in values.items(): if k in self.sections[section]: - raise InterpreterException('Summary section {!r} already have key {!r}'.format(section, k)) + raise InterpreterException(f'Summary section {section!r} already have key {k!r}') formatted_values = [] for i in listify(v): i = unholder(i) @@ -1948,7 +1948,7 @@ class MesonMain(InterpreterObject): script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir)) elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)): if not allow_built: - raise InterpreterException('Arguments to {} cannot be built'.format(name)) + raise InterpreterException(f'Arguments to {name} cannot be built') new = True script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()]) @@ -2111,7 +2111,7 @@ class MesonMain(InterpreterObject): clist = self.interpreter.coredata.compilers[for_machine] if cname in clist: return CompilerHolder(clist[cname], self.build.environment, self.interpreter.subproject) - raise InterpreterException('Tried to access compiler for language "%s", not specified for %s machine.' % (cname, for_machine.get_lower_case_name())) + raise InterpreterException(f'Tried to access compiler for language "{cname}", not specified for {for_machine.get_lower_case_name()} machine.') @noPosargs @permittedKwargs({}) @@ -2618,7 +2618,7 @@ class Interpreter(InterpreterBase): try: module = importlib.import_module('mesonbuild.modules.' + modname) except ImportError: - raise InvalidArguments('Module "%s" does not exist' % (modname, )) + raise InvalidArguments(f'Module "{modname}" does not exist') ext_module = module.initialize(self) assert isinstance(ext_module, ModuleObject) self.modules[modname] = ext_module @@ -2634,7 +2634,7 @@ class Interpreter(InterpreterBase): try: # check if stable module exists self.import_module(plainname) - mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname)) + mlog.warning(f'Module {modname} is now stable, please use the {plainname} module instead.') modname = plainname except InvalidArguments: mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node) @@ -2662,13 +2662,13 @@ class Interpreter(InterpreterBase): try: (key, value) = v.split('=', 1) except ValueError: - raise InterpreterException('Variable {!r} must have a value separated by equals sign.'.format(v)) + raise InterpreterException(f'Variable {v!r} must have a value separated by equals sign.') variables[key.strip()] = value.strip() for k, v in variables.items(): if not k or not v: raise InterpreterException('Empty variable name or value') if any(c.isspace() for c in k): - raise InterpreterException('Invalid whitespace in variable name "{}"'.format(k)) + raise InterpreterException(f'Invalid whitespace in variable name "{k}"') if not isinstance(v, str): raise InterpreterException('variables values must be strings.') return variables @@ -2771,7 +2771,7 @@ external dependencies (including libraries) must go to "dependencies".''') ' and therefore cannot be used during configuration' raise InterpreterException(msg.format(progname, cmd.description())) if not cmd.found(): - raise InterpreterException('command {!r} not found or not executable'.format(cmd.get_name())) + raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable') elif isinstance(cmd, CompilerHolder): exelist = cmd.compiler.get_exelist() cmd = exelist[0] @@ -2894,7 +2894,7 @@ external dependencies (including libraries) must go to "dependencies".''') elif method == 'cmake': return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs) else: - raise InterpreterException('The method {} is invalid for the subproject {}'.format(method, subp_name)) + raise InterpreterException(f'The method {method} is invalid for the subproject {subp_name}') # Invalid code is always an error except InvalidCode: raise @@ -2934,7 +2934,7 @@ external dependencies (including libraries) must go to "dependencies".''') pv = subi.project_version wanted = kwargs['version'] if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]: - raise InterpreterException('Subproject %s version is %s but %s required.' % (subp_name, pv, wanted)) + raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.') self.active_projectname = current_active self.subprojects.update(subi.subprojects) self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings) @@ -3086,7 +3086,7 @@ external dependencies (including libraries) must go to "dependencies".''') raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') proj_name, *proj_langs = args if ':' in proj_name: - raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name)) + raise InvalidArguments(f"Project name {proj_name!r} must not contain ':'") # This needs to be evaluated as early as possible, as meson uses this # for things like deprecation testing. @@ -3094,7 +3094,7 @@ external dependencies (including libraries) must go to "dependencies".''') cv = coredata.version pv = kwargs['meson_version'] if not mesonlib.version_compare(cv, pv): - raise InterpreterException('Meson version is %s but project requires %s' % (cv, pv)) + raise InterpreterException(f'Meson version is {cv} but project requires {pv}') mesonlib.project_meson_versions[self.subproject] = kwargs['meson_version'] if os.path.exists(self.option_file): @@ -3258,11 +3258,11 @@ external dependencies (including libraries) must go to "dependencies".''') for name, subp in sorted(self.subprojects.items()): value = subp.found() if subp.disabled_feature: - value = [value, 'Feature {!r} disabled'.format(subp.disabled_feature)] + value = [value, f'Feature {subp.disabled_feature!r} disabled'] elif subp.exception: value = [value, str(subp.exception)] elif subp.warnings > 0: - value = [value, '{} warnings'.format(subp.warnings)] + value = [value, f'{subp.warnings} warnings'] all_subprojects[name] = value if all_subprojects: self.summary_impl('Subprojects', all_subprojects, @@ -3457,7 +3457,7 @@ external dependencies (including libraries) must go to "dependencies".''') if not is_found: mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'), 'found', mlog.normal_cyan(version), 'but need:', - mlog.bold(', '.join(["'{}'".format(e) for e in not_found])), *extra_info) + mlog.bold(', '.join([f"'{e}'" for e in not_found])), *extra_info) if required: m = 'Invalid version of program, need {!r} {!r} found {!r}.' raise InterpreterException(m.format(progobj.get_name(), not_found, version)) @@ -3549,7 +3549,7 @@ external dependencies (including libraries) must go to "dependencies".''') mlog.log('Dependency', mlog.bold(name), 'found:', mlog.red('NO'), 'found', mlog.normal_cyan(found_vers), 'but need:', - mlog.bold(', '.join(["'{}'".format(e) for e in wanted_vers])), + mlog.bold(', '.join([f"'{e}'" for e in wanted_vers])), *info) return identifier, NotFoundDependency(self.environment) else: @@ -3631,7 +3631,7 @@ external dependencies (including libraries) must go to "dependencies".''') # This is raised by get_variable_method() if varname does no exist # in the subproject. Just add the reason in the not-found message # that will be printed later. - extra_info.append(mlog.blue('(Variable {!r} not found)'.format(varname))) + extra_info.append(mlog.blue(f'(Variable {varname!r} not found)')) if not isinstance(dep, DependencyHolder): raise InvalidCode('Fetched variable {!r} in the subproject {!r} is ' @@ -3650,7 +3650,7 @@ external dependencies (including libraries) must go to "dependencies".''') mlog.log('Dependency', mlog.bold(display_name), 'from subproject', mlog.bold(subproject.subdir), 'found:', mlog.red('NO'), 'found', mlog.normal_cyan(found), 'but need:', - mlog.bold(', '.join(["'{}'".format(e) for e in wanted]))) + mlog.bold(', '.join([f"'{e}'" for e in wanted]))) if required: raise DependencyException('Version {} of subproject dependency {} already ' 'cached, requested incompatible version {} for ' @@ -3709,7 +3709,7 @@ external dependencies (including libraries) must go to "dependencies".''') wanted = kwargs['include_type'] actual = d.include_type_method([], {}) if wanted != actual: - mlog.debug('Current include type of {} is {}. Converting to requested {}'.format(name, actual, wanted)) + mlog.debug(f'Current include type of {name} is {actual}. Converting to requested {wanted}') d = d.as_system_method([wanted], {}) # Override this dependency to have consistent results in subsequent # dependency lookups. @@ -3934,7 +3934,7 @@ external dependencies (including libraries) must go to "dependencies".''') else: vcs = mesonlib.detect_vcs(source_dir) if vcs: - mlog.log('Found %s repository at %s' % (vcs['name'], vcs['wc_dir'])) + mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir'])) vcs_cmd = vcs['get_rev'].split() regex_selector = vcs['rev_regex'] else: @@ -4006,7 +4006,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self mlog.debug('Wrong type:', str(i)) raise InterpreterException('Invalid argument to run_target.') if isinstance(i, dependencies.ExternalProgram) and not i.found(): - raise InterpreterException('Tried to use non-existing executable {!r}'.format(i.name)) + raise InterpreterException(f'Tried to use non-existing executable {i.name!r}') cleaned_args.append(i) name = args[0] if not isinstance(name, str): @@ -4085,7 +4085,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self if not isinstance(name, str): raise InterpreterException('First argument of test must be a string.') if ':' in name: - mlog.deprecation('":" is not allowed in test name "{}", it has been replaced with "_"'.format(name), + mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"', location=node) name = name.replace(':', '_') exe = args[1] @@ -4223,7 +4223,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self absname = os.path.join(self.environment.get_source_dir(), buildfilename) if not os.path.isfile(absname): self.subdir = prev_subdir - raise InterpreterException("Non-existent build file '{!s}'".format(buildfilename)) + raise InterpreterException(f"Non-existent build file '{buildfilename!s}'") with open(absname, encoding='utf8') as f: code = f.read() assert(isinstance(code, str)) @@ -4415,7 +4415,7 @@ This will become a hard error in the future.''' % kwargs['input'], location=self ofile_rpath = os.path.join(self.subdir, output) if ofile_rpath in self.configure_file_outputs: mesonbuildfile = os.path.join(self.subdir, 'meson.build') - current_call = "{}:{}".format(mesonbuildfile, self.current_lineno) + current_call = f"{mesonbuildfile}:{self.current_lineno}" first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath]) mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call) else: @@ -4655,25 +4655,25 @@ different subdirectory. optargs = ('-O0', '-O2', '-O3', '-Os', '/O1', '/O2', '/Os') for arg in args: if arg in warnargs: - mlog.warning('Consider using the built-in warning_level option instead of using "{}".'.format(arg), + mlog.warning(f'Consider using the built-in warning_level option instead of using "{arg}".', location=self.current_node) elif arg in optargs: - mlog.warning('Consider using the built-in optimization level instead of using "{}".'.format(arg), + mlog.warning(f'Consider using the built-in optimization level instead of using "{arg}".', location=self.current_node) elif arg == '-Werror': - mlog.warning('Consider using the built-in werror option instead of using "{}".'.format(arg), + mlog.warning(f'Consider using the built-in werror option instead of using "{arg}".', location=self.current_node) elif arg == '-g': - mlog.warning('Consider using the built-in debug option instead of using "{}".'.format(arg), + mlog.warning(f'Consider using the built-in debug option instead of using "{arg}".', location=self.current_node) elif arg == '-pipe': mlog.warning("You don't need to add -pipe, Meson will use it automatically when it is available.", location=self.current_node) elif arg.startswith('-fsanitize'): - mlog.warning('Consider using the built-in option for sanitizers instead of using "{}".'.format(arg), + mlog.warning(f'Consider using the built-in option for sanitizers instead of using "{arg}".', location=self.current_node) elif arg.startswith('-std=') or arg.startswith('/std:'): - mlog.warning('Consider using the built-in option for language standard version instead of using "{}".'.format(arg), + mlog.warning(f'Consider using the built-in option for language standard version instead of using "{arg}".', location=self.current_node) def add_global_arguments(self, node, argsdict, args, kwargs): @@ -4702,7 +4702,7 @@ different subdirectory. raise InvalidCode(msg) if 'language' not in kwargs: - raise InvalidCode('Missing language definition in {}'.format(node.func_name)) + raise InvalidCode(f'Missing language definition in {node.func_name}') self.warn_about_builtin_args(args) @@ -4779,9 +4779,9 @@ Try setting b_lundef to false instead.'''.format(self.coredata.options[OptionKey return project_root = Path(srcdir, self.root_subdir) if project_root not in norm.parents: - raise InterpreterException('Sandbox violation: Tried to grab file {} outside current (sub)project.'.format(norm.name)) + raise InterpreterException(f'Sandbox violation: Tried to grab file {norm.name} outside current (sub)project.') if project_root / self.subproject_dir in norm.parents: - raise InterpreterException('Sandbox violation: Tried to grab file {} from a nested subproject.'.format(norm.name)) + raise InterpreterException(f'Sandbox violation: Tried to grab file {norm.name} from a nested subproject.') def source_strings_to_files(self, sources: T.List[str]) -> T.List[mesonlib.File]: mesonlib.check_direntry_issues(sources) diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index 376f8b0..aba9ed4 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -65,7 +65,7 @@ class ObjectHolder(T.Generic[TV_InterpreterObject]): self.subproject = subproject def __repr__(self) -> str: - return ''.format(self.held_object) + return f'' class MesonVersionString(str): pass @@ -101,7 +101,7 @@ def _get_callee_args(wrapped_args: T.Sequence[T.Any], want_subproject: bool = Fa kwargs = None subproject = wrapped_args[1].subproject else: - raise AssertionError('Unknown args: {!r}'.format(wrapped_args)) + raise AssertionError(f'Unknown args: {wrapped_args!r}') elif n == 3: # Methods on objects (*Holder, MesonMain, etc) have 3 args: self, args, kwargs node = s.current_node @@ -134,7 +134,7 @@ def _get_callee_args(wrapped_args: T.Sequence[T.Any], want_subproject: bool = Fa if want_subproject: subproject = wrapped_args[2].subproject else: - raise AssertionError('Unknown args: {!r}'.format(wrapped_args)) + raise AssertionError(f'Unknown args: {wrapped_args!r}') # Sometimes interpreter methods are called internally with None instead of # empty list/dict args = args if args is not None else [] @@ -174,7 +174,7 @@ def builtinMethodNoKwargs(f: TV_func) -> TV_func: method_name = wrapped_args[2] kwargs = wrapped_args[4] if kwargs: - mlog.warning('Method {!r} does not take keyword arguments.'.format(method_name), + mlog.warning(f'Method {method_name!r} does not take keyword arguments.', 'This will become a hard error in the future', location=node) return f(*wrapped_args, **wrapped_kwargs) @@ -224,7 +224,7 @@ class permittedKwargs: s, node, args, kwargs, _ = _get_callee_args(wrapped_args) for k in kwargs: if k not in self.permitted: - mlog.warning('''Passed invalid keyword argument "{}".'''.format(k), location=node) + mlog.warning(f'''Passed invalid keyword argument "{k}".''', location=node) mlog.warning('This will become a hard error in the future.') return f(*wrapped_args, **wrapped_kwargs) return T.cast(TV_func, wrapped) @@ -418,7 +418,7 @@ class FeatureCheckBase(metaclass=abc.ABCMeta): def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any: subproject = _get_callee_args(wrapped_args, want_subproject=True)[4] if subproject is None: - raise AssertionError('{!r}'.format(wrapped_args)) + raise AssertionError(f'{wrapped_args!r}') self.use(subproject) return f(*wrapped_args, **wrapped_kwargs) return T.cast(TV_func, wrapped) @@ -444,14 +444,14 @@ class FeatureNew(FeatureCheckBase): @staticmethod def get_warning_str_prefix(tv: str) -> str: - return 'Project specifies a minimum meson_version \'{}\' but uses features which were added in newer versions:'.format(tv) + return f'Project specifies a minimum meson_version \'{tv}\' but uses features which were added in newer versions:' def log_usage_warning(self, tv: str) -> None: args = [ - 'Project targeting', "'{}'".format(tv), + 'Project targeting', f"'{tv}'", 'but tried to use feature introduced in', - "'{}':".format(self.feature_version), - '{}.'.format(self.feature_name), + f"'{self.feature_version}':", + f'{self.feature_name}.', ] if self.extra_message: args.append(self.extra_message) @@ -476,10 +476,10 @@ class FeatureDeprecated(FeatureCheckBase): def log_usage_warning(self, tv: str) -> None: args = [ - 'Project targeting', "'{}'".format(tv), + 'Project targeting', f"'{tv}'", 'but tried to use feature deprecated since', - "'{}':".format(self.feature_version), - '{}.'.format(self.feature_name), + f"'{self.feature_version}':", + f'{self.feature_name}.', ] if self.extra_message: args.append(self.extra_message) @@ -505,7 +505,7 @@ class FeatureCheckKwargsBase(metaclass=abc.ABCMeta): def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any: kwargs, subproject = _get_callee_args(wrapped_args, want_subproject=True)[3:5] if subproject is None: - raise AssertionError('{!r}'.format(wrapped_args)) + raise AssertionError(f'{wrapped_args!r}') for arg in self.kwargs: if arg not in kwargs: continue @@ -755,7 +755,7 @@ class InterpreterBase: if isinstance(result, Disabler): return result if not(isinstance(result, bool)): - raise InvalidCode('If clause {!r} does not evaluate to true or false.'.format(result)) + raise InvalidCode(f'If clause {result!r} does not evaluate to true or false.') if result: prev_meson_version = mesonlib.project_meson_versions[self.subproject] if self.tmp_meson_version: @@ -1067,7 +1067,7 @@ The result of this is undefined and will become a hard error in a future Meson r return Disabler() if method_name == 'extract_objects': if not isinstance(obj, ObjectHolder): - raise InvalidArguments('Invalid operation "extract_objects" on variable "{}"'.format(object_name)) + raise InvalidArguments(f'Invalid operation "extract_objects" on variable "{object_name}"') self.validate_extraction(obj.held_object) obj.current_node = node return obj.method_call(method_name, args, kwargs) @@ -1161,7 +1161,7 @@ The result of this is undefined and will become a hard error in a future Meson r try: return int(obj) except Exception: - raise InterpreterException('String {!r} cannot be converted to int'.format(obj)) + raise InterpreterException(f'String {obj!r} cannot be converted to int') elif method_name == 'join': if len(posargs) != 1: raise InterpreterException('Join() takes exactly one argument.') @@ -1206,7 +1206,7 @@ The result of this is undefined and will become a hard error in a future Meson r def arg_replace(match: T.Match[str]) -> str: idx = int(match.group(1)) if idx >= len(arg_strings): - raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx)) + raise InterpreterException(f'Format placeholder @{idx}@ out of range.') return arg_strings[idx] return re.sub(r'@(\d+)@', arg_replace, templ) @@ -1283,7 +1283,7 @@ The result of this is undefined and will become a hard error in a future Meson r return self.evaluate_statement(fallback) return fallback - raise InterpreterException('Key {!r} is not in the dictionary.'.format(key)) + raise InterpreterException(f'Key {key!r} is not in the dictionary.') if method_name == 'keys': if len(posargs) != 0: @@ -1326,7 +1326,7 @@ The result of this is undefined and will become a hard error in a future Meson r raise InterpreterException('Kwargs argument must not contain a "kwargs" entry. Points for thinking meta, though. :P') for k, v in to_expand.items(): if k in kwargs: - raise InterpreterException('Entry "{}" defined both as a keyword argument and in a "kwarg" entry.'.format(k)) + raise InterpreterException(f'Entry "{k}" defined both as a keyword argument and in a "kwarg" entry.') kwargs[k] = v return kwargs diff --git a/mesonbuild/linkers.py b/mesonbuild/linkers.py index f4c6dca..2807003 100644 --- a/mesonbuild/linkers.py +++ b/mesonbuild/linkers.py @@ -209,7 +209,7 @@ class CcrxLinker(StaticLinker): return False def get_output_args(self, target: str) -> T.List[str]: - return ['-output={}'.format(target)] + return [f'-output={target}'] def get_linker_always_args(self) -> T.List[str]: return ['-nologo', '-form=library'] @@ -225,7 +225,7 @@ class Xc16Linker(StaticLinker): return False def get_output_args(self, target: str) -> T.List[str]: - return ['{}'.format(target)] + return [f'{target}'] def get_linker_always_args(self) -> T.List[str]: return ['rcs'] @@ -240,7 +240,7 @@ class CompCertLinker(StaticLinker): return False def get_output_args(self, target: str) -> T.List[str]: - return ['-o{}'.format(target)] + return [f'-o{target}'] class C2000Linker(StaticLinker): @@ -253,7 +253,7 @@ class C2000Linker(StaticLinker): return False def get_output_args(self, target: str) -> T.List[str]: - return ['{}'.format(target)] + return [f'{target}'] def get_linker_always_args(self) -> T.List[str]: return ['-r'] @@ -345,7 +345,7 @@ class DynamicLinker(metaclass=abc.ABCMeta): return self.id def get_version_string(self) -> str: - return '({} {})'.format(self.id, self.version) + return f'({self.id} {self.version})' def get_exelist(self) -> T.List[str]: return self.exelist.copy() @@ -412,18 +412,18 @@ class DynamicLinker(metaclass=abc.ABCMeta): def get_link_whole_for(self, args: T.List[str]) -> T.List[str]: raise mesonlib.EnvironmentException( - 'Linker {} does not support link_whole'.format(self.id)) + f'Linker {self.id} does not support link_whole') def get_allow_undefined_args(self) -> T.List[str]: raise mesonlib.EnvironmentException( - 'Linker {} does not support allow undefined'.format(self.id)) + f'Linker {self.id} does not support allow undefined') @abc.abstractmethod def get_output_args(self, outname: str) -> T.List[str]: pass def get_coverage_args(self) -> T.List[str]: - m = "Linker {} doesn't implement coverage data generation.".format(self.id) + m = f"Linker {self.id} doesn't implement coverage data generation." raise mesonlib.EnvironmentException(m) @abc.abstractmethod @@ -583,7 +583,7 @@ class GnuLikeDynamicLinkerMixin: # For PE/COFF the soname argument has no effect return [] sostr = '' if soversion is None else '.' + soversion - return self._apply_prefix('-soname,{}{}.{}{}'.format(prefix, shlib_name, suffix, sostr)) + return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}') def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, @@ -803,7 +803,7 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, suffix: str, soversion: str, darwin_versions: T.Tuple[str, str], is_shared_module: bool) -> T.List[str]: - raise mesonlib.MesonException('{} does not support shared libraries.'.format(self.id)) + raise mesonlib.MesonException(f'{self.id} does not support shared libraries.') def get_asneeded_args(self) -> T.List[str]: return [] @@ -835,7 +835,7 @@ class CcrxDynamicLinker(DynamicLinker): return [] def get_output_args(self, outputname: str) -> T.List[str]: - return ['-output={}'.format(outputname)] + return [f'-output={outputname}'] def get_search_args(self, dirname: str) -> 'T.NoReturn': raise OSError('rlink.exe does not have a search dir argument') @@ -875,7 +875,7 @@ class Xc16DynamicLinker(DynamicLinker): return [] def get_output_args(self, outputname: str) -> T.List[str]: - return ['-o{}'.format(outputname)] + return [f'-o{outputname}'] def get_search_args(self, dirname: str) -> 'T.NoReturn': raise OSError('xc16-gcc.exe does not have a search dir argument') @@ -919,10 +919,10 @@ class CompCertDynamicLinker(DynamicLinker): return [] def get_output_args(self, outputname: str) -> T.List[str]: - return ['-o{}'.format(outputname)] + return [f'-o{outputname}'] def get_search_args(self, dirname: str) -> T.List[str]: - return ['-L{}'.format(dirname)] + return [f'-L{dirname}'] def get_allow_undefined_args(self) -> T.List[str]: return [] @@ -930,7 +930,7 @@ class CompCertDynamicLinker(DynamicLinker): def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, suffix: str, soversion: str, darwin_versions: T.Tuple[str, str], is_shared_module: bool) -> T.List[str]: - raise mesonlib.MesonException('{} does not support shared libraries.'.format(self.id)) + raise mesonlib.MesonException(f'{self.id} does not support shared libraries.') def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, rpath_paths: str, build_rpath: str, @@ -963,7 +963,7 @@ class C2000DynamicLinker(DynamicLinker): return [] def get_output_args(self, outputname: str) -> T.List[str]: - return ['-z', '--output_file={}'.format(outputname)] + return ['-z', f'--output_file={outputname}'] def get_search_args(self, dirname: str) -> 'T.NoReturn': raise OSError('cl2000.exe does not have a search dir argument') @@ -1262,13 +1262,13 @@ class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): paths = padding else: paths = paths + ':' + padding - return (self._apply_prefix('-rpath,{}'.format(paths)), rpath_dirs_to_remove) + return (self._apply_prefix(f'-rpath,{paths}'), rpath_dirs_to_remove) def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, suffix: str, soversion: str, darwin_versions: T.Tuple[str, str], is_shared_module: bool) -> T.List[str]: sostr = '' if soversion is None else '.' + soversion - return self._apply_prefix('-soname,{}{}.{}{}'.format(prefix, shlib_name, suffix, sostr)) + return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}') class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker): diff --git a/mesonbuild/mcompile.py b/mesonbuild/mcompile.py index b7653d2..d6f54e3 100644 --- a/mesonbuild/mcompile.py +++ b/mesonbuild/mcompile.py @@ -57,7 +57,7 @@ def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]: """ path_to_intro = builddir / 'meson-info' / 'intro-targets.json' if not path_to_intro.exists(): - raise MesonException('`{}` is missing! Directory is not configured yet?'.format(path_to_intro.name)) + raise MesonException(f'`{path_to_intro.name}` is missing! Directory is not configured yet?') with path_to_intro.open() as f: schema = json.load(f) @@ -78,7 +78,7 @@ class ParsedTargetName: if len(split) > 1: self.type = split[1] if not self._is_valid_type(self.type): - raise MesonException('Can\'t invoke target `{}`: unknown target type: `{}`'.format(target, self.type)) + raise MesonException(f'Can\'t invoke target `{target}`: unknown target type: `{self.type}`') split = split[0].rsplit('/', 1) if len(split) > 1: @@ -103,7 +103,7 @@ class ParsedTargetName: def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: if target.name not in introspect_data: - raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name)) + raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found') intro_targets = introspect_data[target.name] found_targets = [] # type: T.List[T.Dict[str, T.Any]] @@ -123,9 +123,9 @@ def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introsp found_targets += [intro_target] if not found_targets: - raise MesonException('Can\'t invoke target `{}`: target not found'.format(target.full_name)) + raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found') elif len(found_targets) > 1: - raise MesonException('Can\'t invoke target `{}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`'.format(target.full_name)) + raise MesonException(f'Can\'t invoke target `{target.full_name}`: ambigious name. Add target type and/or path: `PATH/NAME:TYPE`') return found_targets[0] @@ -219,7 +219,7 @@ def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.Tuple # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1` if options.jobs > 0: - cmd.append('-maxCpuCount:{}'.format(options.jobs)) + cmd.append(f'-maxCpuCount:{options.jobs}') else: cmd.append('-maxCpuCount') @@ -351,7 +351,7 @@ def run(options: 'argparse.Namespace') -> int: cmd, env = get_parsed_args_xcode(options, bdir) else: raise MesonException( - 'Backend `{}` is not yet supported by `compile`. Use generated project files directly instead.'.format(backend)) + f'Backend `{backend}` is not yet supported by `compile`. Use generated project files directly instead.') p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer, env=env) diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 13530ec..c458115 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -73,7 +73,7 @@ class Conf: self.coredata = intr.coredata self.default_values_only = True else: - raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir)) + raise ConfException(f'Directory {build_dir} is neither a Meson build directory nor a project source directory.') def clear_cache(self): self.coredata.deps.host.clear() diff --git a/mesonbuild/mdist.py b/mesonbuild/mdist.py index 4547b38..3085f44 100644 --- a/mesonbuild/mdist.py +++ b/mesonbuild/mdist.py @@ -86,13 +86,13 @@ def run_dist_scripts(src_root, bld_root, dist_root, dist_scripts): env['MESON_BUILD_ROOT'] = bld_root for d in dist_scripts: name = ' '.join(d.cmd_args) - print('Running custom dist script {!r}'.format(name)) + print(f'Running custom dist script {name!r}') try: rc = run_exe(d, env) if rc != 0: sys.exit('Dist script errored out') except OSError: - print('Failed to run dist script {!r}'.format(name)) + print(f'Failed to run dist script {name!r}') sys.exit(1) def git_root(src_root): @@ -222,7 +222,7 @@ def run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_ return 0 def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir): - print('Testing distribution package {}'.format(packagename)) + print(f'Testing distribution package {packagename}') unpackdir = os.path.join(privdir, 'dist-unpack') builddir = os.path.join(privdir, 'dist-build') installdir = os.path.join(privdir, 'dist-install') @@ -242,19 +242,19 @@ def check_dist(packagename, meson_command, extra_meson_args, bld_root, privdir): ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args) if ret > 0: - print('Dist check build directory was {}'.format(builddir)) + print(f'Dist check build directory was {builddir}') else: windows_proof_rmtree(unpackdir) windows_proof_rmtree(builddir) windows_proof_rmtree(installdir) - print('Distribution package {} tested'.format(packagename)) + print(f'Distribution package {packagename} tested') return ret def determine_archives_to_generate(options): result = [] for i in options.formats.split(','): if i not in archive_choices: - sys.exit('Value "{}" not one of permitted values {}.'.format(i, archive_choices)) + sys.exit(f'Value "{i}" not one of permitted values {archive_choices}.') result.append(i) if len(i) == 0: sys.exit('No archive types specified.') @@ -264,7 +264,7 @@ def run(options): options.wd = os.path.abspath(options.wd) buildfile = Path(options.wd) / 'meson-private' / 'build.dat' if not buildfile.is_file(): - raise MesonException('Directory {!r} does not seem to be a Meson build directory.'.format(options.wd)) + raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.') b = build.load(options.wd) # This import must be load delayed, otherwise it will get the default # value of None. diff --git a/mesonbuild/mesonlib/universal.py b/mesonbuild/mesonlib/universal.py index 4f1b6e8..b0c50c8 100644 --- a/mesonbuild/mesonlib/universal.py +++ b/mesonbuild/mesonlib/universal.py @@ -214,7 +214,7 @@ def set_meson_command(mainfile: str) -> None: _meson_command = python_command + [mainfile] # We print this value for unit tests. if 'MESON_COMMAND_TESTS' in os.environ: - mlog.log('meson_command is {!r}'.format(_meson_command)) + mlog.log(f'meson_command is {_meson_command!r}') def get_meson_command() -> T.Optional[T.List[str]]: @@ -428,7 +428,7 @@ def get_compiler_for_source(compilers: T.Iterable['CompilerType'], src: str) -> for comp in compilers: if comp.can_compile(src): return comp - raise MesonException('No specified compiler can handle file {!s}'.format(src)) + raise MesonException(f'No specified compiler can handle file {src!s}') def classify_unity_sources(compilers: T.Iterable['CompilerType'], sources: T.Iterable[str]) -> T.Dict['CompilerType', T.List[str]]: @@ -487,7 +487,7 @@ class PerMachine(T.Generic[_T]): return unfreeze def __repr__(self) -> str: - return 'PerMachine({!r}, {!r})'.format(self.build, self.host) + return f'PerMachine({self.build!r}, {self.host!r})' class PerThreeMachine(PerMachine[_T]): @@ -522,7 +522,7 @@ class PerThreeMachine(PerMachine[_T]): return self.build == self[machine] def __repr__(self) -> str: - return 'PerThreeMachine({!r}, {!r}, {!r})'.format(self.build, self.host, self.target) + return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})' class PerMachineDefaultable(PerMachine[T.Optional[_T]]): @@ -543,7 +543,7 @@ class PerMachineDefaultable(PerMachine[T.Optional[_T]]): return freeze def __repr__(self) -> str: - return 'PerMachineDefaultable({!r}, {!r})'.format(self.build, self.host) + return f'PerMachineDefaultable({self.build!r}, {self.host!r})' class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Optional[_T]]): @@ -567,7 +567,7 @@ class PerThreeMachineDefaultable(PerMachineDefaultable, PerThreeMachine[T.Option return freeze def __repr__(self) -> str: - return 'PerThreeMachineDefaultable({!r}, {!r}, {!r})'.format(self.build, self.host, self.target) + return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})' def is_sunos() -> bool: @@ -650,7 +650,7 @@ def darwin_get_object_archs(objpath: str) -> T.List[str]: ''' _, stdo, stderr = Popen_safe(['lipo', '-info', objpath]) if not stdo: - mlog.debug('lipo {}: {}'.format(objpath, stderr)) + mlog.debug(f'lipo {objpath}: {stderr}') return None stdo = stdo.rsplit(': ', 1)[1] # Convert from lipo-style archs to meson-style CPUs @@ -704,10 +704,10 @@ class Version: self._v = sequences3 def __str__(self) -> str: - return '%s (V=%s)' % (self._s, str(self._v)) + return '{} (V={})'.format(self._s, str(self._v)) def __repr__(self) -> str: - return ''.format(self._s) + return f'' def __lt__(self, other: object) -> bool: if isinstance(other, Version): @@ -932,7 +932,7 @@ if is_windows(): # https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/ _whitespace = ' \t\n\r' - _find_unsafe_char = re.compile(r'[{}"]'.format(_whitespace)).search + _find_unsafe_char = re.compile(fr'[{_whitespace}"]').search def quote_arg(arg: str) -> str: if arg and not _find_unsafe_char(arg): @@ -1073,7 +1073,7 @@ def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData', v result = v else: result = get_cmake_define(line, confdata) - result = '#define %s %s\n' % (varname, result) + result = f'#define {varname} {result}\n' (result, missing_variable) = do_replacement(regex, result, variable_format, confdata) return result else: @@ -1087,7 +1087,7 @@ def get_variable_regex(variable_format: str = 'meson') -> T.Pattern[str]: elif variable_format == 'cmake': regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}') else: - raise MesonException('Format "{}" not handled'.format(variable_format)) + raise MesonException(f'Format "{variable_format}" not handled') return regex def do_conf_str (data: list, confdata: 'ConfigurationData', variable_format: str, @@ -1118,7 +1118,7 @@ def do_conf_str (data: list, confdata: 'ConfigurationData', variable_format: str line = do_define(regex, line, confdata, variable_format) else: if not line_is_valid(line,variable_format): - raise MesonException('Format "{}" mismatched'.format(variable_format)) + raise MesonException(f'Format "{variable_format}" mismatched') line, missing = do_replacement(regex, line, variable_format, confdata) missing_variables.update(missing) if missing: @@ -1133,7 +1133,7 @@ def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_for with open(src, encoding=encoding, newline='') as f: data = f.readlines() except Exception as e: - raise MesonException('Could not read input file %s: %s' % (src, str(e))) + raise MesonException('Could not read input file {}: {}'.format(src, str(e))) (result, missing_variables, confdata_useless) = do_conf_str(data, confdata, variable_format, encoding) dst_tmp = dst + '~' @@ -1141,7 +1141,7 @@ def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData', variable_for with open(dst_tmp, 'w', encoding=encoding, newline='') as f: f.writelines(result) except Exception as e: - raise MesonException('Could not write output file %s: %s' % (dst, str(e))) + raise MesonException('Could not write output file {}: {}'.format(dst, str(e))) shutil.copymode(src, dst_tmp) replace_if_different(dst, dst_tmp) return missing_variables, confdata_useless @@ -1181,11 +1181,11 @@ def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: ofile.write('; %s\n' % line) if isinstance(v, bool): if v: - ofile.write('%sdefine %s\n\n' % (prefix, k)) + ofile.write(f'{prefix}define {k}\n\n') else: - ofile.write('%sundef %s\n\n' % (prefix, k)) + ofile.write(f'{prefix}undef {k}\n\n') elif isinstance(v, (int, str)): - ofile.write('%sdefine %s %s\n\n' % (prefix, k, v)) + ofile.write(f'{prefix}define {k} {v}\n\n') else: raise MesonException('Unknown data type in configuration file entry: ' + k) replace_if_different(ofilename, ofilename_tmp) @@ -1489,7 +1489,7 @@ def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T values['@INPUT@'] = inputs for (ii, vv) in enumerate(inputs): # Write out @INPUT0@, @INPUT1@, ... - values['@INPUT{}@'.format(ii)] = vv + values[f'@INPUT{ii}@'] = vv if len(inputs) == 1: # Just one value, substitute @PLAINNAME@ and @BASENAME@ values['@PLAINNAME@'] = plain = os.path.basename(inputs[0]) @@ -1498,7 +1498,7 @@ def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T # Gather values derived from the outputs, similar to above. values['@OUTPUT@'] = outputs for (ii, vv) in enumerate(outputs): - values['@OUTPUT{}@'.format(ii)] = vv + values[f'@OUTPUT{ii}@'] = vv # Outdir should be the same for all outputs values['@OUTDIR@'] = os.path.dirname(outputs[0]) # Many external programs fail on empty arguments. @@ -1719,7 +1719,7 @@ class ProgressBarFallback: # lgtm [py/iter-returns-non-self] if self.total and bar_type == 'download': print('Download size:', self.total) if desc: - print('{}: '.format(desc), end='') + print(f'{desc}: ', end='') # Pretend to be an iterator when called as one and don't print any # progress diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index 9bf6ef6..173e998 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -165,7 +165,7 @@ def run_script_command(script_name, script_args): try: return module.run(script_args) except MesonException as e: - mlog.error('Error in {} helper script:'.format(script_name)) + mlog.error(f'Error in {script_name} helper script:') mlog.exception(e) return 1 @@ -183,7 +183,7 @@ def ensure_stdout_accepts_unicode(): def run(original_args, mainfile): if sys.version_info < (3, 6): print('Meson works correctly only with python 3.6+.') - print('You have python {}.'.format(sys.version)) + print(f'You have python {sys.version}.') print('Please update your environment') return 1 diff --git a/mesonbuild/minit.py b/mesonbuild/minit.py index 55e716c..8707c06 100644 --- a/mesonbuild/minit.py +++ b/mesonbuild/minit.py @@ -147,8 +147,8 @@ def add_arguments(parser: 'argparse.ArgumentParser') -> None: parser.add_argument("-b", "--build", action='store_true', help="build after generation") parser.add_argument("--builddir", default='build', help="directory for build") parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.") - parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help="project type. default: {} based project".format(DEFAULT_PROJECT)) - parser.add_argument('--version', default=DEFAULT_VERSION, help="project version. default: {}".format(DEFAULT_VERSION)) + parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help=f"project type. default: {DEFAULT_PROJECT} based project") + parser.add_argument('--version', default=DEFAULT_VERSION, help=f"project version. default: {DEFAULT_VERSION}") def run(options: 'argparse.Namespace') -> int: ''' diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 785ff58..5713335 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -392,7 +392,7 @@ class Installer: raise RuntimeError('Destination {!r} already exists and is not ' 'a file'.format(to_file)) if self.should_preserve_existing_file(from_file, to_file): - append_to_log(self.lf, '# Preserving old file {}\n'.format(to_file)) + append_to_log(self.lf, f'# Preserving old file {to_file}\n') self.preserved_file_count += 1 return False self.remove(to_file) @@ -401,7 +401,7 @@ class Installer: dirmaker, outdir = makedirs # Create dirs if needed dirmaker.makedirs(outdir, exist_ok=True) - self.log('Installing {} to {}'.format(from_file, outdir)) + self.log(f'Installing {from_file} to {outdir}') if os.path.islink(from_file): if not os.path.exists(from_file): # Dangling symlink. Replicate as is. @@ -443,9 +443,9 @@ class Installer: each element of the set is a path relative to src_dir. ''' if not os.path.isabs(src_dir): - raise ValueError('src_dir must be absolute, got {}'.format(src_dir)) + raise ValueError(f'src_dir must be absolute, got {src_dir}') if not os.path.isabs(dst_dir): - raise ValueError('dst_dir must be absolute, got {}'.format(dst_dir)) + raise ValueError(f'dst_dir must be absolute, got {dst_dir}') if exclude is not None: exclude_files, exclude_dirs = exclude else: @@ -463,7 +463,7 @@ class Installer: if os.path.isdir(abs_dst): continue if os.path.exists(abs_dst): - print('Tried to copy directory {} but a file of that name already exists.'.format(abs_dst)) + print(f'Tried to copy directory {abs_dst} but a file of that name already exists.') sys.exit(1) dm.makedirs(abs_dst) self.copystat(abs_src, abs_dst) @@ -475,7 +475,7 @@ class Installer: continue abs_dst = os.path.join(dst_dir, filepart) if os.path.isdir(abs_dst): - print('Tried to copy file {} but a directory of that name already exists.'.format(abs_dst)) + print(f'Tried to copy file {abs_dst} but a directory of that name already exists.') sys.exit(1) parent_dir = os.path.dirname(abs_dst) if not os.path.isdir(parent_dir): @@ -538,7 +538,7 @@ class Installer: continue self.did_install_something = True full_dst_dir = get_destdir_path(destdir, fullprefix, i.install_path) - self.log('Installing subdir {} to {}'.format(i.path, full_dst_dir)) + self.log(f'Installing subdir {i.path} to {full_dst_dir}') dm.makedirs(full_dst_dir, exist_ok=True) self.do_copydir(d, i.path, full_dst_dir, i.exclude, i.install_mode, dm) @@ -591,18 +591,18 @@ class Installer: continue name = ' '.join(i.cmd_args) if i.skip_if_destdir and destdir: - self.log('Skipping custom install script because DESTDIR is set {!r}'.format(name)) + self.log(f'Skipping custom install script because DESTDIR is set {name!r}') continue self.did_install_something = True # Custom script must report itself if it does nothing. - self.log('Running custom install script {!r}'.format(name)) + self.log(f'Running custom install script {name!r}') try: rc = self.run_exe(i, env) except OSError: - print('FAILED: install script \'{}\' could not be run, stopped'.format(name)) + print(f'FAILED: install script \'{name}\' could not be run, stopped') # POSIX shells return 127 when a command could not be found sys.exit(127) if rc != 0: - print('FAILED: install script \'{}\' exit code {}, stopped'.format(name, rc)) + print(f'FAILED: install script \'{name}\' exit code {rc}, stopped') sys.exit(rc) def install_targets(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None: @@ -612,10 +612,10 @@ class Installer: if not os.path.exists(t.fname): # For example, import libraries of shared modules are optional if t.optional: - self.log('File {!r} not found, skipping'.format(t.fname)) + self.log(f'File {t.fname!r} not found, skipping') continue else: - raise RuntimeError('File {!r} could not be found'.format(t.fname)) + raise RuntimeError(f'File {t.fname!r} could not be found') file_copied = False # not set when a directory is copied fname = check_for_stampfile(t.fname) outdir = get_destdir_path(destdir, fullprefix, t.outdir) @@ -627,7 +627,7 @@ class Installer: install_name_mappings = t.install_name_mappings install_mode = t.install_mode if not os.path.exists(fname): - raise RuntimeError('File {!r} could not be found'.format(fname)) + raise RuntimeError(f'File {fname!r} could not be found') elif os.path.isfile(fname): file_copied = self.do_copyfile(fname, outname, makedirs=(dm, outdir)) self.set_mode(outname, install_mode, d.install_umask) @@ -639,8 +639,8 @@ class Installer: returncode, stdo, stde = self.Popen_safe(d.strip_bin + [outname]) if returncode != 0: print('Could not strip file.\n') - print('Stdout:\n{}\n'.format(stdo)) - print('Stderr:\n{}\n'.format(stde)) + print(f'Stdout:\n{stdo}\n') + print(f'Stderr:\n{stde}\n') sys.exit(1) if fname.endswith('.js'): # Emscripten outputs js files and optionally a wasm file. @@ -655,7 +655,7 @@ class Installer: dm.makedirs(outdir, exist_ok=True) self.do_copydir(d, fname, outname, None, install_mode, dm) else: - raise RuntimeError('Unknown file type for {!r}'.format(fname)) + raise RuntimeError(f'Unknown file type for {fname!r}') printed_symlink_error = False for alias, to in aliases.items(): try: @@ -695,7 +695,7 @@ def rebuild_all(wd: str) -> bool: ret = subprocess.run(ninja + ['-C', wd]).returncode if ret != 0: - print('Could not rebuild {}'.format(wd)) + print(f'Could not rebuild {wd}') return False return True diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index f4b8e0b..430b2f1 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -391,7 +391,7 @@ def get_infodir(builddir: T.Optional[str] = None) -> str: def get_info_file(infodir: str, kind: T.Optional[str] = None) -> str: return os.path.join(infodir, - 'meson-info.json' if not kind else 'intro-{}.json'.format(kind)) + 'meson-info.json' if not kind else f'intro-{kind}.json') def load_info_file(infodir: str, kind: T.Optional[str] = None) -> T.Any: with open(get_info_file(infodir, kind)) as fp: @@ -509,7 +509,7 @@ def write_meson_info_file(builddata: build.Build, errors: list, build_files_upda if not intro_types[i].func: continue intro_info[i] = { - 'file': 'intro-{}.json'.format(i), + 'file': f'intro-{i}.json', 'updated': i in updated_introspection_files } diff --git a/mesonbuild/mlog.py b/mesonbuild/mlog.py index dbc48a1..15fdb8d 100644 --- a/mesonbuild/mlog.py +++ b/mesonbuild/mlog.py @@ -127,7 +127,7 @@ class AnsiDecorator: if with_codes and self.code: text = self.code + self.text + AnsiDecorator.plain_code if self.quoted: - text = '"{}"'.format(text) + text = f'"{text}"' return text def __len__(self) -> int: @@ -230,7 +230,7 @@ def debug(*args: T.Union[str, AnsiDecorator], **kwargs: T.Any) -> None: def _debug_log_cmd(cmd: str, args: T.List[str]) -> None: if not _in_ci: return - args = ['"{}"'.format(x) for x in args] # Quote all args, just in case + args = [f'"{x}"' for x in args] # Quote all args, just in case debug('!meson_ci!/{} {}'.format(cmd, ' '.join(args))) def cmd_ci_include(file: str) -> None: @@ -276,7 +276,7 @@ def log_once(*args: T.Union[str, AnsiDecorator], is_error: bool = False, # This would more accurately embody what this function can handle, but we # don't have that yet, so instead we'll do some casting to work around it def get_error_location_string(fname: str, lineno: str) -> str: - return '{}:{}:'.format(fname, lineno) + return f'{fname}:{lineno}:' def _log_error(severity: str, *rargs: T.Union[str, AnsiDecorator], once: bool = False, fatal: bool = True, **kwargs: T.Any) -> None: @@ -350,7 +350,7 @@ def exception(e: Exception, prefix: T.Optional[AnsiDecorator] = None) -> None: # Mypy doesn't follow hasattr, and it's pretty easy to visually inspect # that this is correct, so we'll just ignore it. path = get_relative_path(Path(e.file), Path(os.getcwd())) # type: ignore - args.append('{}:{}:{}:'.format(path, e.lineno, e.colno)) # type: ignore + args.append(f'{path}:{e.lineno}:{e.colno}:') # type: ignore if prefix: args.append(prefix) args.append(str(e)) diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py index 1cf7c1c..ddb5e3e 100644 --- a/mesonbuild/modules/__init__.py +++ b/mesonbuild/modules/__init__.py @@ -82,7 +82,7 @@ def get_include_args(include_dirs, prefix='-I'): dirs_str = [] for dirs in unholder(include_dirs): if isinstance(dirs, str): - dirs_str += ['%s%s' % (prefix, dirs)] + dirs_str += [f'{prefix}{dirs}'] continue # Should be build.IncludeDirs object. @@ -91,10 +91,10 @@ def get_include_args(include_dirs, prefix='-I'): expdir = os.path.join(basedir, d) srctreedir = os.path.join('@SOURCE_ROOT@', expdir) buildtreedir = os.path.join('@BUILD_ROOT@', expdir) - dirs_str += ['%s%s' % (prefix, buildtreedir), - '%s%s' % (prefix, srctreedir)] + dirs_str += [f'{prefix}{buildtreedir}', + f'{prefix}{srctreedir}'] for d in dirs.get_extra_build_dirs(): - dirs_str += ['%s%s' % (prefix, d)] + dirs_str += [f'{prefix}{d}'] return dirs_str diff --git a/mesonbuild/modules/cmake.py b/mesonbuild/modules/cmake.py index 18dc2f5..fd92ecf 100644 --- a/mesonbuild/modules/cmake.py +++ b/mesonbuild/modules/cmake.py @@ -92,7 +92,7 @@ class CMakeSubprojectHolder(InterpreterObject, ObjectHolder): tgt = args[0] res = self.held_object.cm_interpreter.target_info(tgt) if res is None: - raise InterpreterException('The CMake target {} does not exist\n'.format(tgt) + + raise InterpreterException(f'The CMake target {tgt} does not exist\n' + ' Use the following command in your meson.build to list all available targets:\n\n' + ' message(\'CMaket targets:\\n - \' + \'\\n - \'.join(.target_list()))') @@ -235,7 +235,7 @@ class CmakeModule(ExtensionModule): cmakebin = dependencies.ExternalProgram('cmake', silent=False) p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3] if p.returncode != 0: - mlog.log('error retrieving cmake information: returnCode={} stdout={} stderr={}'.format(p.returncode, stdout, stderr)) + mlog.log(f'error retrieving cmake information: returnCode={p.returncode} stdout={stdout} stderr={stderr}') return False match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip()) @@ -273,11 +273,11 @@ class CmakeModule(ExtensionModule): if not isinstance(pkgroot, str): raise mesonlib.MesonException('Install_dir must be a string.') - template_file = os.path.join(self.cmake_root, 'Modules', 'BasicConfigVersion-{}.cmake.in'.format(compatibility)) + template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in') if not os.path.exists(template_file): - raise mesonlib.MesonException('your cmake installation doesn\'t support the {} compatibility'.format(compatibility)) + raise mesonlib.MesonException(f'your cmake installation doesn\'t support the {compatibility} compatibility') - version_file = os.path.join(state.environment.scratch_dir, '{}ConfigVersion.cmake'.format(name)) + version_file = os.path.join(state.environment.scratch_dir, f'{name}ConfigVersion.cmake') conf = { 'CVF_VERSION': (version, ''), @@ -298,7 +298,7 @@ class CmakeModule(ExtensionModule): with open(infile) as fin: data = fin.readlines() except Exception as e: - raise mesonlib.MesonException('Could not read input file %s: %s' % (infile, str(e))) + raise mesonlib.MesonException('Could not read input file {}: {}'.format(infile, str(e))) result = [] regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@') @@ -339,7 +339,7 @@ class CmakeModule(ExtensionModule): raise mesonlib.MesonException('"name" not specified.') name = kwargs['name'] - (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, '{}Config.cmake'.format(name))) + (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, f'{name}Config.cmake')) ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname) install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)) diff --git a/mesonbuild/modules/fs.py b/mesonbuild/modules/fs.py index fd1e99b..9594460 100644 --- a/mesonbuild/modules/fs.py +++ b/mesonbuild/modules/fs.py @@ -113,7 +113,7 @@ class FSModule(ExtensionModule): def hash(self, state: 'ModuleState', args: T.Tuple[str, str], kwargs: T.Dict[str, T.Any]) -> ModuleReturnValue: file = self._resolve_dir(state, args[0]) if not file.is_file(): - raise MesonException('{} is not a file and therefore cannot be hashed'.format(file)) + raise MesonException(f'{file} is not a file and therefore cannot be hashed') try: h = hashlib.new(args[1]) except ValueError: @@ -127,7 +127,7 @@ class FSModule(ExtensionModule): def size(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> ModuleReturnValue: file = self._resolve_dir(state, args[0]) if not file.is_file(): - raise MesonException('{} is not a file and therefore cannot be sized'.format(file)) + raise MesonException(f'{file} is not a file and therefore cannot be sized') try: return ModuleReturnValue(file.stat().st_size, []) except ValueError: diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index a0b0de8..f1df18a 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -200,7 +200,7 @@ class GnomeModule(ExtensionModule): 'to generate it at configure-time.' raise MesonException(m) else: - raise MesonException('Invalid file argument: {!r}'.format(ifile)) + raise MesonException(f'Invalid file argument: {ifile!r}') depend_files, depends, subdirs = self._get_gresource_dependencies( state, ifile, source_dirs, dependencies) @@ -424,7 +424,7 @@ class GnomeModule(ExtensionModule): lib_dir = os.path.dirname(lib) external_ldflags.update(["-L%s" % lib_dir]) if include_rpath: - external_ldflags.update(['-Wl,-rpath {}'.format(lib_dir)]) + external_ldflags.update([f'-Wl,-rpath {lib_dir}']) libname = os.path.basename(lib) if libname.startswith("lib"): libname = libname[3:] @@ -443,7 +443,7 @@ class GnomeModule(ExtensionModule): cflags.update(get_include_args(dep.get_include_dirs())) depends.append(dep) else: - mlog.log('dependency {!r} not handled to build gir files'.format(dep)) + mlog.log(f'dependency {dep!r} not handled to build gir files') continue if use_gir_args and self._gir_has_option('--extra-library'): @@ -528,14 +528,14 @@ class GnomeModule(ExtensionModule): includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True) for inc in unholder(includes): if isinstance(inc, str): - ret += ['--include=%s' % (inc, )] + ret += [f'--include={inc}'] elif isinstance(inc, GirTarget): gir_inc_dirs += [ os.path.join(state.environment.get_build_dir(), inc.get_subdir()), ] ret += [ - "--include-uninstalled=%s" % (os.path.join(inc.get_subdir(), inc.get_basename()), ) + "--include-uninstalled={}".format(os.path.join(inc.get_subdir(), inc.get_basename())) ] depends += [inc] else: @@ -615,7 +615,7 @@ class GnomeModule(ExtensionModule): ret += ["-L@BUILD_ROOT@/{}".format(os.path.dirname(libpath))] libname = girtarget.get_basename() else: - libname = os.path.join("@BUILD_ROOT@/{}".format(libpath)) + libname = os.path.join(f"@BUILD_ROOT@/{libpath}") ret += ['--library', libname] # Needed for the following binutils bug: # https://github.com/mesonbuild/meson/issues/1911 @@ -682,7 +682,7 @@ class GnomeModule(ExtensionModule): gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0]) if not os.path.isdir(gir_filelist_dir): os.mkdir(gir_filelist_dir) - gir_filelist_filename = os.path.join(gir_filelist_dir, '%s_%s_gir_filelist' % (ns, nsversion)) + gir_filelist_filename = os.path.join(gir_filelist_dir, f'{ns}_{nsversion}_gir_filelist') with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist: for s in unholder(libsources): @@ -813,7 +813,7 @@ class GnomeModule(ExtensionModule): if not nsversion: raise MesonException('Missing "nsversion" keyword argument') libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True) - girfile = '%s-%s.gir' % (ns, nsversion) + girfile = f'{ns}-{nsversion}.gir' srcdir = os.path.join(state.environment.get_source_dir(), state.subdir) builddir = os.path.join(state.environment.get_build_dir(), state.subdir) depends = gir_dep.sources + girtargets @@ -875,7 +875,7 @@ class GnomeModule(ExtensionModule): scan_target = self._make_gir_target(state, girfile, scan_command, depends, kwargs) - typelib_output = '%s-%s.typelib' % (ns, nsversion) + typelib_output = f'{ns}-{nsversion}.typelib' typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@'] typelib_cmd += get_include_args(gir_inc_dirs, prefix='--includedir=') @@ -1010,7 +1010,7 @@ class GnomeModule(ExtensionModule): mode = kwargs.get('mode', 'auto') VALID_MODES = ('xml', 'sgml', 'none', 'auto') if mode not in VALID_MODES: - raise MesonException('gtkdoc: Mode {} is not a valid mode: {}'.format(mode, VALID_MODES)) + raise MesonException(f'gtkdoc: Mode {mode} is not a valid mode: {VALID_MODES}') src_dirs = mesonlib.extract_as_list(kwargs, 'src_dir') header_dirs = [] @@ -1040,7 +1040,7 @@ class GnomeModule(ExtensionModule): program_name = 'gtkdoc-' + tool program = self.interpreter.find_program_impl(program_name) path = program.held_object.get_path() - args.append('--{}={}'.format(program_name, path)) + args.append(f'--{program_name}={path}') if namespace: args.append('--namespace=' + namespace) args += self._unpack_args('--htmlargs=', 'html_args', kwargs) @@ -1075,7 +1075,7 @@ class GnomeModule(ExtensionModule): s)) else: raise MesonException( - 'Invalid object type: {!r}'.format(s.__class__.__name__)) + f'Invalid object type: {s.__class__.__name__!r}') args += ['--content-files=' + '@@'.join(content_files)] args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state) @@ -1363,7 +1363,7 @@ class GnomeModule(ExtensionModule): cmd += ['--' + arg.replace('_', '-'), value] elif arg not in known_custom_target_kwargs: raise MesonException( - 'Mkenums does not take a %s keyword argument.' % (arg, )) + f'Mkenums does not take a {arg} keyword argument.') cmd = [self.interpreter.find_program_impl(['glib-mkenums', 'mkenums'])] + cmd custom_kwargs = {} for arg in known_custom_target_kwargs: @@ -1429,7 +1429,7 @@ class GnomeModule(ExtensionModule): 'fprod', 'ftail', 'vhead', 'vtail', 'comments'] for arg in forbidden_kwargs: if arg in kwargs: - raise MesonException('mkenums_simple() does not take a %s keyword argument' % (arg, )) + raise MesonException(f'mkenums_simple() does not take a {arg} keyword argument') # kwargs to pass as-is from mkenums_simple() to mkenums() shared_kwargs = ['sources', 'install_header', 'install_dir', @@ -1581,8 +1581,8 @@ G_END_DECLS''' cmd += ['--' + arg.replace('_', '-')] elif arg not in known_custom_target_kwargs: raise MesonException( - 'Genmarshal does not take a %s keyword argument.' % ( - arg, )) + 'Genmarshal does not take a {} keyword argument.'.format( + arg)) install_header = kwargs.pop('install_header', False) install_dir = kwargs.pop('install_dir', None) @@ -1628,7 +1628,7 @@ G_END_DECLS''' for arg in arg_list: if not isinstance(arg, str): types = 'strings' + ' or InternalDependencys' if accept_vapi else '' - raise MesonException('All {} must be {}'.format(variable, types)) + raise MesonException(f'All {variable} must be {types}') ret.append(prefix + arg) return ret diff --git a/mesonbuild/modules/hotdoc.py b/mesonbuild/modules/hotdoc.py index eda411c..51da31c 100644 --- a/mesonbuild/modules/hotdoc.py +++ b/mesonbuild/modules/hotdoc.py @@ -100,7 +100,7 @@ class HotdocTargetBuilder: # When an option expects a single value, the unambiguous way # to specify it is with = if isinstance(value, str): - self.cmd.extend(['%s=%s' % (option, value)]) + self.cmd.extend([f'{option}={value}']) else: self.cmd.extend([option, value]) @@ -113,7 +113,7 @@ class HotdocTargetBuilder: valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.BuildTarget) if not isinstance(value, valid_types): - raise InvalidArguments('Argument "%s=%s" should be of type: %s.' % ( + raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format( arg, value, [t.__name__ for t in valid_types])) def process_extra_args(self): @@ -403,7 +403,7 @@ class HotDocModule(ExtensionModule): from hotdoc.run_hotdoc import run # noqa: F401 self.hotdoc.run_hotdoc = run except Exception as e: - raise MesonException('hotdoc %s required but not found. (%s)' % ( + raise MesonException('hotdoc {} required but not found. ({})'.format( MIN_HOTDOC_VERSION, e)) @noKwargs diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py index 54faf4c..afa7287 100644 --- a/mesonbuild/modules/i18n.py +++ b/mesonbuild/modules/i18n.py @@ -83,7 +83,7 @@ class I18nModule(ExtensionModule): file_type = kwargs.pop('type', 'xml') VALID_TYPES = ('xml', 'desktop') if file_type not in VALID_TYPES: - raise MesonException('i18n: "{}" is not a valid type {}'.format(file_type, VALID_TYPES)) + raise MesonException(f'i18n: "{file_type}" is not a valid type {VALID_TYPES}') datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.pop('data_dirs', []))) datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None diff --git a/mesonbuild/modules/keyval.py b/mesonbuild/modules/keyval.py index 8e1a89f..8123a57 100644 --- a/mesonbuild/modules/keyval.py +++ b/mesonbuild/modules/keyval.py @@ -43,7 +43,7 @@ class KeyvalModule(ExtensionModule): continue result[name.strip()] = val.strip() except OSError as e: - raise mesonlib.MesonException('Failed to load {}: {}'.format(path_to_config, e)) + raise mesonlib.MesonException(f'Failed to load {path_to_config}: {e}') return result diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 57987ed..fd2c24a 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -356,10 +356,10 @@ class PkgConfigModule(ExtensionModule): ofile.write('Version: %s\n' % version) reqs_str = deps.format_reqs(deps.pub_reqs) if len(reqs_str) > 0: - ofile.write('Requires: {}\n'.format(reqs_str)) + ofile.write(f'Requires: {reqs_str}\n') reqs_str = deps.format_reqs(deps.priv_reqs) if len(reqs_str) > 0: - ofile.write('Requires.private: {}\n'.format(reqs_str)) + ofile.write(f'Requires.private: {reqs_str}\n') if len(conflicts) > 0: ofile.write('Conflicts: {}\n'.format(' '.join(conflicts))) @@ -380,7 +380,7 @@ class PkgConfigModule(ExtensionModule): continue if 'cs' in l.compilers: if isinstance(install_dir, str): - Lflag = '-r${prefix}/%s/%s' % (self._escape(self._make_relative(prefix, install_dir)), l.filename) + Lflag = '-r${{prefix}}/{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename) else: # install_dir is True Lflag = '-r${libdir}/%s' % l.filename else: @@ -473,7 +473,7 @@ class PkgConfigModule(ExtensionModule): default_subdirs = [] blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs'] if len(set(kwargs) & set(blocked_vars)) > 0: - raise mesonlib.MesonException('Cannot combine dataonly with any of {}'.format(blocked_vars)) + raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}') subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs)) version = kwargs.get('version', default_version) @@ -520,7 +520,7 @@ class PkgConfigModule(ExtensionModule): variables = [] for name, value in vardict.items(): if name in reserved: - raise mesonlib.MesonException('Variable "{}" is reserved'.format(name)) + raise mesonlib.MesonException(f'Variable "{name}" is reserved') variables.append((name, value)) return variables diff --git a/mesonbuild/modules/python.py b/mesonbuild/modules/python.py index d05c72a..122f977 100644 --- a/mesonbuild/modules/python.py +++ b/mesonbuild/modules/python.py @@ -63,10 +63,10 @@ class PythonDependency(ExternalDependency): pkg_version = self.variables.get('LDVERSION') or self.version pkg_libdir = self.variables.get('LIBPC') pkg_embed = '-embed' if self.embed and mesonlib.version_compare(self.version, '>=3.8') else '' - pkg_name = 'python-{}{}'.format(pkg_version, pkg_embed) + pkg_name = f'python-{pkg_version}{pkg_embed}' # If python-X.Y.pc exists in LIBPC, we will try to use it - if pkg_libdir is not None and Path(os.path.join(pkg_libdir, '{}.pc'.format(pkg_name))).is_file(): + if pkg_libdir is not None and Path(os.path.join(pkg_libdir, f'{pkg_name}.pc')).is_file(): old_pkg_libdir = os.environ.get('PKG_CONFIG_LIBDIR') old_pkg_path = os.environ.get('PKG_CONFIG_PATH') @@ -77,10 +77,10 @@ class PythonDependency(ExternalDependency): try: self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs) - mlog.debug('Found "{}" via pkgconfig lookup in LIBPC ({})'.format(pkg_name, pkg_libdir)) + mlog.debug(f'Found "{pkg_name}" via pkgconfig lookup in LIBPC ({pkg_libdir})') py_lookup_method = 'pkgconfig' except MesonException as e: - mlog.debug('"{}" could not be found in LIBPC ({})'.format(pkg_name, pkg_libdir)) + mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir})') mlog.debug(e) if old_pkg_path is not None: @@ -91,16 +91,16 @@ class PythonDependency(ExternalDependency): else: os.environ.pop('PKG_CONFIG_LIBDIR', None) else: - mlog.debug('"{}" could not be found in LIBPC ({}), this is likely due to a relocated python installation'.format(pkg_name, pkg_libdir)) + mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir}), this is likely due to a relocated python installation') # If lookup via LIBPC failed, try to use fallback PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH mechanisms if self.pkgdep is None or not self.pkgdep.found(): try: self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs) - mlog.debug('Found "{}" via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH'.format(pkg_name)) + mlog.debug(f'Found "{pkg_name}" via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH') py_lookup_method = 'pkgconfig-fallback' except MesonException as e: - mlog.debug('"{}" could not be found via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH'.format(pkg_name)) + mlog.debug(f'"{pkg_name}" could not be found via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH') mlog.debug(e) if self.pkgdep and self.pkgdep.found(): @@ -118,11 +118,11 @@ class PythonDependency(ExternalDependency): else: self._find_libpy(python_holder, environment) if self.is_found: - mlog.debug('Found "python-{}" via SYSCONFIG module'.format(self.version)) + mlog.debug(f'Found "python-{self.version}" via SYSCONFIG module') py_lookup_method = 'sysconfig' if self.is_found: - mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green('YES ({})'.format(py_lookup_method))) + mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green(f'YES ({py_lookup_method})')) else: mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO')) @@ -135,7 +135,7 @@ class PythonDependency(ExternalDependency): libdir = os.path.join(self.variables.get('base'), 'bin') libdirs = [libdir] else: - libname = 'python{}'.format(self.version) + libname = f'python{self.version}' if 'DEBUG_EXT' in self.variables: libname += self.variables['DEBUG_EXT'] if 'ABIFLAGS' in self.variables: @@ -170,20 +170,20 @@ class PythonDependency(ExternalDependency): return '32' elif self.platform in ('win64', 'win-amd64'): return '64' - mlog.log('Unknown Windows Python platform {!r}'.format(self.platform)) + mlog.log(f'Unknown Windows Python platform {self.platform!r}') return None def get_windows_link_args(self): if self.platform.startswith('win'): vernum = self.variables.get('py_version_nodot') if self.static: - libpath = Path('libs') / 'libpython{}.a'.format(vernum) + libpath = Path('libs') / f'libpython{vernum}.a' else: comp = self.get_compiler() if comp.id == "gcc": - libpath = 'python{}.dll'.format(vernum) + libpath = f'python{vernum}.dll' else: - libpath = Path('libs') / 'python{}.lib'.format(vernum) + libpath = Path('libs') / f'python{vernum}.lib' lib = Path(self.variables.get('base')) / libpath elif self.platform == 'mingw': if self.static: @@ -212,7 +212,7 @@ class PythonDependency(ExternalDependency): arch = '64' else: # We can't cross-compile Python 3 dependencies on Windows yet - mlog.log('Unknown architecture {!r} for'.format(arch), + mlog.log(f'Unknown architecture {arch!r} for', mlog.bold(self.name)) self.is_found = False return @@ -429,7 +429,7 @@ class PythonInstallation(ExternalProgramHolder): if len(args) == 2: path = args[1] else: - raise InvalidArguments('{} is not a valid path name'.format(path_name)) + raise InvalidArguments(f'{path_name} is not a valid path name') return path @@ -457,7 +457,7 @@ class PythonInstallation(ExternalProgramHolder): if len(args) == 2: var = args[1] else: - raise InvalidArguments('{} is not a valid variable name'.format(var_name)) + raise InvalidArguments(f'{var_name} is not a valid variable name') return var @@ -544,7 +544,7 @@ class PythonModule(ExtensionModule): for mod in want_modules: p, out, err = mesonlib.Popen_safe( python.command + - ['-c', 'import {}'.format(mod)]) + ['-c', f'import {mod}']) if p.returncode != 0: missing_modules.append(mod) else: @@ -591,7 +591,7 @@ class PythonModule(ExtensionModule): else: res = ExternalProgramHolder(NonExistingExternalProgram(), state.subproject) if required: - raise mesonlib.MesonException('{} is not a valid python or it is missing setuptools'.format(python)) + raise mesonlib.MesonException(f'{python} is not a valid python or it is missing setuptools') return res diff --git a/mesonbuild/modules/python3.py b/mesonbuild/modules/python3.py index 97bd5ec..8815966 100644 --- a/mesonbuild/modules/python3.py +++ b/mesonbuild/modules/python3.py @@ -66,7 +66,7 @@ class Python3Module(ExtensionModule): path_name = args[0] valid_names = sysconfig.get_path_names() if path_name not in valid_names: - raise mesonlib.MesonException('{} is not a valid path name {}.'.format(path_name, valid_names)) + raise mesonlib.MesonException(f'{path_name} is not a valid path name {valid_names}.') # Get a relative path without a prefix, e.g. lib/python3.6/site-packages path = sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:] diff --git a/mesonbuild/modules/qt.py b/mesonbuild/modules/qt.py index 680ee72..32841ff 100644 --- a/mesonbuild/modules/qt.py +++ b/mesonbuild/modules/qt.py @@ -43,7 +43,7 @@ class QtBaseModule(ExtensionModule): if self.tools_detected: return self.tools_detected = True - mlog.log('Detecting Qt{version} tools'.format(version=self.qt_version)) + mlog.log(f'Detecting Qt{self.qt_version} tools') kwargs = {'required': required, 'modules': 'Core', 'method': method} qt = _QT_DEPS_LUT[self.qt_version](env, kwargs) if qt.found(): @@ -55,7 +55,7 @@ class QtBaseModule(ExtensionModule): mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:', mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) else: - suffix = '-qt{}'.format(self.qt_version) + suffix = f'-qt{self.qt_version}' self.moc = NonExistingExternalProgram(name='moc' + suffix) self.uic = NonExistingExternalProgram(name='uic' + suffix) self.rcc = NonExistingExternalProgram(name='rcc' + suffix) @@ -143,10 +143,10 @@ class QtBaseModule(ExtensionModule): err_msg = "{0} sources specified and couldn't find {1}, " \ "please check your qt{2} installation" if (moc_headers or moc_sources) and not self.moc.found(): - raise MesonException(err_msg.format('MOC', 'moc-qt{}'.format(self.qt_version), self.qt_version)) + raise MesonException(err_msg.format('MOC', f'moc-qt{self.qt_version}', self.qt_version)) if rcc_files: if not self.rcc.found(): - raise MesonException(err_msg.format('RCC', 'rcc-qt{}'.format(self.qt_version), self.qt_version)) + raise MesonException(err_msg.format('RCC', f'rcc-qt{self.qt_version}', self.qt_version)) # custom output name set? -> one output file, multiple otherwise if args: qrc_deps = [] @@ -178,12 +178,12 @@ class QtBaseModule(ExtensionModule): sources.append(res_target) if ui_files: if not self.uic.found(): - raise MesonException(err_msg.format('UIC', 'uic-qt{}'.format(self.qt_version), self.qt_version)) + raise MesonException(err_msg.format('UIC', f'uic-qt{self.qt_version}', self.qt_version)) arguments = uic_extra_arguments + ['-o', '@OUTPUT@', '@INPUT@'] ui_kwargs = {'output': 'ui_@BASENAME@.h', 'arguments': arguments} ui_gen = build.Generator([self.uic], ui_kwargs) - ui_output = ui_gen.process_files('Qt{} ui'.format(self.qt_version), ui_files, state) + ui_output = ui_gen.process_files(f'Qt{self.qt_version} ui', ui_files, state) sources.append(ui_output) inc = get_include_args(include_dirs=include_directories) compile_args = [] @@ -202,14 +202,14 @@ class QtBaseModule(ExtensionModule): moc_kwargs = {'output': 'moc_@BASENAME@.cpp', 'arguments': arguments} moc_gen = build.Generator([self.moc], moc_kwargs) - moc_output = moc_gen.process_files('Qt{} moc header'.format(self.qt_version), moc_headers, state) + moc_output = moc_gen.process_files(f'Qt{self.qt_version} moc header', moc_headers, state) sources.append(moc_output) if moc_sources: arguments = moc_extra_arguments + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@'] moc_kwargs = {'output': '@BASENAME@.moc', 'arguments': arguments} moc_gen = build.Generator([self.moc], moc_kwargs) - moc_output = moc_gen.process_files('Qt{} moc source'.format(self.qt_version), moc_sources, state) + moc_output = moc_gen.process_files(f'Qt{self.qt_version} moc source', moc_sources, state) sources.append(moc_output) return ModuleReturnValue(sources, sources) @@ -237,7 +237,7 @@ class QtBaseModule(ExtensionModule): if c.endswith('.qm'): ts_files.append(c.rstrip('.qm')+'.ts') else: - raise MesonException('qt.compile_translations: qresource can only contain qm files, found {}'.format(c)) + raise MesonException(f'qt.compile_translations: qresource can only contain qm files, found {c}') results = self.preprocess(state, [], {'qresources': qresource, 'rcc_extra_arguments': kwargs.get('rcc_extra_arguments', [])}) self._detect_tools(state.environment, kwargs.get('method', 'auto')) translations = [] @@ -258,7 +258,7 @@ class QtBaseModule(ExtensionModule): 'command': cmd} if install_dir is not None: lrelease_kwargs['install_dir'] = install_dir - lrelease_target = build.CustomTarget('qt{}-compile-{}'.format(self.qt_version, ts), outdir, state.subproject, lrelease_kwargs) + lrelease_target = build.CustomTarget(f'qt{self.qt_version}-compile-{ts}', outdir, state.subproject, lrelease_kwargs) translations.append(lrelease_target) if qresource: return ModuleReturnValue(results.return_value[0], [results.new_objects, translations]) diff --git a/mesonbuild/modules/sourceset.py b/mesonbuild/modules/sourceset.py index e49a548..15257e7 100644 --- a/mesonbuild/modules/sourceset.py +++ b/mesonbuild/modules/sourceset.py @@ -150,7 +150,7 @@ class SourceSetHolder(MutableInterpreterObject, ObjectHolder): if isinstance(config_data, dict): def _get_from_config_data(key): if strict and key not in config_data: - raise InterpreterException('Entry {} not in configuration dictionary.'.format(key)) + raise InterpreterException(f'Entry {key} not in configuration dictionary.') return config_data.get(key, False) else: config_cache = dict() diff --git a/mesonbuild/modules/unstable_external_project.py b/mesonbuild/modules/unstable_external_project.py index cb1ea85..8153908 100644 --- a/mesonbuild/modules/unstable_external_project.py +++ b/mesonbuild/modules/unstable_external_project.py @@ -140,7 +140,7 @@ class ExternalProject(InterpreterObject): # Ensure the user at least try to pass basic info to the build system, # like the prefix, libdir, etc. for key, default, val in variables: - key_format = '@{}@'.format(key) + key_format = f'@{key}@' for option in self.configure_options: if key_format in option: break @@ -160,13 +160,13 @@ class ExternalProject(InterpreterObject): if missing: var_list = ", ".join(map(repr, sorted(missing))) raise EnvironmentException( - "Variables {} in configure options are missing.".format(var_list)) + f"Variables {var_list} in configure options are missing.") return out def _run(self, step: str, command: T.List[str]): - mlog.log('External project {}:'.format(self.name), mlog.bold(step)) + mlog.log(f'External project {self.name}:', mlog.bold(step)) m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n' - log_filename = Path(mlog.log_dir, '{}-{}.log'.format(self.name, step)) + log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log') output = None if not self.verbose: output = open(log_filename, 'w') @@ -178,7 +178,7 @@ class ExternalProject(InterpreterObject): stderr=subprocess.STDOUT, stdout=output) if p.returncode != 0: - m = '{} step returned error code {}.'.format(step, p.returncode) + m = f'{step} step returned error code {p.returncode}.' if not self.verbose: m += '\nSee logs: ' + str(log_filename) raise MesonException(m) @@ -196,8 +196,8 @@ class ExternalProject(InterpreterObject): if self.verbose: cmd.append('--verbose') - target_kwargs = {'output': '{}.stamp'.format(self.name), - 'depfile': '{}.d'.format(self.name), + target_kwargs = {'output': f'{self.name}.stamp', + 'depfile': f'{self.name}.d', 'command': cmd + ['@OUTPUT@', '@DEPFILE@'], 'console': True, } @@ -237,8 +237,8 @@ class ExternalProject(InterpreterObject): version = self.project_version['version'] incdir = [] - compile_args = ['-I{}'.format(abs_includedir)] - link_args = ['-L{}'.format(abs_libdir), '-l{}'.format(libname)] + compile_args = [f'-I{abs_includedir}'] + link_args = [f'-L{abs_libdir}', f'-l{libname}'] libs = [] libs_whole = [] sources = self.target diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index b8715f2..54bd265 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -133,7 +133,7 @@ class WindowsModule(ExtensionModule): name_formatted = 'windows_compile_resources_' + src.get_filename() name = src.get_id() else: - raise MesonException('Unexpected source type {!r}. windows.compile_resources accepts only strings, files, custom targets, and lists thereof.'.format(src)) + raise MesonException(f'Unexpected source type {src!r}. windows.compile_resources accepts only strings, files, custom targets, and lists thereof.') # Path separators are not allowed in target names name = name.replace('/', '_').replace('\\', '_') diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index a60109d..79f461e 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -72,7 +72,7 @@ class BlockParseException(MesonException): # Followed by a caret to show the block start # Followed by underscores # Followed by a caret to show the block end. - super().__init__("%s\n%s\n%s" % (text, line, '%s^%s^' % (' ' * start_colno, '_' * (colno - start_colno - 1)))) + super().__init__("{}\n{}\n{}".format(text, line, '{}^{}^'.format(' ' * start_colno, '_' * (colno - start_colno - 1)))) else: # If block start and end are on different lines, it is formatted as: # Error message @@ -204,7 +204,7 @@ class Lexer: try: value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value) except MesonUnicodeDecodeError as err: - raise MesonException("Failed to parse escape sequence: '{}' in string:\n {}".format(err.match, match_text)) + raise MesonException(f"Failed to parse escape sequence: '{err.match}' in string:\n {match_text}") elif tid == 'multiline_string': tid = 'string' value = match_text[3:-3] @@ -228,7 +228,7 @@ class Lexer: tid = match_text else: if match_text in self.future_keywords: - mlog.warning("Identifier '{}' will become a reserved keyword in a future release. Please rename it.".format(match_text), + mlog.warning(f"Identifier '{match_text}' will become a reserved keyword in a future release. Please rename it.", location=types.SimpleNamespace(filename=filename, lineno=lineno)) value = match_text yield Token(tid, filename, curline_start, curline, col, bytespan, value) @@ -316,7 +316,7 @@ class ArgumentNode(BaseNode): def set_kwarg(self, name: IdNode, value: BaseNode) -> None: if name.value in [x.value for x in self.kwargs.keys() if isinstance(x, IdNode)]: - mlog.warning('Keyword argument "{}" defined multiple times.'.format(name.value), location=self) + mlog.warning(f'Keyword argument "{name.value}" defined multiple times.', location=self) mlog.warning('This will be an error in future Meson releases.') self.kwargs[name] = value @@ -510,12 +510,12 @@ class Parser: def expect(self, s: str) -> bool: if self.accept(s): return True - raise ParseException('Expecting %s got %s.' % (s, self.current.tid), self.getline(), self.current.lineno, self.current.colno) + raise ParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno) def block_expect(self, s: str, block_start: Token) -> bool: if self.accept(s): return True - raise BlockParseException('Expecting %s got %s.' % (s, self.current.tid), self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno) + raise BlockParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno) def parse(self) -> CodeBlockNode: block = self.codeblock() diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py index 11fe3ce..f42d013 100644 --- a/mesonbuild/msetup.py +++ b/mesonbuild/msetup.py @@ -128,9 +128,9 @@ class MesonApp: if not os.path.exists(ndir2): os.makedirs(ndir2) if not stat.S_ISDIR(os.stat(ndir1).st_mode): - raise MesonException('{} is not a directory'.format(dir1)) + raise MesonException(f'{dir1} is not a directory') if not stat.S_ISDIR(os.stat(ndir2).st_mode): - raise MesonException('{} is not a directory'.format(dir2)) + raise MesonException(f'{dir2} is not a directory') if os.path.samefile(ndir1, ndir2): # Fallback to textual compare if undefined entries found has_undefined = any((s.st_ino == 0 and s.st_dev == 0) for s in (os.stat(ndir1), os.stat(ndir2))) @@ -138,11 +138,11 @@ class MesonApp: raise MesonException('Source and build directories must not be the same. Create a pristine build directory.') if self.has_build_file(ndir1): if self.has_build_file(ndir2): - raise MesonException('Both directories contain a build file {}.'.format(environment.build_filename)) + raise MesonException(f'Both directories contain a build file {environment.build_filename}.') return ndir1, ndir2 if self.has_build_file(ndir2): return ndir2, ndir1 - raise MesonException('Neither directory contains a build file {}.'.format(environment.build_filename)) + raise MesonException(f'Neither directory contains a build file {environment.build_filename}.') def add_vcs_ignore_files(self, build_dir: str) -> None: if os.listdir(build_dir): @@ -169,7 +169,7 @@ class MesonApp: else: has_cmd_line_file = os.path.exists(coredata.get_cmd_line_file(build_dir)) if (wipe and not has_cmd_line_file) or (not wipe and reconfigure): - raise SystemExit('Directory does not contain a valid build tree:\n{}'.format(build_dir)) + raise SystemExit(f'Directory does not contain a valid build tree:\n{build_dir}') return src_dir, build_dir def generate(self) -> None: @@ -238,7 +238,7 @@ class MesonApp: # possible, but before build files, and if any error occurs, delete it. cdf = env.dump_coredata() if self.options.profile: - fname = 'profile-{}-backend.log'.format(intr.backend.name) + fname = f'profile-{intr.backend.name}-backend.log' fname = os.path.join(self.build_dir, 'meson-private', fname) profile.runctx('intr.backend.generate()', globals(), locals(), filename=fname) else: diff --git a/mesonbuild/msubprojects.py b/mesonbuild/msubprojects.py index 7c227f7..fbd5dfc 100755 --- a/mesonbuild/msubprojects.py +++ b/mesonbuild/msubprojects.py @@ -160,7 +160,7 @@ def update_git(r, wrap, repo_dir, options): mlog.log(mlog.red(str(e))) return False elif url != origin_url: - mlog.log(' -> URL changed from {!r} to {!r}'.format(origin_url, url)) + mlog.log(f' -> URL changed from {origin_url!r} to {url!r}') return False try: # Same as `git branch --show-current` but compatible with older git version @@ -251,7 +251,7 @@ def update_svn(r, wrap, repo_dir, options): return True def update(r, wrap, repo_dir, options): - mlog.log('Updating {}...'.format(wrap.name)) + mlog.log(f'Updating {wrap.name}...') if wrap.type == 'file': return update_file(r, wrap, repo_dir, options) elif wrap.type == 'git': @@ -271,14 +271,14 @@ def checkout(r, wrap, repo_dir, options): if not branch_name: # It could be a detached git submodule for example. return True - mlog.log('Checkout {} in {}...'.format(branch_name, wrap.name)) + mlog.log(f'Checkout {branch_name} in {wrap.name}...') if git_checkout(repo_dir, branch_name, create=options.b): git_show(repo_dir) return True return False def download(r, wrap, repo_dir, options): - mlog.log('Download {}...'.format(wrap.name)) + mlog.log(f'Download {wrap.name}...') if os.path.isdir(repo_dir): mlog.log(' -> Already downloaded') return True @@ -291,7 +291,7 @@ def download(r, wrap, repo_dir, options): return True def foreach(r, wrap, repo_dir, options): - mlog.log('Executing command in {}'.format(repo_dir)) + mlog.log(f'Executing command in {repo_dir}') if not os.path.isdir(repo_dir): mlog.log(' -> Not downloaded yet') return True @@ -310,7 +310,7 @@ def add_common_arguments(p): p.add_argument('--sourcedir', default='.', help='Path to source directory') p.add_argument('--types', default='', - help='Comma-separated list of subproject types. Supported types are: {} (default: all)'.format(ALL_TYPES_STRING)) + help=f'Comma-separated list of subproject types. Supported types are: {ALL_TYPES_STRING} (default: all)') def add_subprojects_argument(p): p.add_argument('subprojects', nargs='*', @@ -372,7 +372,7 @@ def run(options): types = [t.strip() for t in options.types.split(',')] if options.types else [] for t in types: if t not in ALL_TYPES: - raise MesonException('Unknown subproject type {!r}, supported types are: {}'.format(t, ALL_TYPES_STRING)) + raise MesonException(f'Unknown subproject type {t!r}, supported types are: {ALL_TYPES_STRING}') failures = [] for wrap in wraps: if types and wrap.type not in types: diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index ac4f515..c48a324 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -80,7 +80,7 @@ def determine_worker_count() -> int: try: num_workers = int(os.environ[varname]) except ValueError: - print('Invalid value in {}, using 1 thread.'.format(varname)) + print(f'Invalid value in {varname}, using 1 thread.') num_workers = 1 else: try: @@ -178,17 +178,17 @@ def returncode_to_status(retcode: int) -> str: signame = signal.Signals(signum).name except ValueError: signame = 'SIGinvalid' - return 'killed by signal {} {}'.format(signum, signame) + return f'killed by signal {signum} {signame}' if retcode <= 128: - return 'exit status {}'.format(retcode) + return f'exit status {retcode}' signum = retcode - 128 try: signame = signal.Signals(signum).name except ValueError: signame = 'SIGinvalid' - return '(exit status {} or signal {} {})'.format(retcode, signum, signame) + return f'(exit status {retcode} or signal {signum} {signame})' # TODO for Windows sh_quote: T.Callable[[str], str] = lambda x: x @@ -285,7 +285,7 @@ class TAPParser: explanation: T.Optional[str] def __str__(self) -> str: - return '{} {}'.format(self.number, self.name).strip() + return f'{self.number} {self.name}'.strip() class Error(T.NamedTuple): message: str @@ -329,7 +329,7 @@ class TAPParser: yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation) return else: - yield self.Error('invalid directive "{}"'.format(directive,)) + yield self.Error(f'invalid directive "{directive}"') yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation) @@ -367,7 +367,7 @@ class TAPParser: return if line.startswith(self.yaml_indent): return - yield self.Error('YAML block not terminated (started on line {})'.format(self.yaml_lineno)) + yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})') self.state = self._MAIN assert self.state == self._MAIN @@ -433,13 +433,13 @@ class TAPParser: else: # end of file if self.state == self._YAML: - yield self.Error('YAML block not terminated (started on line {})'.format(self.yaml_lineno)) + yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})') if not self.bailed_out and self.plan and self.num_tests != self.plan.num_tests: if self.num_tests < self.plan.num_tests: - yield self.Error('Too few tests run (expected {}, got {})'.format(self.plan.num_tests, self.num_tests)) + yield self.Error(f'Too few tests run (expected {self.plan.num_tests}, got {self.num_tests})') else: - yield self.Error('Too many tests run (expected {}, got {})'.format(self.plan.num_tests, self.num_tests)) + yield self.Error(f'Too many tests run (expected {self.plan.num_tests}, got {self.num_tests})') class TestLogger: def flush(self) -> None: @@ -530,7 +530,7 @@ class ConsoleLogger(TestLogger): return if len(self.running_tests) == 1: - count = '{}/{}'.format(self.started_tests, self.test_count) + count = f'{self.started_tests}/{self.test_count}' else: count = '{}-{}/{}'.format(self.started_tests - len(self.running_tests) + 1, self.started_tests, self.test_count) @@ -657,7 +657,7 @@ class ConsoleLogger(TestLogger): self.running_tests.remove(result) if result.res is TestResult.TIMEOUT and harness.options.verbose: self.flush() - print('{} time out (After {} seconds)'.format(result.name, result.timeout)) + print(f'{result.name} time out (After {result.timeout} seconds)') if not harness.options.quiet or not result.res.is_ok(): self.flush() @@ -692,9 +692,9 @@ class ConsoleLogger(TestLogger): class TextLogfileBuilder(TestFileLogger): def start(self, harness: 'TestHarness') -> None: - self.file.write('Log of Meson test suite run on {}\n\n'.format(datetime.datetime.now().isoformat())) + self.file.write(f'Log of Meson test suite run on {datetime.datetime.now().isoformat()}\n\n') inherit_env = env_tuple_to_str(os.environ.items()) - self.file.write('Inherited environment: {}\n\n'.format(inherit_env)) + self.file.write(f'Inherited environment: {inherit_env}\n\n') def log(self, harness: 'TestHarness', result: 'TestRun') -> None: self.file.write(harness.format(result, False) + '\n') @@ -713,7 +713,7 @@ class TextLogfileBuilder(TestFileLogger): self.file.write(harness.format(result, False) + '\n') self.file.write(harness.summary()) - print('Full log written to {}'.format(self.filename)) + print(f'Full log written to {self.filename}') class JsonLogfileBuilder(TestFileLogger): @@ -773,7 +773,7 @@ class JunitBuilder(TestLogger): # We want to record this so that each result is recorded # separately if test.results: - suitename = '{}.{}'.format(test.project, test.name) + suitename = f'{test.project}.{test.name}' assert suitename not in self.suites or harness.options.repeat > 1, 'duplicate suite' suite = self.suites[suitename] = et.Element( @@ -905,9 +905,9 @@ class TestRun: passed = sum(x.result.is_ok() for x in self.results) ran = sum(x.result is not TestResult.SKIP for x in self.results) if passed == ran: - return '{} subtests passed'.format(passed) + return f'{passed} subtests passed' else: - return '{}/{} subtests passed'.format(passed, ran) + return f'{passed}/{ran} subtests passed' return '' def _complete(self, returncode: int, res: TestResult, @@ -985,7 +985,7 @@ TestRun.PROTOCOL_TO_CLASS[TestProtocol.EXITCODE] = TestRunExitCode class TestRunGTest(TestRunExitCode): def complete(self, returncode: int, res: TestResult, stdo: T.Optional[str], stde: T.Optional[str]) -> None: - filename = '{}.xml'.format(self.test.name) + filename = f'{self.test.name}.xml' if self.test.workdir: filename = os.path.join(self.test.workdir, filename) @@ -1005,7 +1005,7 @@ class TestRunTAP(TestRun): if returncode != 0 and not res.was_killed(): res = TestResult.ERROR stde = stde or '' - stde += '\n(test program exited with status code {})'.format(returncode,) + stde += f'\n(test program exited with status code {returncode})' super().complete(returncode, res, stdo, stde) @@ -1048,7 +1048,7 @@ class TestRunRust(TestRun): elif result == 'FAILED': return TAPParser.Test(n, name, TestResult.FAIL, None) return TAPParser.Test(n, name, TestResult.ERROR, - 'Unsupported output from rust test: {}'.format(result)) + f'Unsupported output from rust test: {result}') n = 1 async for line in lines: @@ -1399,7 +1399,7 @@ class SingleTestRunner: gtestname = self.test.name if self.test.workdir: gtestname = os.path.join(self.test.workdir, self.test.name) - extra_cmd.append('--gtest_output=xml:{}.xml'.format(gtestname)) + extra_cmd.append(f'--gtest_output=xml:{gtestname}.xml') p = await self._run_subprocess(cmd + extra_cmd, stdout=stdout, @@ -1480,7 +1480,7 @@ class TestHarness: def load_tests(self, file_name: str) -> T.List[TestSerialisation]: datafile = Path('meson-private') / file_name if not datafile.is_file(): - raise TestException('Directory {!r} does not seem to be a Meson build directory.'.format(self.options.wd)) + raise TestException(f'Directory {self.options.wd!r} does not seem to be a Meson build directory.') with datafile.open('rb') as f: objs = check_testdata(pickle.load(f)) return objs @@ -1498,12 +1498,12 @@ class TestHarness: def get_test_setup(self, test: T.Optional[TestSerialisation]) -> build.TestSetup: if ':' in self.options.setup: if self.options.setup not in self.build_data.test_setups: - sys.exit("Unknown test setup '{}'.".format(self.options.setup)) + sys.exit(f"Unknown test setup '{self.options.setup}'.") return self.build_data.test_setups[self.options.setup] else: full_name = test.project_name + ":" + self.options.setup if full_name not in self.build_data.test_setups: - sys.exit("Test setup '{}' not found from project '{}'.".format(self.options.setup, test.project_name)) + sys.exit(f"Test setup '{self.options.setup}' not found from project '{test.project_name}'.") return self.build_data.test_setups[full_name] def merge_setup_options(self, options: argparse.Namespace, test: TestSerialisation) -> T.Dict[str, str]: @@ -1550,7 +1550,7 @@ class TestHarness: elif result.res is TestResult.UNEXPECTEDPASS: self.unexpectedpass_count += 1 else: - sys.exit('Unknown test result encountered: {}'.format(result.res)) + sys.exit(f'Unknown test result encountered: {result.res}') if result.res.is_bad(): self.collected_failures.append(result) @@ -1922,7 +1922,7 @@ def rebuild_deps(wd: str, tests: T.List[TestSerialisation]) -> bool: ret = subprocess.run(ninja + ['-C', wd] + sorted(targets)).returncode if ret != 0: - print('Could not rebuild {}'.format(wd)) + print(f'Could not rebuild {wd}') return False return True @@ -1953,7 +1953,7 @@ def run(options: argparse.Namespace) -> int: if check_bin is not None: exe = ExternalProgram(check_bin, silent=True) if not exe.found(): - print('Could not find requested program: {!r}'.format(check_bin)) + print(f'Could not find requested program: {check_bin!r}') return 1 with TestHarness(options) as th: diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 8585833..ddd712f 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -450,7 +450,7 @@ class Rewriter: 'id': "/", 'operation': 'remove_regex', 'kwargs': { - 'default_options': ['{}=.*'.format(x) for x in cmd['options'].keys()] + 'default_options': [f'{x}=.*' for x in cmd['options'].keys()] } } self.process_kwargs(kwargs_cmd) @@ -484,7 +484,7 @@ class Rewriter: self.handle_error() continue - kwargs_cmd['kwargs']['default_options'] += ['{}={}'.format(key, val)] + kwargs_cmd['kwargs']['default_options'] += [f'{key}={val}'] self.process_kwargs(kwargs_cmd) @@ -656,7 +656,7 @@ class Rewriter: mlog.log(' -- Source', mlog.green(i), 'is already defined for the target --> skipping') continue mlog.log(' -- Adding source', mlog.green(i), 'at', - mlog.yellow('{}:{}'.format(node.filename, node.lineno))) + mlog.yellow(f'{node.filename}:{node.lineno}')) token = Token('string', node.filename, 0, 0, 0, None, i) to_append += [StringNode(token)] @@ -700,7 +700,7 @@ class Rewriter: arg_node = root assert(arg_node is not None) mlog.log(' -- Removing source', mlog.green(i), 'from', - mlog.yellow('{}:{}'.format(string_node.filename, string_node.lineno))) + mlog.yellow(f'{string_node.filename}:{string_node.lineno}')) arg_node.arguments.remove(string_node) # Mark the node as modified @@ -747,7 +747,7 @@ class Rewriter: to_remove = target['node'] self.to_remove_nodes += [to_remove] mlog.log(' -- Removing target', mlog.green(cmd['target']), 'at', - mlog.yellow('{}:{}'.format(to_remove.filename, to_remove.lineno))) + mlog.yellow(f'{to_remove.filename}:{to_remove.lineno}')) elif cmd['operation'] == 'info': # T.List all sources in the target diff --git a/mesonbuild/scripts/cleantrees.py b/mesonbuild/scripts/cleantrees.py index 6feb9a7..1a38753 100644 --- a/mesonbuild/scripts/cleantrees.py +++ b/mesonbuild/scripts/cleantrees.py @@ -22,7 +22,7 @@ def rmtrees(build_dir: str, trees: T.List[str]) -> None: for t in trees: # Never delete trees outside of the builddir if os.path.isabs(t): - print('Cannot delete dir with absolute path {!r}'.format(t)) + print(f'Cannot delete dir with absolute path {t!r}') continue bt = os.path.join(build_dir, t) # Skip if it doesn't exist, or if it is not a directory diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py index 3f6f2af..dfb70d1 100755 --- a/mesonbuild/scripts/cmake_run_ctgt.py +++ b/mesonbuild/scripts/cmake_run_ctgt.py @@ -16,7 +16,7 @@ def run(argsv: T.List[str]) -> int: parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to') parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files') parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake') - parser.add_argument('commands', nargs=argparse.REMAINDER, help='A "{}" separated list of commands'.format(SEPARATOR)) + parser.add_argument('commands', nargs=argparse.REMAINDER, help=f'A "{SEPARATOR}" separated list of commands') # Parse args = parser.parse_args(argsv) diff --git a/mesonbuild/scripts/coverage.py b/mesonbuild/scripts/coverage.py index 80e9052..f8a4924 100644 --- a/mesonbuild/scripts/coverage.py +++ b/mesonbuild/scripts/coverage.py @@ -69,11 +69,11 @@ def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build if mesonlib.is_windows(): llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat') with open(llvm_cov_shim_path, 'w') as llvm_cov_bat: - llvm_cov_bat.write('@"{}" gcov %*'.format(llvm_cov_exe)) + llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*') else: llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh') with open(llvm_cov_shim_path, 'w') as llvm_cov_sh: - llvm_cov_sh.write('#!/usr/bin/env sh\nexec "{}" gcov $@'.format(llvm_cov_exe)) + llvm_cov_sh.write(f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@') os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC) gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path] else: diff --git a/mesonbuild/scripts/depscan.py b/mesonbuild/scripts/depscan.py index 2879d8b..207bbc6 100644 --- a/mesonbuild/scripts/depscan.py +++ b/mesonbuild/scripts/depscan.py @@ -52,7 +52,7 @@ class DependencyScanner: elif suffix in lang_suffixes['cpp']: self.scan_cpp_file(fname) else: - sys.exit('Can not scan files with suffix .{}.'.format(suffix)) + sys.exit(f'Can not scan files with suffix .{suffix}.') def scan_fortran_file(self, fname: str) -> None: fpath = pathlib.Path(fname) @@ -75,7 +75,7 @@ class DependencyScanner: assert(exported_module not in modules_in_this_file) modules_in_this_file.add(exported_module) if exported_module in self.provided_by: - raise RuntimeError('Multiple files provide module {}.'.format(exported_module)) + raise RuntimeError(f'Multiple files provide module {exported_module}.') self.sources_with_exports.append(fname) self.provided_by[exported_module] = fname self.exports[fname] = exported_module @@ -89,7 +89,7 @@ class DependencyScanner: parent_module_name_full = submodule_export_match.group(1).lower() parent_module_name = parent_module_name_full.split(':')[0] submodule_name = submodule_export_match.group(2).lower() - concat_name = '{}:{}'.format(parent_module_name, submodule_name) + concat_name = f'{parent_module_name}:{submodule_name}' self.sources_with_exports.append(fname) self.provided_by[concat_name] = fname self.exports[fname] = concat_name @@ -120,7 +120,7 @@ class DependencyScanner: if export_match: exported_module = export_match.group(1) if exported_module in self.provided_by: - raise RuntimeError('Multiple files provide module {}.'.format(exported_module)) + raise RuntimeError(f'Multiple files provide module {exported_module}.') self.sources_with_exports.append(fname) self.provided_by[exported_module] = fname self.exports[fname] = exported_module @@ -141,7 +141,7 @@ class DependencyScanner: extension = 'smod' else: extension = 'mod' - return os.path.join(self.target_data.private_dir, '{}.{}'.format(namebase, extension)) + return os.path.join(self.target_data.private_dir, f'{namebase}.{extension}') elif suffix in lang_suffixes['cpp']: return '{}.ifc'.format(self.exports[src]) else: diff --git a/mesonbuild/scripts/externalproject.py b/mesonbuild/scripts/externalproject.py index a3ffe73..657ef2f 100644 --- a/mesonbuild/scripts/externalproject.py +++ b/mesonbuild/scripts/externalproject.py @@ -35,7 +35,7 @@ class ExternalProject: def write_depfile(self) -> None: with open(self.depfile, 'w') as f: - f.write('{}: \\\n'.format(self.stampfile)) + f.write(f'{self.stampfile}: \\\n') for dirpath, dirnames, filenames in os.walk(self.src_dir): dirnames[:] = [d for d in dirnames if not d.startswith('.')] for fname in filenames: @@ -75,7 +75,7 @@ class ExternalProject: def _run(self, step: str, command: T.List[str]) -> int: m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n' - log_filename = Path(self.log_dir, '{}-{}.log'.format(self.name, step)) + log_filename = Path(self.log_dir, f'{self.name}-{step}.log') output = None if not self.verbose: output = open(log_filename, 'w') @@ -86,7 +86,7 @@ class ExternalProject: p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output, cwd=self.build_dir) if p.returncode != 0: - m = '{} step returned error code {}.'.format(step, p.returncode) + m = f'{step} step returned error code {p.returncode}.' if not self.verbose: m += '\nSee logs: ' + str(log_filename) print(m) diff --git a/mesonbuild/scripts/gettext.py b/mesonbuild/scripts/gettext.py index 64c228b..92b55d3 100644 --- a/mesonbuild/scripts/gettext.py +++ b/mesonbuild/scripts/gettext.py @@ -41,7 +41,7 @@ def read_linguas(src_sub: str) -> T.List[str]: langs += line.split() return langs except (FileNotFoundError, PermissionError): - print('Could not find file LINGUAS in {}'.format(src_sub)) + print(f'Could not find file LINGUAS in {src_sub}') return [] def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int: @@ -87,7 +87,7 @@ def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.Lis shutil.copy2(srcfile, tempfile) os.replace(tempfile, outfile) if not os.getenv('MESON_INSTALL_QUIET', False): - print('Installing %s to %s' % (srcfile, outfile)) + print(f'Installing {srcfile} to {outfile}') return 0 def run(args: T.List[str]) -> int: diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py index 86949e5..153c3d9 100644 --- a/mesonbuild/scripts/gtkdochelper.py +++ b/mesonbuild/scripts/gtkdochelper.py @@ -72,7 +72,7 @@ def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.Lis # This preserves the order of messages. p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2] if p.returncode != 0: - err_msg = ["{!r} failed with status {:d}".format(cmd, p.returncode)] + err_msg = [f"{cmd!r} failed with status {p.returncode:d}"] if out: err_msg.append(out) raise MesonException('\n'.join(err_msg)) @@ -215,8 +215,8 @@ def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs gtkdoc_run_check(fixref_cmd, abs_out) if module_version: - shutil.move(os.path.join(htmldir, '{}.devhelp2'.format(module)), - os.path.join(htmldir, '{}-{}.devhelp2'.format(module, module_version))) + shutil.move(os.path.join(htmldir, f'{module}.devhelp2'), + os.path.join(htmldir, f'{module}-{module_version}.devhelp2')) def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None: source = os.path.join(build_root, doc_subdir, 'html') diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py index d44280f..ceb9e43 100644 --- a/mesonbuild/scripts/meson_exe.py +++ b/mesonbuild/scripts/meson_exe.py @@ -67,7 +67,7 @@ def run_exe(exe: ExecutableSerialisation, extra_env: T.Optional[dict] = None) -> if p.returncode != 0: if exe.pickled: - print('while executing {!r}'.format(cmd_args)) + print(f'while executing {cmd_args!r}') if exe.verbose: return p.returncode if not exe.capture: diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index e80d9c2fac..728d5e2 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -56,7 +56,7 @@ def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = N global TOOL_WARNING_FILE if os.path.exists(TOOL_WARNING_FILE): return - m = '{!r} {}. {}'.format(tools, msg, RELINKING_WARNING) + m = f'{tools!r} {msg}. {RELINKING_WARNING}' if stderr: m += '\n' + stderr mlog.warning(m) diff --git a/mesonbuild/scripts/uninstall.py b/mesonbuild/scripts/uninstall.py index b648de4..8db04dd 100644 --- a/mesonbuild/scripts/uninstall.py +++ b/mesonbuild/scripts/uninstall.py @@ -32,7 +32,7 @@ def do_uninstall(log: str) -> None: print('Deleted:', fname) successes += 1 except Exception as e: - print('Could not delete %s: %s.' % (fname, e)) + print(f'Could not delete {fname}: {e}.') failures += 1 print('\nUninstall finished.\n') print('Deleted:', successes) diff --git a/mesonbuild/scripts/yelphelper.py b/mesonbuild/scripts/yelphelper.py index 0355d9f..374104b 100644 --- a/mesonbuild/scripts/yelphelper.py +++ b/mesonbuild/scripts/yelphelper.py @@ -68,7 +68,7 @@ def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[s for source in sources: infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source) outfile = os.path.join(indir, source) - mlog.log('Installing %s to %s' % (infile, outfile)) + mlog.log(f'Installing {infile} to {outfile}') shutil.copy2(infile, outfile) for m in media: infile = os.path.join(srcdir, lang, m) @@ -80,7 +80,7 @@ def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[s continue elif symlinks: srcfile = os.path.join(c_install_dir, m) - mlog.log('Symlinking %s to %s.' % (outfile, srcfile)) + mlog.log(f'Symlinking {outfile} to {srcfile}.') if has_path_sep(m): os.makedirs(os.path.dirname(outfile), exist_ok=True) try: @@ -96,7 +96,7 @@ def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[s else: # Lang doesn't have media file so copy it over 'C' one infile = c_infile - mlog.log('Installing %s to %s' % (infile, outfile)) + mlog.log(f'Installing {infile} to {outfile}') if has_path_sep(m): os.makedirs(os.path.dirname(outfile), exist_ok=True) shutil.copyfile(infile, outfile) diff --git a/mesonbuild/templates/mesontemplates.py b/mesonbuild/templates/mesontemplates.py index 68f9769..4c4d66f 100644 --- a/mesonbuild/templates/mesontemplates.py +++ b/mesonbuild/templates/mesontemplates.py @@ -46,12 +46,12 @@ def create_meson_build(options: argparse.Namespace) -> None: default_options += ['cpp_std=c++14'] # If we get a meson.build autoformatter one day, this code could # be simplified quite a bit. - formatted_default_options = ', '.join("'{}'".format(x) for x in default_options) - sourcespec = ',\n '.join("'{}'".format(x) for x in options.srcfiles) + formatted_default_options = ', '.join(f"'{x}'" for x in default_options) + sourcespec = ',\n '.join(f"'{x}'" for x in options.srcfiles) depspec = '' if options.deps: depspec = '\n dependencies : [\n ' - depspec += ',\n '.join("dependency('{}')".format(x) + depspec += ',\n '.join(f"dependency('{x}')" for x in options.deps.split(',')) depspec += '],' if options.language != 'java': diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index ca201de..c93ecae 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -57,11 +57,11 @@ def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult: """ raises WrapException if not whitelisted subdomain """ url = urllib.parse.urlparse(urlstr) if not url.hostname: - raise WrapException('{} is not a valid URL'.format(urlstr)) + raise WrapException(f'{urlstr} is not a valid URL') if not url.hostname.endswith(WHITELIST_SUBDOMAIN): - raise WrapException('{} is not a whitelisted WrapDB URL'.format(urlstr)) + raise WrapException(f'{urlstr} is not a whitelisted WrapDB URL') if has_ssl and not url.scheme == 'https': - raise WrapException('WrapDB did not have expected SSL https url, instead got {}'.format(urlstr)) + raise WrapException(f'WrapDB did not have expected SSL https url, instead got {urlstr}') return url def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse': @@ -72,17 +72,17 @@ def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse': try: return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT)) except urllib.error.URLError as excp: - raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp)) + raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}') # following code is only for those without Python SSL nossl_url = url._replace(scheme='http') if not SSL_WARNING_PRINTED: - mlog.warning('SSL module not available in {}: WrapDB traffic not authenticated.'.format(sys.executable)) + mlog.warning(f'SSL module not available in {sys.executable}: WrapDB traffic not authenticated.') SSL_WARNING_PRINTED = True try: return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT)) except urllib.error.URLError as excp: - raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp)) + raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}') class WrapException(MesonException): @@ -109,7 +109,7 @@ class PackageDefinition: if os.path.dirname(self.directory): raise WrapException('Directory key must be a name and not a path') if self.type and self.type not in ALL_TYPES: - raise WrapException('Unknown wrap type {!r}'.format(self.type)) + raise WrapException(f'Unknown wrap type {self.type!r}') self.filesdir = os.path.join(os.path.dirname(self.filename), 'packagefiles') def parse_wrap(self) -> None: @@ -144,7 +144,7 @@ class PackageDefinition: def parse_wrap_section(self, config: configparser.ConfigParser) -> None: if len(config.sections()) < 1: - raise WrapException('Missing sections in {}'.format(self.basename)) + raise WrapException(f'Missing sections in {self.basename}') self.wrap_section = config.sections()[0] if not self.wrap_section.startswith('wrap-'): m = '{!r} is not a valid first section in {}' @@ -334,7 +334,7 @@ class Resolver: elif self.wrap.type == "svn": self.get_svn() else: - raise WrapException('Unknown wrap type {!r}'.format(self.wrap.type)) + raise WrapException(f'Unknown wrap type {self.wrap.type!r}') self.apply_patch() # A meson.build or CMakeLists.txt file is required in the directory @@ -472,14 +472,14 @@ class Resolver: if url.hostname and url.hostname.endswith(WHITELIST_SUBDOMAIN): resp = open_wrapdburl(urlstring) elif WHITELIST_SUBDOMAIN in urlstring: - raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring)) + raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL') else: try: - req = urllib.request.Request(urlstring, headers={'User-Agent': 'mesonbuild/{}'.format(coredata.version)}) + req = urllib.request.Request(urlstring, headers={'User-Agent': f'mesonbuild/{coredata.version}'}) resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT) except urllib.error.URLError as e: mlog.log(str(e)) - raise WrapException('could not get {} is the internet available?'.format(urlstring)) + raise WrapException(f'could not get {urlstring} is the internet available?') with contextlib.closing(resp) as resp: try: dlsize = int(resp.info()['Content-Length']) @@ -518,7 +518,7 @@ class Resolver: h.update(f.read()) dhash = h.hexdigest() if dhash != expected: - raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash)) + raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.') def download(self, what: str, ofname: str, fallback: bool = False) -> None: self.check_can_download() @@ -529,7 +529,7 @@ class Resolver: expected = self.wrap.get(what + '_hash').lower() if dhash != expected: os.remove(tmpfile) - raise WrapException('Incorrect hash for {}:\n {} expected\n {} actual.'.format(what, expected, dhash)) + raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.') except WrapException: if not fallback: if what + '_fallback_url' in self.wrap.values: @@ -555,11 +555,11 @@ class Resolver: return cache_path else: from ..interpreterbase import FeatureNew - FeatureNew('Local wrap patch files without {}_url'.format(what), '0.55.0').use(self.current_subproject) + FeatureNew(f'Local wrap patch files without {what}_url', '0.55.0').use(self.current_subproject) path = Path(self.wrap.filesdir) / filename if not path.exists(): - raise WrapException('File "{}" does not exist'.format(path)) + raise WrapException(f'File "{path}" does not exist') self.check_hash(what, path.as_posix(), hash_required=False) return path.as_posix() @@ -582,7 +582,7 @@ class Resolver: patch_dir = self.wrap.values['patch_directory'] src_dir = os.path.join(self.wrap.filesdir, patch_dir) if not os.path.isdir(src_dir): - raise WrapException('patch directory does not exists: {}'.format(patch_dir)) + raise WrapException(f'patch directory does not exists: {patch_dir}') self.copy_tree(src_dir, self.dirname) def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None: diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py index d420889..fb91f3a 100644 --- a/mesonbuild/wrap/wraptool.py +++ b/mesonbuild/wrap/wraptool.py @@ -100,7 +100,7 @@ def install(options: 'argparse.Namespace') -> None: if os.path.exists(wrapfile): raise SystemExit('Wrap file already exists.') (branch, revision) = get_latest_version(name) - u = open_wrapdburl(API_ROOT + 'projects/{}/{}/{}/get_wrap'.format(name, branch, revision)) + u = open_wrapdburl(API_ROOT + f'projects/{name}/{branch}/{revision}/get_wrap') data = u.read() with open(wrapfile, 'wb') as f: f.write(data) @@ -119,7 +119,7 @@ def get_current_version(wrapfile: str) -> T.Tuple[str, int, str, str, str]: return branch, revision, wrap_data['directory'], wrap_data['source_filename'], wrap_data['patch_filename'] def update_wrap_file(wrapfile: str, name: str, new_branch: str, new_revision: str) -> None: - u = open_wrapdburl(API_ROOT + 'projects/{}/{}/{}/get_wrap'.format(name, new_branch, new_revision)) + u = open_wrapdburl(API_ROOT + f'projects/{name}/{new_branch}/{new_revision}/get_wrap') data = u.read() with open(wrapfile, 'wb') as f: f.write(data) @@ -154,7 +154,7 @@ def info(options: 'argparse.Namespace') -> None: versions = jd['versions'] if not versions: raise SystemExit('No available versions of' + name) - print('Available versions of {}:'.format(name)) + print(f'Available versions of {name}:') for v in versions: print(' ', v['branch'], v['revision']) @@ -166,7 +166,7 @@ def do_promotion(from_path: str, spdir_name: str) -> None: sproj_name = os.path.basename(from_path) outputdir = os.path.join(spdir_name, sproj_name) if os.path.exists(outputdir): - raise SystemExit('Output dir {} already exists. Will not overwrite.'.format(outputdir)) + raise SystemExit(f'Output dir {outputdir} already exists. Will not overwrite.') shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects')) def promote(options: 'argparse.Namespace') -> None: @@ -183,10 +183,10 @@ def promote(options: 'argparse.Namespace') -> None: # otherwise the argument is just a subproject basename which must be unambiguous if argument not in sprojs: - raise SystemExit('Subproject {} not found in directory tree.'.format(argument)) + raise SystemExit(f'Subproject {argument} not found in directory tree.') matches = sprojs[argument] if len(matches) > 1: - print('There is more than one version of {} in tree. Please specify which one to promote:\n'.format(argument), file=sys.stderr) + print(f'There is more than one version of {argument} in tree. Please specify which one to promote:\n', file=sys.stderr) for s in matches: print(s, file=sys.stderr) raise SystemExit(1) @@ -207,9 +207,9 @@ def status(options: 'argparse.Namespace') -> None: print('Wrap file not from wrapdb.', file=sys.stderr) continue if current_branch == latest_branch and current_revision == latest_revision: - print('', name, 'up to date. Branch {}, revision {}.'.format(current_branch, current_revision)) + print('', name, f'up to date. Branch {current_branch}, revision {current_revision}.') else: - print('', name, 'not up to date. Have {} {}, but {} {} is available.'.format(current_branch, current_revision, latest_branch, latest_revision)) + print('', name, f'not up to date. Have {current_branch} {current_revision}, but {latest_branch} {latest_revision} is available.') def run(options: 'argparse.Namespace') -> int: options.wrap_func(options) diff --git a/packaging/createmsi.py b/packaging/createmsi.py index f279df5..1136681 100644 --- a/packaging/createmsi.py +++ b/packaging/createmsi.py @@ -79,7 +79,7 @@ class PackageGenerator: self.update_guid = '141527EE-E28A-4D14-97A4-92E6075D28B2' self.main_xml = 'meson.wxs' self.main_o = 'meson.wixobj' - self.final_output = 'meson-{}-64.msi'.format(self.version) + self.final_output = f'meson-{self.version}-64.msi' self.staging_dirs = ['dist', 'dist2'] self.progfile_dir = 'ProgramFiles64Folder' redist_glob = 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community\\VC\\Redist\\MSVC\\v*\\MergeModules\\Microsoft_VC142_CRT_x64.msm' @@ -199,7 +199,7 @@ class PackageGenerator: package = ET.SubElement(product, 'Package', { 'Id': '*', 'Keywords': 'Installer', - 'Description': 'Meson {} installer'.format(self.version), + 'Description': f'Meson {self.version} installer', 'Comments': 'Meson is a high performance build system', 'Manufacturer': 'The Meson Development Team', 'InstallerVersion': '500', @@ -299,7 +299,7 @@ class PackageGenerator: ''' cur_node = nodes[current_dir] if cur_node.files: - component_id = 'ApplicationFiles{}'.format(self.component_num) + component_id = f'ApplicationFiles{self.component_num}' comp_xml_node = ET.SubElement(parent_xml_node, 'Component', { 'Id': component_id, 'Guid': gen_guid(), diff --git a/run_meson_command_tests.py b/run_meson_command_tests.py index 795e545..750e09d 100755 --- a/run_meson_command_tests.py +++ b/run_meson_command_tests.py @@ -79,7 +79,7 @@ class CommandTests(unittest.TestCase): def assertMesonCommandIs(self, line, cmd): self.assertTrue(line.startswith('meson_command '), msg=line) - self.assertEqual(line, 'meson_command is {!r}'.format(cmd)) + self.assertEqual(line, f'meson_command is {cmd!r}') def test_meson_uninstalled(self): # This is what the meson command must be for all these cases diff --git a/run_project_tests.py b/run_project_tests.py index b14e817..977c0c4 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -143,7 +143,7 @@ class InstalledFile: p = p.with_name('{}-{}'.format(p.name, self.version[0])) return p.with_suffix('.dll') - p = p.with_name('lib{}'.format(p.name)) + p = p.with_name(f'lib{p.name}') if env.machines.host.is_darwin(): # MacOS only has libfoo.dylib and libfoo.X.dylib if len(self.version) > 1: @@ -179,7 +179,7 @@ class InstalledFile: elif self.typ == 'expr': return Path(platform_fix_name(p.as_posix(), canonical_compiler, env)) else: - raise RuntimeError('Invalid installed file type {}'.format(self.typ)) + raise RuntimeError(f'Invalid installed file type {self.typ}') return p @@ -190,9 +190,9 @@ class InstalledFile: if self.typ == 'dir': abs_p = installdir / p if not abs_p.exists(): - raise RuntimeError('{} does not exist'.format(p)) + raise RuntimeError(f'{p} does not exist') if not abs_p.is_dir(): - raise RuntimeError('{} is not a directory'.format(p)) + raise RuntimeError(f'{p} is not a directory') return [x.relative_to(installdir) for x in abs_p.rglob('*') if x.is_file() or x.is_symlink()] else: return [p] @@ -214,7 +214,7 @@ class TestDef: def display_name(self) -> str: if self.name: - return '{} ({})'.format(self.path.as_posix(), self.name) + return f'{self.path.as_posix()} ({self.name})' return self.path.as_posix() def __lt__(self, other: object) -> bool: @@ -300,24 +300,24 @@ def validate_install(test: TestDef, installdir: Path, compiler: str, env: enviro try: expected_raw += i.get_paths(compiler, env, installdir) except RuntimeError as err: - ret_msg += 'Expected path error: {}\n'.format(err) + ret_msg += f'Expected path error: {err}\n' expected = {x: False for x in expected_raw} found = [x.relative_to(installdir) for x in installdir.rglob('*') if x.is_file() or x.is_symlink()] # Mark all found files as found and detect unexpected files for fname in found: if fname not in expected: - ret_msg += 'Extra file {} found.\n'.format(fname) + ret_msg += f'Extra file {fname} found.\n' continue expected[fname] = True # Check if expected files were found for p, f in expected.items(): if not f: - ret_msg += 'Expected file {} missing.\n'.format(p) + ret_msg += f'Expected file {p} missing.\n' # List dir content on error if ret_msg != '': ret_msg += '\nInstall dir contents:\n' for i in found: - ret_msg += ' - {}\n'.format(i) + ret_msg += f' - {i}\n' return ret_msg def log_text_file(logfile, testdir, stdo, stde): @@ -409,7 +409,7 @@ def _compare_output(expected: T.List[T.Dict[str, str]], output: str, desc: str) how, expected = next_expected(i) # reached the end of output without finding expected - return 'expected "{}" not found in {}'.format(expected, desc) + return f'expected "{expected}" not found in {desc}' except StopIteration: # matched all expected lines pass @@ -543,7 +543,7 @@ def _run_test(test: TestDef, test_build_dir: str, install_dir: str, if returncode == 1: return testresult elif returncode != 0: - testresult.fail('Test exited with unexpected status {}.'.format(returncode)) + testresult.fail(f'Test exited with unexpected status {returncode}.') return testresult else: testresult.fail('Test that should have failed succeeded.') @@ -653,7 +653,7 @@ def load_test_json(t: TestDef, stdout_mandatory: bool) -> T.List[TestDef]: # Handle expected output stdout = test_def.get('stdout', []) if stdout_mandatory and not stdout: - raise RuntimeError("{} must contain a non-empty stdout key".format(test_def_file)) + raise RuntimeError(f"{test_def_file} must contain a non-empty stdout key") # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] @@ -726,7 +726,7 @@ def load_test_json(t: TestDef, stdout_mandatory: bool) -> T.List[TestDef]: exclude = False opt_names = [x[0] for x in i] for j in matrix['exclude']: - ex_list = ['{}={}'.format(k, v) for k, v in j.items()] + ex_list = [f'{k}={v}' for k, v in j.items()] if all([x in opt_names for x in ex_list]): exclude = True break @@ -1056,7 +1056,7 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], (testnum, testbase) = t.path.name.split(' ', 1) testname = '%.3d %s' % (int(testnum), testbase) if t.name: - testname += ' ({})'.format(t.name) + testname += f' ({t.name})' should_fail = False suite_args = [] if name.startswith('failing'): @@ -1084,7 +1084,7 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], else: without_install = "" if len(install_commands) > 0 else " (without install)" if result.msg != '': - print(red('Failed test{} during {}: {!r}'.format(without_install, result.step.name, t.display_name()))) + print(red(f'Failed test{without_install} during {result.step.name}: {t.display_name()!r}')) print('Reason:', result.msg) failing_tests += 1 if result.step == BuildStep.configure and result.mlog != no_meson_log_msg: @@ -1108,7 +1108,7 @@ def _run_tests(all_tests: T.List[T.Tuple[str, T.List[TestDef], bool]], for (_, _, res) in futures: res.cancel() else: - print('Succeeded test%s: %s' % (without_install, t.display_name())) + print(f'Succeeded test{without_install}: {t.display_name()}') passing_tests += 1 conf_time += result.conftime build_time += result.buildtime @@ -1190,26 +1190,26 @@ def check_meson_commands_work(options): gen_cmd = meson_commands + [testdir, build_dir] + backend_flags + options.extra_args pc, o, e = Popen_safe(gen_cmd) if pc.returncode != 0: - raise RuntimeError('Failed to configure {!r}:\n{}\n{}'.format(testdir, e, o)) + raise RuntimeError(f'Failed to configure {testdir!r}:\n{e}\n{o}') print('Checking that introspect works...') pc, o, e = Popen_safe(meson_commands + ['introspect', '--targets'], cwd=build_dir) json.loads(o) if pc.returncode != 0: - raise RuntimeError('Failed to introspect --targets {!r}:\n{}\n{}'.format(testdir, e, o)) + raise RuntimeError(f'Failed to introspect --targets {testdir!r}:\n{e}\n{o}') print('Checking that building works...') dir_args = get_backend_args_for_dir(backend, build_dir) pc, o, e = Popen_safe(compile_commands + dir_args, cwd=build_dir) if pc.returncode != 0: - raise RuntimeError('Failed to build {!r}:\n{}\n{}'.format(testdir, e, o)) + raise RuntimeError(f'Failed to build {testdir!r}:\n{e}\n{o}') print('Checking that testing works...') pc, o, e = Popen_safe(test_commands, cwd=build_dir) if pc.returncode != 0: - raise RuntimeError('Failed to test {!r}:\n{}\n{}'.format(testdir, e, o)) + raise RuntimeError(f'Failed to test {testdir!r}:\n{e}\n{o}') if install_commands: print('Checking that installing works...') pc, o, e = Popen_safe(install_commands, cwd=build_dir) if pc.returncode != 0: - raise RuntimeError('Failed to install {!r}:\n{}\n{}'.format(testdir, e, o)) + raise RuntimeError(f'Failed to install {testdir!r}:\n{e}\n{o}') def detect_system_compiler(options): @@ -1246,7 +1246,7 @@ def detect_system_compiler(options): def print_compilers(env, machine): print() - print('{} machine compilers'.format(machine.get_lower_case_name())) + print(f'{machine.get_lower_case_name()} machine compilers') print() for lang in sorted(compilers.all_languages): try: @@ -1254,7 +1254,7 @@ def print_compilers(env, machine): details = '{:<10} {} {}'.format('[' + comp.get_id() + ']', ' '.join(comp.get_exelist()), comp.get_version_string()) except mesonlib.MesonException: details = '[not found]' - print('%-7s: %s' % (lang, details)) + print(f'{lang:<7}: {details}') def print_tool_versions(): @@ -1295,7 +1295,7 @@ def print_tool_versions(): tool_vers_map[t['tool']] = m.group(t['match_group']) return '{} ({})'.format(exe, m.group(t['match_group'])) - return '{} (unknown)'.format(exe) + return f'{exe} (unknown)' print() print('tools') @@ -1356,5 +1356,5 @@ if __name__ == '__main__': for k, g in itertools.groupby(dir_names, key=lambda x: x.split()[0]): tests = list(g) if len(tests) != 1: - print('WARNING: The %s suite contains duplicate "%s" tests: "%s"' % (name, k, '", "'.join(tests))) + print('WARNING: The {} suite contains duplicate "{}" tests: "{}"'.format(name, k, '", "'.join(tests))) raise SystemExit(failing_tests) diff --git a/run_tests.py b/run_tests.py index 85434e4..4d737ee 100755 --- a/run_tests.py +++ b/run_tests.py @@ -79,7 +79,7 @@ def guess_backend(backend_str: str, msbuild_exe: str) -> T.Tuple['Backend', T.Li backend_flags = ['--backend=ninja'] backend = Backend.ninja else: - raise RuntimeError('Unknown backend: {!r}'.format(backend_str)) + raise RuntimeError(f'Unknown backend: {backend_str!r}') return (backend, backend_flags) @@ -169,7 +169,7 @@ def get_meson_script(): meson_cmd = shutil.which('meson') if meson_cmd: return meson_cmd - raise RuntimeError('Could not find {!r} or a meson in PATH'.format(meson_script)) + raise RuntimeError(f'Could not find {meson_script!r} or a meson in PATH') def get_backend_args_for_dir(backend, builddir): ''' @@ -184,16 +184,16 @@ def find_vcxproj_with_target(builddir, target): import re, fnmatch t, ext = os.path.splitext(target) if ext: - p = r'{}\s*\{}'.format(t, ext) + p = fr'{t}\s*\{ext}' else: - p = r'{}'.format(t) + p = fr'{t}' for _, _, files in os.walk(builddir): for f in fnmatch.filter(files, '*.vcxproj'): f = os.path.join(builddir, f) with open(f, encoding='utf-8') as o: if re.search(p, o.read(), flags=re.MULTILINE): return f - raise RuntimeError('No vcxproj matching {!r} in {!r}'.format(p, builddir)) + raise RuntimeError(f'No vcxproj matching {p!r} in {builddir!r}') def get_builddir_target_args(backend, builddir, target): dir_args = [] @@ -209,7 +209,7 @@ def get_builddir_target_args(backend, builddir, target): elif backend is Backend.ninja: target_args = [target] else: - raise AssertionError('Unknown backend: {!r}'.format(backend)) + raise AssertionError(f'Unknown backend: {backend!r}') return target_args + dir_args def get_backend_commands(backend: Backend, debug: bool = False) -> \ @@ -239,7 +239,7 @@ def get_backend_commands(backend: Backend, debug: bool = False) -> \ install_cmd = cmd + ['install'] uninstall_cmd = cmd + ['uninstall'] else: - raise AssertionError('Unknown backend: {!r}'.format(backend)) + raise AssertionError(f'Unknown backend: {backend!r}') return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd def ensure_backend_detects_changes(backend): @@ -385,7 +385,7 @@ def main(): else: cross_test_args = mesonlib.python_command + ['run_cross_test.py'] for cf in options.cross: - print(mlog.bold('Running {} cross tests.'.format(cf))) + print(mlog.bold(f'Running {cf} cross tests.')) print(flush=True) cmd = cross_test_args + ['cross/' + cf] if options.failfast: diff --git a/run_unittests.py b/run_unittests.py index 62f28b2..11940b3 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -205,7 +205,7 @@ def skipIfNoPkgconfigDep(depname): if not is_ci() and shutil.which('pkg-config') is None: raise unittest.SkipTest('pkg-config not found') if not is_ci() and subprocess.call(['pkg-config', '--exists', depname]) != 0: - raise unittest.SkipTest('pkg-config dependency {} not found.'.format(depname)) + raise unittest.SkipTest(f'pkg-config dependency {depname} not found.') return func(*args, **kwargs) return wrapped return wrapper @@ -230,10 +230,10 @@ def skip_if_not_language(lang): def wrapped(*args, **kwargs): try: env = get_fake_env() - f = getattr(env, 'detect_{}_compiler'.format(lang)) + f = getattr(env, f'detect_{lang}_compiler') f(MachineChoice.HOST) except EnvironmentException: - raise unittest.SkipTest('No {} compiler found.'.format(lang)) + raise unittest.SkipTest(f'No {lang} compiler found.') return func(*args, **kwargs) return wrapped return wrapper @@ -248,7 +248,7 @@ def skip_if_env_set(key): old = None if key in os.environ: if not is_ci(): - raise unittest.SkipTest('Env var {!r} set, skipping'.format(key)) + raise unittest.SkipTest(f'Env var {key!r} set, skipping') old = os.environ.pop(key) try: return func(*args, **kwargs) @@ -271,7 +271,7 @@ def skip_if_not_base_option(feature): key = OptionKey(feature) if key not in cc.base_options: raise unittest.SkipTest( - '{} not available with {}'.format(feature, cc.id)) + f'{feature} not available with {cc.id}') return f(*args, **kwargs) return wrapped return actual @@ -898,11 +898,11 @@ class InternalTests(unittest.TestCase): if '--libs' not in args: return 0, '', '' if args[-1] == 'foo': - return 0, '-L{} -lfoo -L{} -lbar'.format(p2.as_posix(), p1.as_posix()), '' + return 0, f'-L{p2.as_posix()} -lfoo -L{p1.as_posix()} -lbar', '' if args[-1] == 'bar': - return 0, '-L{} -lbar'.format(p2.as_posix()), '' + return 0, f'-L{p2.as_posix()} -lbar', '' if args[-1] == 'internal': - return 0, '-L{} -lpthread -lm -lc -lrt -ldl'.format(p1.as_posix()), '' + return 0, f'-L{p1.as_posix()} -lpthread -lm -lc -lrt -ldl', '' old_call = PkgConfigDependency._call_pkgbin old_check = PkgConfigDependency.check_pkgconfig @@ -923,7 +923,7 @@ class InternalTests(unittest.TestCase): link_args = internal_dep.get_link_args() for link_arg in link_args: for lib in ('pthread', 'm', 'c', 'dl', 'rt'): - self.assertNotIn('lib{}.a'.format(lib), link_arg, msg=link_args) + self.assertNotIn(f'lib{lib}.a', link_arg, msg=link_args) finally: # Test ends PkgConfigDependency._call_pkgbin = old_call @@ -1040,17 +1040,17 @@ class InternalTests(unittest.TestCase): ver_b = Version(b) if op is operator.eq: for o, name in [(op, 'eq'), (operator.ge, 'ge'), (operator.le, 'le')]: - self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) + self.assertTrue(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') if op is operator.lt: for o, name in [(op, 'lt'), (operator.le, 'le'), (operator.ne, 'ne')]: - self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) + self.assertTrue(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') for o, name in [(operator.gt, 'gt'), (operator.ge, 'ge'), (operator.eq, 'eq')]: - self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) + self.assertFalse(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') if op is operator.gt: for o, name in [(op, 'gt'), (operator.ge, 'ge'), (operator.ne, 'ne')]: - self.assertTrue(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) + self.assertTrue(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') for o, name in [(operator.lt, 'lt'), (operator.le, 'le'), (operator.eq, 'eq')]: - self.assertFalse(o(ver_a, ver_b), '{} {} {}'.format(ver_a, name, ver_b)) + self.assertFalse(o(ver_a, ver_b), f'{ver_a} {name} {ver_b}') def test_msvc_toolset_version(self): ''' @@ -1073,7 +1073,7 @@ class InternalTests(unittest.TestCase): # See https://devblogs.microsoft.com/cppblog/finding-the-visual-c-compiler-tools-in-visual-studio-2017/ vctools_ver = (Path(os.environ['VCINSTALLDIR']) / 'Auxiliary' / 'Build' / 'Microsoft.VCToolsVersion.default.txt').read_text() self.assertTrue(vctools_ver.startswith(toolset_ver), - msg='{!r} does not start with {!r}'.format(vctools_ver, toolset_ver)) + msg=f'{vctools_ver!r} does not start with {toolset_ver!r}') def test_split_args(self): split_args = mesonbuild.mesonlib.split_args @@ -1290,7 +1290,7 @@ class InternalTests(unittest.TestCase): errors.append((p.resolve(), e)) for f, e in errors: - print('Failed to validate: "{}"'.format(f)) + print(f'Failed to validate: "{f}"') print(str(e)) self.assertFalse(errors) @@ -1545,7 +1545,7 @@ class DataTests(unittest.TestCase): end = len(md) # Extract the content for this section return md[section.end():end] - raise RuntimeError('Could not find "{}" heading'.format(name)) + raise RuntimeError(f'Could not find "{name}" heading') def test_builtin_options_documented(self): ''' @@ -1605,7 +1605,7 @@ class DataTests(unittest.TestCase): elif debug == 'false': debug = False else: - raise RuntimeError('Invalid debug value {!r} in row:\n{}'.format(debug, m.group())) + raise RuntimeError(f'Invalid debug value {debug!r} in row:\n{m.group()}') env.coredata.set_option(OptionKey('buildtype'), buildtype) self.assertEqual(env.coredata.options[OptionKey('buildtype')].value, buildtype) self.assertEqual(env.coredata.options[OptionKey('optimization')].value, opt) @@ -1731,7 +1731,7 @@ class BasePlatformTests(unittest.TestCase): else: # VS doesn't have a stable output when no changes are done # XCode backend is untested with unit tests, help welcome! - self.no_rebuild_stdout = ['UNKNOWN BACKEND {!r}'.format(self.backend.name)] + self.no_rebuild_stdout = [f'UNKNOWN BACKEND {self.backend.name!r}'] self.builddirs = [] self.new_builddir() @@ -1762,7 +1762,7 @@ class BasePlatformTests(unittest.TestCase): def _print_meson_log(self): log = os.path.join(self.logdir, 'meson-log.txt') if not os.path.isfile(log): - print("{!r} doesn't exist".format(log)) + print(f"{log!r} doesn't exist") return with open(log, encoding='utf-8') as f: print(f.read()) @@ -1874,7 +1874,7 @@ class BasePlatformTests(unittest.TestCase): def install(self, *, use_destdir=True, override_envvars=None): if self.backend is not Backend.ninja: - raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) + raise unittest.SkipTest(f'{self.backend.name!r} backend can\'t install files') if use_destdir: destdir = {'DESTDIR': self.installdir} if override_envvars is None: @@ -1909,7 +1909,7 @@ class BasePlatformTests(unittest.TestCase): def get_compdb(self): if self.backend is not Backend.ninja: - raise unittest.SkipTest('Compiler db not available with {} backend'.format(self.backend.name)) + raise unittest.SkipTest(f'Compiler db not available with {self.backend.name} backend') try: with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: contents = json.load(ifile) @@ -1988,7 +1988,7 @@ class BasePlatformTests(unittest.TestCase): self.assertPathEqual(i[0], i[1]) def assertPathBasenameEqual(self, path, basename): - msg = '{!r} does not end with {!r}'.format(path, basename) + msg = f'{path!r} does not end with {basename!r}' # We cannot use os.path.basename because it returns '' when the path # ends with '/' for some silly reason. This is not how the UNIX utility # `basename` works. @@ -2016,7 +2016,7 @@ class BasePlatformTests(unittest.TestCase): elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: - raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') def assertBuildIsNoop(self): ret = self.build() @@ -2035,12 +2035,12 @@ class BasePlatformTests(unittest.TestCase): elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: - raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') def assertRebuiltTarget(self, target): ret = self.build() if self.backend is Backend.ninja: - self.assertIn('Linking target {}'.format(target), ret) + self.assertIn(f'Linking target {target}', ret) elif self.backend is Backend.vs: # Ensure that this target was rebuilt linkre = re.compile('Link:\n [^\n]*link[^\n]*' + target, flags=re.IGNORECASE) @@ -2048,7 +2048,7 @@ class BasePlatformTests(unittest.TestCase): elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: - raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') @staticmethod def get_target_from_filename(filename): @@ -2075,14 +2075,14 @@ class BasePlatformTests(unittest.TestCase): elif self.backend is Backend.xcode: raise unittest.SkipTest('Please help us fix this test on the xcode backend') else: - raise RuntimeError('Invalid backend: {!r}'.format(self.backend.name)) + raise RuntimeError(f'Invalid backend: {self.backend.name!r}') def assertPathExists(self, path): - m = 'Path {!r} should exist'.format(path) + m = f'Path {path!r} should exist' self.assertTrue(os.path.exists(path), msg=m) def assertPathDoesNotExist(self, path): - m = 'Path {!r} should not exist'.format(path) + m = f'Path {path!r} should not exist' self.assertFalse(os.path.exists(path), msg=m) @@ -2391,7 +2391,7 @@ class AllPlatformTests(BasePlatformTests): https://github.com/mesonbuild/meson/issues/829 ''' if self.backend is not Backend.ninja: - raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) + raise unittest.SkipTest(f'{self.backend.name!r} backend can\'t install files') testdir = os.path.join(self.common_test_dir, '8 install') self.init(testdir) intro = self.introspect('--targets') @@ -2438,7 +2438,7 @@ class AllPlatformTests(BasePlatformTests): TODO Change the format to a list officially in a followup PR ''' if self.backend is not Backend.ninja: - raise unittest.SkipTest('{!r} backend can\'t install files'.format(self.backend.name)) + raise unittest.SkipTest(f'{self.backend.name!r} backend can\'t install files') testdir = os.path.join(self.common_test_dir, '141 custom target multiple outputs') self.init(testdir) intro = self.introspect('--targets') @@ -2471,12 +2471,12 @@ class AllPlatformTests(BasePlatformTests): f.readlines()))) logged = read_logs() for name in logged: - self.assertTrue(name in expected, 'Log contains extra entry {}'.format(name)) + self.assertTrue(name in expected, f'Log contains extra entry {name}') expected[name] += 1 for name, count in expected.items(): - self.assertGreater(count, 0, 'Log is missing entry for {}'.format(name)) - self.assertLess(count, 2, 'Log has multiple entries for {}'.format(name)) + self.assertGreater(count, 0, f'Log is missing entry for {name}') + self.assertLess(count, 2, f'Log has multiple entries for {name}') # Verify that with --dry-run we obtain the same logs but with nothing # actually installed @@ -2788,7 +2788,7 @@ class AllPlatformTests(BasePlatformTests): for lang, evar in langs: # Detect with evar and do sanity checks on that if evar in os.environ: - ecc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) + ecc = getattr(env, f'detect_{lang}_compiler')(MachineChoice.HOST) self.assertTrue(ecc.version) elinker = env.detect_static_linker(ecc) # Pop it so we don't use it for the next detection @@ -2812,11 +2812,11 @@ class AllPlatformTests(BasePlatformTests): self.assertIsInstance(ecc, msvc) self.assertIsInstance(elinker, lib) else: - raise AssertionError('Unknown compiler {!r}'.format(evalue)) + raise AssertionError(f'Unknown compiler {evalue!r}') # Check that we actually used the evalue correctly as the compiler self.assertEqual(ecc.get_exelist(), split_args(evalue)) # Do auto-detection of compiler based on platform, PATH, etc. - cc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) + cc = getattr(env, f'detect_{lang}_compiler')(MachineChoice.HOST) self.assertTrue(cc.version) linker = env.detect_static_linker(cc) # Check compiler type @@ -2876,7 +2876,7 @@ class AllPlatformTests(BasePlatformTests): # Need a new env to re-run environment loading env = get_fake_env(testdir, self.builddir, self.prefix) - wcc = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) + wcc = getattr(env, f'detect_{lang}_compiler')(MachineChoice.HOST) wlinker = env.detect_static_linker(wcc) # Pop it so we don't use it for the next detection evalue = os.environ.pop('AR') @@ -2972,9 +2972,9 @@ class AllPlatformTests(BasePlatformTests): value = 'spaces and fun@$&()-=_+{}[]:;>?,./~`' for env_var in ['CPPFLAGS', 'CFLAGS']: env = {} - env[env_var] = '-D{}="{}"'.format(define, value) + env[env_var] = f'-D{define}="{value}"' env['LDFLAGS'] = '-DMESON_FAIL_VALUE=cflags-read' - self.init(testdir, extra_args=['-D{}={}'.format(define, value)], override_envvars=env) + self.init(testdir, extra_args=[f'-D{define}={value}'], override_envvars=env) def test_custom_target_exe_data_deterministic(self): testdir = os.path.join(self.common_test_dir, '110 custom target capture') @@ -3190,7 +3190,7 @@ class AllPlatformTests(BasePlatformTests): path = os.path.join(project_dir, 'subprojects', name) os.makedirs(path) with open(os.path.join(path, 'meson.build'), 'w') as ofile: - ofile.write("project('{}', version: '1.0')".format(name)) + ofile.write(f"project('{name}', version: '1.0')") return path def dist_impl(self, vcs_init, vcs_add_all=None, include_subprojects=True): @@ -3292,7 +3292,7 @@ class AllPlatformTests(BasePlatformTests): self.build() for each in ('prog', 'subdir/liblib1.so', ): rpath = get_rpath(os.path.join(self.builddir, each)) - self.assertTrue(rpath, 'Rpath could not be determined for {}.'.format(each)) + self.assertTrue(rpath, f'Rpath could not be determined for {each}.') if is_dragonflybsd(): # DragonflyBSD will prepend /usr/lib/gccVERSION to the rpath, # so ignore that. @@ -3954,7 +3954,7 @@ class AllPlatformTests(BasePlatformTests): if item['name'] == arg: self.assertEqual(item['value'], 'bar') return - raise Exception('Missing {} value?'.format(arg)) + raise Exception(f'Missing {arg} value?') def test_same_dash_option_twice(self): self._test_same_option_twice('bindir', ['--bindir=foo', '--bindir=bar']) @@ -3974,7 +3974,7 @@ class AllPlatformTests(BasePlatformTests): if item['name'] == arg: self.assertEqual(item['value'], 'bar') return - raise Exception('Missing {} value?'.format(arg)) + raise Exception(f'Missing {arg} value?') def test_same_dash_option_twice_configure(self): self._test_same_option_twice_configure( @@ -4466,7 +4466,7 @@ class AllPlatformTests(BasePlatformTests): @skipIfNoExecutable('clang-format') def test_clang_format(self): if self.backend is not Backend.ninja: - raise unittest.SkipTest('Clang-format is for now only supported on Ninja, not {}'.format(self.backend.name)) + raise unittest.SkipTest(f'Clang-format is for now only supported on Ninja, not {self.backend.name}') testdir = os.path.join(self.unit_test_dir, '54 clang-format') testfile = os.path.join(testdir, 'prog.c') badfile = os.path.join(testdir, 'prog_orig_c') @@ -4494,7 +4494,7 @@ class AllPlatformTests(BasePlatformTests): @skipIfNoExecutable('clang-tidy') def test_clang_tidy(self): if self.backend is not Backend.ninja: - raise unittest.SkipTest('Clang-tidy is for now only supported on Ninja, not {}'.format(self.backend.name)) + raise unittest.SkipTest(f'Clang-tidy is for now only supported on Ninja, not {self.backend.name}') if shutil.which('c++') is None: raise unittest.SkipTest('Clang-tidy breaks when ccache is used and "c++" not in path.') if is_osx(): @@ -4572,7 +4572,7 @@ class AllPlatformTests(BasePlatformTests): if k == i[0]: found = True break - self.assertTrue(found, 'Key "{}" not in expected list'.format(k)) + self.assertTrue(found, f'Key "{k}" not in expected list') root_keylist = [ ('benchmarks', list), @@ -4759,7 +4759,7 @@ class AllPlatformTests(BasePlatformTests): infodir = os.path.join(self.builddir, 'meson-info') self.assertPathExists(infodir) for i in root_keylist: - curr = os.path.join(infodir, 'intro-{}.json'.format(i)) + curr = os.path.join(infodir, f'intro-{i}.json') self.assertPathExists(curr) with open(curr) as fp: res_file[i] = json.load(fp) @@ -4980,7 +4980,7 @@ class AllPlatformTests(BasePlatformTests): def test_alias_target(self): if self.backend is Backend.vs: # FIXME: This unit test is broken with vs backend, needs investigation - raise unittest.SkipTest('Skipping alias_target test with {} backend'.format(self.backend.name)) + raise unittest.SkipTest(f'Skipping alias_target test with {self.backend.name} backend') testdir = os.path.join(self.unit_test_dir, '66 alias target') self.init(testdir) self.build() @@ -5050,24 +5050,24 @@ class AllPlatformTests(BasePlatformTests): def get_exe_name(basename: str) -> str: if is_windows(): - return '{}.exe'.format(basename) + return f'{basename}.exe' else: return basename def get_shared_lib_name(basename: str) -> str: if mesonbuild.environment.detect_msys2_arch(): - return 'lib{}.dll'.format(basename) + return f'lib{basename}.dll' elif is_windows(): - return '{}.dll'.format(basename) + return f'{basename}.dll' elif is_cygwin(): - return 'cyg{}.dll'.format(basename) + return f'cyg{basename}.dll' elif is_osx(): - return 'lib{}.dylib'.format(basename) + return f'lib{basename}.dylib' else: - return 'lib{}.so'.format(basename) + return f'lib{basename}.so' def get_static_lib_name(basename: str) -> str: - return 'lib{}.a'.format(basename) + return f'lib{basename}.a' # Base case (no targets or additional arguments) @@ -5227,7 +5227,7 @@ class AllPlatformTests(BasePlatformTests): help_output = self._run(self.meson_command + ['--help']) help_commands = {c.strip() for c in re.findall(r'usage:(?:.+)?{((?:[a-z]+,*)+?)}', help_output, re.MULTILINE|re.DOTALL)[0].split(',')} - self.assertEqual(md_commands | {'help'}, help_commands, 'Doc file: `{}`'.format(doc_path)) + self.assertEqual(md_commands | {'help'}, help_commands, f'Doc file: `{doc_path}`') ## Validate that each section has proper placeholders @@ -5240,7 +5240,7 @@ class AllPlatformTests(BasePlatformTests): for command in md_commands: m = get_data_pattern(command).search(md, pos=md_command_sections[command][0], endpos=md_command_sections[command][1]) - self.assertIsNotNone(m, 'Command `{}` is missing placeholders for dynamic data. Doc file: `{}`'.format(command, doc_path)) + self.assertIsNotNone(m, f'Command `{command}` is missing placeholders for dynamic data. Doc file: `{doc_path}`') def _check_coverage_files(self, types=('text', 'xml', 'html')): covdir = Path(self.builddir) / 'meson-logs' @@ -5252,7 +5252,7 @@ class AllPlatformTests(BasePlatformTests): if 'html' in types: files.append('coveragereport/index.html') for f in files: - self.assertTrue((covdir / f).is_file(), msg='{} is not a file'.format(f)) + self.assertTrue((covdir / f).is_file(), msg=f'{f} is not a file') def test_coverage(self): if mesonbuild.environment.detect_msys2_arch(): @@ -5607,10 +5607,10 @@ class FailureTests(BasePlatformTests): with open(self.mbuild, 'w') as f: f.write("project('failure test', 'c', 'cpp'") if meson_version: - f.write(", meson_version: '{}'".format(meson_version)) + f.write(f", meson_version: '{meson_version}'") f.write(")\n") for lang in langs: - f.write("add_languages('{}', required : false)\n".format(lang)) + f.write(f"add_languages('{lang}', required : false)\n") f.write(contents) if options is not None: with open(self.moptions, 'w') as f: @@ -5633,10 +5633,10 @@ class FailureTests(BasePlatformTests): with open(self.mbuild, 'w') as f: f.write("project('output test', 'c', 'cpp'") if meson_version: - f.write(", meson_version: '{}'".format(meson_version)) + f.write(f", meson_version: '{meson_version}'") f.write(")\n") for lang in langs: - f.write("add_languages('{}', required : false)\n".format(lang)) + f.write(f"add_languages('{lang}', required : false)\n") f.write(contents) # Run in-process for speed and consistency with assertMesonRaises return self.init(self.srcdir, extra_args=extra_args, inprocess=True) @@ -5705,7 +5705,7 @@ class FailureTests(BasePlatformTests): if shutil.which('gnustep-config'): raise unittest.SkipTest('gnustep-config found') self.assertMesonRaises("dependency('gnustep')", - "(requires a Objc compiler|{})".format(self.dnf), + f"(requires a Objc compiler|{self.dnf})", langs = ['objc']) def test_wx_notfound_dependency(self): @@ -5724,19 +5724,19 @@ class FailureTests(BasePlatformTests): def test_llvm_dependency(self): self.assertMesonRaises("dependency('llvm', modules : 'fail')", - "(required.*fail|{})".format(self.dnf)) + f"(required.*fail|{self.dnf})") def test_boost_notfound_dependency(self): # Can be run even if Boost is found or not self.assertMesonRaises("dependency('boost', modules : 1)", "module.*not a string") self.assertMesonRaises("dependency('boost', modules : 'fail')", - "(fail.*not found|{})".format(self.dnf)) + f"(fail.*not found|{self.dnf})") def test_boost_BOOST_ROOT_dependency(self): # Test BOOST_ROOT; can be run even if Boost is found or not self.assertMesonRaises("dependency('boost')", - "(boost_root.*absolute|{})".format(self.dnf), + f"(boost_root.*absolute|{self.dnf})", override_envvars = {'BOOST_ROOT': 'relative/path'}) def test_dependency_invalid_method(self): @@ -5928,7 +5928,7 @@ class WindowsTests(BasePlatformTests): # Find cmd with an absolute path that's missing the extension cmd_path = prog2.get_path()[:-4] prog = ExternalProgram(cmd_path) - self.assertTrue(prog.found(), msg='{!r} not found'.format(cmd_path)) + self.assertTrue(prog.found(), msg=f'{cmd_path!r} not found') # Finding a script with no extension inside a directory works prog = ExternalProgram(os.path.join(testdir, 'test-script')) self.assertTrue(prog.found(), msg='test-script not found') @@ -5965,7 +5965,7 @@ class WindowsTests(BasePlatformTests): path = os.environ['PATH'] if 'WindowsApps' not in path: username = os.environ['USERNAME'] - appstore_dir = r'C:\Users\{}\AppData\Local\Microsoft\WindowsApps'.format(username) + appstore_dir = fr'C:\Users\{username}\AppData\Local\Microsoft\WindowsApps' path = os.pathsep + appstore_dir path = ExternalProgram._windows_sanitize_path(path) self.assertNotIn('WindowsApps', path) @@ -6052,8 +6052,8 @@ class WindowsTests(BasePlatformTests): def _check_ld(self, name: str, lang: str, expected: str) -> None: if not shutil.which(name): - raise unittest.SkipTest('Could not find {}.'.format(name)) - envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP['{}_ld'.format(lang)]] + raise unittest.SkipTest(f'Could not find {name}.') + envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP[f'{lang}_ld']] # Also test a deprecated variable if there is one. if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP: @@ -6064,9 +6064,9 @@ class WindowsTests(BasePlatformTests): with mock.patch.dict(os.environ, {envvar: name}): env = get_fake_env() try: - comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) + comp = getattr(env, f'detect_{lang}_compiler')(MachineChoice.HOST) except EnvironmentException: - raise unittest.SkipTest('Could not find a compiler for {}'.format(lang)) + raise unittest.SkipTest(f'Could not find a compiler for {lang}') self.assertEqual(comp.linker.id, expected) def test_link_environment_variable_lld_link(self): @@ -6121,7 +6121,7 @@ class WindowsTests(BasePlatformTests): exe = os.path.join(self.builddir, 'cppprog.exe') for f in (dll, exe): pe = pefile.PE(f) - msg = 'PE file: {!r}, compiler: {!r}, linker: {!r}'.format(f, cc_id, ld_id) + msg = f'PE file: {f!r}, compiler: {cc_id!r}, linker: {ld_id!r}' if cc_id == 'clang-cl': # Latest clang-cl tested (7.0) does not write checksums out self.assertFalse(pe.verify_checksum(), msg=msg) @@ -6202,7 +6202,7 @@ class WindowsTests(BasePlatformTests): def test_modules(self): if self.backend is not Backend.ninja: - raise unittest.SkipTest('C++ modules only work with the Ninja backend (not {}).'.format(self.backend.name)) + raise unittest.SkipTest(f'C++ modules only work with the Ninja backend (not {self.backend.name}).') if 'VSCMD_VER' not in os.environ: raise unittest.SkipTest('C++ modules is only supported with Visual Studio.') if version_compare(os.environ['VSCMD_VER'], '<16.9.0'): @@ -6722,7 +6722,7 @@ class LinuxlikeTests(BasePlatformTests): ('intel', 'c++03'), ('intel', 'gnu++03')]) if v != 'none' and not (compiler.get_id(), v) in skiplist: - cmd_std = " -std={} ".format(v) + cmd_std = f" -std={v} " self.assertIn(cmd_std, cmd) try: self.build() @@ -6738,7 +6738,7 @@ class LinuxlikeTests(BasePlatformTests): elif compiler.language == 'cpp': env_flag_name = 'CXXFLAGS' else: - raise NotImplementedError('Language {} not defined.'.format(compiler.language)) + raise NotImplementedError(f'Language {compiler.language} not defined.') env = {} env[env_flag_name] = cmd_std with self.assertRaises((subprocess.CalledProcessError, mesonbuild.mesonlib.EnvironmentException), @@ -7683,7 +7683,7 @@ class LinuxlikeTests(BasePlatformTests): # Test that installed libraries works self.new_builddir() self.prefix = oldprefix - meson_args = ['-Dc_link_args=-L{}'.format(libdir), + meson_args = [f'-Dc_link_args=-L{libdir}', '--fatal-meson-warnings'] testdir = os.path.join(self.unit_test_dir, '68 static link') env = {'PKG_CONFIG_LIBDIR': os.path.join(libdir, 'pkgconfig')} @@ -7695,8 +7695,8 @@ class LinuxlikeTests(BasePlatformTests): if is_sunos(): raise unittest.SkipTest('Solaris currently cannot override the linker.') if not shutil.which(check): - raise unittest.SkipTest('Could not find {}.'.format(check)) - envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP['{}_ld'.format(lang)]] + raise unittest.SkipTest(f'Could not find {check}.') + envvars = [mesonbuild.envconfig.ENV_VAR_PROG_MAP[f'{lang}_ld']] # Also test a deprecated variable if there is one. if f'{lang}_ld' in mesonbuild.envconfig.DEPRECATED_ENV_PROG_MAP: @@ -7706,7 +7706,7 @@ class LinuxlikeTests(BasePlatformTests): for envvar in envvars: with mock.patch.dict(os.environ, {envvar: name}): env = get_fake_env() - comp = getattr(env, 'detect_{}_compiler'.format(lang))(MachineChoice.HOST) + comp = getattr(env, f'detect_{lang}_compiler')(MachineChoice.HOST) if isinstance(comp, (mesonbuild.compilers.AppleClangCCompiler, mesonbuild.compilers.AppleClangCPPCompiler, mesonbuild.compilers.AppleClangObjCCompiler, @@ -7714,7 +7714,7 @@ class LinuxlikeTests(BasePlatformTests): raise unittest.SkipTest('AppleClang is currently only supported with ld64') if lang != 'rust' and comp.use_linker_args('bfd') == []: raise unittest.SkipTest( - 'Compiler {} does not support using alternative linkers'.format(comp.id)) + f'Compiler {comp.id} does not support using alternative linkers') self.assertEqual(comp.linker.id, expected) def test_ld_environment_variable_bfd(self): @@ -8023,7 +8023,7 @@ class PythonTests(BasePlatformTests): def test_versions(self): if self.backend is not Backend.ninja: - raise unittest.SkipTest('Skipping python tests with {} backend'.format(self.backend.name)) + raise unittest.SkipTest(f'Skipping python tests with {self.backend.name} backend') testdir = os.path.join(self.src_root, 'test cases', 'unit', '39 python extmodule') @@ -8417,23 +8417,23 @@ class NativeFileTests(BasePlatformTests): values should be a nested dictionary structure of {section: {key: value}} """ - filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) + filename = os.path.join(self.builddir, f'generated{self.current_config}.config') self.current_config += 1 with open(filename, 'wt') as f: for section, entries in values.items(): - f.write('[{}]\n'.format(section)) + f.write(f'[{section}]\n') for k, v in entries.items(): if isinstance(v, (bool, int, float)): - f.write("{}={}\n".format(k, v)) + f.write(f"{k}={v}\n") elif isinstance(v, list): - f.write("{}=[{}]\n".format(k, ', '.join(["'{}'".format(w) for w in v]))) + f.write("{}=[{}]\n".format(k, ', '.join([f"'{w}'" for w in v]))) else: - f.write("{}='{}'\n".format(k, v)) + f.write(f"{k}='{v}'\n") return filename def helper_create_binary_wrapper(self, binary, dir_=None, extra_args=None, **kwargs): """Creates a wrapper around a binary that overrides specific values.""" - filename = os.path.join(dir_ or self.builddir, 'binary_wrapper{}.py'.format(self.current_wrapper)) + filename = os.path.join(dir_ or self.builddir, f'binary_wrapper{self.current_wrapper}.py') extra_args = extra_args or {} self.current_wrapper += 1 if is_haiku(): @@ -8455,7 +8455,7 @@ class NativeFileTests(BasePlatformTests): f.write(' parser.add_argument("-{0}", "--{0}", action="store_true")\n'.format(name)) f.write(' args, extra_args = parser.parse_known_args()\n') for name, value in chain(extra_args.items(), kwargs.items()): - f.write(' if args.{}:\n'.format(name)) + f.write(f' if args.{name}:\n') f.write(' print("{}", file=sys.{})\n'.format(value, kwargs.get('outfile', 'stdout'))) f.write(' sys.exit(0)\n') f.write(textwrap.dedent(''' @@ -8478,9 +8478,9 @@ class NativeFileTests(BasePlatformTests): # On windows we need yet another level of indirection, as cmd cannot # invoke python files itself, so instead we generate a .bat file, which # invokes our python wrapper - batfile = os.path.join(self.builddir, 'binary_wrapper{}.bat'.format(self.current_wrapper)) + batfile = os.path.join(self.builddir, f'binary_wrapper{self.current_wrapper}.bat') with open(batfile, 'wt') as f: - f.write(r'@{} {} %*'.format(sys.executable, filename)) + f.write(fr'@{sys.executable} {filename} %*') return batfile def helper_for_compiler(self, lang, cb, for_machine = MachineChoice.HOST): @@ -8488,7 +8488,7 @@ class NativeFileTests(BasePlatformTests): with more than one implementation, such as C, C++, ObjC, ObjC++, and D. """ env = get_fake_env() - getter = getattr(env, 'detect_{}_compiler'.format(lang)) + getter = getattr(env, f'detect_{lang}_compiler') getter = functools.partial(getter, for_machine) cc = getter() binary, newid = cb(cc) @@ -8516,7 +8516,7 @@ class NativeFileTests(BasePlatformTests): def filler(): with open(fifo, 'w') as f: f.write('[binaries]\n') - f.write("bash = '{}'\n".format(wrapper)) + f.write(f"bash = '{wrapper}'\n") thread = threading.Thread(target=filler) thread.start() @@ -8540,7 +8540,7 @@ class NativeFileTests(BasePlatformTests): def _simple_test(self, case, binary, entry=None): wrapper = self.helper_create_binary_wrapper(binary, version='12345') config = self.helper_create_native_file({'binaries': {entry or binary: wrapper}}) - self.init(self.testcase, extra_args=['--native-file', config, '-Dcase={}'.format(case)]) + self.init(self.testcase, extra_args=['--native-file', config, f'-Dcase={case}']) def test_find_program(self): self._simple_test('find_program', 'bash') @@ -8569,7 +8569,7 @@ class NativeFileTests(BasePlatformTests): # We not have python2, check for it for v in ['2', '2.7', '-2.7']: - rc = subprocess.call(['pkg-config', '--cflags', 'python{}'.format(v)], + rc = subprocess.call(['pkg-config', '--cflags', f'python{v}'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) if rc == 0: @@ -8693,7 +8693,7 @@ class NativeFileTests(BasePlatformTests): """ wrapper = self.helper_create_binary_wrapper(binary, version=version_str) env = get_fake_env() - getter = getattr(env, 'detect_{}_compiler'.format(lang)) + getter = getattr(env, f'detect_{lang}_compiler') getter = functools.partial(getter, MachineChoice.HOST) env.binaries.host.binaries[lang] = [wrapper] compiler = getter() @@ -9094,13 +9094,13 @@ class CrossFileTests(BasePlatformTests): values should be a nested dictionary structure of {section: {key: value}} """ - filename = os.path.join(self.builddir, 'generated{}.config'.format(self.current_config)) + filename = os.path.join(self.builddir, f'generated{self.current_config}.config') self.current_config += 1 with open(filename, 'wt') as f: for section, entries in values.items(): - f.write('[{}]\n'.format(section)) + f.write(f'[{section}]\n') for k, v in entries.items(): - f.write("{}='{}'\n".format(k, v)) + f.write(f"{k}='{v}'\n") return filename def test_cross_file_dirs(self): @@ -9528,7 +9528,7 @@ class SubprojectsCommandTests(BasePlatformTests): def _create_project(self, path, project_name='dummy'): os.makedirs(str(path), exist_ok=True) with open(str(path / 'meson.build'), 'w') as f: - f.write("project('{}')".format(project_name)) + f.write(f"project('{project_name}')") def _git(self, cmd, workdir): return git(cmd, str(workdir), check=True)[1].strip() @@ -9577,14 +9577,14 @@ class SubprojectsCommandTests(BasePlatformTests): def _git_create_remote_commit(self, name, branch): self._git_remote(['checkout', branch], name) - self._git_remote(['commit', '--allow-empty', '-m', 'initial {} commit'.format(branch)], name) + self._git_remote(['commit', '--allow-empty', '-m', f'initial {branch} commit'], name) def _git_create_remote_branch(self, name, branch): self._git_remote(['checkout', '-b', branch], name) - self._git_remote(['commit', '--allow-empty', '-m', 'initial {} commit'.format(branch)], name) + self._git_remote(['commit', '--allow-empty', '-m', f'initial {branch} commit'], name) def _git_create_remote_tag(self, name, tag): - self._git_remote(['commit', '--allow-empty', '-m', 'tag {} commit'.format(tag)], name) + self._git_remote(['commit', '--allow-empty', '-m', f'tag {tag} commit'], name) self._git_remote(['tag', tag], name) def _wrap_create_git(self, name, revision='master'): diff --git a/skip_ci.py b/skip_ci.py index 4ee4466..7411d57 100755 --- a/skip_ci.py +++ b/skip_ci.py @@ -23,16 +23,16 @@ import traceback def check_pr(is_pr_env): if is_pr_env not in os.environ: - print('This is not pull request: {} is not set'.format(is_pr_env)) + print(f'This is not pull request: {is_pr_env} is not set') sys.exit() elif os.environ[is_pr_env] == 'false': - print('This is not pull request: {} is false'.format(is_pr_env)) + print(f'This is not pull request: {is_pr_env} is false') sys.exit() def get_base_branch(base_env): if base_env not in os.environ: - print('Unable to determine base branch: {} is not set'.format(base_env)) + print(f'Unable to determine base branch: {base_env} is not set') sys.exit() return os.environ[base_env] diff --git a/test cases/common/106 generatorcustom/gen-resx.py b/test cases/common/106 generatorcustom/gen-resx.py index 7d31ade..242a962 100755 --- a/test cases/common/106 generatorcustom/gen-resx.py +++ b/test cases/common/106 generatorcustom/gen-resx.py @@ -6,4 +6,4 @@ ofile = sys.argv[1] num = sys.argv[2] with open(ofile, 'w') as f: - f.write('res{}\n'.format(num)) + f.write(f'res{num}\n') diff --git a/test cases/common/14 configure file/check_file.py b/test cases/common/14 configure file/check_file.py index 707995e..1cdb624 100644 --- a/test cases/common/14 configure file/check_file.py +++ b/test cases/common/14 configure file/check_file.py @@ -12,9 +12,9 @@ elif len(sys.argv) == 3: m2 = os.stat(f2).st_mtime_ns # Compare only os.stat() if m1 != m2: - raise RuntimeError('mtime of {!r} () != mtime of {!r} ()'.format(f1, m1, f2, m2)) + raise RuntimeError(f'mtime of {f1!r} () != mtime of {m1!r} ()') import filecmp if not filecmp.cmp(f1, f2): - raise RuntimeError('{!r} != {!r}'.format(f1, f2)) + raise RuntimeError(f'{f1!r} != {f2!r}') else: raise AssertionError diff --git a/test cases/common/14 configure file/generator-deps.py b/test cases/common/14 configure file/generator-deps.py index 376ddb2..cca253c 100755 --- a/test cases/common/14 configure file/generator-deps.py +++ b/test cases/common/14 configure file/generator-deps.py @@ -16,4 +16,4 @@ with outputf.open('w') as ofile: depf = Path(sys.argv[2]) if not depf.exists(): with depf.open('w') as ofile: - ofile.write("{}: depfile\n".format(outputf.name)) + ofile.write(f"{outputf.name}: depfile\n") diff --git a/test cases/common/140 mesonintrospect from scripts/check_env.py b/test cases/common/140 mesonintrospect from scripts/check_env.py index 2d46d88..61de546 100644 --- a/test cases/common/140 mesonintrospect from scripts/check_env.py +++ b/test cases/common/140 mesonintrospect from scripts/check_env.py @@ -22,7 +22,7 @@ introspect_arr = shlex.split(mesonintrospect) some_executable = introspect_arr[0] if not os.path.isfile(some_executable): - raise RuntimeError('{!r} does not exist'.format(mesonintrospect)) + raise RuntimeError(f'{mesonintrospect!r} does not exist') if do_print: print(some_executable, end='') diff --git a/test cases/common/169 preserve gendir/genprog.py b/test cases/common/169 preserve gendir/genprog.py index a50642b..681c43a 100755 --- a/test cases/common/169 preserve gendir/genprog.py +++ b/test cases/common/169 preserve gendir/genprog.py @@ -29,7 +29,7 @@ rel_ofiles = [] for ifile in ifiles: if not ifile.startswith(options.searchdir): - sys.exit('Input file %s does not start with search dir %s.' % (ifile, searchdir)) + sys.exit(f'Input file {ifile} does not start with search dir {searchdir}.') rel_ofile = ifile[len(searchdir):] if rel_ofile[0] == '/' or rel_ofile[0] == '\\': rel_ofile = rel_ofile[1:] diff --git a/test cases/common/196 generator in subdir/com/mesonbuild/genprog.py b/test cases/common/196 generator in subdir/com/mesonbuild/genprog.py index a50642b..681c43a 100644 --- a/test cases/common/196 generator in subdir/com/mesonbuild/genprog.py +++ b/test cases/common/196 generator in subdir/com/mesonbuild/genprog.py @@ -29,7 +29,7 @@ rel_ofiles = [] for ifile in ifiles: if not ifile.startswith(options.searchdir): - sys.exit('Input file %s does not start with search dir %s.' % (ifile, searchdir)) + sys.exit(f'Input file {ifile} does not start with search dir {searchdir}.') rel_ofile = ifile[len(searchdir):] if rel_ofile[0] == '/' or rel_ofile[0] == '\\': rel_ofile = rel_ofile[1:] diff --git a/test cases/common/227 link depends indexed custom target/check_arch.py b/test cases/common/227 link depends indexed custom target/check_arch.py index 2e09f35..927bf87 100644 --- a/test cases/common/227 link depends indexed custom target/check_arch.py +++ b/test cases/common/227 link depends indexed custom target/check_arch.py @@ -29,4 +29,4 @@ elif arch == 'x64': arch = 'x86_64' if arch != want_arch: - raise RuntimeError('Wanted arch {} but exe uses {}'.format(want_arch, arch)) + raise RuntimeError(f'Wanted arch {want_arch} but exe uses {arch}') diff --git a/test cases/common/50 custom target/depfile/dep.py b/test cases/common/50 custom target/depfile/dep.py index dc6e6b6..c9e8f94 100755 --- a/test cases/common/50 custom target/depfile/dep.py +++ b/test cases/common/50 custom target/depfile/dep.py @@ -12,4 +12,4 @@ quoted_depfiles = [x.replace(' ', r'\ ') for x in depfiles] with open(output, 'w') as f: f.write('I am the result of globbing.') with open(depfile, 'w') as f: - f.write('%s: %s\n' % (output, ' '.join(quoted_depfiles))) + f.write('{}: {}\n'.format(output, ' '.join(quoted_depfiles))) diff --git a/test cases/common/96 manygen/subdir/manygen.py b/test cases/common/96 manygen/subdir/manygen.py index c40cc2e..931fb61 100755 --- a/test cases/common/96 manygen/subdir/manygen.py +++ b/test cases/common/96 manygen/subdir/manygen.py @@ -41,18 +41,18 @@ tmpc = 'diibadaaba.c' tmpo = 'diibadaaba' + objsuffix with open(outc, 'w') as f: - f.write('''#include"%s.h" -int %s_in_src(void) { + f.write('''#include"{}.h" +int {}_in_src(void) {{ return 0; -} -''' % (funcname, funcname)) +}} +'''.format(funcname, funcname)) with open(outh, 'w') as f: f.write('''#pragma once -int %s_in_lib(void); -int %s_in_obj(void); -int %s_in_src(void); -''' % (funcname, funcname, funcname)) +int {}_in_lib(void); +int {}_in_obj(void); +int {}_in_src(void); +'''.format(funcname, funcname, funcname)) with open(tmpc, 'w') as f: f.write('''int %s_in_obj(void) { diff --git a/test cases/python/2 extmodule/blaster.py b/test cases/python/2 extmodule/blaster.py index 1f01876..aaac984 100755 --- a/test cases/python/2 extmodule/blaster.py +++ b/test cases/python/2 extmodule/blaster.py @@ -8,4 +8,4 @@ if not isinstance(result, int): raise SystemExit('Returned result not an integer.') if result != 1: - raise SystemExit('Returned result {} is not 1.'.format(result)) + raise SystemExit(f'Returned result {result} is not 1.') diff --git a/test cases/python/4 custom target depends extmodule/blaster.py b/test cases/python/4 custom target depends extmodule/blaster.py index 09039cb..61b11f9 100644 --- a/test cases/python/4 custom target depends extmodule/blaster.py +++ b/test cases/python/4 custom target depends extmodule/blaster.py @@ -27,4 +27,4 @@ if not isinstance(result, int): raise SystemExit('Returned result not an integer.') if result != 1: - raise SystemExit('Returned result {} is not 1.'.format(result)) + raise SystemExit(f'Returned result {result} is not 1.') diff --git a/test cases/python3/2 extmodule/blaster.py b/test cases/python3/2 extmodule/blaster.py index 7e1eae6..529b028 100755 --- a/test cases/python3/2 extmodule/blaster.py +++ b/test cases/python3/2 extmodule/blaster.py @@ -10,5 +10,5 @@ if not isinstance(result, int): sys.exit(1) if result != 1: - print('Returned result {} is not 1.'.format(result)) + print(f'Returned result {result} is not 1.') sys.exit(1) diff --git a/test cases/python3/4 custom target depends extmodule/blaster.py b/test cases/python3/4 custom target depends extmodule/blaster.py index 49be466..d2c93ad 100644 --- a/test cases/python3/4 custom target depends extmodule/blaster.py +++ b/test cases/python3/4 custom target depends extmodule/blaster.py @@ -28,5 +28,5 @@ if not isinstance(result, int): sys.exit(1) if result != 1: - print('Returned result {} is not 1.'.format(result)) + print(f'Returned result {result} is not 1.') sys.exit(1) diff --git a/test cases/unit/39 python extmodule/blaster.py b/test cases/unit/39 python extmodule/blaster.py index 8bfe9cf..daa392d 100755 --- a/test cases/unit/39 python extmodule/blaster.py +++ b/test cases/unit/39 python extmodule/blaster.py @@ -10,5 +10,5 @@ if not isinstance(result, int): sys.exit(1) if result != 1: - print('Returned result {} is not 1.'.format(result)) + print(f'Returned result {result} is not 1.') sys.exit(1) diff --git a/tools/ac_converter.py b/tools/ac_converter.py index 075eae6..6c72f8d 100755 --- a/tools/ac_converter.py +++ b/tools/ac_converter.py @@ -414,7 +414,7 @@ cdata = configuration_data()''') print('check_headers = [') for token, hname in headers: - print(" ['{}', '{}'],".format(token, hname)) + print(f" ['{token}', '{hname}'],") print(']\n') print('''foreach h : check_headers @@ -430,7 +430,7 @@ print('check_functions = [') for tok in functions: if len(tok) == 3: tokstr, fdata0, fdata1 = tok - print(" ['{}', '{}', '#include<{}>'],".format(tokstr, fdata0, fdata1)) + print(f" ['{tokstr}', '{fdata0}', '#include<{fdata1}>'],") else: print('# check token', tok) print(']\n') @@ -445,7 +445,7 @@ endforeach # Convert sizeof checks. for elem, typename in sizes: - print("cdata.set('{}', cc.sizeof('{}'))".format(elem, typename)) + print(f"cdata.set('{elem}', cc.sizeof('{typename}'))") print(''' configure_file(input : 'config.h.meson', diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py index e19ef68..4185b5a 100755 --- a/tools/cmake2meson.py +++ b/tools/cmake2meson.py @@ -81,7 +81,7 @@ class Lexer: elif tid == 'varexp': yield(Token('varexp', match_text[2:-1])) else: - raise ValueError('lex: unknown element {}'.format(tid)) + raise ValueError(f'lex: unknown element {tid}') break if not matched: raise ValueError('Lexer got confused line %d column %d' % (lineno, col)) @@ -106,7 +106,7 @@ class Parser: def expect(self, s: str) -> bool: if self.accept(s): return True - raise ValueError('Expecting %s got %s.' % (s, self.current.tid), self.current.lineno, self.current.colno) + raise ValueError(f'Expecting {s} got {self.current.tid}.', self.current.lineno, self.current.colno) def statement(self) -> Statement: cur = self.current @@ -177,7 +177,7 @@ class Converter: elif i.tid == 'string': res.append("'%s'" % i.value) else: - raise ValueError('Unknown arg type {}'.format(i.tid)) + raise ValueError(f'Unknown arg type {i.tid}') if len(res) > 1: return start + ', '.join(res) + end if len(res) == 1: @@ -197,15 +197,15 @@ class Converter: varname = t.args[0].value.lower() mods = ["dependency('%s')" % i.value for i in t.args[1:]] if len(mods) == 1: - line = '%s = %s' % (varname, mods[0]) + line = '{} = {}'.format(varname, mods[0]) else: - line = '%s = [%s]' % (varname, ', '.join(["'%s'" % i for i in mods])) + line = '{} = [{}]'.format(varname, ', '.join(["'%s'" % i for i in mods])) elif t.name == 'find_package': - line = "%s_dep = dependency('%s')" % (t.args[0].value, t.args[0].value) + line = "{}_dep = dependency('{}')".format(t.args[0].value, t.args[0].value) elif t.name == 'find_library': - line = "%s = find_library('%s')" % (t.args[0].value.lower(), t.args[0].value) + line = "{} = find_library('{}')".format(t.args[0].value.lower(), t.args[0].value) elif t.name == 'add_executable': - line = '%s_exe = executable(%s)' % (t.args[0].value, self.convert_args(t.args, False)) + line = '{}_exe = executable({})'.format(t.args[0].value, self.convert_args(t.args, False)) elif t.name == 'add_library': if t.args[1].value == 'SHARED': libcmd = 'shared_library' @@ -216,7 +216,7 @@ class Converter: else: libcmd = 'library' args = t.args - line = '%s_lib = %s(%s)' % (t.args[0].value, libcmd, self.convert_args(args, False)) + line = '{}_lib = {}({})'.format(t.args[0].value, libcmd, self.convert_args(args, False)) elif t.name == 'add_test': line = 'test(%s)' % self.convert_args(t.args, False) elif t.name == 'option': @@ -240,7 +240,7 @@ class Converter: line = 'project(' + ', '.join(args) + ", default_options : ['default_library=static'])" elif t.name == 'set': varname = t.args[0].value.lower() - line = '%s = %s\n' % (varname, self.convert_args(t.args[1:])) + line = '{} = {}\n'.format(varname, self.convert_args(t.args[1:])) elif t.name == 'if': postincrement = 1 try: @@ -266,7 +266,7 @@ class Converter: preincrement = -1 line = 'endif' else: - line = '''# %s(%s)''' % (t.name, self.convert_args(t.args)) + line = '''# {}({})'''.format(t.name, self.convert_args(t.args)) self.indent_level += preincrement indent = self.indent_level * self.indent_unit outfile.write(indent) @@ -316,7 +316,7 @@ class Converter: else: typestr = ' type : \'string\',' defaultstr = ' value : %s,' % default - line = "option(%r,%s%s description : '%s')\n" % (optname, + line = "option({!r},{}{} description : '{}')\n".format(optname, typestr, defaultstr, description) diff --git a/tools/dircondenser.py b/tools/dircondenser.py index b49d80f..9d642d2 100755 --- a/tools/dircondenser.py +++ b/tools/dircondenser.py @@ -48,7 +48,7 @@ def get_entries() -> T.List[T.Tuple[int, str]]: try: numstr = int(number) except ValueError: - raise SystemExit('Dir name {} does not start with a number.'.format(e)) + raise SystemExit(f'Dir name {e} does not start with a number.') entries.append((numstr, rest)) entries.sort() return entries -- cgit v1.1