diff options
author | Josh Soref <2119212+jsoref@users.noreply.github.com> | 2023-04-11 16:04:17 -0400 |
---|---|---|
committer | Eli Schwartz <eschwartz93@gmail.com> | 2023-04-11 19:21:05 -0400 |
commit | cf9fd56bc905a2022ad48c93d25b5a73b57c8802 (patch) | |
tree | a6858f0e790f801f49d8d4f161e9183deaf90e20 /mesonbuild | |
parent | e238b81ba0b89faa19b512d1e78de00dad1488ce (diff) | |
download | meson-cf9fd56bc905a2022ad48c93d25b5a73b57c8802.zip meson-cf9fd56bc905a2022ad48c93d25b5a73b57c8802.tar.gz meson-cf9fd56bc905a2022ad48c93d25b5a73b57c8802.tar.bz2 |
fix various spelling issues
Signed-off-by: Josh Soref <2119212+jsoref@users.noreply.github.com>
Diffstat (limited to 'mesonbuild')
41 files changed, 114 insertions, 114 deletions
diff --git a/mesonbuild/arglist.py b/mesonbuild/arglist.py index 475d954..cb74d31 100644 --- a/mesonbuild/arglist.py +++ b/mesonbuild/arglist.py @@ -198,13 +198,13 @@ class CompilerArgs(T.MutableSequence[str]): """Returns whether the argument can be safely de-duped. In addition to these, we handle library arguments specially. - With GNU ld, we surround library arguments with -Wl,--start/end-gr -> Dedupoup + With GNU ld, we surround library arguments with -Wl,--start/end-group to recursively search for symbols in the libraries. This is not needed with other linkers. """ # A standalone argument must never be deduplicated because it is - # defined by what comes _after_ it. Thus dedupping this: + # defined by what comes _after_ it. Thus deduping this: # -D FOO -D BAR # would yield either # -D FOO BAR diff --git a/mesonbuild/ast/interpreter.py b/mesonbuild/ast/interpreter.py index 7484e04..68e2b6e 100644 --- a/mesonbuild/ast/interpreter.py +++ b/mesonbuild/ast/interpreter.py @@ -352,7 +352,7 @@ class AstInterpreter(InterpreterBase): return None # Loop detected id_loop_detect += [node.ast_id] - # Try to evealuate the value of the node + # Try to evaluate the value of the node if isinstance(node, IdNode): result = quick_resolve(node) @@ -421,7 +421,7 @@ class AstInterpreter(InterpreterBase): else: args = [args_raw] - flattend_args = [] # type: T.List[TYPE_nvar] + flattened_args = [] # type: T.List[TYPE_nvar] # Resolve the contents of args for i in args: @@ -430,18 +430,18 @@ class AstInterpreter(InterpreterBase): if resolved is not None: if not isinstance(resolved, list): resolved = [resolved] - flattend_args += resolved + flattened_args += resolved elif isinstance(i, (str, bool, int, float)) or include_unknown_args: - flattend_args += [i] - return flattend_args + flattened_args += [i] + return flattened_args def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_nvar], include_unknown_args: bool = False) -> T.Dict[str, TYPE_nvar]: - flattend_kwargs = {} + flattened_kwargs = {} for key, val in kwargs.items(): if isinstance(val, BaseNode): resolved = self.resolve_node(val, include_unknown_args) if resolved is not None: - flattend_kwargs[key] = resolved + flattened_kwargs[key] = resolved elif isinstance(val, (str, bool, int, float)) or include_unknown_args: - flattend_kwargs[key] = val - return flattend_kwargs + flattened_kwargs[key] = val + return flattened_kwargs diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py index e811f11..3158aa2 100644 --- a/mesonbuild/ast/introspection.py +++ b/mesonbuild/ast/introspection.py @@ -263,9 +263,9 @@ class IntrospectionInterpreter(AstInterpreter): # Pop the first element if the function is a build target function if isinstance(curr, FunctionNode) and curr.func_name in BUILD_TARGET_FUNCTIONS: arg_nodes.pop(0) - elemetary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] + elementary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))] - if elemetary_nodes: + if elementary_nodes: res += [curr] return res diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index d8b05b1..2522b62 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -545,7 +545,7 @@ class NinjaBackend(backends.Backend): # We want to match 'Note: including file: ' in the line # 'Note: including file: d:\MyDir\include\stdio.h', however # different locales have different messages with a different - # number of colons. Match up to the the drive name 'd:\'. + # number of colons. Match up to the drive name 'd:\'. # When used in cross compilation, the path separator is a # forward slash rather than a backslash so handle both; i.e. # the path is /MyDir/include/stdio.h. @@ -750,7 +750,7 @@ class NinjaBackend(backends.Backend): ''' Adds the source file introspection information for a language of a target - Internal introspection storage formart: + Internal introspection storage format: self.introspection_data = { '<target ID>': { <id tuple>: { @@ -830,7 +830,7 @@ class NinjaBackend(backends.Backend): self.generate_swift_target(target) return - # Pre-existing target C/C++ sources to be built; dict of full path to + # Preexisting target C/C++ sources to be built; dict of full path to # source relative to build root and the original File object. target_sources: T.MutableMapping[str, File] @@ -839,7 +839,7 @@ class NinjaBackend(backends.Backend): generated_sources: T.MutableMapping[str, File] # List of sources that have been transpiled from a DSL (like Vala) into - # a language that is haneled below, such as C or C++ + # a language that is handled below, such as C or C++ transpiled_sources: T.List[str] if 'vala' in target.compilers: @@ -879,7 +879,7 @@ class NinjaBackend(backends.Backend): mlog.log(mlog.red('FIXME'), msg) # Get a list of all generated headers that will be needed while building - # this target's sources (generated sources and pre-existing sources). + # this target's sources (generated sources and preexisting sources). # This will be set as dependencies of all the target's sources. At the # same time, also deal with generated sources that need to be compiled. generated_source_files = [] @@ -964,7 +964,7 @@ class NinjaBackend(backends.Backend): o, s = self.generate_single_compile(target, src, 'vala', [], header_deps) obj_list.append(o) - # Generate compile targets for all the pre-existing sources for this target + # Generate compile targets for all the preexisting sources for this target for src in target_sources.values(): if not self.environment.is_header(src): if self.environment.is_llvm_ir(src): @@ -1035,8 +1035,8 @@ class NinjaBackend(backends.Backend): rule_name = 'depscan' scan_sources = self.select_sources_to_scan(compiled_sources) - # Dump the sources as a json list. This avoids potential probllems where - # the number of sources passed to depscan exceedes the limit imposed by + # Dump the sources as a json list. This avoids potential problems where + # the number of sources passed to depscan exceeds the limit imposed by # the OS. with open(json_abs, 'w', encoding='utf-8') as f: json.dump(scan_sources, f) @@ -1294,7 +1294,7 @@ class NinjaBackend(backends.Backend): if build.rulename in self.ruledict: build.rule = self.ruledict[build.rulename] else: - mlog.warning(f"build statement for {build.outfilenames} references non-existent rule {build.rulename}") + mlog.warning(f"build statement for {build.outfilenames} references nonexistent rule {build.rulename}") def write_rules(self, outfile): for b in self.build_elements: @@ -1505,7 +1505,7 @@ class NinjaBackend(backends.Backend): T.Tuple[T.MutableMapping[str, File], T.MutableMapping]]: """ Splits the target's sources into .vala, .gs, .vapi, and other sources. - Handles both pre-existing and generated sources. + Handles both preexisting and generated sources. Returns a tuple (vala, vapi, others) each of which is a dictionary with the keys being the path to the file (relative to the build directory) @@ -1515,7 +1515,7 @@ class NinjaBackend(backends.Backend): vapi: T.MutableMapping[str, File] = OrderedDict() others: T.MutableMapping[str, File] = OrderedDict() othersgen: T.MutableMapping[str, File] = OrderedDict() - # Split pre-existing sources + # Split preexisting sources for s in t.get_sources(): # BuildTarget sources are always mesonlib.File files which are # either in the source root, or generated with configure_file and @@ -1928,7 +1928,7 @@ class NinjaBackend(backends.Backend): # before that it would treat linking two static libraries as # whole-archive linking. However, to make this work we have to disable # bundling, which can't be done until 1.63.0… So for 1.61–1.62 we just - # have to hope that the default cases of +whole-archive are sufficent. + # have to hope that the default cases of +whole-archive are sufficient. # See: https://github.com/rust-lang/rust/issues/99429 if mesonlib.version_compare(rustc.version, '>= 1.63.0'): whole_archive = ':+whole-archive,-bundle' @@ -2624,7 +2624,7 @@ https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) # has pdb file called foo.pdb. So will a static library # foo.lib, which clobbers both foo.pdb _and_ the dll file's # export library called foo.lib (by default, currently we name - # them libfoo.a to avoidt this issue). You can give the files + # them libfoo.a to avoid this issue). You can give the files # unique names such as foo_exe.pdb but VC also generates a # bunch of other files which take their names from the target # basename (i.e. "foo") and stomp on each other. diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 37d0365..86c50ec 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -1436,7 +1436,7 @@ class Vs2010Backend(backends.Backend): else: inc_dirs = file_inc_dirs self.add_include_dirs(lang, inc_cl, inc_dirs) - # XXX: Do we need to set the object file name name here too? + # XXX: Do we need to set the object file name here too? previous_objects = [] if self.has_objects(objects, additional_objects, gen_objs): diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 5408b21..2f325ae 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -740,7 +740,7 @@ class BuildTarget(Target): self.rpath_dirs_to_remove: T.Set[bytes] = set() self.process_sourcelist(sources) # Objects can be: - # 1. Pre-existing objects provided by the user with the `objects:` kwarg + # 1. Preexisting objects provided by the user with the `objects:` kwarg # 2. Compiled objects created by and extracted from another target self.process_objectlist(objects) self.process_kwargs(kwargs) @@ -811,8 +811,8 @@ class BuildTarget(Target): """Split sources into generated and static sources. Sources can be: - 1. Pre-existing source files in the source tree (static) - 2. Pre-existing sources generated by configure_file in the build tree. + 1. Preexisting source files in the source tree (static) + 2. Preexisting sources generated by configure_file in the build tree. (static as they are only regenerated if meson itself is regenerated) 3. Sources files generated by another target or a Generator (generated) """ @@ -884,7 +884,7 @@ class BuildTarget(Target): missing_languages: T.List[str] = [] if not any([self.sources, self.generated, self.objects, self.structured_sources]): return missing_languages - # Pre-existing sources + # Preexisting sources sources: T.List['FileOrString'] = list(self.sources) generated = self.generated.copy() @@ -1654,7 +1654,7 @@ You probably should put it in link_with instead.''') '\n ' f'If shared_module() was used for {link_target.name} because it has references to undefined symbols,' '\n ' - 'use shared_libary() with `override_options: [\'b_lundef=false\']` instead.') + 'use shared_library() with `override_options: [\'b_lundef=false\']` instead.') link_target.force_soname = True class Generator(HoldableObject): diff --git a/mesonbuild/cmake/common.py b/mesonbuild/cmake/common.py index accb7c9..32a8c68 100644 --- a/mesonbuild/cmake/common.py +++ b/mesonbuild/cmake/common.py @@ -146,7 +146,7 @@ def cmake_defines_to_args(raw: T.Any, permissive: bool = False) -> T.List[str]: return res -# TODO: this functuin will become obsolete once the `cmake_args` kwarg is dropped +# TODO: this function will become obsolete once the `cmake_args` kwarg is dropped def check_cmake_args(args: T.List[str]) -> T.List[str]: res = [] # type: T.List[str] dis = ['-D' + x for x in blacklist_cmake_defs] diff --git a/mesonbuild/cmake/traceparser.py b/mesonbuild/cmake/traceparser.py index 5fcba80..7f31f13 100644 --- a/mesonbuild/cmake/traceparser.py +++ b/mesonbuild/cmake/traceparser.py @@ -288,7 +288,7 @@ class CMakeTraceParser: raise CMakeException(f'CMake: {function}() {error}\n{tline}') def _cmake_set(self, tline: CMakeTraceLine) -> None: - """Handler for the CMake set() function in all variaties. + """Handler for the CMake set() function in all varieties. comes in three flavors: set(<var> <value> [PARENT_SCOPE]) @@ -509,7 +509,7 @@ class CMakeTraceParser: targets += curr.split(';') if not args: - return self._gen_exception('set_property', 'faild to parse argument list', tline) + return self._gen_exception('set_property', 'failed to parse argument list', tline) if len(args) == 1: # Tries to set property to nothing so nothing has to be done @@ -575,7 +575,7 @@ class CMakeTraceParser: targets.append(curr) - # Now we need to try to reconsitute the original quoted format of the + # Now we need to try to reconstitute the original quoted format of the # arguments, as a property value could have spaces in it. Unlike # set_property() this is not context free. There are two approaches I # can think of, both have drawbacks: @@ -586,7 +586,7 @@ class CMakeTraceParser: # # Neither of these is awesome for obvious reasons. I'm going to try # option 1 first and fall back to 2, as 1 requires less code and less - # synchroniztion for cmake changes. + # synchronization for cmake changes. # # With the JSON output format, introduced in CMake 3.17, spaces are # handled properly and we don't have to do either options diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py index 0feb371..5f7bfa4 100644 --- a/mesonbuild/compilers/compilers.py +++ b/mesonbuild/compilers/compilers.py @@ -1228,7 +1228,7 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta): mode: CompileCheckMode = CompileCheckMode.COMPILE) -> CompilerArgs: """Arguments to pass the build_wrapper helper. - This generally needs to be set on a per-language baises. It provides + This generally needs to be set on a per-language basis. It provides a hook for languages to handle dependencies and extra args. The base implementation handles the most common cases, namely adding the check_arguments, unwrapping dependencies, and appending extra args. @@ -1266,7 +1266,7 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta): mode: str = 'compile', want_output: bool = False, disable_cache: bool = False, temp_dir: str = None) -> T.Iterator[T.Optional[CompileResult]]: - """Helper for getting a cacched value when possible. + """Helper for getting a cached value when possible. This method isn't meant to be called externally, it's mean to be wrapped by other methods like compiles() and links(). @@ -1361,7 +1361,7 @@ def get_global_options(lang: str, # If the compiler acts as a linker driver, and we're using the # environment variable flags for both the compiler and linker # arguments, then put the compiler flags in the linker flags as well. - # This is how autotools works, and the env vars freature is for + # This is how autotools works, and the env vars feature is for # autotools compatibility. largs.extend_value(comp_options) diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py index 6eca155..78d7fdd 100644 --- a/mesonbuild/compilers/detect.py +++ b/mesonbuild/compilers/detect.py @@ -382,7 +382,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin if 'Arm C/C++/Fortran Compiler' in out: arm_ver_match = re.search(r'version (\d+)\.(\d+)\.?(\d+)? \(build number (\d+)\)', out) - assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None + assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None version = '.'.join([x for x in arm_ver_match.groups() if x is not None]) if lang == 'c': cls = c.ArmLtdClangCCompiler @@ -667,7 +667,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C if 'Arm C/C++/Fortran Compiler' in out: cls = fortran.ArmLtdFlangFortranCompiler arm_ver_match = re.search(r'version (\d+)\.(\d+)\.?(\d+)? \(build number (\d+)\)', out) - assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None + assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None version = '.'.join([x for x in arm_ver_match.groups() if x is not None]) linker = guess_nix_linker(env, compiler, cls, version, for_machine) return cls( @@ -1073,7 +1073,7 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile if 'LLVM D compiler' in out: cls = d.LLVMDCompiler # LDC seems to require a file - # We cannot use NamedTemproraryFile on windows, its documented + # We cannot use NamedTemporaryFile on windows, its documented # to not work for our uses. So, just use mkstemp and only have # one path for simplicity. o, f = tempfile.mkstemp('.d') @@ -1111,7 +1111,7 @@ def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compile elif 'The D Language Foundation' in out or 'Digital Mars' in out: cls = d.DmdDCompiler # DMD seems to require a file - # We cannot use NamedTemproraryFile on windows, its documented + # We cannot use NamedTemporaryFile on windows, its documented # to not work for our uses. So, just use mkstemp and only have # one path for simplicity. o, f = tempfile.mkstemp('.d') diff --git a/mesonbuild/compilers/mixins/clike.py b/mesonbuild/compilers/mixins/clike.py index 8c17b5b..f8c7ebc 100644 --- a/mesonbuild/compilers/mixins/clike.py +++ b/mesonbuild/compilers/mixins/clike.py @@ -16,7 +16,7 @@ from __future__ import annotations """Mixin classes to be shared between C and C++ compilers. -Without this we'll end up with awful diamond inherintance problems. The goal +Without this we'll end up with awful diamond inheritance problems. The goal of this is to have mixin's, which are classes that are designed *not* to be standalone, they only work through inheritance. """ @@ -432,7 +432,7 @@ class CLikeCompiler(Compiler): extra_args: T.Union[None, arglist.CompilerArgs, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], dependencies: T.Optional[T.List['Dependency']], mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs: - # TODO: the caller should handle the listfing of these arguments + # TODO: the caller should handle the listing of these arguments if extra_args is None: extra_args = [] else: diff --git a/mesonbuild/compilers/mixins/visualstudio.py b/mesonbuild/compilers/mixins/visualstudio.py index 12522e1..76d9829 100644 --- a/mesonbuild/compilers/mixins/visualstudio.py +++ b/mesonbuild/compilers/mixins/visualstudio.py @@ -423,7 +423,7 @@ class MSVCCompiler(VisualStudioLikeCompiler): def __init__(self, target: str): super().__init__(target) - # Visual Studio 2013 and erlier don't support the /utf-8 argument. + # Visual Studio 2013 and earlier don't support the /utf-8 argument. # We want to remove it. We also want to make an explicit copy so we # don't mutate class constant state if mesonlib.version_compare(self.version, '<19.00') and '/utf-8' in self.always_args: diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 8ddc910..c779422 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -472,7 +472,7 @@ class CoreData: # want to overwrite options for such subprojects. self.initialized_subprojects: T.Set[str] = set() - # For host == build configuraitons these caches should be the same. + # For host == build configurations these caches should be the same. self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default( self.is_cross_build(), DependencyCache(self.options, MachineChoice.BUILD), @@ -586,7 +586,7 @@ class CoreData: except TypeError: return value if option.name.endswith('dir') and value.is_absolute() and \ - option not in BULITIN_DIR_NOPREFIX_OPTIONS: + option not in BUILTIN_DIR_NOPREFIX_OPTIONS: try: # Try to relativize the path. value = value.relative_to(prefix) @@ -707,7 +707,7 @@ class CoreData: elif key.name in {'wrap_mode', 'force_fallback_for'}: # We could have the system dependency cached for a dependency that # is now forced to use subproject fallback. We probably could have - # more fine grained cache invalidation, but better be safe. + # more fine-grained cache invalidation, but better be safe. self.clear_deps_cache() dirty = True @@ -838,7 +838,7 @@ class CoreData: if pfk in options: prefix = self.sanitize_prefix(options[pfk]) dirty |= self.options[OptionKey('prefix')].set_value(prefix) - for key in BULITIN_DIR_NOPREFIX_OPTIONS: + for key in BUILTIN_DIR_NOPREFIX_OPTIONS: if key not in options: dirty |= self.options[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix)) @@ -862,7 +862,7 @@ class CoreData: def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None: # Main project can set default options on subprojects, but subprojects - # can only set default options on themself. + # can only set default options on themselves. # Preserve order: if env.options has 'buildtype' it must come after # 'optimization' if it is in default_options. options: T.MutableMapping[OptionKey, T.Any] = OrderedDict() @@ -1194,7 +1194,7 @@ class BuiltinOption(T.Generic[_T, _U]): if self.opt_type in [UserComboOption, UserIntegerOption]: return self.default try: - return BULITIN_DIR_NOPREFIX_OPTIONS[name][prefix] + return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix] except KeyError: pass return self.default @@ -1283,7 +1283,7 @@ BUILTIN_OPTIONS_PER_MACHINE: 'MutableKeyedOptionDictType' = OrderedDict([ # Special prefix-dependent defaults for installation directories that reside in # a path outside of the prefix in FHS and common usage. -BULITIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = { +BUILTIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = { OptionKey('sysconfdir'): {'/usr': '/etc'}, OptionKey('localstatedir'): {'/usr': '/var', '/usr/local': '/var/local'}, OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'}, diff --git a/mesonbuild/dependencies/boost.py b/mesonbuild/dependencies/boost.py index 4ebd88d..44f1baf 100644 --- a/mesonbuild/dependencies/boost.py +++ b/mesonbuild/dependencies/boost.py @@ -80,7 +80,7 @@ if T.TYPE_CHECKING: # 2. Find all boost libraries # 2.1 Add all libraries in lib* # 2.2 Filter out non boost libraries -# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.) +# 2.3 Filter the remaining libraries based on the meson requirements (static/shared, etc.) # 2.4 Ensure that all libraries have the same boost tag (and are thus compatible) # 3. Select the libraries matching the requested modules @@ -243,7 +243,7 @@ class BoostLibraryFile(): return any(self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs) def fix_python_name(self, tags: T.List[str]) -> T.List[str]: - # Handle the boost_python naming madeness. + # Handle the boost_python naming madness. # See https://github.com/mesonbuild/meson/issues/4788 for some distro # specific naming variations. other_tags = [] # type: T.List[str] diff --git a/mesonbuild/dependencies/cmake.py b/mesonbuild/dependencies/cmake.py index abd31a1..b5ddd28 100644 --- a/mesonbuild/dependencies/cmake.py +++ b/mesonbuild/dependencies/cmake.py @@ -489,7 +489,7 @@ class CMakeDependency(ExternalDependency): libs_raw = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x] # CMake has a "fun" API, where certain keywords describing - # configurations can be in the *_LIBRARIES vraiables. See: + # configurations can be in the *_LIBRARIES variables. See: # - https://github.com/mesonbuild/meson/issues/9197 # - https://gitlab.freedesktop.org/libnice/libnice/-/issues/140 # - https://cmake.org/cmake/help/latest/command/target_link_libraries.html#overview (the last point in the section) @@ -505,7 +505,7 @@ class CMakeDependency(ExternalDependency): libs += [i] # According to the CMake docs, a keyword only works for the # directly the following item and all items without a keyword - # are implizitly `general` + # are implicitly `general` cfg_matches = True # Try to use old style variables if no module is specified diff --git a/mesonbuild/dependencies/data/CMakeListsLLVM.txt b/mesonbuild/dependencies/data/CMakeListsLLVM.txt index f12dddc..4a93822 100644 --- a/mesonbuild/dependencies/data/CMakeListsLLVM.txt +++ b/mesonbuild/dependencies/data/CMakeListsLLVM.txt @@ -42,7 +42,7 @@ function(meson_llvm_cmake_dynamic_available mod out) return() endif() - # Complex heurisic to filter all pseudo-components and skip invalid names + # Complex heuristic to filter all pseudo-components and skip invalid names # LLVM_DYLIB_COMPONENTS will be 'all', because in other case we returned # in previous check. 'all' is also handled there. set(llvm_pseudo_components "native" "backend" "engine" "all-targets") diff --git a/mesonbuild/dependencies/misc.py b/mesonbuild/dependencies/misc.py index 7b075c4..0c747c4 100644 --- a/mesonbuild/dependencies/misc.py +++ b/mesonbuild/dependencies/misc.py @@ -332,7 +332,7 @@ class CursesSystemDependency(SystemDependency): ('curses', ['curses.h']), ] - # Not sure how else to elegently break out of both loops + # Not sure how else to elegantly break out of both loops for lib, headers in candidates: l = self.clib_compiler.find_library(lib, env, []) if l: diff --git a/mesonbuild/dependencies/pkgconfig.py b/mesonbuild/dependencies/pkgconfig.py index 2ef22e9..8dfb128 100644 --- a/mesonbuild/dependencies/pkgconfig.py +++ b/mesonbuild/dependencies/pkgconfig.py @@ -415,7 +415,7 @@ class PkgConfigDependency(ExternalDependency): else: variable = out.strip() - # pkg-config doesn't distinguish between empty and non-existent variables + # pkg-config doesn't distinguish between empty and nonexistent variables # use the variable list to check for variable existence if not variable: ret, out, _ = self._call_pkgbin(['--print-variables', self.name]) diff --git a/mesonbuild/dependencies/qt.py b/mesonbuild/dependencies/qt.py index 6dd712d..6b8c694 100644 --- a/mesonbuild/dependencies/qt.py +++ b/mesonbuild/dependencies/qt.py @@ -260,7 +260,7 @@ class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta): self.tools = [f'qmake{self.qtver}', f'qmake-{self.name}', 'qmake'] # Add additional constraints that the Qt version is met, but preserve - # any version requrements the user has set as well. For example, if Qt5 + # any version requirements the user has set as well. For example, if Qt5 # is requested, add "">= 5, < 6", but if the user has ">= 5.6", don't # lose that. kwargs = kwargs.copy() @@ -325,7 +325,7 @@ class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta): self.compile_args.append('-I' + directory) libfiles = self.clib_compiler.find_library( self.qtpkgname + module + modules_lib_suffix, self.env, - mesonlib.listify(libdir)) # TODO: shouldn't be necissary + mesonlib.listify(libdir)) # TODO: shouldn't be necessary if libfiles: libfile = libfiles[0] else: diff --git a/mesonbuild/envconfig.py b/mesonbuild/envconfig.py index 90117c1..50c974b 100644 --- a/mesonbuild/envconfig.py +++ b/mesonbuild/envconfig.py @@ -28,7 +28,7 @@ from pathlib import Path # and cross file currently), and also assists with the reading environment # variables. # -# At this time there isn't an ironclad difference between this an other sources +# At this time there isn't an ironclad difference between this and other sources # of state like `coredata`. But one rough guide is much what is in `coredata` is # the *output* of the configuration process: the final decisions after tests. # This, on the other hand has *inputs*. The config files are parsed, but @@ -167,7 +167,7 @@ class Properties: return language + '_stdlib' in self.properties # Some of get_stdlib, get_root, get_sys_root are wider than is actually - # true, but without heterogenious dict annotations it's not practical to + # true, but without heterogeneous dict annotations it's not practical to # narrow them def get_stdlib(self, language: str) -> T.Union[str, T.List[str]]: stdlib = self.properties[language + '_stdlib'] diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 02a3337..ccd31eb 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -674,7 +674,7 @@ class Environment: # time) until we're instantiating that `Compiler` # object. This is required so that passing # `-Dc_args=` on the command line and `$CFLAGS` - # have subtely different behavior. `$CFLAGS` will be + # have subtly different behavior. `$CFLAGS` will be # added to the linker command line if the compiler # acts as a linker driver, `-Dc_args` will not. # diff --git a/mesonbuild/interpreter/compiler.py b/mesonbuild/interpreter/compiler.py index d56591b..c9ea880 100644 --- a/mesonbuild/interpreter/compiler.py +++ b/mesonbuild/interpreter/compiler.py @@ -1,4 +1,4 @@ -# SPDX-Licnese-Identifier: Apache-2.0 +# SPDX-License-Identifier: Apache-2.0 # Copyright 2012-2021 The Meson development team # Copyright © 2021 Intel Corporation from __future__ import annotations @@ -60,7 +60,7 @@ if T.TYPE_CHECKING: args: T.List[str] dependencies: T.List[dependencies.Dependency] - class CompupteIntKW(CommonKW): + class ComputeIntKW(CommonKW): guess: T.Optional[int] high: T.Optional[int] @@ -405,7 +405,7 @@ class CompilerHolder(ObjectHolder['Compiler']): KwargInfo('guess', (int, NoneType)), *_COMMON_KWS, ) - def compute_int_method(self, args: T.Tuple[str], kwargs: 'CompupteIntKW') -> int: + def compute_int_method(self, args: T.Tuple[str], kwargs: 'ComputeIntKW') -> int: expression = args[0] extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args']) deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross) diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py index bbc34a4..96d4af0 100644 --- a/mesonbuild/interpreter/interpreter.py +++ b/mesonbuild/interpreter/interpreter.py @@ -235,7 +235,7 @@ class InterpreterRuleRelaxation(Enum): generate a Meson AST via introspection, etc. ''' - ALLOW_BUILD_DIR_FILE_REFFERENCES = 1 + ALLOW_BUILD_DIR_FILE_REFERENCES = 1 permitted_dependency_kwargs = { 'allow_fallback', @@ -1001,7 +1001,7 @@ class Interpreter(InterpreterBase, HoldableObject): # Duplicates are possible when subproject uses files from project root if build_def_files: self.build_def_files.update(build_def_files) - # We always need the subi.build_def_files, to propgate sub-sub-projects + # We always need the subi.build_def_files, to propagate sub-sub-projects self.build_def_files.update(subi.build_def_files) self.build.merge(subi.build) self.build.subprojects[subp_name] = subi.project_version @@ -1048,7 +1048,7 @@ class Interpreter(InterpreterBase, HoldableObject): [str(f) for f in cm_int.bs_files], is_translated=True, relaxations={ - InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFFERENCES, + InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES, } ) result.cm_interpreter = cm_int @@ -1365,7 +1365,7 @@ class Interpreter(InterpreterBase, HoldableObject): section, values, kwargs['bool_yn'], kwargs['list_sep'], self.subproject) def _print_summary(self) -> None: - # Add automatic 'Supbrojects' section in main project. + # Add automatic 'Subprojects' section in main project. all_subprojects = collections.OrderedDict() for name, subp in sorted(self.subprojects.items()): value = subp.found() @@ -1997,7 +1997,7 @@ class Interpreter(InterpreterBase, HoldableObject): build_by_default = kwargs['build_always'] build_always_stale = kwargs['build_by_default'] - # These are are nullaable so that we can know whether they're explicitly + # These are nullable so that we can know whether they're explicitly # set or not. If they haven't been overwritten, set them to their true # default if build_by_default is None: @@ -2019,9 +2019,9 @@ class Interpreter(InterpreterBase, HoldableObject): command[0] = self.find_program_impl([command[0]]) if len(inputs) > 1 and kwargs['feed']: - raise InvalidArguments('custom_target: "feed" keyword argument can only be used used with a single input') + raise InvalidArguments('custom_target: "feed" keyword argument can only be used with a single input') if len(kwargs['output']) > 1 and kwargs['capture']: - raise InvalidArguments('custom_target: "capture" keyword argument can only be used used with a single output') + raise InvalidArguments('custom_target: "capture" keyword argument can only be used with a single output') if kwargs['capture'] and kwargs['console']: raise InvalidArguments('custom_target: "capture" and "console" keyword arguments are mutually exclusive') for c in command: @@ -2370,7 +2370,7 @@ class Interpreter(InterpreterBase, HoldableObject): absname = os.path.join(self.environment.get_source_dir(), buildfilename) if not os.path.isfile(absname): self.subdir = prev_subdir - raise InterpreterException(f"Non-existent build file '{buildfilename!s}'") + raise InterpreterException(f"Nonexistent build file '{buildfilename!s}'") with open(absname, encoding='utf-8') as f: code = f.read() assert isinstance(code, str) @@ -3034,7 +3034,7 @@ class Interpreter(InterpreterBase, HoldableObject): inputtype = 'directory' else: inputtype = 'file' - if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFFERENCES in self.relaxations and builddir in norm.parents: + if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES in self.relaxations and builddir in norm.parents: return if srcdir not in norm.parents: # Grabbing files outside the source tree is ok. diff --git a/mesonbuild/interpreter/kwargs.py b/mesonbuild/interpreter/kwargs.py index fb02374..3c1cb00 100644 --- a/mesonbuild/interpreter/kwargs.py +++ b/mesonbuild/interpreter/kwargs.py @@ -57,7 +57,7 @@ class FuncTest(FuncBenchmark): """Keyword Arguments for `test` - `test` only adds the `is_prallel` argument over benchmark, so inherintance + `test` only adds the `is_parallel` argument over benchmark, so inheritance is helpful here. """ diff --git a/mesonbuild/interpreter/mesonmain.py b/mesonbuild/interpreter/mesonmain.py index 3e75629..2ea3163 100644 --- a/mesonbuild/interpreter/mesonmain.py +++ b/mesonbuild/interpreter/mesonmain.py @@ -377,7 +377,7 @@ class MesonMain(MesonInterpreterObject): def _override_dependency_impl(self, name: str, dep: dependencies.Dependency, kwargs: 'FuncOverrideDependency', static: T.Optional[bool], permissive: bool = False) -> None: # We need the cast here as get_dep_identifier works on such a dict, - # which FuncOverrideDependency is, but mypy can't fgure that out + # which FuncOverrideDependency is, but mypy can't figure that out nkwargs = T.cast('T.Dict[str, T.Any]', kwargs.copy()) if static is None: del nkwargs['static'] diff --git a/mesonbuild/interpreter/type_checking.py b/mesonbuild/interpreter/type_checking.py index e1ee82a..9b2e46c 100644 --- a/mesonbuild/interpreter/type_checking.py +++ b/mesonbuild/interpreter/type_checking.py @@ -87,9 +87,9 @@ def _install_mode_validator(mode: T.List[T.Union[str, bool, int]]) -> T.Optional return f'permission character 9 must be "-", "t", "T", or "x", not {perms[8]}' if len(mode) >= 2 and not isinstance(mode[1], (int, str, bool)): - return 'second componenent can only be a string, number, or False' + return 'second component can only be a string, number, or False' if len(mode) >= 3 and not isinstance(mode[2], (int, str, bool)): - return 'third componenent can only be a string, number, or False' + return 'third component can only be a string, number, or False' return None @@ -211,7 +211,7 @@ def _env_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Di return None def _options_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Dict[str, 'TYPE_var'], str, None]) -> T.Optional[str]: - # Reusing the env validator is a littl overkill, but nicer than duplicating the code + # Reusing the env validator is a little overkill, but nicer than duplicating the code return _env_validator(value, allow_dict_list=False) def split_equal_string(input: str) -> T.Tuple[str, str]: diff --git a/mesonbuild/interpreterbase/decorators.py b/mesonbuild/interpreterbase/decorators.py index 9defb99..3ffa67a 100644 --- a/mesonbuild/interpreterbase/decorators.py +++ b/mesonbuild/interpreterbase/decorators.py @@ -470,7 +470,7 @@ def typed_kwargs(name: str, *types: KwargInfo, allow_unknown: bool = False) -> T information. For non-required values it sets the value to a default, which means the value will always be provided. - If type tyhpe is a :class:ContainerTypeInfo, then the default value will be + If type is a :class:ContainerTypeInfo, then the default value will be passed as an argument to the container initializer, making a shallow copy :param name: the name of the function, including the object it's attached to @@ -583,7 +583,7 @@ def typed_kwargs(name: str, *types: KwargInfo, allow_unknown: bool = False) -> T else: # set the value to the default, this ensuring all kwargs are present # This both simplifies the typing checking and the usage - assert check_value_type(types_tuple, info.default), f'In funcion {name} default value of {info.name} is not a valid type, got {type(info.default)} expected {types_description(types_tuple)}' + assert check_value_type(types_tuple, info.default), f'In function {name} default value of {info.name} is not a valid type, got {type(info.default)} expected {types_description(types_tuple)}' # Create a shallow copy of the container. This allows mutable # types to be used safely as default values kwargs[info.name] = copy.copy(info.default) diff --git a/mesonbuild/interpreterbase/interpreterbase.py b/mesonbuild/interpreterbase/interpreterbase.py index b1d0779..5f854d0 100644 --- a/mesonbuild/interpreterbase/interpreterbase.py +++ b/mesonbuild/interpreterbase/interpreterbase.py @@ -525,14 +525,14 @@ class InterpreterBase: return None def method_call(self, node: mparser.MethodNode) -> T.Optional[InterpreterObject]: - invokable = node.source_object + invocable = node.source_object obj: T.Optional[InterpreterObject] - if isinstance(invokable, mparser.IdNode): - object_display_name = f'variable "{invokable.value}"' - obj = self.get_variable(invokable.value) + if isinstance(invocable, mparser.IdNode): + object_display_name = f'variable "{invocable.value}"' + obj = self.get_variable(invocable.value) else: - object_display_name = invokable.__class__.__name__ - obj = self.evaluate_statement(invokable) + object_display_name = invocable.__class__.__name__ + obj = self.evaluate_statement(invocable) method_name = node.name (h_args, h_kwargs) = self.reduce_arguments(node.args) (args, kwargs) = self._unholder_args(h_args, h_kwargs) diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py index 97e770c..59f1d78 100644 --- a/mesonbuild/linkers/detect.py +++ b/mesonbuild/linkers/detect.py @@ -96,7 +96,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty if value is not None and invoked_directly: compiler = value - # We've already hanedled the non-direct case above + # We've already handled the non-direct case above p, o, e = Popen_safe(compiler + check_args) if 'LLD' in o.split('\n', maxsplit=1)[0]: diff --git a/mesonbuild/linkers/linkers.py b/mesonbuild/linkers/linkers.py index 5799caf..4c00032 100644 --- a/mesonbuild/linkers/linkers.py +++ b/mesonbuild/linkers/linkers.py @@ -928,7 +928,7 @@ class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, Dyna class CcrxDynamicLinker(DynamicLinker): - """Linker for Renesis CCrx compiler.""" + """Linker for Renesas CCrx compiler.""" id = 'rlink' @@ -1216,7 +1216,7 @@ NvidiaHPC_StaticLinker = PGIStaticLinker class VisualStudioLikeLinkerMixin: - """Mixin class for for dynamic linkers that act like Microsoft's link.exe.""" + """Mixin class for dynamic linkers that act like Microsoft's link.exe.""" if T.TYPE_CHECKING: for_machine = MachineChoice.HOST diff --git a/mesonbuild/minstall.py b/mesonbuild/minstall.py index 27e9929..91e87fa 100644 --- a/mesonbuild/minstall.py +++ b/mesonbuild/minstall.py @@ -207,10 +207,10 @@ def set_mode(path: str, mode: T.Optional['FileMode'], default_umask: T.Union[str except PermissionError as e: print(f'{path!r}: Unable to set owner {mode.owner!r} and group {mode.group!r}: {e.strerror}, ignoring...') except LookupError: - print(f'{path!r}: Non-existent owner {mode.owner!r} or group {mode.group!r}: ignoring...') + print(f'{path!r}: Nonexistent owner {mode.owner!r} or group {mode.group!r}: ignoring...') except OSError as e: if e.errno == errno.EINVAL: - print(f'{path!r}: Non-existent numeric owner {mode.owner!r} or group {mode.group!r}: ignoring...') + print(f'{path!r}: Nonexistent numeric owner {mode.owner!r} or group {mode.group!r}: ignoring...') else: raise # Must set permissions *after* setting owner/group otherwise the diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 019b1ea..b6577dc 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -2149,7 +2149,7 @@ class GnomeModule(ExtensionModule): ) # So to try our best to get this to just work we need: - # - link with with the correct library + # - link with the correct library # - include the vapi and dependent vapi files in sources # - add relevant directories to include dirs incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)] diff --git a/mesonbuild/modules/keyval.py b/mesonbuild/modules/keyval.py index 1ba2f1c..48afe81 100644 --- a/mesonbuild/modules/keyval.py +++ b/mesonbuild/modules/keyval.py @@ -55,7 +55,7 @@ class KeyvalModule(ExtensionModule): return result @noKwargs - @typed_pos_args('keyval.laod', (str, mesonlib.File)) + @typed_pos_args('keyval.load', (str, mesonlib.File)) def load(self, state: 'ModuleState', args: T.Tuple['mesonlib.FileOrString'], kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]: s = args[0] is_built = False diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index 494cfbf..ed1bef1 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -164,7 +164,7 @@ class WindowsModule(ExtensionModule): elif isinstance(src, build.CustomTargetIndex): FeatureNew.single_use('windows.compile_resource CustomTargetIndex in positional arguments', '0.61.0', state.subproject, location=state.current_node) - # This dance avoids a case where two indexs of the same + # This dance avoids a case where two indexes of the same # target are given as separate arguments. yield (f'{src.get_id()}_{src.target.get_outputs().index(src.output)}', f'windows_compile_resources_{src.get_filename()}', src) diff --git a/mesonbuild/mtest.py b/mesonbuild/mtest.py index 53550b5..f7cf770 100644 --- a/mesonbuild/mtest.py +++ b/mesonbuild/mtest.py @@ -185,7 +185,7 @@ def returncode_to_status(retcode: int) -> str: # functions here because the status returned by subprocess is munged. It # returns a negative value if the process was killed by a signal rather than # the raw status returned by `wait()`. Also, If a shell sits between Meson - # the the actual unit test that shell is likely to convert a termination due + # the actual unit test that shell is likely to convert a termination due # to a signal into an exit status of 128 plus the signal number. if retcode < 0: signum = -retcode diff --git a/mesonbuild/programs.py b/mesonbuild/programs.py index 64f7c29..4b66698 100644 --- a/mesonbuild/programs.py +++ b/mesonbuild/programs.py @@ -354,7 +354,7 @@ class OverrideProgram(ExternalProgram): def find_external_program(env: 'Environment', for_machine: MachineChoice, name: str, display_name: str, default_names: T.List[str], allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]: - """Find an external program, chcking the cross file plus any default options.""" + """Find an external program, checking the cross file plus any default options.""" # Lookup in cross or machine file. potential_cmd = env.lookup_binary_entry(for_machine, name) if potential_cmd is not None: diff --git a/mesonbuild/rewriter.py b/mesonbuild/rewriter.py index 4c32ff1..a9b2e88 100644 --- a/mesonbuild/rewriter.py +++ b/mesonbuild/rewriter.py @@ -569,27 +569,27 @@ class Rewriter: if key not in arg_node.kwargs: arg_node.kwargs[key] = None - modifyer = kwargs_def[key](arg_node.kwargs[key]) - if not modifyer.can_modify(): + modifier = kwargs_def[key](arg_node.kwargs[key]) + if not modifier.can_modify(): mlog.log(' -- Skipping', mlog.bold(key), 'because it is to complex to modify') # Apply the operation val_str = str(val) if cmd['operation'] == 'set': mlog.log(' -- Setting', mlog.bold(key), 'to', mlog.yellow(val_str)) - modifyer.set_value(val) + modifier.set_value(val) elif cmd['operation'] == 'add': mlog.log(' -- Adding', mlog.yellow(val_str), 'to', mlog.bold(key)) - modifyer.add_value(val) + modifier.add_value(val) elif cmd['operation'] == 'remove': mlog.log(' -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key)) - modifyer.remove_value(val) + modifier.remove_value(val) elif cmd['operation'] == 'remove_regex': mlog.log(' -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key)) - modifyer.remove_regex(val) + modifier.remove_regex(val) # Write back the result - arg_node.kwargs[key] = modifyer.get_node() + arg_node.kwargs[key] = modifier.get_node() num_changed += 1 # Convert the keys back to IdNode's diff --git a/mesonbuild/scripts/cmake_run_ctgt.py b/mesonbuild/scripts/cmake_run_ctgt.py index a788ba5..755530a 100755 --- a/mesonbuild/scripts/cmake_run_ctgt.py +++ b/mesonbuild/scripts/cmake_run_ctgt.py @@ -35,7 +35,7 @@ def run(argsv: T.List[str]) -> int: commands += [[]] continue - i = i.replace('"', '') # Remove lefover quotes + i = i.replace('"', '') # Remove leftover quotes commands[-1] += [i] # Execute diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index ae18594..b9c58fe 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -350,7 +350,7 @@ class Elf(DataSizes): sys.exit(msg) # The linker does read-only string deduplication. If there is a # string that shares a suffix with the rpath, they might get - # dedupped. This means changing the rpath string might break something + # deduped. This means changing the rpath string might break something # completely unrelated. This has already happened once with X.org. # Thus we want to keep this change as small as possible to minimize # the chance of obliterating other strings. It might still happen diff --git a/mesonbuild/utils/universal.py b/mesonbuild/utils/universal.py index e06eaaa..8c35803 100644 --- a/mesonbuild/utils/universal.py +++ b/mesonbuild/utils/universal.py @@ -2295,7 +2295,7 @@ class OptionKey: def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None, machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '', module: T.Optional[str] = '') -> 'OptionKey': - """Create a new copy of this key, but with alterted members. + """Create a new copy of this key, but with altered members. For example: >>> a = OptionKey('foo', '', MachineChoice.Host) @@ -2318,11 +2318,11 @@ class OptionKey: return self.evolve(subproject='') def as_build(self) -> 'OptionKey': - """Convenience method for key.evolve(machine=MachinceChoice.BUILD).""" + """Convenience method for key.evolve(machine=MachineChoice.BUILD).""" return self.evolve(machine=MachineChoice.BUILD) def as_host(self) -> 'OptionKey': - """Convenience method for key.evolve(machine=MachinceChoice.HOST).""" + """Convenience method for key.evolve(machine=MachineChoice.HOST).""" return self.evolve(machine=MachineChoice.HOST) def is_backend(self) -> bool: diff --git a/mesonbuild/utils/vsenv.py b/mesonbuild/utils/vsenv.py index d862e5a..3c26878 100644 --- a/mesonbuild/utils/vsenv.py +++ b/mesonbuild/utils/vsenv.py @@ -70,7 +70,7 @@ def _setup_vsenv(force: bool) -> bool: ) bat_info = json.loads(bat_json) if not bat_info: - # VS installer instelled but not VS itself maybe? + # VS installer installed but not VS itself maybe? raise MesonException('Could not parse vswhere.exe output') bat_root = pathlib.Path(bat_info[0]['installationPath']) if windows_detect_native_arch() == 'arm64': |