aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--docs/markdown/Builtin-options.md59
-rw-r--r--docs/yaml/functions/project.yaml10
-rw-r--r--mesonbuild/ast/introspection.py2
-rw-r--r--mesonbuild/backend/xcodebackend.py53
-rw-r--r--mesonbuild/cmake/tracetargets.py1
-rw-r--r--mesonbuild/compilers/c.py2
-rw-r--r--mesonbuild/compilers/compilers.py47
-rw-r--r--mesonbuild/compilers/detect.py60
-rw-r--r--mesonbuild/coredata.py11
-rw-r--r--mesonbuild/environment.py42
-rw-r--r--mesonbuild/interpreter/interpreter.py4
-rw-r--r--mesonbuild/linkers/detect.py6
-rw-r--r--mesonbuild/msetup.py21
-rw-r--r--mesonbuild/options.py189
-rw-r--r--unittests/linuxliketests.py18
-rw-r--r--unittests/optiontests.py98
16 files changed, 406 insertions, 217 deletions
diff --git a/docs/markdown/Builtin-options.md b/docs/markdown/Builtin-options.md
index ee07df4..7a4d7f0 100644
--- a/docs/markdown/Builtin-options.md
+++ b/docs/markdown/Builtin-options.md
@@ -72,28 +72,28 @@ Options that are labeled "per machine" in the table are set per
machine. See the [specifying options per
machine](#specifying-options-per-machine) section for details.
-| Option | Default value | Description | Is per machine | Is per subproject |
-| -------------------------------------- | ------------- | ----------- | -------------- | ----------------- |
+| Option | Default value | Description | Is per machine | Per subproject (since) |
+| -------------------------------------- | ------------- | ----------- | -------------- | ---------------------- |
| auto_features {enabled, disabled, auto} | auto | Override value of all 'auto' features | no | no |
| backend {ninja, vs,<br>vs2010, vs2012, vs2013, vs2015, vs2017, vs2019, vs2022, xcode, none} | ninja | Backend to use | no | no |
| genvslite {vs2022} | vs2022 | Setup multi-buildtype ninja build directories and Visual Studio solution | no | no |
-| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no | no |
-| debug | true | Enable debug symbols and other information | no | no |
-| default_both_libraries {shared, static, auto} | shared | Default library type for both_libraries | no | no |
-| default_library {shared, static, both} | shared | Default library type | no | yes |
+| buildtype {plain, debug,<br>debugoptimized, release, minsize, custom} | debug | Build type to use | no | 1.8.0 |
+| debug | true | Enable debug symbols and other information | no | 1.8.0 |
+| default_both_libraries {shared, static, auto} | shared | Default library type for both_libraries | no | 1.8.0 |
+| default_library {shared, static, both} | shared | Default library type | no | 0.54.0 |
| errorlogs | true | Whether to print the logs from failing tests. | no | no |
| install_umask {preserve, 0000-0777} | 022 | Default umask to apply on permissions of installed files | no | no |
| layout {mirror,flat} | mirror | Build directory layout | no | no |
-| optimization {plain, 0, g, 1, 2, 3, s} | 0 | Optimization level | no | no |
+| optimization {plain, 0, g, 1, 2, 3, s} | 0 | Optimization level | no | 1.8.0 |
| pkg_config_path {OS separated path} | '' | Additional paths for pkg-config to search before builtin paths | yes | no |
| prefer_static | false | Whether to try static linking before shared linking | no | no |
| cmake_prefix_path | [] | Additional prefixes for cmake to search before builtin paths | yes | no |
| stdsplit | true | Split stdout and stderr in test logs | no | no |
-| strip | false | Strip targets on install | no | no |
-| unity {on, off, subprojects} | off | Unity build | no | no |
-| unity_size {>=2} | 4 | Unity file block size | no | no |
-| warning_level {0, 1, 2, 3, everything} | 1 | Set the warning level. From 0 = compiler default to everything = highest | no | yes |
-| werror | false | Treat warnings as errors | no | yes |
+| strip | false | Strip targets on install | no | 1.8.0 |
+| unity {on, off, subprojects} | off | Unity build | no | 1.8.0 |
+| unity_size {>=2} | 4 | Unity file block size | no | 1.8.0 |
+| warning_level {0, 1, 2, 3, everything} | 1 | Set the warning level. From 0 = compiler default to everything = highest | no | 0.56.0 |
+| werror | false | Treat warnings as errors | no | 0.54.0 |
| wrap_mode {default, nofallback,<br>nodownload, forcefallback, nopromote} | default | Wrap mode to use | no | no |
| force_fallback_for | [] | Force fallback for those dependencies | no | no |
| vsenv | false | Activate Visual Studio environment | no | no |
@@ -370,11 +370,10 @@ allowing differences in behavior to crop out.
## Specifying options per subproject
-Since *0.54.0* `default_library` and `werror` built-in options can be
-defined per subproject. This is useful, for example, when building
-shared libraries in the main project and statically linking a subproject,
-or when the main project must build with no warnings but some subprojects
-cannot.
+Several built-in options and all compiler options can be defined per subproject.
+This is useful, for example, when building shared libraries in the main project
+and statically linking a subproject, or when the main project must build
+with no warnings but some subprojects cannot.
Most of the time, this would be used either in the parent project by
setting subproject's default_options (e.g. `subproject('foo',
@@ -382,12 +381,30 @@ default_options: 'default_library=static')`), or by the user through the
command line: `-Dfoo:default_library=static`.
The value is overridden in this order:
+- `opt=value` from parent project's `default_options`
+- `opt=value` from subproject's `default_options`
+- `subp:opt=value` from parent project's default options
+- `opt=value` from `subproject()` `default_options`
+- `opt=value` from machine file
+- `opt=value` from command line
+- `subp:opt=value` from machine file
+- `subp:opt=value` from command line
+
+### Old behavior
+
+Between *0.54.0* and *1.7.x* only a few options could be defined per subproject:
+* `default_library` and `werror` since *0.54.0*;
+* `warning_level` since *0.56.0*;
+* compiler options since *0.63.0*
+
+The value was overridden in this order:
+
- Value from parent project
-- Value from subproject's default_options if set
-- Value from subproject() default_options if set
-- Value from command line if set
+- Value from subproject's `default_options`
+- Value from `subproject()` `default_options`
+- Value from machine file
+- Value from command line
-Since *0.56.0* `warning_level` can also be defined per subproject.
## Module options
diff --git a/docs/yaml/functions/project.yaml b/docs/yaml/functions/project.yaml
index 5be8cac..25ea9b9 100644
--- a/docs/yaml/functions/project.yaml
+++ b/docs/yaml/functions/project.yaml
@@ -45,15 +45,19 @@ kwargs:
For example to set the default project type you would
set this: `default_options : ['buildtype=debugoptimized']`. Note
that these settings are only used when running Meson for the first
- time. Global options such as `buildtype` can only be specified in
- the master project, settings in subprojects are ignored. Project
- specific options are used normally even in subprojects.
+ time.
Note that some options can override the default behavior;
for example, using `c_args` here means that the `CFLAGS`
environment variable is not used. Consider using
[[add_project_arguments()]] instead.
+ Also note that not all options are taken into account when
+ building as a subproject, and the exact set of options
+ that are per-subproject has increased over time; for more
+ information, see [core options](Builtin-options.md#core-options)
+ and [compiler options](Builtin-options.md#compiler-options).
+
*(since 1.2.0)*: A dictionary may now be passed.
version:
diff --git a/mesonbuild/ast/introspection.py b/mesonbuild/ast/introspection.py
index 147436d..decce4b 100644
--- a/mesonbuild/ast/introspection.py
+++ b/mesonbuild/ast/introspection.py
@@ -284,7 +284,7 @@ class IntrospectionInterpreter(AstInterpreter):
return new_target
def build_library(self, node: BaseNode, args: T.List[TYPE_var], kwargs: T.Dict[str, TYPE_var]) -> T.Union[IntrospectionBuildTarget, UnknownValue]:
- default_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
+ default_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
if default_library == 'shared':
return self.build_target(node, args, kwargs, SharedLibrary)
elif default_library == 'static':
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index 6ad982d..c0522e3 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -176,6 +176,15 @@ class PbxDict:
self.keys.add(key)
self.items.append(item)
+ def get_item(self, key: str) -> PbxDictItem:
+ assert key in self.keys
+ for item in self.items:
+ if not isinstance(item, PbxDictItem):
+ continue
+ if item.key == key:
+ return item
+ return None
+
def has_item(self, key: str) -> bool:
return key in self.keys
@@ -396,10 +405,23 @@ class XCodeBackend(backends.Backend):
def generate_filemap(self) -> None:
self.filemap = {} # Key is source file relative to src root.
+ self.foldermap = {}
self.target_filemap = {}
for name, t in self.build_targets.items():
for s in t.sources:
if isinstance(s, mesonlib.File):
+ if '/' in s.fname:
+ # From the top level down, add the folders containing the source file.
+ folder = os.path.split(os.path.dirname(s.fname))
+ while folder:
+ fpath = os.path.join(*folder)
+ # Multiple targets might use the same folders, so store their targets with them.
+ # Otherwise, folders and their source files will appear in the wrong places in Xcode.
+ if (fpath, t) not in self.foldermap:
+ self.foldermap[(fpath, t)] = self.gen_id()
+ else:
+ break
+ folder = folder[:-1]
s = os.path.join(s.subdir, s.fname)
self.filemap[s] = self.gen_id()
for o in t.objects:
@@ -1052,6 +1074,24 @@ class XCodeBackend(backends.Backend):
main_children.add_item(frameworks_id, 'Frameworks')
main_dict.add_item('sourceTree', '<group>')
+ # Define each folder as a group in Xcode. That way, it can build the file tree correctly.
+ # This must be done before the project tree group is generated, as source files are added during that phase.
+ for (path, target), id in self.foldermap.items():
+ folder_dict = PbxDict()
+ objects_dict.add_item(id, folder_dict, path)
+ folder_dict.add_item('isa', 'PBXGroup')
+ folder_children = PbxArray()
+ folder_dict.add_item('children', folder_children)
+ folder_dict.add_item('name', '"{}"'.format(path.rsplit('/', 1)[-1]))
+ folder_dict.add_item('path', f'"{path}"')
+ folder_dict.add_item('sourceTree', 'SOURCE_ROOT')
+
+ # Add any detected subdirectories (not declared as subdir()) here, but only one level higher.
+ # Example: In "root", add "root/sub", but not "root/sub/subtwo".
+ for path_dep, target_dep in self.foldermap:
+ if path_dep.startswith(path) and path_dep.split('/', 1)[0] == path.split('/', 1)[0] and path_dep != path and path_dep.count('/') == path.count('/') + 1 and target == target_dep:
+ folder_children.add_item(self.foldermap[(path_dep, target)], path_dep)
+
self.add_projecttree(objects_dict, projecttree_id)
resource_dict = PbxDict()
@@ -1121,6 +1161,7 @@ class XCodeBackend(backends.Backend):
tid = t.get_id()
group_id = self.gen_id()
target_dict = PbxDict()
+ folder_ids = set()
objects_dict.add_item(group_id, target_dict, tid)
target_dict.add_item('isa', 'PBXGroup')
target_children = PbxArray()
@@ -1130,6 +1171,18 @@ class XCodeBackend(backends.Backend):
source_files_dict = PbxDict()
for s in t.sources:
if isinstance(s, mesonlib.File):
+ # If the file is in a folder, add it to the group representing that folder.
+ if '/' in s.fname:
+ folder = '/'.join(s.fname.split('/')[:-1])
+ folder_dict = objects_dict.get_item(self.foldermap[(folder, t)]).value.get_item('children').value
+ temp = os.path.join(s.subdir, s.fname)
+ folder_dict.add_item(self.fileref_ids[(tid, temp)], temp)
+ if self.foldermap[(folder, t)] in folder_ids:
+ continue
+ if len(folder.split('/')) == 1:
+ target_children.add_item(self.foldermap[(folder, t)], folder)
+ folder_ids.add(self.foldermap[(folder, t)])
+ continue
s = os.path.join(s.subdir, s.fname)
elif isinstance(s, str):
s = os.path.join(t.subdir, s)
diff --git a/mesonbuild/cmake/tracetargets.py b/mesonbuild/cmake/tracetargets.py
index 9873845..2b2b93d 100644
--- a/mesonbuild/cmake/tracetargets.py
+++ b/mesonbuild/cmake/tracetargets.py
@@ -87,6 +87,7 @@ def resolve_cmake_trace_targets(target_name: str,
curr_path = Path(*path_to_framework)
framework_path = curr_path.parent
framework_name = curr_path.stem
+ res.public_compile_opts += [f"-F{framework_path}"]
res.libraries += [f'-F{framework_path}', '-framework', framework_name]
else:
res.libraries += [curr]
diff --git a/mesonbuild/compilers/c.py b/mesonbuild/compilers/c.py
index 7a2fec5..424b612 100644
--- a/mesonbuild/compilers/c.py
+++ b/mesonbuild/compilers/c.py
@@ -504,7 +504,7 @@ class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerM
def get_option_std_args(self, target: BuildTarget, env: Environment, subproject: T.Optional[str] = None) -> T.List[str]:
args: T.List[str] = []
- std = self.get_compileropt_value('winlibs', env, target, subproject)
+ std = self.get_compileropt_value('std', env, target, subproject)
assert isinstance(std, str)
if std == 'c89':
mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True)
diff --git a/mesonbuild/compilers/compilers.py b/mesonbuild/compilers/compilers.py
index 0376922..af6b050 100644
--- a/mesonbuild/compilers/compilers.py
+++ b/mesonbuild/compilers/compilers.py
@@ -1417,50 +1417,3 @@ class Compiler(HoldableObject, metaclass=abc.ABCMeta):
if 'none' not in value:
value = ['none'] + value
std.choices = value
-
-
-def get_global_options(lang: str,
- comp: T.Type[Compiler],
- for_machine: MachineChoice,
- env: 'Environment') -> dict[OptionKey, options.AnyOptionType]:
- """Retrieve options that apply to all compilers for a given language."""
- description = f'Extra arguments passed to the {lang}'
- argkey = OptionKey(f'{lang}_args', machine=for_machine)
- largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
-
- comp_args_from_envvar = False
- comp_options = env.coredata.optstore.get_pending_value(argkey)
- if comp_options is None:
- comp_args_from_envvar = True
- comp_options = env.env_opts.get(argkey, [])
-
- link_args_from_envvar = False
- link_options = env.coredata.optstore.get_pending_value(largkey)
- if link_options is None:
- link_args_from_envvar = True
- link_options = env.env_opts.get(largkey, [])
-
- assert isinstance(comp_options, (str, list)), 'for mypy'
- assert isinstance(link_options, (str, list)), 'for mypy'
-
- cargs = options.UserStringArrayOption(
- argkey.name,
- description + ' compiler',
- comp_options, split_args=True, allow_dups=True)
-
- largs = options.UserStringArrayOption(
- largkey.name,
- description + ' linker',
- link_options, split_args=True, allow_dups=True)
-
- if comp.INVOKES_LINKER and comp_args_from_envvar and link_args_from_envvar:
- # If the compiler acts as a linker driver, and we're using the
- # environment variable flags for both the compiler and linker
- # arguments, then put the compiler flags in the linker flags as well.
- # This is how autotools works, and the env vars feature is for
- # autotools compatibility.
- largs.extend_value(comp_options)
-
- opts: dict[OptionKey, options.AnyOptionType] = {argkey: cargs, largkey: largs}
-
- return opts
diff --git a/mesonbuild/compilers/detect.py b/mesonbuild/compilers/detect.py
index 040c42f..f57957f 100644
--- a/mesonbuild/compilers/detect.py
+++ b/mesonbuild/compilers/detect.py
@@ -366,7 +366,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Emscripten' in out:
cls = c.EmscriptenCCompiler if lang == 'c' else cpp.EmscriptenCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
# emcc requires a file input in order to pass arguments to the
# linker. It'll exit with an error code, but still print the
@@ -410,7 +410,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version = arm_ver_str
cls = c.ArmclangCCompiler if lang == 'c' else cpp.ArmclangCPPCompiler
linker = linkers.ArmClangDynamicLinker(for_machine, version=version)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
full_version=full_version, linker=linker)
@@ -445,7 +445,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if identifier in out:
cls = compiler_classes[0] if lang == 'c' else compiler_classes[1]
lnk = compiler_classes[2]
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = lnk(compiler, for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -482,7 +482,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = c.IntelClCCompiler if lang == 'c' else cpp.IntelClCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, target,
@@ -491,7 +491,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = c.IntelLLVMClCCompiler if lang == 'c' else cpp.IntelLLVMClCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info, target,
@@ -524,14 +524,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version=cl_signature, linker=linker)
if 'PGI Compilers' in out:
cls = c.PGICCompiler if lang == 'c' else cpp.PGICPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
info, linker=linker)
if 'NVIDIA Compilers and Tools' in out:
cls = c.NvidiaHPC_CCompiler if lang == 'c' else cpp.NvidiaHPC_CPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
@@ -550,14 +550,14 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
full_version=full_version, linker=l)
if 'ARM' in out and not ('Metrowerks' in out or 'Freescale' in out):
cls = c.ArmCCompiler if lang == 'c' else cpp.ArmCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.ArmDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross,
info, full_version=full_version, linker=linker)
if 'RX Family' in out:
cls = c.CcrxCCompiler if lang == 'c' else cpp.CcrxCPPCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.CcrxDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -565,7 +565,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'Microchip Technology' in out:
cls = c.Xc16CCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.Xc16DynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -573,7 +573,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
if 'CompCert' in out:
cls = c.CompCertCCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.CompCertDynamicLinker(for_machine, version=version)
return cls(
ccache, compiler, version, for_machine, is_cross, info,
@@ -591,7 +591,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
assert mwcc_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None
compiler_version = '.'.join(x for x in mwcc_ver_match.groups() if x is not None)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
if ld is not None:
@@ -616,7 +616,7 @@ def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: Machin
assert tasking_ver_match is not None, 'for mypy'
tasking_version = '.'.join(x for x in tasking_ver_match.groups() if x is not None)
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
ld = env.lookup_binary_entry(for_machine, cls.language + '_ld')
if ld is None:
raise MesonException(f'{cls.language}_ld was not properly defined in your cross file')
@@ -668,7 +668,7 @@ def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = out.strip().rsplit('V', maxsplit=1)[-1]
cpp_compiler = detect_cpp_compiler(env, for_machine)
cls = CudaCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
key = OptionKey('cuda_link_args', machine=for_machine)
if key in env.options:
# To fix LDFLAGS issue
@@ -759,7 +759,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = fortran.IntelLLVMClFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info,
@@ -769,7 +769,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
version = search_version(err)
target = 'x86' if 'IA-32' in err else 'x86_64'
cls = fortran.IntelClFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.XilinkDynamicLinker(for_machine, [], version=version)
return cls(
compiler, version, for_machine, is_cross, info,
@@ -796,7 +796,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'PGI Compilers' in out:
cls = fortran.PGIFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
@@ -805,7 +805,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
if 'NVIDIA Compilers and Tools' in out:
cls = fortran.NvidiaHPC_FortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.PGIDynamicLinker(compiler, for_machine,
cls.LINKER_PREFIX, [], version=version)
return cls(
@@ -856,7 +856,7 @@ def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> C
full_version = err.split('\n', 1)[0]
version = full_version.split()[-1]
cls = fortran.NAGFortranCompiler
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
linker = linkers.NAGDynamicLinker(
compiler, for_machine, cls.LINKER_PREFIX, [],
version=version)
@@ -948,7 +948,7 @@ def detect_java_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
if len(parts) > 1:
version = parts[1]
comp_class = JavaCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class(exelist, version, for_machine, info)
raise EnvironmentException('Unknown compiler: ' + join_args(exelist))
@@ -972,7 +972,7 @@ def detect_cs_compiler(env: 'Environment', for_machine: MachineChoice) -> Compil
cls = cs.VisualStudioCsCompiler
else:
continue
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(comp, version, for_machine, info)
_handle_exceptions(popen_exceptions, compilers)
@@ -1002,7 +1002,7 @@ def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Co
version = search_version(err)
if version is not None:
comp_class = CythonCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, is_cross=is_cross)
_handle_exceptions(popen_exceptions, compilers)
raise EnvironmentException('Unreachable code (exception to make mypy happy)')
@@ -1023,7 +1023,7 @@ def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = search_version(out)
if 'Vala' in out:
comp_class = ValaCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class(exelist, version, for_machine, is_cross, info)
raise EnvironmentException('Unknown compiler: ' + join_args(exelist))
@@ -1145,7 +1145,7 @@ def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> Rust
c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0]
compiler.extend(cls.use_linker_args(c, ''))
- env.coredata.add_lang_args(cls.language, cls, for_machine, env)
+ env.add_lang_args(cls.language, cls, for_machine)
return cls(
compiler, version, for_machine, is_cross, info,
linker=linker, full_version=full_version)
@@ -1329,20 +1329,20 @@ def detect_nasm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
version = search_version(output)
if 'NASM' in output:
comp_class = NasmCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
elif 'yasm' in output:
comp_class = YasmCompiler
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
elif 'Metrowerks' in output or 'Freescale' in output:
if 'ARM' in output:
comp_class_mwasmarm = MetrowerksAsmCompilerARM
- env.coredata.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine, env)
+ env.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine)
return comp_class_mwasmarm([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
else:
comp_class_mwasmeppc = MetrowerksAsmCompilerEmbeddedPowerPC
- env.coredata.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine, env)
+ env.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine)
return comp_class_mwasmeppc([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
_handle_exceptions(popen_exceptions, compilers)
@@ -1383,7 +1383,7 @@ def detect_masm_compiler(env: 'Environment', for_machine: MachineChoice) -> Comp
try:
output = Popen_safe(comp + [arg])[2]
version = search_version(output)
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
except OSError as e:
popen_exceptions[' '.join(comp + [arg])] = e
@@ -1403,7 +1403,7 @@ def detect_linearasm_compiler(env: Environment, for_machine: MachineChoice) -> C
try:
output = Popen_safe(comp + [arg])[2]
version = search_version(output)
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross)
except OSError as e:
popen_exceptions[' '.join(comp + [arg])] = e
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 26ef1b8..9a4139b 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -38,7 +38,6 @@ if T.TYPE_CHECKING:
from . import dependencies
from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode
from .dependencies.detect import TV_DepID
- from .environment import Environment
from .mesonlib import FileOrString
from .cmake.traceparser import CMakeCacheEntry
from .interpreterbase import SubProject
@@ -584,16 +583,6 @@ class CoreData:
else:
self.optstore.add_compiler_option(lang, k, o)
- def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
- for_machine: MachineChoice, env: 'Environment') -> None:
- """Add global language arguments that are needed before compiler/linker detection."""
- from .compilers import compilers
- # These options are all new at this point, because the compiler is
- # responsible for adding its own options, thus calling
- # `self.optstore.update()`` is perfectly safe.
- for gopt_key, gopt_valobj in compilers.get_global_options(lang, comp, for_machine, env).items():
- self.optstore.add_compiler_option(lang, gopt_key, gopt_valobj)
-
def process_compiler_options(self, lang: str, comp: Compiler, subproject: str) -> None:
self.add_compiler_options(comp.get_options(), lang, comp.for_machine)
diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py
index 2c3bdec..2b79f02 100644
--- a/mesonbuild/environment.py
+++ b/mesonbuild/environment.py
@@ -12,6 +12,7 @@ import collections
from . import coredata
from . import mesonlib
from . import machinefile
+from . import options
CmdLineFileParser = machinefile.CmdLineFileParser
@@ -1071,3 +1072,44 @@ class Environment:
if extra_paths:
env.prepend('PATH', list(extra_paths))
return env
+
+ def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
+ for_machine: MachineChoice) -> None:
+ """Add global language arguments that are needed before compiler/linker detection."""
+ description = f'Extra arguments passed to the {lang}'
+ argkey = OptionKey(f'{lang}_args', machine=for_machine)
+ largkey = OptionKey(f'{lang}_link_args', machine=for_machine)
+
+ comp_args_from_envvar = False
+ comp_options = self.coredata.optstore.get_pending_value(argkey)
+ if comp_options is None:
+ comp_args_from_envvar = True
+ comp_options = self.env_opts.get(argkey, [])
+
+ link_options = self.coredata.optstore.get_pending_value(largkey)
+ if link_options is None:
+ link_options = self.env_opts.get(largkey, [])
+
+ assert isinstance(comp_options, (str, list)), 'for mypy'
+ assert isinstance(link_options, (str, list)), 'for mypy'
+
+ cargs = options.UserStringArrayOption(
+ argkey.name,
+ description + ' compiler',
+ comp_options, split_args=True, allow_dups=True)
+
+ largs = options.UserStringArrayOption(
+ largkey.name,
+ description + ' linker',
+ link_options, split_args=True, allow_dups=True)
+
+ self.coredata.optstore.add_compiler_option(lang, argkey, cargs)
+ self.coredata.optstore.add_compiler_option(lang, largkey, largs)
+
+ if comp.INVOKES_LINKER and comp_args_from_envvar:
+ # If the compiler acts as a linker driver, and we're using the
+ # environment variable flags for both the compiler and linker
+ # arguments, then put the compiler flags in the linker flags as well.
+ # This is how autotools works, and the env vars feature is for
+ # autotools compatibility.
+ largs.extend_value(comp_options)
diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py
index 29bb705..730f3d3 100644
--- a/mesonbuild/interpreter/interpreter.py
+++ b/mesonbuild/interpreter/interpreter.py
@@ -3255,9 +3255,9 @@ class Interpreter(InterpreterBase, HoldableObject):
def build_both_libraries(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargsType], kwargs: kwtypes.Library) -> build.BothLibraries:
shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
- preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_both_libraries', subproject=self.subproject))
if preferred_library == 'auto':
- preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library'))
+ preferred_library = self.coredata.optstore.get_value_for(OptionKey('default_library', subproject=self.subproject))
if preferred_library == 'both':
preferred_library = 'shared'
diff --git a/mesonbuild/linkers/detect.py b/mesonbuild/linkers/detect.py
index f6c0fbc..6fbe6e4 100644
--- a/mesonbuild/linkers/detect.py
+++ b/mesonbuild/linkers/detect.py
@@ -39,7 +39,7 @@ def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
use_linker_prefix: bool = True, invoked_directly: bool = True,
extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
from . import linkers
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
if invoked_directly or comp_class.get_argument_syntax() == 'msvc':
rsp_syntax = RSPFileSyntax.MSVC
@@ -128,7 +128,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
:extra_args: Any additional arguments required (such as a source file)
"""
from . import linkers
- env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+ env.add_lang_args(comp_class.language, comp_class, for_machine)
extra_args = extra_args or []
system = env.machines[for_machine].system
@@ -166,7 +166,7 @@ def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Ty
linker = lld_cls(
compiler, for_machine, comp_class.LINKER_PREFIX, override, system=system, version=v)
- elif 'Hexagon' in o and 'LLVM' in o:
+ elif o.startswith("eld"):
linker = linkers.ELDDynamicLinker(
compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
elif 'Snapdragon' in e and 'LLVM' in e:
diff --git a/mesonbuild/msetup.py b/mesonbuild/msetup.py
index b08d5e8..e22e0a7 100644
--- a/mesonbuild/msetup.py
+++ b/mesonbuild/msetup.py
@@ -194,22 +194,25 @@ class MesonApp:
return self._generate(env, capture, vslite_ctx)
def check_unused_options(self, coredata: 'coredata.CoreData', cmd_line_options: T.Dict[OptionKey, str], all_subprojects: T.Mapping[str, object]) -> None:
- pending = coredata.optstore.pending_options
errlist: T.List[str] = []
known_subprojects = all_subprojects.keys()
- for opt in pending:
- # It is not an error to set wrong option for unknown subprojects
- # because they might be used in future reconfigurations
- if coredata.optstore.accept_as_pending_option(opt, known_subprojects):
+ for opt in cmd_line_options:
+ # Accept options that exist or could appear in subsequent reconfigurations,
+ # including options for subprojects that were not used
+ if opt in coredata.optstore or \
+ opt.evolve(subproject=None) in coredata.optstore or \
+ coredata.optstore.accept_as_pending_option(opt):
continue
- if opt in cmd_line_options:
- errlist.append(f'"{opt}"')
+ if opt.subproject and opt.subproject not in known_subprojects:
+ continue
+ # "foo=true" may also refer to toplevel project option ":foo"
+ if opt.subproject is None and coredata.optstore.is_project_option(opt.as_root()):
+ continue
+ errlist.append(f'"{opt}"')
if errlist:
errstr = ', '.join(errlist)
raise MesonException(f'Unknown options: {errstr}')
- coredata.optstore.clear_pending()
-
def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: T.Optional[dict]) -> T.Optional[dict]:
# Get all user defined options, including options that have been defined
# during a previous invocation or using meson configure.
diff --git a/mesonbuild/options.py b/mesonbuild/options.py
index bc4d79f..317acbd 100644
--- a/mesonbuild/options.py
+++ b/mesonbuild/options.py
@@ -805,6 +805,7 @@ class OptionStore:
def __init__(self, is_cross: bool) -> None:
self.options: T.Dict['OptionKey', 'AnyOptionType'] = {}
+ self.subprojects: T.Set[str] = set()
self.project_options: T.Set[OptionKey] = set()
self.module_options: T.Set[OptionKey] = set()
from .compilers import all_languages
@@ -812,13 +813,11 @@ class OptionStore:
self.augments: OptionDict = {}
self.is_cross = is_cross
- # Pending options are options that need to be initialized later, either
- # configuration dependent options like compiler options, or options for
- # a different subproject
+ # Pending options are configuration dependent options that could be
+ # initialized later, such as compiler options
self.pending_options: OptionDict = {}
-
- def clear_pending(self) -> None:
- self.pending_options = {}
+ # Subproject options from toplevel project()
+ self.pending_subproject_options: OptionDict = {}
def ensure_and_validate_key(self, key: T.Union[OptionKey, str]) -> OptionKey:
if isinstance(key, str):
@@ -849,7 +848,7 @@ class OptionStore:
def __len__(self) -> int:
return len(self.options)
- def get_value_object_for(self, key: 'T.Union[OptionKey, str]') -> AnyOptionType:
+ def get_key_and_value_object_for(self, key: 'T.Union[OptionKey, str]') -> T.Tuple[OptionKey, AnyOptionType]:
key = self.ensure_and_validate_key(key)
potential = self.options.get(key, None)
if self.is_project_option(key):
@@ -862,32 +861,41 @@ class OptionStore:
# Subproject is set to yield, but top level
# project does not have an option of the same
# name. Return the subproject option.
- return potential
+ return key, potential
# If parent object has different type, do not yield.
# This should probably be an error.
if type(parent_option) is type(potential):
- return parent_option
- return potential
+ return parent_key, parent_option
+ return key, potential
if potential is None:
raise KeyError(f'Tried to access nonexistant project option {key}.')
- return potential
+ return key, potential
else:
if potential is None:
parent_key = OptionKey(key.name, subproject=None, machine=key.machine)
if parent_key not in self.options:
raise KeyError(f'Tried to access nonexistant project parent option {parent_key}.')
- return self.options[parent_key]
- return potential
+ # This is a global option but it can still have per-project
+ # augment, so return the subproject key.
+ return key, self.options[parent_key]
+ return key, potential
+
+ def get_value_object_for(self, key: 'T.Union[OptionKey, str]') -> AnyOptionType:
+ return self.get_key_and_value_object_for(key)[1]
def get_value_object_and_value_for(self, key: OptionKey) -> T.Tuple[AnyOptionType, ElementaryOptionValues]:
assert isinstance(key, OptionKey)
- vobject = self.get_value_object_for(key)
+ _, vobject = self.get_key_and_value_object_for(key)
computed_value = vobject.value
- if key.subproject is not None:
- if key in self.augments:
- computed_value = vobject.validate_value(self.augments[key])
+ if key in self.augments:
+ assert key.subproject is not None
+ computed_value = self.augments[key]
return (vobject, computed_value)
+ def option_has_value(self, key: OptionKey, value: ElementaryOptionValues) -> bool:
+ vobject, current_value = self.get_value_object_and_value_for(key)
+ return vobject.validate_value(value) == current_value
+
def get_value_for(self, name: 'T.Union[OptionKey, str]', subproject: T.Optional[str] = None) -> ElementaryOptionValues:
if isinstance(name, str):
key = OptionKey(name, subproject)
@@ -998,6 +1006,7 @@ class OptionStore:
return value.as_posix()
def set_option(self, key: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ changed = False
error_key = key
if error_key.subproject == '':
error_key = error_key.evolve(subproject=None)
@@ -1011,7 +1020,7 @@ class OptionStore:
new_value = self.sanitize_dir_option_value(prefix, key, new_value)
try:
- opt = self.get_value_object_for(key)
+ actual_key, opt = self.get_key_and_value_object_for(key)
except KeyError:
raise MesonException(f'Unknown option: "{error_key}".')
@@ -1034,13 +1043,23 @@ class OptionStore:
elif isinstance(opt.deprecated, str):
mlog.deprecation(f'Option "{error_key}" is replaced by {opt.deprecated!r}')
# Change both this aption and the new one pointed to.
- dirty = self.set_option(key.evolve(name=opt.deprecated), new_value)
- dirty |= opt.set_value(new_value)
- return dirty
+ changed |= self.set_option(key.evolve(name=opt.deprecated), new_value, first_invocation)
- old_value = opt.value
- changed = opt.set_value(new_value)
+ new_value = opt.validate_value(new_value)
+ if key in self.options:
+ if actual_key.subproject == key.subproject:
+ old_value = opt.value
+ opt.set_value(new_value)
+ else:
+ # the key must have pointed to a yielding option;
+ # do not overwrite the global value in that case
+ return changed
+ else:
+ assert key.subproject is not None
+ old_value = self.augments.get(key, opt.value)
+ self.augments[key] = new_value
+ changed |= old_value != new_value
if opt.readonly and changed and not first_invocation:
raise MesonException(f'Tried to modify read only option "{error_key}"')
@@ -1054,12 +1073,12 @@ class OptionStore:
optimization, debug = self.DEFAULT_DEPENDENTS[new_value]
dkey = key.evolve(name='debug')
optkey = key.evolve(name='optimization')
- self.options[dkey].set_value(debug)
- self.options[optkey].set_value(optimization)
+ self.set_option(dkey, debug, first_invocation)
+ self.set_option(optkey, optimization, first_invocation)
return changed
- def set_option_maybe_root(self, o: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
+ def set_user_option(self, o: OptionKey, new_value: ElementaryOptionValues, first_invocation: bool = False) -> bool:
if not self.is_cross and o.is_for_build():
return False
@@ -1070,37 +1089,34 @@ class OptionStore:
# can be either
#
# A) a system option in which case the subproject is None
- # B) a project option, in which case the subproject is '' (this method is only called from top level)
+ # B) a project option, in which case the subproject is ''
#
# The key parsing function can not handle the difference between the two
# and defaults to A.
if o in self.options:
return self.set_option(o, new_value, first_invocation)
+
+ # could also be an augment...
+ global_option = o.evolve(subproject=None)
+ if o.subproject is not None and global_option in self.options:
+ return self.set_option(o, new_value, first_invocation)
+
if self.accept_as_pending_option(o, first_invocation=first_invocation):
old_value = self.pending_options.get(o, None)
self.pending_options[o] = new_value
- return old_value is None or str(old_value) == new_value
- else:
+ return old_value is None or str(old_value) != new_value
+ elif o.subproject is None:
o = o.as_root()
return self.set_option(o, new_value, first_invocation)
+ else:
+ raise MesonException(f'Unknown option: "{o}".')
def set_from_configure_command(self, D_args: T.List[str], U_args: T.List[str]) -> bool:
dirty = False
D_args = [] if D_args is None else D_args
- (global_options, perproject_global_options, project_options) = self.classify_D_arguments(D_args)
U_args = [] if U_args is None else U_args
- for key, valstr in global_options:
- dirty |= self.set_option_maybe_root(key, valstr)
- for key, valstr in project_options:
- dirty |= self.set_option_maybe_root(key, valstr)
- for key, valstr in perproject_global_options:
- if key in self.augments:
- if self.augments[key] != valstr:
- self.augments[key] = valstr
- dirty = True
- else:
- self.augments[key] = valstr
- dirty = True
+ for key, valstr in self.parse_D_arguments(D_args):
+ dirty |= self.set_user_option(key, valstr)
for keystr in U_args:
key = OptionKey.from_string(keystr)
if key in self.augments:
@@ -1226,23 +1242,13 @@ class OptionStore:
def is_module_option(self, key: OptionKey) -> bool:
return key in self.module_options
- def classify_D_arguments(self, D: T.List[str]) -> T.Tuple[T.List[T.Tuple[OptionKey, str]],
- T.List[T.Tuple[OptionKey, str]],
- T.List[T.Tuple[OptionKey, str]]]:
- global_options = []
- project_options = []
- perproject_global_options = []
+ def parse_D_arguments(self, D: T.List[str]) -> T.List[T.Tuple[OptionKey, str]]:
+ options = []
for setval in D:
keystr, valstr = setval.split('=', 1)
key = OptionKey.from_string(keystr)
- valuetuple = (key, valstr)
- if self.is_project_option(key):
- project_options.append(valuetuple)
- elif key.subproject is None:
- global_options.append(valuetuple)
- else:
- perproject_global_options.append(valuetuple)
- return (global_options, perproject_global_options, project_options)
+ options.append((key, valstr))
+ return options
def prefix_split_options(self, coll: OptionDict) -> T.Tuple[T.Optional[str], OptionDict]:
prefix = None
@@ -1305,15 +1311,15 @@ class OptionStore:
if not self.is_cross and key.is_for_build():
continue
if key.subproject:
- # do apply project() default_options for subprojects here, because
- # they have low priority
- self.pending_options[key] = valstr
+ # Subproject options from toplevel project() have low priority
+ # and will be processed when the subproject is found
+ self.pending_subproject_options[key] = valstr
else:
# Setting a project option with default_options
# should arguably be a hard error; the default
# value of project option should be set in the option
# file, not in the project call.
- self.set_option_maybe_root(key, valstr, True)
+ self.set_user_option(key, valstr, True)
# ignore subprojects for now for machine file and command line
# options; they are applied later
@@ -1323,25 +1329,18 @@ class OptionStore:
if not self.is_cross and key.is_for_build():
continue
if not key.subproject:
- self.set_option_maybe_root(key, valstr, True)
+ self.set_user_option(key, valstr, True)
for key, valstr in cmd_line_options.items():
# Due to backwards compatibility we ignore all build-machine options
# when building natively.
if not self.is_cross and key.is_for_build():
continue
if not key.subproject:
- self.set_option_maybe_root(key, valstr, True)
+ self.set_user_option(key, valstr, True)
- def accept_as_pending_option(self, key: OptionKey, known_subprojects: T.Optional[T.Container[str]] = None,
- first_invocation: bool = False) -> bool:
- # Fail on unknown options that we can know must exist at this point in time.
- # Subproject and compiler options are resolved later.
- #
+ def accept_as_pending_option(self, key: OptionKey, first_invocation: bool = False) -> bool:
# Some base options (sanitizers etc) might get added later.
# Permitting them all is not strictly correct.
- if key.subproject:
- if known_subprojects is None or key.subproject not in known_subprojects:
- return True
if self.is_compiler_option(key):
return True
if first_invocation and self.is_backend_option(key):
@@ -1365,23 +1364,40 @@ class OptionStore:
project_default_options: OptionDict,
cmd_line_options: OptionDict,
machine_file_options: OptionDict) -> None:
- # pick up pending per-project settings from the toplevel project() invocation
- options = {k: v for k, v in self.pending_options.items() if k.subproject == subproject}
- # apply project() and subproject() default_options
- for key, valstr in itertools.chain(project_default_options.items(), spcall_default_options.items()):
+ options: OptionDict = {}
+
+ # project() default_options
+ for key, valstr in project_default_options.items():
+ if key.subproject == subproject:
+ without_subp = key.evolve(subproject=None)
+ raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+
if key.subproject is None:
key = key.evolve(subproject=subproject)
- elif key.subproject == subproject:
+ options[key] = valstr
+
+ # augments from the toplevel project() default_options
+ for key, valstr in self.pending_subproject_options.items():
+ if key.subproject == subproject:
+ options[key] = valstr
+
+ # subproject() default_options
+ for key, valstr in spcall_default_options.items():
+ if key.subproject == subproject:
without_subp = key.evolve(subproject=None)
raise MesonException(f'subproject name not needed in default_options; use "{without_subp}" instead of "{key}"')
+
+ if key.subproject is None:
+ key = key.evolve(subproject=subproject)
options[key] = valstr
# then global settings from machine file and command line
+ # **but not if they are toplevel project options**
for key, valstr in itertools.chain(machine_file_options.items(), cmd_line_options.items()):
- if key.subproject is None:
+ if key.subproject is None and not self.is_project_option(key.as_root()):
subp_key = key.evolve(subproject=subproject)
- self.pending_options.pop(subp_key, None)
+ self.pending_subproject_options.pop(subp_key, None)
options.pop(subp_key, None)
# then finally per project augments from machine file and command line
@@ -1391,12 +1407,21 @@ class OptionStore:
# merge everything that has been computed above, while giving self.augments priority
for key, valstr in options.items():
+ if key.subproject != subproject:
+ if key.subproject in self.subprojects and not self.option_has_value(key, valstr):
+ mlog.warning('option {key} is set in subproject {subproject} but has already been processed')
+ continue
+
+ # Subproject options from project() will be processed when the subproject is found
+ self.pending_subproject_options[key] = valstr
+ continue
+
+ self.pending_subproject_options.pop(key, None)
self.pending_options.pop(key, None)
- valstr = self.augments.pop(key, valstr)
- if key in self.project_options:
- self.set_option(key, valstr, True)
- else:
- self.augments[key] = valstr
+ if key not in self.augments:
+ self.set_user_option(key, valstr, True)
+
+ self.subprojects.add(subproject)
def update_project_options(self, project_options: MutableKeyedOptionDictType, subproject: SubProject) -> None:
for key, value in project_options.items():
diff --git a/unittests/linuxliketests.py b/unittests/linuxliketests.py
index 376c395..c25449c 100644
--- a/unittests/linuxliketests.py
+++ b/unittests/linuxliketests.py
@@ -446,6 +446,24 @@ class LinuxlikeTests(BasePlatformTests):
libdir = self.installdir + os.path.join(self.prefix, self.libdir)
self._test_soname_impl(libdir, True)
+ @skip_if_not_base_option('b_sanitize')
+ def test_c_link_args_and_env(self):
+ '''
+ Test that the CFLAGS / CXXFLAGS environment variables are
+ included on the linker command line when c_link_args is
+ set but c_args is not.
+ '''
+ if is_cygwin():
+ raise SkipTest('asan not available on Cygwin')
+ if is_openbsd():
+ raise SkipTest('-fsanitize=address is not supported on OpenBSD')
+
+ testdir = os.path.join(self.common_test_dir, '1 trivial')
+ env = {'CFLAGS': '-fsanitize=address'}
+ self.init(testdir, extra_args=['-Dc_link_args="-L/usr/lib"'],
+ override_envvars=env)
+ self.build()
+
def test_compiler_check_flags_order(self):
'''
Test that compiler check flags override all other flags. This can't be
diff --git a/unittests/optiontests.py b/unittests/optiontests.py
index 3e87b5c..3fb44b7 100644
--- a/unittests/optiontests.py
+++ b/unittests/optiontests.py
@@ -152,6 +152,30 @@ class OptionTests(unittest.TestCase):
self.assertEqual(optstore.get_value_for(sub_name, 'sub'), sub_value)
self.assertEqual(num_options(optstore), 2)
+ def test_project_yielding_initialize(self):
+ optstore = OptionStore(False)
+ name = 'someoption'
+ top_value = 'top'
+ sub_value = 'sub'
+ subp = 'subp'
+ cmd_line = { OptionKey(name): top_value, OptionKey(name, subp): sub_value }
+
+ vo = UserStringOption(name, 'A top level option', 'default1')
+ optstore.add_project_option(OptionKey(name, ''), vo)
+ optstore.initialize_from_top_level_project_call({}, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(num_options(optstore), 1)
+
+ vo2 = UserStringOption(name, 'A subproject option', 'default2', True)
+ optstore.add_project_option(OptionKey(name, 'subp'), vo2)
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(name, subp), top_value)
+ self.assertEqual(num_options(optstore), 2)
+
+ optstore.initialize_from_subproject_call(subp, {}, {}, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, ''), top_value)
+ self.assertEqual(optstore.get_value_for(name, subp), top_value)
+
def test_augments(self):
optstore = OptionStore(False)
name = 'cpp_std'
@@ -227,18 +251,32 @@ class OptionTests(unittest.TestCase):
def test_backend_option_pending(self):
optstore = OptionStore(False)
# backend options are known after the first invocation
- self.assertTrue(optstore.accept_as_pending_option(OptionKey('backend_whatever'), set(), True))
- self.assertFalse(optstore.accept_as_pending_option(OptionKey('backend_whatever'), set(), False))
+ self.assertTrue(optstore.accept_as_pending_option(OptionKey('backend_whatever'), True))
+ self.assertFalse(optstore.accept_as_pending_option(OptionKey('backend_whatever'), False))
def test_reconfigure_b_nonexistent(self):
optstore = OptionStore(False)
optstore.set_from_configure_command(['b_ndebug=true'], [])
- def test_subproject_nonexistent(self):
+ def test_subproject_proj_opt_with_same_name(self):
+ name = 'tests'
+ subp = 'subp'
+
optstore = OptionStore(False)
- subprojects = {'found'}
- self.assertFalse(optstore.accept_as_pending_option(OptionKey('foo', subproject='found'), subprojects))
- self.assertTrue(optstore.accept_as_pending_option(OptionKey('foo', subproject='whatisthis'), subprojects))
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserBooleanOption(name, 'Tests', False)
+ optstore.add_project_option(OptionKey(name, subproject=''), o)
+ o = UserBooleanOption(name, 'Tests', True)
+ optstore.add_project_option(OptionKey(name, subproject=subp), o)
+
+ cmd_line = {OptionKey(name): True}
+ spcall = {OptionKey(name): False}
+
+ optstore.initialize_from_top_level_project_call({}, cmd_line, {})
+ optstore.initialize_from_subproject_call(subp, spcall, {}, cmd_line, {})
+ self.assertEqual(optstore.get_value_for(name, ''), True)
+ self.assertEqual(optstore.get_value_for(name, subp), False)
def test_subproject_cmdline_override_global(self):
name = 'optimization'
@@ -260,6 +298,26 @@ class OptionTests(unittest.TestCase):
self.assertEqual(optstore.get_value_for(name, subp), new_value)
self.assertEqual(optstore.get_value_for(name), new_value)
+ def test_subproject_parent_override_subp(self):
+ name = 'optimization'
+ subp = 'subp'
+ default_value = 's'
+ subp_value = '0'
+
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption(name, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])
+ optstore.add_system_option(name, o)
+
+ toplevel_proj_default = {OptionKey(name, subproject=subp): subp_value, OptionKey(name): default_value}
+ subp_proj_default = {OptionKey(name): '3'}
+
+ optstore.initialize_from_top_level_project_call(toplevel_proj_default, {}, {})
+ optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, {}, {})
+ self.assertEqual(optstore.get_value_for(name, subp), subp_value)
+ self.assertEqual(optstore.get_value_for(name), default_value)
+
def test_subproject_cmdline_override_global_and_augment(self):
name = 'optimization'
subp = 'subp'
@@ -300,7 +358,33 @@ class OptionTests(unittest.TestCase):
optstore.initialize_from_top_level_project_call(toplevel_proj_default, cmd_line, {})
optstore.initialize_from_subproject_call(subp, {}, subp_proj_default, cmd_line, {})
self.assertEqual(optstore.get_value_for(name, subp), subp_value)
- self.assertEqual(optstore.get_value_for(name), toplevel_value)
+ self.assertEqual(optstore.get_value_for(name, ''), toplevel_value)
+
+ def test_subproject_buildtype(self):
+ subp = 'subp'
+ main1 = {OptionKey('buildtype'): 'release'}
+ main2 = {OptionKey('optimization'): '3', OptionKey('debug'): 'false'}
+ sub1 = {OptionKey('buildtype'): 'debug'}
+ sub2 = {OptionKey('optimization'): '0', OptionKey('debug'): 'true'}
+
+ for mainopt, subopt in ((main1, sub1),
+ (main2, sub2),
+ ({**main1, **main2}, {**sub1, **sub2})):
+ optstore = OptionStore(False)
+ prefix = UserStringOption('prefix', 'This is needed by OptionStore', '/usr')
+ optstore.add_system_option('prefix', prefix)
+ o = UserComboOption('buildtype', 'Build type to use', 'debug', choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])
+ optstore.add_system_option(o.name, o)
+ o = UserComboOption('optimization', 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])
+ optstore.add_system_option(o.name, o)
+ o = UserBooleanOption('debug', 'Enable debug symbols and other information', True)
+ optstore.add_system_option(o.name, o)
+
+ optstore.initialize_from_top_level_project_call(mainopt, {}, {})
+ optstore.initialize_from_subproject_call(subp, {}, subopt, {}, {})
+ self.assertEqual(optstore.get_value_for('buildtype', subp), 'debug')
+ self.assertEqual(optstore.get_value_for('optimization', subp), '0')
+ self.assertEqual(optstore.get_value_for('debug', subp), True)
def test_deprecated_nonstring_value(self):
# TODO: add a lot more deprecated option tests