diff options
-rw-r--r-- | .travis.yml | 4 | ||||
-rw-r--r-- | docs/markdown/Reference-manual.md | 12 | ||||
-rw-r--r-- | mesonbuild/backend/ninjabackend.py | 10 | ||||
-rw-r--r-- | mesonbuild/backend/vs2010backend.py | 8 | ||||
-rw-r--r-- | mesonbuild/build.py | 41 | ||||
-rw-r--r-- | mesonbuild/coredata.py | 2 | ||||
-rw-r--r-- | mesonbuild/dependencies.py | 43 | ||||
-rw-r--r-- | mesonbuild/interpreter.py | 78 | ||||
-rw-r--r-- | mesonbuild/mesonlib.py | 8 | ||||
-rw-r--r-- | mesonbuild/mintro.py | 6 | ||||
-rwxr-xr-x | run_project_tests.py | 10 | ||||
-rwxr-xr-x | run_tests.py | 8 | ||||
-rwxr-xr-x | run_unittests.py | 12 | ||||
-rw-r--r-- | test cases/frameworks/14 doxygen/installed_files.txt | 4 | ||||
-rw-r--r-- | test cases/linuxlike/1 pkg-config/meson.build | 1 | ||||
-rw-r--r-- | test cases/linuxlike/3 linker script/bob.map.in | 6 | ||||
-rw-r--r-- | test cases/linuxlike/3 linker script/copy.py | 5 | ||||
-rw-r--r-- | test cases/linuxlike/3 linker script/meson.build | 48 | ||||
-rw-r--r-- | test cases/linuxlike/3 linker script/sub/foo.map | 6 | ||||
-rw-r--r-- | test cases/linuxlike/3 linker script/sub/meson.build | 6 | ||||
-rw-r--r-- | test cases/linuxlike/5 dependency versions/meson.build | 46 |
21 files changed, 271 insertions, 93 deletions
diff --git a/.travis.yml b/.travis.yml index fbb11ac..5f4318d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,13 +31,13 @@ matrix: before_install: - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install ninja python3; fi - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:yakkety; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker pull jpakkane/mesonci:zesty; fi # We need to copy the current checkout inside the Docker container, # because it has the MR id to be tested checked out. script: - - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM jpakkane/mesonci:yakkety > Dockerfile; fi + - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo FROM jpakkane/mesonci:zesty > Dockerfile; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then echo ADD . /root >> Dockerfile; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker build -t withgit .; fi - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then docker run withgit /bin/sh -c "cd /root && TRAVIS=true CC=$CC CXX=$CXX OBJC=$CC OBJCXX=$CXX ./run_tests.py -- $MESON_ARGS"; fi diff --git a/docs/markdown/Reference-manual.md b/docs/markdown/Reference-manual.md index 276c9c9..8d690de 100644 --- a/docs/markdown/Reference-manual.md +++ b/docs/markdown/Reference-manual.md @@ -233,7 +233,7 @@ Executable supports the following keyword arguments. Note that just like the pos - `<languagename>_pch` precompiled header file to use for the given language - `<languagename>_args` compiler flags to use for the given language; eg: `cpp_args` for C++ - `link_args` flags to use during linking. You can use UNIX-style flags here for all platforms. -- `link_depends` an extra file in the source tree that the link step depends on such as a symbol visibility map. The purpose is to automatically trigger a re-link (but not a re-compile) of the target when this file changes. +- `link_depends` strings, files, or custom targets the link step depends on such as a symbol visibility map. The purpose is to automatically trigger a re-link (but not a re-compile) of the target when this file changes. - `include_directories` one or more objects created with the `include_directories` function - `dependencies` one or more objects created with [`dependency`](#dependency) or [`find_library`](#compiler-object) (for external deps) or [`declare_dependency`](#declare_dependency) (for deps built by the project) - `gui_app` when set to true flags this target as a GUI application on platforms where this makes a difference (e.g. Windows) @@ -266,6 +266,16 @@ Meson will also autodetect scripts with a shebang line and run them with the exe `program_name2` and later positional arguments are used as fallback strings to search for. This is meant to be used for cases where the program may have many alternative names, such as `foo` and `foo.py`. The function will check for the arguments one by one and the first one that is found is returned. Meson versions earlier than 0.37.0 only accept one argument. +If you need to check for a program in a non-standard location, you can just pass an absolute path to `find_program`, e.g. +``` +setcap = find_program('setcap', '/usr/sbin/setcap', '/sbin/setcap', required : false) +``` + +It is also possible to pass an array to `find_program` in case you need to construct the set of paths to search on the fly: +``` +setcap = find_program(['setcap', '/usr/sbin/setcap', '/sbin/setcap'], required : false) +``` + If none of the programs are found, Meson will abort. You can tell it not to by setting the keyword argument `required` to false, and then use the `.found()` method on the returned object to check whether it was found or not. The returned object also has methods that are documented in the [object methods section](#external-program-object) below. diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index ac660cf..797fb64 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -2295,8 +2295,8 @@ rule FORTRAN_DEP_HACK # current compiler. commands = commands.to_native() dep_targets = [self.get_dependency_filename(t) for t in dependencies] - dep_targets += [os.path.join(self.environment.source_dir, - target.subdir, t) for t in target.link_depends] + dep_targets.extend([self.get_dependency_filename(t) + for t in target.link_depends]) elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list) elem.add_dep(dep_targets + custom_target_libraries) elem.add_item('LINK_ARGS', commands) @@ -2314,6 +2314,12 @@ rule FORTRAN_DEP_HACK def get_dependency_filename(self, t): if isinstance(t, build.SharedLibrary): return os.path.join(self.get_target_private_dir(t), self.get_target_filename(t) + '.symbols') + elif isinstance(t, mesonlib.File): + if t.is_built: + return t.relative_name() + else: + return t.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) return self.get_target_filename(t) def generate_shlib_aliases(self, target, outdir): diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 533edf0..f97ab87 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -379,7 +379,7 @@ class Vs2010Backend(backends.Backend): pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = self.environment.coredata.test_guid + guidelem.text = '{%s}' % self.environment.coredata.test_guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') @@ -636,7 +636,7 @@ class Vs2010Backend(backends.Backend): # Globals globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = guid + guidelem.text = '{%s}' % guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' ns = ET.SubElement(globalgroup, 'RootNamespace') @@ -1050,7 +1050,7 @@ class Vs2010Backend(backends.Backend): pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = self.environment.coredata.test_guid + guidelem.text = '{%s}' % self.environment.coredata.test_guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') @@ -1130,7 +1130,7 @@ if %%errorlevel%% neq 0 goto :VCEnd''' pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') guidelem = ET.SubElement(globalgroup, 'ProjectGuid') - guidelem.text = self.environment.coredata.test_guid + guidelem.text = '{%s}' % self.environment.coredata.test_guid kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 1f1018f..0d58394 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -503,6 +503,34 @@ class BuildTarget(Target): assert(len(self.compilers) == 1) return + def process_link_depends(self, sources, environment): + """Process the link_depends keyword argument. + + This is designed to handle strings, Files, and the output of Custom + Targets. Notably it doesn't handle generator() returned objects, since + adding them as a link depends would inherently cause them to be + generated twice, since the output needs to be passed to the ld_args and + link_depends. + """ + if not isinstance(sources, list): + sources = [sources] + for s in sources: + if hasattr(s, 'held_object'): + s = s.held_object + + if isinstance(s, File): + self.link_depends.append(s) + elif isinstance(s, str): + self.link_depends.append( + File.from_source_file(environment.source_dir, self.subdir, s)) + elif hasattr(s, 'get_outputs'): + self.link_depends.extend( + [File.from_built_file(s.subdir, p) for p in s.get_outputs()]) + else: + raise InvalidArguments( + 'Link_depends arguments must be strings, Files, ' + 'or a Custom Target, or lists thereof.') + def get_original_kwargs(self): return self.kwargs @@ -616,12 +644,7 @@ class BuildTarget(Target): for i in self.link_args: if not isinstance(i, str): raise InvalidArguments('Link_args arguments must be strings.') - self.link_depends = kwargs.get('link_depends', []) - if not isinstance(self.link_depends, list): - self.link_depends = [self.link_depends] - for i in self.link_depends: - if not isinstance(i, str): - raise InvalidArguments('Link_depends arguments must be strings.') + self.process_link_depends(kwargs.get('link_depends', []), environment) # Target-specific include dirs must be added BEFORE include dirs from # internal deps (added inside self.add_deps()) to override them. inclist = kwargs.get('include_directories', []) @@ -1264,13 +1287,11 @@ class SharedLibrary(BuildTarget): self.vs_module_defs = File.from_absolute_file(path) else: self.vs_module_defs = File.from_source_file(environment.source_dir, self.subdir, path) - # link_depends can be an absolute path or relative to self.subdir - self.link_depends.append(path) + self.link_depends.append(self.vs_module_defs) elif isinstance(path, File): # When passing a generated file. self.vs_module_defs = path - # link_depends can be an absolute path or relative to self.subdir - self.link_depends.append(path.absolute_path(environment.source_dir, environment.build_dir)) + self.link_depends.append(path) else: raise InvalidArguments( 'Shared library vs_module_defs must be either a string, ' diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 2dd57a9..6c091c8 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -160,7 +160,7 @@ class CoreData: self.wrap_mode = options.wrap_mode self.compilers = OrderedDict() self.cross_compilers = OrderedDict() - self.deps = {} + self.deps = OrderedDict() self.modules = {} # Only to print a warning if it changes between Meson invocations. self.pkgconf_envvar = os.environ.get('PKG_CONFIG_PATH', '') diff --git a/mesonbuild/dependencies.py b/mesonbuild/dependencies.py index 37e2cbd..d9a7c9c 100644 --- a/mesonbuild/dependencies.py +++ b/mesonbuild/dependencies.py @@ -27,9 +27,10 @@ import subprocess import sysconfig from enum import Enum from collections import OrderedDict -from . mesonlib import MesonException, version_compare, version_compare_many, Popen_safe from . import mlog from . import mesonlib +from .mesonlib import Popen_safe, flatten +from .mesonlib import MesonException, version_compare, version_compare_many from .environment import detect_cpu_family, for_windows class DependencyException(MesonException): @@ -103,6 +104,7 @@ class InternalDependency(Dependency): def __init__(self, version, incdirs, compile_args, link_args, libraries, sources, ext_deps): super().__init__('internal', {}) self.version = version + self.is_found = True self.include_directories = incdirs self.compile_args = compile_args self.link_args = link_args @@ -127,6 +129,7 @@ class PkgConfigDependency(Dependency): def __init__(self, name, environment, kwargs): Dependency.__init__(self, 'pkgconfig', kwargs) self.is_libtool = False + self.version_reqs = kwargs.get('version', None) self.required = kwargs.get('required', True) self.static = kwargs.get('static', False) self.silent = kwargs.get('silent', False) @@ -187,7 +190,6 @@ class PkgConfigDependency(Dependency): ''.format(self.type_string, name)) return found_msg = [self.type_string + ' dependency', mlog.bold(name), 'found:'] - self.version_reqs = kwargs.get('version', None) if self.version_reqs is None: self.is_found = True else: @@ -1635,6 +1637,7 @@ class LLVMDependency(Dependency): llvmconfig = None _llvmconfig_found = False __best_found = None + __cpp_blacklist = {'-DNDEBUG'} def __init__(self, environment, kwargs): super().__init__('llvm-config', kwargs) @@ -1651,7 +1654,7 @@ class LLVMDependency(Dependency): self.check_llvmconfig(req_version) if not self._llvmconfig_found: if self.__best_found is not None: - mlog.log('found {!r} but need:'.format(self.version), + mlog.log('found {!r} but need:'.format(self.__best_found), req_version) else: mlog.log("No llvm-config found; can't detect dependency") @@ -1680,7 +1683,7 @@ class LLVMDependency(Dependency): p, out = Popen_safe([self.llvmconfig, '--cppflags'])[:2] if p.returncode != 0: raise DependencyException('Could not generate includedir for LLVM.') - self.cargs = shlex.split(out) + self.cargs = list(mesonlib.OrderedSet(shlex.split(out)).difference(self.__cpp_blacklist)) p, out = Popen_safe([self.llvmconfig, '--components'])[:2] if p.returncode != 0: @@ -1742,21 +1745,23 @@ class LLVMDependency(Dependency): return True -def get_dep_identifier(name, kwargs): - elements = [name] - modlist = kwargs.get('modules', []) - if isinstance(modlist, str): - modlist = [modlist] - for module in modlist: - elements.append(module) - # We use a tuple because we need a non-mutable structure to use as the key - # of a dictionary and a string has potential for name collisions - identifier = tuple(elements) - identifier += ('main', kwargs.get('main', False)) - identifier += ('static', kwargs.get('static', False)) - if 'fallback' in kwargs: - f = kwargs.get('fallback') - identifier += ('fallback', f[0], f[1]) +def get_dep_identifier(name, kwargs, want_cross): + # Need immutable objects since the identifier will be used as a dict key + version_reqs = flatten(kwargs.get('version', [])) + if isinstance(version_reqs, list): + version_reqs = frozenset(version_reqs) + identifier = (name, version_reqs, want_cross) + for key, value in kwargs.items(): + # 'version' is embedded above as the second element for easy access + # 'native' is handled above with `want_cross` + # 'required' is irrelevant for caching; the caller handles it separately + # 'fallback' subprojects cannot be cached -- they must be initialized + if key in ('version', 'native', 'required', 'fallback',): + continue + # All keyword arguments are strings, ints, or lists (or lists of lists) + if isinstance(value, list): + value = frozenset(flatten(value)) + identifier += (key, value) return identifier def find_external_dependency(name, environment, kwargs): diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 80d482e..5df26cc 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -23,7 +23,8 @@ from . import compilers from .wrap import wrap, WrapMode from . import mesonlib from .mesonlib import FileMode, Popen_safe, get_meson_script -from .dependencies import InternalDependency, Dependency, ExternalProgram +from .dependencies import ExternalProgram +from .dependencies import InternalDependency, Dependency, DependencyException from .interpreterbase import InterpreterBase from .interpreterbase import check_stringlist, noPosargs, noKwargs, stringArgs from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode @@ -1852,13 +1853,7 @@ class Interpreter(InterpreterBase): def func_find_library(self, node, args, kwargs): mlog.log(mlog.red('DEPRECATION:'), 'find_library() is removed, use the corresponding method in compiler object instead.') - def func_dependency(self, node, args, kwargs): - self.validate_arguments(args, 1, [str]) - name = args[0] - if '<' in name or '>' in name or '=' in name: - raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify' - 'version\n requirements use the \'version\' keyword argument instead.') - identifier = dependencies.get_dep_identifier(name, kwargs) + def _find_cached_dep(self, name, kwargs): # Check if we want this as a cross-dep or a native-dep # FIXME: Not all dependencies support such a distinction right now, # and we repeat this check inside dependencies that do. We need to @@ -1868,60 +1863,79 @@ class Interpreter(InterpreterBase): want_cross = not kwargs['native'] else: want_cross = is_cross - # Check if we've already searched for and found this dep + identifier = dependencies.get_dep_identifier(name, kwargs, want_cross) cached_dep = None + # Check if we've already searched for and found this dep if identifier in self.coredata.deps: cached_dep = self.coredata.deps[identifier] - if 'version' in kwargs: - wanted = kwargs['version'] - found = cached_dep.get_version() - if not cached_dep.found() or \ - not mesonlib.version_compare_many(found, wanted)[0]: - # Cached dep has the wrong version. Check if an external - # dependency or a fallback dependency provides it. - cached_dep = None - # Don't re-use cached dep if it wasn't required but this one is, - # so we properly go into fallback/error code paths - if kwargs.get('required', True) and not getattr(cached_dep, 'required', False): - cached_dep = None - # Don't reuse cached dep if one is a cross-dep and the other is a native dep - if not getattr(cached_dep, 'want_cross', is_cross) == want_cross: - cached_dep = None + else: + # Check if exactly the same dep with different version requirements + # was found already. + wanted = identifier[1] + for trial, trial_dep in self.coredata.deps.items(): + # trial[1], identifier[1] are the version requirements + if trial[0] != identifier[0] or trial[2:] != identifier[2:]: + continue + found = trial_dep.get_version() + if not wanted or mesonlib.version_compare_many(found, wanted)[0]: + # We either don't care about the version, or our + # version requirements matched the trial dep's version. + cached_dep = trial_dep + break + return identifier, cached_dep + + def func_dependency(self, node, args, kwargs): + self.validate_arguments(args, 1, [str]) + name = args[0] + if '<' in name or '>' in name or '=' in name: + raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify' + 'version\n requirements use the \'version\' keyword argument instead.') + identifier, cached_dep = self._find_cached_dep(name, kwargs) if cached_dep: + if kwargs.get('required', True) and not cached_dep.found(): + m = 'Dependency {!r} was already checked and was not found' + raise DependencyException(m.format(name)) dep = cached_dep else: # We need to actually search for this dep exception = None dep = None - # If the fallback has already been configured (possibly by a higher level project) - # try to use it before using the native version + # If the dependency has already been configured, possibly by + # a higher level project, try to use it first. if 'fallback' in kwargs: dirname, varname = self.get_subproject_infos(kwargs) if dirname in self.subprojects: + subproject = self.subprojects[dirname] try: - dep = self.subprojects[dirname].get_variable_method([varname], {}) - dep = dep.held_object + # Never add fallback deps to self.coredata.deps + return subproject.get_variable_method([varname], {}) except KeyError: pass + # Search for it outside the project if not dep: try: dep = dependencies.find_external_dependency(name, self.environment, kwargs) - except dependencies.DependencyException as e: + except DependencyException as e: exception = e pass + # Search inside the projects list if not dep or not dep.found(): if 'fallback' in kwargs: fallback_dep = self.dependency_fallback(name, kwargs) if fallback_dep: + # Never add fallback deps to self.coredata.deps since we + # cannot cache them. They must always be evaluated else + # we won't actually read all the build files. return fallback_dep - if not dep: raise exception - self.coredata.deps[identifier] = dep + # Only store found-deps in the cache + if dep.found(): + self.coredata.deps[identifier] = dep return DependencyHolder(dep) def get_subproject_infos(self, kwargs): @@ -2230,7 +2244,7 @@ class Interpreter(InterpreterBase): absname = os.path.join(self.environment.get_source_dir(), buildfilename) if not os.path.isfile(absname): self.subdir = prev_subdir - raise InterpreterException('Nonexistent build def file %s.' % buildfilename) + raise InterpreterException('Non-existent build file {!r}'.format(buildfilename)) with open(absname, encoding='utf8') as f: code = f.read() assert(isinstance(code, str)) diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index fbd732a..54e8016 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -305,7 +305,7 @@ def version_compare(vstr1, vstr2, strict=False): return cmpop(varr1, varr2) def version_compare_many(vstr1, conditions): - if not isinstance(conditions, (list, tuple)): + if not isinstance(conditions, (list, tuple, frozenset)): conditions = [conditions] found = [] not_found = [] @@ -708,7 +708,8 @@ class OrderedSet(collections.MutableSet): def __repr__(self): # Don't print 'OrderedSet("")' for an empty set. if self.__container: - return 'OrderedSet("{}")'.format('", "'.join(self.__container.keys())) + return 'OrderedSet("{}")'.format( + '", "'.join(repr(e) for e in self.__container.keys())) return 'OrderedSet()' def add(self, value): @@ -721,3 +722,6 @@ class OrderedSet(collections.MutableSet): def update(self, iterable): for item in iterable: self.__container[item] = None + + def difference(self, set_): + return type(self)(e for e in self if e not in set_) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index c12c4dd..88ea16e 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -161,12 +161,12 @@ def list_buildsystem_files(coredata, builddata): print(json.dumps(filelist)) def list_deps(coredata): - result = {} - for d in coredata.deps.values(): + result = [] + for d in coredata.deps: if d.found(): args = {'compile_args': d.get_compile_args(), 'link_args': d.get_link_args()} - result[d.name] = args + result += [d.name, args] print(json.dumps(result)) def list_tests(testdata): diff --git a/run_project_tests.py b/run_project_tests.py index 1abc199..b8ef0e9 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -36,6 +36,7 @@ import concurrent.futures as conc import re from run_tests import get_backend_commands, get_backend_args_for_dir, Backend +from run_tests import ensure_backend_detects_changes class BuildStep(Enum): @@ -342,6 +343,10 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen return TestResult('Test that should have failed succeeded', BuildStep.configure, stdo, stde, mesonlog, gen_time) if returncode != 0: return TestResult('Generating the build system failed.', BuildStep.configure, stdo, stde, mesonlog, gen_time) + # Touch the meson.build file to force a regenerate so we can test that + # regeneration works before a build is run. + ensure_backend_detects_changes(backend) + os.utime(os.path.join(testdir, 'meson.build')) # Build with subprocess dir_args = get_backend_args_for_dir(backend, test_build_dir) build_start = time.time() @@ -356,9 +361,8 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, compiler, backen if pc.returncode != 0: return TestResult('Compiling source code failed.', BuildStep.build, stdo, stde, mesonlog, gen_time, build_time) # Touch the meson.build file to force a regenerate so we can test that - # regeneration works. We need to sleep for 0.2s because Ninja tracks mtimes - # at a low resolution: https://github.com/ninja-build/ninja/issues/371 - time.sleep(0.2) + # regeneration works after a build is complete. + ensure_backend_detects_changes(backend) os.utime(os.path.join(testdir, 'meson.build')) test_start = time.time() # Test in-process diff --git a/run_tests.py b/run_tests.py index d0a67e8..a374839 100755 --- a/run_tests.py +++ b/run_tests.py @@ -16,6 +16,7 @@ import os import sys +import time import shutil import subprocess import platform @@ -98,6 +99,13 @@ def get_backend_commands(backend, debug=False): raise AssertionError('Unknown backend: {!r}'.format(backend)) return cmd, clean_cmd, test_cmd, install_cmd, uninstall_cmd +def ensure_backend_detects_changes(backend): + # This is needed to increase the difference between build.ninja's + # timestamp and the timestamp of whatever you changed due to a Ninja + # bug: https://github.com/ninja-build/ninja/issues/371 + if backend is Backend.ninja: + time.sleep(1) + def get_fake_options(prefix): import argparse opts = argparse.Namespace() diff --git a/run_unittests.py b/run_unittests.py index d285e6a..ed98cad 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -30,6 +30,7 @@ from mesonbuild.dependencies import PkgConfigDependency, ExternalProgram from run_tests import exe_suffix, get_fake_options, FakeEnvironment from run_tests import get_builddir_target_args, get_backend_commands, Backend +from run_tests import ensure_backend_detects_changes def get_soname(fname): @@ -355,13 +356,6 @@ class BasePlatformTests(unittest.TestCase): # XCode backend is untested with unit tests, help welcome! self.no_rebuild_stdout = 'UNKNOWN BACKEND {!r}'.format(self.backend.name) - def ensure_backend_detects_changes(self): - # This is needed to increase the difference between build.ninja's - # timestamp and the timestamp of whatever you changed due to a Ninja - # bug: https://github.com/ninja-build/ninja/issues/371 - if self.backend is Backend.ninja: - time.sleep(1) - def _print_meson_log(self): log = os.path.join(self.logdir, 'meson-log.txt') if not os.path.isfile(log): @@ -439,14 +433,14 @@ class BasePlatformTests(unittest.TestCase): def setconf(self, arg, will_build=True): if will_build: - self.ensure_backend_detects_changes() + ensure_backend_detects_changes(self.backend) self._run(self.mconf_command + [arg, self.builddir]) def wipe(self): shutil.rmtree(self.builddir) def utime(self, f): - self.ensure_backend_detects_changes() + ensure_backend_detects_changes(self.backend) os.utime(f) def get_compdb(self): diff --git a/test cases/frameworks/14 doxygen/installed_files.txt b/test cases/frameworks/14 doxygen/installed_files.txt index 72afb2e..e4f70e3 100644 --- a/test cases/frameworks/14 doxygen/installed_files.txt +++ b/test cases/frameworks/14 doxygen/installed_files.txt @@ -1,6 +1,4 @@ usr/share/doc/spede/html/annotated.html -usr/share/doc/spede/html/arrowdown.png -usr/share/doc/spede/html/arrowright.png usr/share/doc/spede/html/bc_s.png usr/share/doc/spede/html/bdwn.png usr/share/doc/spede/html/classComedy_1_1Comedian.html @@ -29,6 +27,8 @@ usr/share/doc/spede/html/functions_func.html usr/share/doc/spede/html/hierarchy.html usr/share/doc/spede/html/index.html usr/share/doc/spede/html/jquery.js +usr/share/doc/spede/html/menu.js +usr/share/doc/spede/html/menudata.js usr/share/doc/spede/html/namespaceComedy.html usr/share/doc/spede/html/namespacemembers.html usr/share/doc/spede/html/namespacemembers_func.html diff --git a/test cases/linuxlike/1 pkg-config/meson.build b/test cases/linuxlike/1 pkg-config/meson.build index 36a4545..7e43821 100644 --- a/test cases/linuxlike/1 pkg-config/meson.build +++ b/test cases/linuxlike/1 pkg-config/meson.build @@ -45,4 +45,3 @@ inc = include_directories('incdir') r = cc.run(code, include_directories : inc, dependencies : zlibdep) assert(r.returncode() == 0, 'Running manual zlib test failed.') - diff --git a/test cases/linuxlike/3 linker script/bob.map.in b/test cases/linuxlike/3 linker script/bob.map.in new file mode 100644 index 0000000..f695e4a --- /dev/null +++ b/test cases/linuxlike/3 linker script/bob.map.in @@ -0,0 +1,6 @@ +V1_0_0 { + global: + "@in@"; + local: + *; +}; diff --git a/test cases/linuxlike/3 linker script/copy.py b/test cases/linuxlike/3 linker script/copy.py new file mode 100644 index 0000000..49e7a85 --- /dev/null +++ b/test cases/linuxlike/3 linker script/copy.py @@ -0,0 +1,5 @@ +import shutil +import sys + +if __name__ == '__main__': + shutil.copy(sys.argv[1], sys.argv[2]) diff --git a/test cases/linuxlike/3 linker script/meson.build b/test cases/linuxlike/3 linker script/meson.build index 30761c6..63765e7 100644 --- a/test cases/linuxlike/3 linker script/meson.build +++ b/test cases/linuxlike/3 linker script/meson.build @@ -1,8 +1,56 @@ project('linker script', 'c') +# Static map file mapfile = 'bob.map' vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile) l = shared_library('bob', 'bob.c', link_args : vflag, link_depends : mapfile) e = executable('prog', 'prog.c', link_with : l) test('core', e) + +# configure_file +conf = configuration_data() +conf.set('in', 'bobMcBob') +m = configure_file( + input : 'bob.map.in', + output : 'bob-conf.map', + configuration : conf, +) +vflag = '-Wl,--version-script,@0@'.format(m) + +l = shared_library('bob-conf', 'bob.c', link_args : vflag, link_depends : m) +e = executable('prog-conf', 'prog.c', link_with : l) +test('core', e) + +# custom_target +python = find_program('python3') +m = custom_target( + 'bob-ct.map', + command : [python, '@INPUT0@', '@INPUT1@', 'bob-ct.map'], + input : ['copy.py', 'bob.map'], + output : 'bob-ct.map', + depend_files : 'bob.map', +) +vflag = '-Wl,--version-script,@0@'.format(m.full_path()) + +l = shared_library('bob-ct', ['bob.c', m], link_args : vflag, link_depends : m) +e = executable('prog-ct', 'prog.c', link_with : l) +test('core', e) + +# File +mapfile = files('bob.map') +vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile[0]) + +l = shared_library('bob-files', 'bob.c', link_args : vflag, link_depends : mapfile) +e = executable('prog-files', 'prog.c', link_with : l) +test('core', e) + +subdir('sub') + +# With map file in subdir +mapfile = 'sub/foo.map' +vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile) + +l = shared_library('bar', 'bob.c', link_args : vflag, link_depends : mapfile) +e = executable('prog-bar', 'prog.c', link_with : l) +test('core', e) diff --git a/test cases/linuxlike/3 linker script/sub/foo.map b/test cases/linuxlike/3 linker script/sub/foo.map new file mode 100644 index 0000000..e07a780 --- /dev/null +++ b/test cases/linuxlike/3 linker script/sub/foo.map @@ -0,0 +1,6 @@ +V1_0_0 { + global: + "bobMcBob"; + local: + *; +}; diff --git a/test cases/linuxlike/3 linker script/sub/meson.build b/test cases/linuxlike/3 linker script/sub/meson.build new file mode 100644 index 0000000..93199f3 --- /dev/null +++ b/test cases/linuxlike/3 linker script/sub/meson.build @@ -0,0 +1,6 @@ +mapfile = 'foo.map' +vflag = '-Wl,--version-script,@0@/@1@'.format(meson.current_source_dir(), mapfile) + +l = shared_library('foo', '../bob.c', link_args : vflag, link_depends : mapfile) +e = executable('prog-foo', '../prog.c', link_with : l) +test('core', e) diff --git a/test cases/linuxlike/5 dependency versions/meson.build b/test cases/linuxlike/5 dependency versions/meson.build index 1b01cd6..5c2c262 100644 --- a/test cases/linuxlike/5 dependency versions/meson.build +++ b/test cases/linuxlike/5 dependency versions/meson.build @@ -21,10 +21,18 @@ if dependency('zlib', version : ['<=1.0', '>=9999', '=' + zlib.version()], requi error('zlib <=1.0 >=9999 should not have been found') endif +# Test that a versionless zlib is found after not finding an optional zlib dep with version reqs +zlibopt = dependency('zlib', required : false) +assert(zlibopt.found() == true, 'zlib not found') + # Test https://github.com/mesonbuild/meson/pull/610 dependency('somebrokenlib', version : '>=2.0', required : false) dependency('somebrokenlib', version : '>=1.0', required : false) +# Search for an external dependency that won't be found, but must later be +# found via fallbacks +somelibnotfound = dependency('somelib', required : false) +assert(somelibnotfound.found() == false, 'somelibnotfound was found?') # Find internal dependency without version somelibver = dependency('somelib', fallback : ['somelibnover', 'some_dep']) @@ -37,17 +45,51 @@ somelib = dependency('somelib', somelibver = dependency('somelib', version : '>= 0.3', fallback : ['somelibver', 'some_dep']) -# Find somelib again, but with a fallback that will fail +# Find somelib again, but with a fallback that will fail because subproject does not exist somelibfail = dependency('somelib', version : '>= 0.2', required : false, fallback : ['somelibfail', 'some_dep']) assert(somelibfail.found() == false, 'somelibfail found via wrong fallback') +# Find somelib again, but with a fallback that will fail because dependency does not exist +somefail_dep = dependency('somelib', + version : '>= 0.2', + required : false, + fallback : ['somelib', 'somefail_dep']) +assert(somefail_dep.found() == false, 'somefail_dep found via wrong fallback') -fakezlib_dep = dependency('zlib', +# Fallback should only be used if the primary was not found +fallbackzlib_dep = dependency('zlib', + fallback : ['somelib', 'fakezlib_dep']) +assert(fallbackzlib_dep.type_name() == 'pkgconfig', 'fallbackzlib_dep should be of type "pkgconfig", not ' + fallbackzlib_dep.type_name()) +# Check that the above dependency was pkgconfig because the fallback wasn't +# checked, not because the fallback didn't work +fakezlib_dep = dependency('fakezlib', fallback : ['somelib', 'fakezlib_dep']) assert(fakezlib_dep.type_name() == 'internal', 'fakezlib_dep should be of type "internal", not ' + fakezlib_dep.type_name()) +# Check that you can find a dependency by not specifying a version after not +# finding it by specifying a version. We add `static: true` here so that the +# previously cached zlib dependencies don't get checked. +dependency('zlib', static : true, version : '>=8000', required : false) +dependency('zlib', static : true) + +# Check that you can find a dependency by specifying a correct version after +# not finding it by specifying a wrong one. We add `method: pkg-config` here so that +# the previously cached zlib dependencies don't get checked. +bzip2 = dependency('zlib', method : 'pkg-config', version : '>=9000', required : false) +bzip2 = dependency('zlib', method : 'pkg-config', version : '>=1.0') + +if meson.is_cross_build() + # Test caching of native and cross dependencies + # https://github.com/mesonbuild/meson/issues/1736 + cross_prefix = dependency('zlib').get_pkgconfig_variable('prefix') + native_prefix = dependency('zlib', native : true).get_pkgconfig_variable('prefix') + assert(cross_prefix != '', 'cross zlib prefix is not defined') + assert(native_prefix != '', 'native zlib prefix is not defined') + assert(native_prefix != cross_prefix, 'native prefix == cross_prefix == ' + native_prefix) +endif + foreach d : ['sdl2', 'gnustep', 'wx', 'gl', 'python3', 'boost', 'gtest', 'gmock'] dep = dependency(d, required : false) if dep.found() |