diff options
42 files changed, 1021 insertions, 205 deletions
@@ -4,9 +4,11 @@ MesonĀ® is a project to create the best possible next-generation build system. -####Build status +#### Status -[](https://travis-ci.org/mesonbuild/meson) [](https://ci.appveyor.com/project/jpakkane/meson) +[](https://pypi.python.org/pypi/meson) +[](https://travis-ci.org/mesonbuild/meson) +[](https://ci.appveyor.com/project/jpakkane/meson) ####Dependencies @@ -38,6 +40,10 @@ executable run the following command: Note that the source checkout may not be `meson` because it would clash with the generated binary name. +This will zip all files inside the source checkout into the script +which includes hundreds of tests, so you might want to temporarily +remove those before running it. + ####Running Meson requires that you have a source directory and a build directory diff --git a/authors.txt b/authors.txt index 9bf3e33..03e8478 100644 --- a/authors.txt +++ b/authors.txt @@ -56,3 +56,4 @@ Aurelien Jarno Mark Schulte Paulo Antonio Alvarez Olexa Bilaniuk +Daniel Stone @@ -14,10 +14,21 @@ # See the License for the specific language governing permissions and # limitations under the License. -from mesonbuild import mesonmain -import sys, os +from mesonbuild import mlog, mesonmain +import sys, os, locale def main(): + # Warn if the locale is not UTF-8. This can cause various unfixable issues + # such as os.stat not being able to decode filenames with unicode in them. + # There is no way to reset both the preferred encoding and the filesystem + # encoding, so we can just warn about it. + e = locale.getpreferredencoding() + if e.upper() != 'UTF-8': + mlog.warning('You are using {!r} which is not a a Unicode-compatible ' + 'locale.'.format(e)) + mlog.warning('You might see errors if you use UTF-8 strings as ' + 'filenames, as strings, or as file contents.') + mlog.warning('Please switch to a UTF-8 locale for your platform.') # Always resolve the command path so Ninja can find it for regen, tests, etc. launcher = os.path.realpath(sys.argv[0]) return mesonmain.run(launcher, sys.argv[1:]) diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index c37ae2a..49b6008 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -279,13 +279,9 @@ class Backend(): result = [] targetdir = self.get_target_private_dir(extobj.target) # With unity builds, there's just one object that contains all the - # sources, so if we want all the objects, just return that. + # sources, and we only support extracting all the objects in this mode, + # so just return that. if self.environment.coredata.get_builtin_option('unity'): - if not extobj.unity_compatible: - # This should never happen - msg = 'BUG: Meson must not allow extracting single objects ' \ - 'in Unity builds' - raise AssertionError(msg) comp = get_compiler_for_source(extobj.target.compilers.values(), extobj.srclist[0]) # The unity object name uses the full absolute path of the source file diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 659a53d..71797ed 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -715,7 +715,7 @@ int dummy; meson_exe = self.environment.get_build_command() (base, ext) = os.path.splitext(meson_exe) test_exe = base + 'test' + ext - cmd = [sys.executable, test_exe] + cmd = [sys.executable, test_exe, '--no-rebuild'] if not self.environment.coredata.get_builtin_option('stdsplit'): cmd += ['--no-stdsplit'] if self.environment.coredata.get_builtin_option('errorlogs'): @@ -728,7 +728,7 @@ int dummy; # And then benchmarks. cmd = [sys.executable, test_exe, '--benchmark','--logbase', - 'benchmarklog', '--num-processes=1'] + 'benchmarklog', '--num-processes=1', '--no-rebuild'] elem = NinjaBuildElement(self.all_outputs, 'benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY']) elem.add_item('COMMAND', cmd) elem.add_item('DESC', 'Running benchmark suite.') @@ -1017,9 +1017,11 @@ int dummy; args += valac.get_werror_args() for d in target.get_external_deps(): if isinstance(d, dependencies.PkgConfigDependency): - if d.name == 'glib-2.0' and d.version_requirement is not None \ - and d.version_requirement.startswith(('>=', '==')): - args += ['--target-glib', d.version_requirement[2:]] + if d.name == 'glib-2.0' and d.version_reqs is not None: + for req in d.version_reqs: + if req.startswith(('>=', '==')): + args += ['--target-glib', req[2:]] + break args += ['--pkg', d.name] elif isinstance(d, dependencies.ExternalLibrary): args += d.get_lang_args('vala') @@ -2004,14 +2006,21 @@ rule FORTRAN_DEP_HACK def generate_shlib_aliases(self, target, outdir): basename = target.get_filename() aliases = target.get_aliaslist() - for alias in aliases: + for i, alias in enumerate(aliases): aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias) try: os.remove(aliasfile) except Exception: pass + # If both soversion and version are set and to different values, + # the .so symlink must point to the soversion symlink rather than the + # original file. + if i == 0 and len(aliases) > 1: + pointed_to_filename = aliases[1] + else: + pointed_to_filename = basename try: - os.symlink(basename, aliasfile) + os.symlink(pointed_to_filename, aliasfile) except NotImplementedError: mlog.debug("Library versioning disabled because symlinks are not supported.") except OSError: diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 98f05c2..462a55b 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -198,10 +198,11 @@ class ExtractedObjects(): ''' Holds a list of sources for which the objects must be extracted ''' - def __init__(self, target, srclist): + def __init__(self, target, srclist, is_unity): self.target = target self.srclist = srclist - self.check_unity_compatible() + if is_unity: + self.check_unity_compatible() def check_unity_compatible(self): # Figure out if the extracted object list is compatible with a Unity @@ -211,11 +212,9 @@ class ExtractedObjects(): # from each unified source file. # If the list of sources for which we want objects is the same as the # list of sources that go into each unified build, we're good. - self.unity_compatible = False srclist_set = set(self.srclist) # Objects for all the sources are required, so we're compatible if srclist_set == set(self.target.sources): - self.unity_compatible = True return # Check if the srclist is a subset (of the target's sources) that is # going to form a unified source file and a single object @@ -223,7 +222,6 @@ class ExtractedObjects(): self.target.sources) for srcs in compsrcs.values(): if srclist_set == set(srcs): - self.unity_compatible = True return msg = 'Single object files can not be extracted in Unity builds. ' \ 'You can only extract all the object files at once.' @@ -273,6 +271,7 @@ class BuildTarget(): self.subdir = subdir self.subproject = subproject # Can not be calculated from subdir as subproject dirname can be changed per project. self.is_cross = is_cross + self.is_unity = environment.coredata.get_builtin_option('unity') self.environment = environment self.sources = [] self.compilers = {} @@ -458,10 +457,10 @@ class BuildTarget(): if src not in self.sources: raise MesonException('Tried to extract unknown source %s.' % src) obj_src.append(src) - return ExtractedObjects(self, obj_src) + return ExtractedObjects(self, obj_src, self.is_unity) def extract_all_objects(self): - return ExtractedObjects(self, self.sources) + return ExtractedObjects(self, self.sources, self.is_unity) def get_all_link_deps(self): return self.get_transitive_link_deps() @@ -1070,12 +1069,10 @@ class SharedLibrary(BuildTarget): self.soversion = str(self.soversion) if not isinstance(self.soversion, str): raise InvalidArguments('Shared library soversion is not a string or integer.') - try: - int(self.soversion) - except ValueError: - raise InvalidArguments('Shared library soversion must be a valid integer') elif self.ltversion: # library version is defined, get the soversion from that + # We replicate what Autotools does here and take the first + # number of the version by default. self.soversion = self.ltversion.split('.')[0] # Visual Studio module-definitions file if 'vs_module_defs' in kwargs: diff --git a/mesonbuild/compilers.py b/mesonbuild/compilers.py index ced2b6f..2534a47 100644 --- a/mesonbuild/compilers.py +++ b/mesonbuild/compilers.py @@ -701,8 +701,28 @@ int main () {{ #endif return 0; }}''' - args = extra_args + self.get_compiler_check_args() - return self.compiles(templ.format(hname, symbol, prefix), env, args, dependencies) + return self.compiles(templ.format(hname, symbol, prefix), env, + extra_args, dependencies) + + @staticmethod + def _override_args(args, override): + ''' + Add @override to @args in such a way that arguments are overriden + correctly. + + We want the include directories to be added first (since they are + chosen left-to-right) and all other arguments later (since they + override previous arguments or add to a list that's chosen + right-to-left). + ''' + before_args = [] + after_args = [] + for arg in override: + if arg.startswith(('-I', '/I')): + before_args.append(arg) + else: + after_args.append(arg) + return before_args + args + after_args def compiles(self, code, env, extra_args=None, dependencies=None): if extra_args is None: @@ -713,9 +733,10 @@ int main () {{ dependencies = [] elif not isinstance(dependencies, list): dependencies = [dependencies] + # Add compile flags needed by dependencies after converting to the + # native type of the selected compiler cargs = [a for d in dependencies for a in d.get_compile_args()] - # Convert flags to the native type of the selected compiler - args = self.unix_link_flags_to_native(cargs + extra_args) + args = self.unix_link_flags_to_native(cargs) # Read c_args/cpp_args/etc from the cross-info file (if needed) args += self.get_cross_extra_flags(env, compile=True, link=False) # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env @@ -723,6 +744,11 @@ int main () {{ args += env.coredata.external_args[self.language] # We only want to compile; not link args += self.get_compile_only_args() + # Append extra_args to the compiler check args such that it overrides + extra_args = self._override_args(self.get_compiler_check_args(), extra_args) + extra_args = self.unix_link_flags_to_native(extra_args) + # Append both to the compiler args such that they override them + args = self._override_args(args, extra_args) with self.compile(code, args) as p: return p.returncode == 0 @@ -736,17 +762,24 @@ int main () {{ dependencies = [] elif not isinstance(dependencies, list): dependencies = [dependencies] + # Add compile and link flags needed by dependencies after converting to + # the native type of the selected compiler cargs = [a for d in dependencies for a in d.get_compile_args()] link_args = [a for d in dependencies for a in d.get_link_args()] - # Convert flags to the native type of the selected compiler - args = self.unix_link_flags_to_native(cargs + link_args + extra_args) + args = self.unix_link_flags_to_native(cargs + link_args) # Select a CRT if needed since we're linking args += self.get_linker_debug_crt_args() - # Read c_args/c_link_args/cpp_args/cpp_link_args/etc from the cross-info file (if needed) + # Read c_args/c_link_args/cpp_args/cpp_link_args/etc from the + # cross-info file (if needed) args += self.get_cross_extra_flags(env, compile=True, link=True) # Add LDFLAGS from the env. We assume that the user has ensured these # are compiler-specific args += env.coredata.external_link_args[self.language] + # Append extra_args to the compiler check args such that it overrides + extra_args = self._override_args(self.get_compiler_check_args(), extra_args) + extra_args = self.unix_link_flags_to_native(extra_args) + # Append both to the compiler args such that they override them + args = self._override_args(args, extra_args) return self.compile(code, args) def links(self, code, env, extra_args=None, dependencies=None): @@ -795,7 +828,6 @@ int main(int argc, char **argv) {{ %s int temparray[%d-sizeof(%s)]; ''' - args = extra_args + self.get_compiler_check_args() if not self.compiles(element_exists_templ.format(prefix, element), env, args, dependencies): return -1 for i in range(1, 1024): @@ -844,7 +876,6 @@ struct tmp { int testarray[%d-offsetof(struct tmp, target)]; ''' - args = extra_args + self.get_compiler_check_args() if not self.compiles(type_exists_templ.format(typename), env, args, dependencies): return -1 for i in range(1, 1024): @@ -980,14 +1011,14 @@ int main(int argc, char **argv) { head, main = self._no_prototype_templ() templ = head + stubs_fail + main - args = extra_args + self.get_compiler_check_args() - if self.links(templ.format(prefix, funcname), env, args, dependencies): + if self.links(templ.format(prefix, funcname), env, extra_args, dependencies): return True # Some functions like alloca() are defined as compiler built-ins which # are inlined by the compiler, so test for that instead. Built-ins are # special functions that ignore all includes and defines, so we just # directly try to link via main(). - return self.links('int main() {{ {0}; }}'.format('__builtin_' + funcname), env, args, dependencies) + return self.links('int main() {{ {0}; }}'.format('__builtin_' + funcname), + env, extra_args, dependencies) def has_members(self, typename, membernames, prefix, env, extra_args=None, dependencies=None): if extra_args is None: @@ -1071,8 +1102,8 @@ class CPPCompiler(CCompiler): #include <{0}> using {1}; int main () {{ return 0; }}''' - args = extra_args + self.get_compiler_check_args() - return self.compiles(templ.format(hname, symbol, prefix), env, args, dependencies) + return self.compiles(templ.format(hname, symbol, prefix), env, + extra_args, dependencies) class ObjCCompiler(CCompiler): def __init__(self, exelist, version, is_cross, exe_wrap): @@ -1846,11 +1877,7 @@ class VisualStudioCCompiler(CCompiler): } def get_option_link_args(self, options): - # FIXME: See GnuCCompiler.get_option_link_args - if 'c_winlibs' in options: - return options['c_winlibs'].value[:] - else: - return msvc_winlibs[:] + return options['c_winlibs'].value[:] def unix_link_flags_to_native(self, args): result = [] @@ -1955,11 +1982,7 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): return args def get_option_link_args(self, options): - # FIXME: See GnuCCompiler.get_option_link_args - if 'cpp_winlibs' in options: - return options['cpp_winlibs'].value[:] - else: - return msvc_winlibs[:] + return options['cpp_winlibs'].value[:] GCC_STANDARD = 0 GCC_OSX = 1 @@ -2071,17 +2094,7 @@ class GnuCCompiler(GnuCompiler, CCompiler): def get_option_link_args(self, options): if self.gcc_type == GCC_MINGW: - # FIXME: This check is needed because we currently pass - # cross-compiler options to the native compiler too and when - # cross-compiling from Windows to Linux, `options` will contain - # Linux-specific options which doesn't include `c_winlibs`. The - # proper fix is to allow cross-info files to specify compiler - # options and to maintain both cross and native compiler options in - # coredata: https://github.com/mesonbuild/meson/issues/1029 - if 'c_winlibs' in options: - return options['c_winlibs'].value[:] - else: - return gnu_winlibs[:] + return options['c_winlibs'].value[:] return [] class GnuCPPCompiler(GnuCompiler, CPPCompiler): @@ -2119,11 +2132,7 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): def get_option_link_args(self, options): if self.gcc_type == GCC_MINGW: - # FIXME: See GnuCCompiler.get_option_link_args - if 'cpp_winlibs' in options: - return options['cpp_winlibs'].value[:] - else: - return gnu_winlibs[:] + return options['cpp_winlibs'].value[:] return [] def get_compiler_check_args(self): diff --git a/mesonbuild/dependencies.py b/mesonbuild/dependencies.py index 38945b4..a092732 100644 --- a/mesonbuild/dependencies.py +++ b/mesonbuild/dependencies.py @@ -23,7 +23,7 @@ import re import os, stat, glob, subprocess, shutil import sysconfig from collections import OrderedDict -from . mesonlib import MesonException +from . mesonlib import MesonException, version_compare, version_compare_many from . import mlog from . import mesonlib from .environment import detect_cpu_family, for_windows @@ -135,22 +135,27 @@ class PkgConfigDependency(Dependency): self.modversion = 'none' return found_msg = ['%s dependency' % self.type_string, mlog.bold(name), 'found:'] - self.version_requirement = kwargs.get('version', None) - if self.version_requirement is None: + self.version_reqs = kwargs.get('version', None) + if self.version_reqs is None: self.is_found = True else: - if not isinstance(self.version_requirement, str): - raise DependencyException('Version argument must be string.') - self.is_found = mesonlib.version_compare(self.modversion, self.version_requirement) + if not isinstance(self.version_reqs, (str, list)): + raise DependencyException('Version argument must be string or list.') + (self.is_found, not_found, found) = \ + version_compare_many(self.modversion, self.version_reqs) if not self.is_found: - found_msg += [mlog.red('NO'), 'found {!r}'.format(self.modversion), - 'but need {!r}'.format(self.version_requirement)] + found_msg += [mlog.red('NO'), + 'found {!r} but need:'.format(self.modversion), + ', '.join(["'{}'".format(e) for e in not_found])] + if found: + found_msg += ['; matched:', + ', '.join(["'{}'".format(e) for e in found])] if not self.silent: mlog.log(*found_msg) if self.required: raise DependencyException( 'Invalid version of a dependency, needed %s %s found %s.' % - (name, self.version_requirement, self.modversion)) + (name, not_found, self.modversion)) return found_msg += [mlog.green('YES'), self.modversion] if not self.silent: @@ -233,7 +238,7 @@ class PkgConfigDependency(Dependency): '(%s)' % out.decode().strip()) PkgConfigDependency.pkgconfig_found = True return - except Exception: + except (FileNotFoundError, PermissionError): pass PkgConfigDependency.pkgconfig_found = False if not self.silent: @@ -301,7 +306,7 @@ class WxDependency(Dependency): self.modversion = out.decode().strip() version_req = kwargs.get('version', None) if version_req is not None: - if not mesonlib.version_compare(self.modversion, version_req): + if not version_compare(self.modversion, version_req, strict=True): mlog.log('Wxwidgets version %s does not fullfill requirement %s' %\ (self.modversion, version_req)) return @@ -358,7 +363,7 @@ class WxDependency(Dependency): self.wxc = wxc WxDependency.wx_found = True return - except Exception: + except (FileNotFoundError, PermissionError): pass WxDependency.wxconfig_found = False mlog.log('Found wx-config:', mlog.red('NO')) @@ -1040,7 +1045,7 @@ class GnuStepDependency(Dependency): gp = subprocess.Popen([confprog, '--help'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) gp.communicate() - except FileNotFoundError: + except (FileNotFoundError, PermissionError): self.args = None mlog.log('Dependency GnuStep found:', mlog.red('NO'), '(no gnustep-config)') return diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index cc62010..098f8ca 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -43,7 +43,8 @@ def detect_ninja(): for n in ['ninja', 'ninja-build']: try: p = subprocess.Popen([n, '--version'], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) - except FileNotFoundError: + except (FileNotFoundError, PermissionError): + # Doesn't exist in PATH or isn't executable continue version = p.communicate()[0].decode(errors='ignore') # Perhaps we should add a way for the caller to know the failure mode diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 3b9f975..e9273e4 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -1762,12 +1762,12 @@ class Interpreter(): raise InvalidCode('Tried to use unknown language "%s".' % lang) comp.sanity_check(self.environment.get_scratch_dir(), self.environment) self.coredata.compilers[lang] = comp + # Native compiler always exist so always add its options. + new_options = comp.get_options() if cross_comp is not None: cross_comp.sanity_check(self.environment.get_scratch_dir(), self.environment) self.coredata.cross_compilers[lang] = cross_comp - new_options = cross_comp.get_options() - else: - new_options = comp.get_options() + new_options.update(cross_comp.get_options()) optprefix = lang + '_' for i in new_options: if not i.startswith(optprefix): @@ -1862,7 +1862,8 @@ requirements use the version keyword argument instead.''') if 'version' in kwargs: wanted = kwargs['version'] found = cached_dep.get_version() - if not cached_dep.found() or not mesonlib.version_compare(found, wanted): + if not cached_dep.found() or \ + not mesonlib.version_compare_many(found, wanted)[0]: # Cached dep has the wrong version. Check if an external # dependency or a fallback dependency provides it. cached_dep = None diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py index b92be5f..4670685 100644 --- a/mesonbuild/mesonlib.py +++ b/mesonbuild/mesonlib.py @@ -146,21 +146,26 @@ def detect_vcs(source_dir): return vcs return None -def grab_leading_numbers(vstr): +def grab_leading_numbers(vstr, strict=False): result = [] for x in vstr.split('.'): try: result.append(int(x)) - except ValueError: + except ValueError as e: + if strict: + msg = 'Invalid version to compare against: {!r}; only ' \ + 'numeric digits separated by "." are allowed: ' + str(e) + raise MesonException(msg.format(vstr)) break return result numpart = re.compile('[0-9.]+') -def version_compare(vstr1, vstr2): +def version_compare(vstr1, vstr2, strict=False): match = numpart.match(vstr1.strip()) if match is None: - raise MesonException('Uncomparable version string %s.' % vstr1) + msg = 'Uncomparable version string {!r}.' + raise MesonException(msg.format(vstr1)) vstr1 = match.group(0) if vstr2.startswith('>='): cmpop = operator.ge @@ -185,10 +190,22 @@ def version_compare(vstr1, vstr2): vstr2 = vstr2[1:] else: cmpop = operator.eq - varr1 = grab_leading_numbers(vstr1) - varr2 = grab_leading_numbers(vstr2) + varr1 = grab_leading_numbers(vstr1, strict) + varr2 = grab_leading_numbers(vstr2, strict) return cmpop(varr1, varr2) +def version_compare_many(vstr1, conditions): + if not isinstance(conditions, (list, tuple)): + conditions = [conditions] + found = [] + not_found = [] + for req in conditions: + if not version_compare(vstr1, req, strict=True): + not_found.append(req) + else: + found.append(req) + return (not_found == [], not_found, found) + def default_libdir(): if is_debianlike(): try: @@ -303,7 +320,7 @@ def do_conf_file(src, dst, confdata): replace_if_different(dst, dst_tmp) def dump_conf_header(ofilename, cdata): - with open(ofilename, 'w') as ofile: + with open(ofilename, 'w', encoding='utf-8') as ofile: ofile.write('''/* * Autogenerated by the Meson build system. * Do not edit, your changes will be lost. diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index 57c814c..71d42e3 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -115,7 +115,9 @@ class MesonApp(): msg = '''Trying to run Meson on a build directory that has already been configured. If you want to build it, just run your build command (e.g. ninja) inside the build directory. Meson will autodetect any changes in your setup and regenerate -itself as required.''' +itself as required. + +If you want to change option values, use the mesonconf tool instead.''' raise RuntimeError(msg) else: if handshake: @@ -240,7 +242,13 @@ def run(mainfile, args): return 1 if len(args) >= 2 and args[0] == '--internal': if args[1] != 'regenerate': - sys.exit(run_script_command(args[1:])) + script = args[1] + try: + sys.exit(run_script_command(args[1:])) + except MesonException as e: + mlog.log(mlog.red('\nError in {} helper script:'.format(script))) + mlog.log(e) + sys.exit(1) args = args[2:] handshake = True else: diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 492bf3f..c1af818 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -168,7 +168,10 @@ def list_tests(testdata): else: fname = t.fname to['cmd'] = fname + t.cmd_args - to['env'] = t.env + if isinstance(t.env, build.EnvironmentVariables): + to['env'] = t.env.get_env(os.environ) + else: + to['env'] = t.env to['name'] = t.name to['workdir'] = t.workdir to['timeout'] = t.timeout diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index fedf95c..d7b05eb 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -37,6 +37,13 @@ native_glib_version = None girwarning_printed = False gresource_warning_printed = False +def find_program(program_name, target_name): + program = dependencies.ExternalProgram(program_name) + if not program.found(): + raise MesonException('%s can\'t be generated as %s could not be found' % ( + target_name, program_name)) + return program + class GnomeModule: @staticmethod @@ -336,6 +343,8 @@ can not be used with the current version of glib-compiled-resources, due to raise MesonException('Gir takes one argument') if kwargs.get('install_dir'): raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"') + giscanner = find_program('g-ir-scanner', 'Gir') + gicompiler = find_program('g-ir-compiler', 'Gir') girtarget = args[0] while hasattr(girtarget, 'held_object'): girtarget = girtarget.held_object @@ -357,7 +366,7 @@ can not be used with the current version of glib-compiled-resources, due to depends = [girtarget] gir_inc_dirs = [] - scan_command = ['g-ir-scanner', '@INPUT@'] + scan_command = giscanner.get_command() + ['@INPUT@'] scan_command += pkgargs scan_command += ['--no-libtool', '--namespace='+ns, '--nsversion=' + nsversion, '--warn-all', '--output', '@OUTPUT@'] @@ -512,7 +521,7 @@ can not be used with the current version of glib-compiled-resources, due to scan_target = GirTarget(girfile, state.subdir, scankwargs) typelib_output = '%s-%s.typelib' % (ns, nsversion) - typelib_cmd = ['g-ir-compiler', scan_target, '--output', '@OUTPUT@'] + typelib_cmd = gicompiler.get_command() + [scan_target, '--output', '@OUTPUT@'] typelib_cmd += self._get_include_args(state, gir_inc_dirs, prefix='--includedir=') for incdir in typelib_includes: @@ -534,7 +543,9 @@ can not be used with the current version of glib-compiled-resources, due to raise MesonException('Compile_schemas does not take positional arguments.') srcdir = os.path.join(state.build_to_src, state.subdir) outdir = state.subdir - cmd = ['glib-compile-schemas', '--targetdir', outdir, srcdir] + + cmd = find_program('glib-compile-schemas', 'gsettings-compile').get_command() + cmd += ['--targetdir', outdir, srcdir] kwargs['command'] = cmd kwargs['input'] = [] kwargs['output'] = 'gschemas.compiled' @@ -712,7 +723,8 @@ can not be used with the current version of glib-compiled-resources, due to raise MesonException('Gdbus_codegen takes two arguments, name and xml file.') namebase = args[0] xml_file = args[1] - cmd = ['gdbus-codegen'] + target_name = namebase + '-gdbus' + cmd = find_program('gdbus-codegen', target_name).get_command() if 'interface_prefix' in kwargs: cmd += ['--interface-prefix', kwargs.pop('interface_prefix')] if 'namespace' in kwargs: @@ -723,7 +735,7 @@ can not be used with the current version of glib-compiled-resources, due to 'output' : outputs, 'command' : cmd } - return build.CustomTarget(namebase + '-gdbus', state.subdir, custom_kwargs) + return build.CustomTarget(target_name, state.subdir, custom_kwargs) def mkenums(self, state, args, kwargs): if len(args) != 1: @@ -769,7 +781,7 @@ can not be used with the current version of glib-compiled-resources, due to elif arg not in known_custom_target_kwargs: raise MesonException( 'Mkenums does not take a %s keyword argument.' % (arg, )) - cmd = ['glib-mkenums'] + cmd + cmd = find_program('glib-mkenums', 'mkenums').get_command() + cmd custom_kwargs = {} for arg in known_custom_target_kwargs: if arg in kwargs: @@ -850,7 +862,7 @@ can not be used with the current version of glib-compiled-resources, due to raise MesonException( 'Sources keyword argument must be a string or array.') - cmd = ['glib-genmarshal'] + cmd = find_program('glib-genmarshal', output + '_genmarshal').get_command() known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc', 'valist_marshallers'] known_custom_target_kwargs = ['build_always', 'depends', @@ -977,7 +989,8 @@ can not be used with the current version of glib-compiled-resources, due to build_dir = os.path.join(state.environment.get_build_dir(), state.subdir) source_dir = os.path.join(state.environment.get_source_dir(), state.subdir) pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs) - cmd = ['vapigen', '--quiet', '--library=' + library, '--directory=' + build_dir] + cmd = find_program('vapigen', 'Vaapi').get_command() + cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir] cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs) cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs) cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs) diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py index 1ddb2fc..13394c1 100644 --- a/mesonbuild/modules/i18n.py +++ b/mesonbuild/modules/i18n.py @@ -12,21 +12,81 @@ # See the License for the specific language governing permissions and # limitations under the License. +from os import path from .. import coredata, mesonlib, build +from ..mesonlib import MesonException import sys +PRESET_ARGS = { + 'glib': [ + '--from-code=UTF-8', + '--add-comments', + + # https://developer.gnome.org/glib/stable/glib-I18N.html + '--keyword=_', + '--keyword=N_', + '--keyword=C_:1c,2', + '--keyword=NC_:1c,2', + '--keyword=g_dcgettext:2', + '--keyword=g_dngettext:2,3', + '--keyword=g_dpgettext2:2c,3', + + '--flag=N_:1:pass-c-format', + '--flag=C_:2:pass-c-format', + '--flag=NC_:2:pass-c-format', + '--flag=g_dngettext:2:pass-c-format', + '--flag=g_strdup_printf:1:c-format', + '--flag=g_string_printf:2:c-format', + '--flag=g_string_append_printf:2:c-format', + '--flag=g_error_new:3:c-format', + '--flag=g_set_error:4:c-format', + ] +} + class I18nModule: + def merge_file(self, state, args, kwargs): + podir = kwargs.pop('po_dir', None) + if not podir: + raise MesonException('i18n: po_dir is a required kwarg') + podir = path.join(state.build_to_src, state.subdir, podir) + + file_type = kwargs.pop('type', 'xml') + VALID_TYPES = ('xml', 'desktop') + if not file_type in VALID_TYPES: + raise MesonException('i18n: "{}" is not a valid type {}'.format(file_type, VALID_TYPES)) + + kwargs['command'] = ['msgfmt', '--' + file_type, + '--template', '@INPUT@', '-d', podir, '-o', '@OUTPUT@'] + return build.CustomTarget(kwargs['output'] + '_merge', state.subdir, kwargs) + + @staticmethod + def _read_linguas(state): + linguas = path.join(state.environment.get_source_dir(), state.subdir, 'LINGUAS') + try: + with open(linguas) as f: + return [line.strip() for line in f if not line.strip().startswith('#')] + except (FileNotFoundError, PermissionError): + return [] + def gettext(self, state, args, kwargs): if len(args) != 1: raise coredata.MesonException('Gettext requires one positional argument (package name).') packagename = args[0] - languages = mesonlib.stringlistify(kwargs.get('languages', [])) + languages = mesonlib.stringlistify(kwargs.get('languages', self._read_linguas(state))) if len(languages) == 0: raise coredata.MesonException('List of languages empty.') datadirs = mesonlib.stringlistify(kwargs.get('data_dirs', [])) extra_args = mesonlib.stringlistify(kwargs.get('args', [])) + preset = kwargs.pop('preset', None) + if preset: + preset_args = PRESET_ARGS.get(preset) + if not preset_args: + raise coredata.MesonException('i18n: Preset "{}" is not one of the valid options: {}'.format( + preset, list(PRESET_ARGS.keys()))) + extra_args = set(preset_args + extra_args) + pkg_arg = '--pkgname=' + packagename lang_arg = '--langs=' + '@@'.join(languages) datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 9f50b0e..73b29ae 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -49,6 +49,7 @@ class PkgConfigModule: # 'os.path.join' for details) ofile.write('libdir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('libdir'))) ofile.write('includedir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('includedir'))) + ofile.write('\n') ofile.write('Name: %s\n' % name) if len(description) > 0: ofile.write('Description: %s\n' % description) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index e05c641..37d6df7 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -93,9 +93,19 @@ class Resolver: def resolve(self, packagename): fname = os.path.join(self.subdir_root, packagename + '.wrap') dirname = os.path.join(self.subdir_root, packagename) - if os.path.isdir(dirname): - # The directory is there? Great, use it. - return packagename + try: + if os.listdir(dirname): + # The directory is there and not empty? Great, use it. + return packagename + else: + mlog.warning('Subproject directory %s is empty, possibly because of an unfinished' + 'checkout, removing to reclone' % dirname) + os.rmdir(checkoutdir) + except NotADirectoryError: + raise RuntimeError('%s is not a directory, can not use as subproject.' % dirname) + except FileNotFoundError: + pass + if not os.path.isfile(fname): # No wrap file with this name? Give up. return None @@ -118,6 +128,15 @@ class Resolver: revno = p.get('revision') is_there = os.path.isdir(checkoutdir) if is_there: + try: + subprocess.check_call(['git', 'rev-parse']) + is_there = True + except subprocess.CalledProcessError: + raise RuntimeError('%s is not empty but is not a valid ' + 'git repository, we can not work with it' + ' as a subproject directory.' % ( + checkoutdir)) + if revno.lower() == 'head': # Failure to do pull is not a fatal error, # because otherwise you can't develop without @@ -134,6 +153,11 @@ class Resolver: if revno.lower() != 'head': subprocess.check_call(['git', 'checkout', revno], cwd=checkoutdir) + push_url = p.values.get('push-url') + if push_url: + subprocess.check_call(['git', 'remote', 'set-url', + '--push', 'origin', push_url], + cwd=checkoutdir) def get_hg(self, p): checkoutdir = os.path.join(self.subdir_root, p.get('directory')) revno = p.get('revision') diff --git a/mesontest.py b/mesontest.py index 04f72df..2d834b1 100755 --- a/mesontest.py +++ b/mesontest.py @@ -18,15 +18,18 @@ import subprocess, sys, os, argparse import pickle -import mesonbuild from mesonbuild import build from mesonbuild import environment -import time, datetime, pickle, multiprocessing, json +import time, datetime, multiprocessing, json import concurrent.futures as conc import platform import signal +# GNU autotools interprets a return code of 77 from tests it executes to +# mean that the test should be skipped. +GNU_SKIP_RETURNCODE = 77 + def is_windows(): platname = platform.system().lower() return platname == 'windows' or 'mingw' in platname @@ -51,13 +54,15 @@ def determine_worker_count(): parser = argparse.ArgumentParser() parser.add_argument('--repeat', default=1, dest='repeat', type=int, help='Number of times to run the tests.') +parser.add_argument('--no-rebuild', default=False, action='store_true', + help='Do not rebuild before running tests.') parser.add_argument('--gdb', default=False, dest='gdb', action='store_true', help='Run test under gdb.') parser.add_argument('--list', default=False, dest='list', action='store_true', help='List available tests.') parser.add_argument('--wrapper', default=None, dest='wrapper', help='wrapper to run tests with (e.g. Valgrind)') -parser.add_argument('--wd', default=None, dest='wd', +parser.add_argument('-C', default='.', dest='wd', help='directory to cd into before running') parser.add_argument('--suite', default=None, dest='suite', help='Only run tests belonging to the given suite.') @@ -71,6 +76,12 @@ parser.add_argument('--logbase', default='testlog', help="Base name for log file.") parser.add_argument('--num-processes', default=determine_worker_count(), type=int, help='How many parallel processes to use.') +parser.add_argument('-v', '--verbose', default=False, action='store_true', + help='Do not redirect stdout and stderr') +parser.add_argument('-t', '--timeout-multiplier', type=float, default=1.0, + help='Define a multiplier for test timeout, for example ' + ' when running tests in particular conditions they might take' + ' more time to execute.') parser.add_argument('args', nargs='*') class TestRun(): @@ -107,6 +118,8 @@ class TestRun(): return res def decode(stream): + if stream is None: + return '' try: return stream.decode('utf-8') except UnicodeDecodeError: @@ -135,12 +148,35 @@ class TestHarness: self.collected_logs = [] self.error_count = 0 self.is_run = False + self.cant_rebuild = False if self.options.benchmark: - self.datafile = 'meson-private/meson_benchmark_setup.dat' + self.datafile = os.path.join(options.wd, 'meson-private/meson_benchmark_setup.dat') else: - self.datafile = 'meson-private/meson_test_setup.dat' + self.datafile = os.path.join(options.wd, 'meson-private/meson_test_setup.dat') + + def rebuild_all(self): + if not os.path.isfile(os.path.join(self.options.wd, 'build.ninja')): + print("Only ninja backend is supported to rebuilt tests before running them.") + self.cant_rebuild = True + return True + + ninja = environment.detect_ninja() + if not ninja: + print("Can't find ninja, can't rebuild test.") + self.cant_rebuild = True + return False + + p = subprocess.Popen([ninja, '-C', self.options.wd]) + (stdo, stde) = p.communicate() + + if p.returncode != 0: + print("Could not rebuild") + return False + + return True def run_single_test(self, wrap, test): + failling = False if test.fname[0].endswith('.jar'): cmd = ['java', '-jar'] + test.fname elif not test.is_cross and run_with_mono(test.fname[0]): @@ -155,6 +191,7 @@ class TestHarness: cmd = [test.exe_runner] + test.fname else: cmd = test.fname + if cmd is None: res = 'SKIP' duration = 0.0 @@ -171,20 +208,30 @@ class TestHarness: child_env.update(test.env) if len(test.extra_paths) > 0: child_env['PATH'] = child_env['PATH'] + ';'.join([''] + test.extra_paths) - if is_windows(): - setsid = None - else: - setsid = os.setsid + + setsid = None + stdout = None + stderr = None + if not self.options.verbose: + stdout = subprocess.PIPE + stderr = subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT + + if not is_windows(): + setsid = os.setsid + p = subprocess.Popen(cmd, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE if self.options and self.options.split else subprocess.STDOUT, + stdout=stdout, + stderr=stderr, env=child_env, cwd=test.workdir, preexec_fn=setsid) timed_out = False + timeout = test.timeout * self.options.timeout_multiplier try: - (stdo, stde) = p.communicate(timeout=test.timeout) + (stdo, stde) = p.communicate(timeout=timeout) except subprocess.TimeoutExpired: + if self.options.verbose: + print("%s time out (After %d seconds)" % (test.name, timeout)) timed_out = True # Python does not provide multiplatform support for # killing a process and all its children so we need @@ -201,13 +248,22 @@ class TestHarness: stde = decode(stde) if timed_out: res = 'TIMEOUT' + failling = True + if p.returncode == GNU_SKIP_RETURNCODE: + res = 'SKIP' elif (not test.should_fail and p.returncode == 0) or \ (test.should_fail and p.returncode != 0): res = 'OK' else: res = 'FAIL' + failling = True returncode = p.returncode - return TestRun(res, returncode, test.should_fail, duration, stdo, stde, cmd, test.env) + result = TestRun(res, returncode, test.should_fail, duration, stdo, stde, cmd, test.env) + + if failling: + self.failed_tests.append(result) + + return result def print_stats(self, numlen, tests, name, result, i, logfile, jsonlogfile): startpad = ' '*(numlen - len('%d' % (i+1))) @@ -218,7 +274,8 @@ class TestHarness: (num, name, padding1, result.res, padding2, result.duration) print(result_str) result_str += "\n\n" + result.get_log() - if (result.returncode != 0) != result.should_fail: + if (result.returncode != GNU_SKIP_RETURNCODE) and \ + (result.returncode != 0) != result.should_fail: self.error_count += 1 if self.options.print_errorlogs: self.collected_logs.append(result_str) @@ -232,7 +289,7 @@ class TestHarness: print('Test data file. Probably this means that you did not run this in the build directory.') return 1 self.is_run = True - logfilename = self.run_tests(self.datafile, self.options.logbase) + logfilename = self.run_tests(self.options.logbase) if len(self.collected_logs) > 0: if len(self.collected_logs) > 10: print('\nThe output from 10 first failed tests:\n') @@ -249,8 +306,16 @@ class TestHarness: print('Full log written to %s.' % logfilename) return self.error_count - def run_tests(self, datafilename, log_base): - logfile_base = os.path.join('meson-logs', log_base) + def get_tests(self): + with open(self.datafile, 'rb') as f: + tests = pickle.load(f) + for test in tests: + test.rebuilt = False + + return tests + + def run_tests(self, log_base): + logfile_base = os.path.join(self.options.wd, 'meson-logs', log_base) if self.options.wrapper is None: wrap = [] logfilename = logfile_base + '.txt' @@ -260,8 +325,7 @@ class TestHarness: namebase = wrap[0] logfilename = logfile_base + '-' + namebase.replace(' ', '_') + '.txt' jsonlogfilename = logfile_base + '-' + namebase.replace(' ', '_') + '.json' - with open(datafilename, 'rb') as f: - tests = pickle.load(f) + tests = self.get_tests() if len(tests) == 0: print('No tests defined.') return @@ -270,37 +334,67 @@ class TestHarness: futures = [] filtered_tests = filter_tests(self.options.suite, tests) - with open(jsonlogfilename, 'w') as jsonlogfile, \ - open(logfilename, 'w') as logfile: - logfile.write('Log of Meson test suite run on %s.\n\n' % - datetime.datetime.now().isoformat()) - for i, test in enumerate(filtered_tests): - if test.suite[0] == '': - visible_name = test.name - else: - if self.options.suite is not None: - visible_name = self.options.suite + ' / ' + test.name + jsonlogfile = None + logfile = None + try: + if not self.options.verbose: + jsonlogfile = open(jsonlogfilename, 'w') + logfile = open(logfilename, 'w') + logfile.write('Log of Meson test suite run on %s.\n\n' % + datetime.datetime.now().isoformat()) + + for i in range(self.options.repeat): + for i, test in enumerate(filtered_tests): + if test.suite[0] == '': + visible_name = test.name else: - visible_name = test.suite[0] + ' / ' + test.name - - if not test.is_parallel: - self.drain_futures(futures) - futures = [] - res = self.run_single_test(wrap, test) - self.print_stats(numlen, filtered_tests, visible_name, res, i, - logfile, jsonlogfile) - else: - f = executor.submit(self.run_single_test, wrap, test) - futures.append((f, numlen, filtered_tests, visible_name, i, - logfile, jsonlogfile)) - self.drain_futures(futures) + if self.options.suite is not None: + visible_name = self.options.suite + ' / ' + test.name + else: + visible_name = test.suite[0] + ' / ' + test.name + + if not test.is_parallel: + self.drain_futures(futures) + futures = [] + res = self.run_single_test(wrap, test) + if not self.options.verbose: + self.print_stats(numlen, filtered_tests, visible_name, res, i, + logfile, jsonlogfile) + else: + f = executor.submit(self.run_single_test, wrap, test) + if not self.options.verbose: + futures.append((f, numlen, filtered_tests, visible_name, i, + logfile, jsonlogfile)) + self.drain_futures(futures, logfile, jsonlogfile) + finally: + if jsonlogfile: + jsonlogfile.close() + if logfile: + logfile.close() + return logfilename - def drain_futures(self, futures): + def drain_futures(self, futures, logfile, jsonlogfile): for i in futures: (result, numlen, tests, name, i, logfile, jsonlogfile) = i - self.print_stats(numlen, tests, name, result.result(), i, logfile, jsonlogfile) + if self.options.repeat > 1 and self.failed_tests: + result.cancel() + elif not self.options.verbose: + self.print_stats(numlen, tests, name, result.result(), i, logfile, jsonlogfile) + else: + result.result() + + if self.options.repeat > 1 and self.failed_tests: + if not self.options.verbose: + for res in self.failed_tests: + print('Test failed:\n\n-- stdout --\n') + print(res.stdo) + print('\n-- stderr --\n') + print(res.stde) + return 1 + + return def run_special(self): 'Tests run by the user, usually something like "under gdb 1000 times".' @@ -315,7 +409,7 @@ class TestHarness: return 1 if os.path.isfile('build.ninja'): subprocess.check_call([environment.detect_ninja(), 'all']) - tests = pickle.load(open(self.datafile, 'rb')) + tests = self.get_tests() if self.options.list: for i in tests: print(i.name) @@ -325,15 +419,22 @@ class TestHarness: for i in range(self.options.repeat): print('Running: %s %d/%d' % (t.name, i+1, self.options.repeat)) if self.options.gdb: - gdbrun(t) + wrap = ['gdb', '--quiet'] + if len(t.cmd_args) > 0: + wrap.append('--args') + if self.options.repeat > 1: + wrap.append('-ex', 'run', '-ex', 'quit') + + res = self.run_single_test(wrap, t) else: res = self.run_single_test(wrap, t) if (res.returncode == 0 and res.should_fail) or \ - (res.returncode != 0 and not res.should_fail): - print('Test failed:\n\n-- stdout --\n') - print(res.stdo) - print('\n-- stderr --\n') - print(res.stde) + (res.returncode != 0 and not res.should_fail): + if not self.options.verbose: + print('Test failed:\n\n-- stdout --\n') + print(res.stdo) + print('\n-- stderr --\n') + print(res.stde) return 1 return 0 @@ -342,32 +443,24 @@ def filter_tests(suite, tests): return tests return [x for x in tests if suite in x.suite] -def gdbrun(test): - child_env = os.environ.copy() - child_env.update(test.env) - # On success will exit cleanly. On failure gdb will ask user - # if they really want to exit. - exe = test.fname - args = test.cmd_args - if len(args) > 0: - argset = ['-ex', 'set args ' + ' '.join(args)] - else: - argset = [] - cmd = ['gdb', '--quiet'] + argset + ['-ex', 'run', '-ex', 'quit'] + exe - # FIXME a ton of stuff. run_single_test grabs stdout & co, - # which we do not want to do when running under gdb. - p = subprocess.Popen(cmd, - env=child_env, - cwd=test.workdir, - ) - p.communicate() def run(args): options = parser.parse_args(args) if options.benchmark: options.num_processes = 1 + + if options.gdb: + options.verbose = True + + options.wd = os.path.abspath(options.wd) + th = TestHarness(options) - if len(options.args) == 0: + if options.list: + return th.run_special() + if not options.no_rebuild: + if not th.rebuild_all(): + return -1 + elif len(options.args) == 0: return th.doit() return th.run_special() diff --git a/run_project_tests.py b/run_project_tests.py index dcc6006..6f4d0a3 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -380,7 +380,14 @@ def run_tests(extra_args): build_time = 0 test_time = 0 - executor = conc.ProcessPoolExecutor(max_workers=multiprocessing.cpu_count()) + try: + # This fails in some CI environments for unknown reasons. + num_workers = multiprocessing.cpu_count() + except Exception as e: + print('Could not determine number of CPUs due to the following reason:' + str(e)) + print('Defaulting to using only one process') + num_workers = 1 + executor = conc.ProcessPoolExecutor(max_workers=num_workers) for name, test_cases, skipped in all_tests: current_suite = ET.SubElement(junit_root, 'testsuite', {'name' : name, 'tests' : str(len(test_cases))}) diff --git a/run_unittests.py b/run_unittests.py index fff0c35..03ce0df 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -17,15 +17,17 @@ import unittest, os, sys, shutil, time import subprocess import re, json import tempfile +from glob import glob import mesonbuild.environment from mesonbuild.environment import detect_ninja from mesonbuild.dependencies import PkgConfigDependency, Qt5Dependency def get_soname(fname): # HACK, fix to not use shell. - raw_out = subprocess.check_output(['readelf', '-a', fname]) - pattern = re.compile(b'soname: \[(.*?)\]') - for line in raw_out.split(b'\n'): + raw_out = subprocess.check_output(['readelf', '-a', fname], + universal_newlines=True) + pattern = re.compile('soname: \[(.*?)\]') + for line in raw_out.split('\n'): m = pattern.search(line) if m is not None: return m.group(1) @@ -52,6 +54,7 @@ class LinuxlikeTests(unittest.TestCase): def setUp(self): super().setUp() src_root = os.path.dirname(__file__) + src_root = os.path.join(os.getcwd(), src_root) self.builddir = tempfile.mkdtemp() self.meson_command = [sys.executable, os.path.join(src_root, 'meson.py')] self.mconf_command = [sys.executable, os.path.join(src_root, 'mesonconf.py')] @@ -60,6 +63,7 @@ class LinuxlikeTests(unittest.TestCase): self.common_test_dir = os.path.join(src_root, 'test cases/common') self.vala_test_dir = os.path.join(src_root, 'test cases/vala') self.framework_test_dir = os.path.join(src_root, 'test cases/frameworks') + self.unit_test_dir = os.path.join(src_root, 'test cases/unit') self.output = b'' self.orig_env = os.environ.copy() @@ -91,27 +95,55 @@ class LinuxlikeTests(unittest.TestCase): with open(os.path.join(self.builddir, 'meson-logs', 'meson-log.txt')) as f: return f.readlines() + def get_meson_log_compiler_checks(self): + ''' + Fetch a list command-lines run by meson for compiler checks. + Each command-line is returned as a list of arguments. + ''' + log = self.get_meson_log() + prefix = 'Command line:' + cmds = [l[len(prefix):].split() for l in log if l.startswith(prefix)] + return cmds + def introspect(self, arg): - out = subprocess.check_output(self.mintro_command + [arg, self.builddir]) - return json.loads(out.decode('utf-8')) + out = subprocess.check_output(self.mintro_command + [arg, self.builddir], + universal_newlines=True) + return json.loads(out) def test_basic_soname(self): + ''' + Test that the soname is set correctly for shared libraries. This can't + be an ordinary test case because we need to run `readelf` and actually + check the soname. + https://github.com/mesonbuild/meson/issues/785 + ''' testdir = os.path.join(self.common_test_dir, '4 shared') self.init(testdir) self.build() lib1 = os.path.join(self.builddir, 'libmylib.so') soname = get_soname(lib1) - self.assertEqual(soname, b'libmylib.so') + self.assertEqual(soname, 'libmylib.so') def test_custom_soname(self): + ''' + Test that the soname is set correctly for shared libraries when + a custom prefix and/or suffix is used. This can't be an ordinary test + case because we need to run `readelf` and actually check the soname. + https://github.com/mesonbuild/meson/issues/785 + ''' testdir = os.path.join(self.common_test_dir, '27 library versions') self.init(testdir) self.build() lib1 = os.path.join(self.builddir, 'prefixsomelib.suffix') soname = get_soname(lib1) - self.assertEqual(soname, b'prefixsomelib.suffix') + self.assertEqual(soname, 'prefixsomelib.suffix') def test_pic(self): + ''' + Test that -fPIC is correctly added to static libraries when b_staticpic + is true and not when it is false. This can't be an ordinary test case + because we need to inspect the compiler database. + ''' testdir = os.path.join(self.common_test_dir, '3 static') self.init(testdir) compdb = self.get_compdb() @@ -127,6 +159,12 @@ class LinuxlikeTests(unittest.TestCase): self.assertTrue('-fPIC' not in compdb[0]['command']) def test_pkgconfig_gen(self): + ''' + Test that generated pkg-config files can be found and have the correct + version and link args. This can't be an ordinary test case because we + need to run pkg-config outside of a Meson build file. + https://github.com/mesonbuild/meson/issues/889 + ''' testdir = os.path.join(self.common_test_dir, '51 pkgconfig-gen') self.init(testdir) env = FakeEnvironment() @@ -138,6 +176,12 @@ class LinuxlikeTests(unittest.TestCase): self.assertTrue('-lfoo' in simple_dep.get_link_args()) def test_vala_c_warnings(self): + ''' + Test that no warnings are emitted for C code generated by Vala. This + can't be an ordinary test case because we need to inspect the compiler + database. + https://github.com/mesonbuild/meson/issues/864 + ''' testdir = os.path.join(self.vala_test_dir, '5 target glib') self.init(testdir) compdb = self.get_compdb() @@ -165,6 +209,12 @@ class LinuxlikeTests(unittest.TestCase): self.assertTrue('-Werror' in c_command) def test_static_compile_order(self): + ''' + Test that the order of files in a compiler command-line while compiling + and linking statically is deterministic. This can't be an ordinary test + case because we need to inspect the compiler database. + https://github.com/mesonbuild/meson/pull/951 + ''' testdir = os.path.join(self.common_test_dir, '5 linkstatic') self.init(testdir) compdb = self.get_compdb() @@ -176,6 +226,10 @@ class LinuxlikeTests(unittest.TestCase): # FIXME: We don't have access to the linker command def test_install_introspection(self): + ''' + Tests that the Meson introspection API exposes install filenames correctly + https://github.com/mesonbuild/meson/issues/829 + ''' testdir = os.path.join(self.common_test_dir, '8 install') self.init(testdir) intro = self.introspect('--targets') @@ -185,14 +239,29 @@ class LinuxlikeTests(unittest.TestCase): self.assertEqual(intro[1]['install_filename'], '/usr/local/bin/prog') def test_run_target_files_path(self): + ''' + Test that run_targets are run from the correct directory + https://github.com/mesonbuild/meson/issues/957 + ''' testdir = os.path.join(self.common_test_dir, '58 run target') self.init(testdir) self.run_target('check_exists') def test_qt5dependency_qmake_detection(self): - # Can't be sure that `qmake` is Qt5, so just try qmake-qt5. + ''' + Test that qt5 detection with qmake works. This can't be an ordinary + test case because it involves setting the environment. + ''' + # Verify that qmake is for Qt5 if not shutil.which('qmake-qt5'): - raise unittest.SkipTest('qt5 not found') + if not shutil.which('qmake'): + raise unittest.SkipTest('QMake not found') + # For some inexplicable reason qmake --version gives different + # results when run from the command line vs invoked by Python. + # Check for both cases in case this behaviour changes in the future. + output = subprocess.getoutput(['qmake', '--version']) + if 'Qt version 5' not in output and 'qt5' not in output: + raise unittest.SkipTest('Qmake found, but it is not for Qt 5.') # Disable pkg-config codepath and force searching with qmake/qmake-qt5 os.environ['PKG_CONFIG_LIBDIR'] = self.builddir os.environ['PKG_CONFIG_PATH'] = self.builddir @@ -200,8 +269,76 @@ class LinuxlikeTests(unittest.TestCase): self.init(testdir) # Confirm that the dependency was found with qmake msg = 'Qt5 native `qmake-qt5` dependency (modules: Core) found: YES\n' + msg2 = 'Qt5 native `qmake` dependency (modules: Core) found: YES\n' mesonlog = self.get_meson_log() - self.assertTrue(msg in mesonlog) + self.assertTrue(msg in mesonlog or msg2 in mesonlog) + + def get_soname(self, fname): + output = subprocess.check_output(['readelf', '-a', fname], + universal_newlines=True) + for line in output.split('\n'): + if 'SONAME' in line: + return line.split('[')[1].split(']')[0] + raise RuntimeError('Readelf gave no SONAME.') + + def test_soname(self): + testdir = os.path.join(self.unit_test_dir, '1 soname') + self.init(testdir) + self.build() + + # File without aliases set. + nover = os.path.join(self.builddir, 'libnover.so') + self.assertTrue(os.path.exists(nover)) + self.assertFalse(os.path.islink(nover)) + self.assertEqual(self.get_soname(nover), 'libnover.so') + self.assertEqual(len(glob(nover[:-3] + '*')), 1) + + # File with version set + verset = os.path.join(self.builddir, 'libverset.so') + self.assertTrue(os.path.exists(verset + '.4.5.6')) + self.assertEqual(os.readlink(verset), 'libverset.so.4') + self.assertEqual(self.get_soname(verset), 'libverset.so.4') + self.assertEqual(len(glob(verset[:-3] + '*')), 3) + + # File with soversion set + soverset = os.path.join(self.builddir, 'libsoverset.so') + self.assertTrue(os.path.exists(soverset + '.1.2.3')) + self.assertEqual(os.readlink(soverset), 'libsoverset.so.1.2.3') + self.assertEqual(self.get_soname(soverset), 'libsoverset.so.1.2.3') + self.assertEqual(len(glob(soverset[:-3] + '*')), 2) + + # File with version and soversion set to same values + settosame = os.path.join(self.builddir, 'libsettosame.so') + self.assertTrue(os.path.exists(settosame + '.7.8.9')) + self.assertEqual(os.readlink(settosame), 'libsettosame.so.7.8.9') + self.assertEqual(self.get_soname(settosame), 'libsettosame.so.7.8.9') + self.assertEqual(len(glob(settosame[:-3] + '*')), 2) + + # File with version and soversion set to different values + bothset = os.path.join(self.builddir, 'libbothset.so') + self.assertTrue(os.path.exists(bothset + '.1.2.3')) + self.assertEqual(os.readlink(bothset), 'libbothset.so.1.2.3') + self.assertEqual(os.readlink(bothset + '.1.2.3'), 'libbothset.so.4.5.6') + self.assertEqual(self.get_soname(bothset), 'libbothset.so.1.2.3') + self.assertEqual(len(glob(bothset[:-3] + '*')), 3) + + def test_compiler_check_flags_order(self): + ''' + Test that compiler check flags override all other flags. This can't be + an ordinary test case because it needs the environment to be set. + ''' + Oflag = '-O3' + os.environ['CFLAGS'] = os.environ['CXXFLAGS'] = Oflag + testdir = os.path.join(self.common_test_dir, '43 has function') + self.init(testdir) + cmds = self.get_meson_log_compiler_checks() + for cmd in cmds: + # Verify that -I flags from the `args` kwarg are first + # This is set in the '43 has function' test case + self.assertEqual(cmd[2], '-I/tmp') + # Verify that -O3 set via the environment is overriden by -O0 + Oargs = [arg for arg in cmd if arg.startswith('-O')] + self.assertEqual(Oargs, [Oflag, '-O0']) if __name__ == '__main__': unittest.main() @@ -70,6 +70,7 @@ setup(name='meson', 'mesonbuild.wrap'], scripts=['meson.py', 'mesonconf.py', + 'mesontest.py', 'mesonintrospect.py', 'wraptool.py'], cmdclass={'install_scripts': install_scripts}, diff --git a/syntax-highlighting/vim/README b/syntax-highlighting/vim/README new file mode 100644 index 0000000..1afa243 --- /dev/null +++ b/syntax-highlighting/vim/README @@ -0,0 +1,3 @@ +ftdetect sets the filetype +syntax does Meson syntax highlighting +plugin does Meson indentation diff --git a/syntax-highlighting/vim/ftdetect/meson.vim b/syntax-highlighting/vim/ftdetect/meson.vim new file mode 100644 index 0000000..84db70c --- /dev/null +++ b/syntax-highlighting/vim/ftdetect/meson.vim @@ -0,0 +1,2 @@ +au BufNewFile,BufRead meson.build set filetype=meson +au BufNewFile,BufRead meson_options.txt set filetype=meson diff --git a/syntax-highlighting/vim/plugin/meson.vim b/syntax-highlighting/vim/plugin/meson.vim new file mode 100644 index 0000000..b219bdc --- /dev/null +++ b/syntax-highlighting/vim/plugin/meson.vim @@ -0,0 +1,183 @@ +" Vim indent file +" Language: Meson +" Maintainer: Nirbheek Chauhan <nirbheek.chauhan@gmail.com> +" Original Authors: David Bustos <bustos@caltech.edu> +" Bram Moolenaar <Bram@vim.org> +" Last Change: 2015 Feb 23 + +" Only load this indent file when no other was loaded. +if exists("b:did_indent") + finish +endif +let b:did_indent = 1 + +" Some preliminary settings +setlocal nolisp " Make sure lisp indenting doesn't supersede us +setlocal autoindent " indentexpr isn't much help otherwise + +setlocal indentexpr=GetMesonIndent(v:lnum) +setlocal indentkeys+==elif,=else,=endforeach,=endif,0) + +" Only define the function once. +if exists("*GetMesonIndent") + finish +endif +let s:keepcpo= &cpo +set cpo&vim + +" Come here when loading the script the first time. + +let s:maxoff = 50 " maximum number of lines to look backwards for () + +" Force sw=2 sts=2 because that's required by convention +set shiftwidth=2 +set softtabstop=2 + +function GetMesonIndent(lnum) + echom getline(line(".")) + + " If this line is explicitly joined: If the previous line was also joined, + " line it up with that one, otherwise add two 'shiftwidth' + if getline(a:lnum - 1) =~ '\\$' + if a:lnum > 1 && getline(a:lnum - 2) =~ '\\$' + return indent(a:lnum - 1) + endif + return indent(a:lnum - 1) + (exists("g:mesonindent_continue") ? eval(g:mesonindent_continue) : (shiftwidth() * 2)) + endif + + " If the start of the line is in a string don't change the indent. + if has('syntax_items') + \ && synIDattr(synID(a:lnum, 1, 1), "name") =~ "String$" + return -1 + endif + + " Search backwards for the previous non-empty line. + let plnum = prevnonblank(v:lnum - 1) + + if plnum == 0 + " This is the first non-empty line, use zero indent. + return 0 + endif + + " If the previous line is inside parenthesis, use the indent of the starting + " line. + " Trick: use the non-existing "dummy" variable to break out of the loop when + " going too far back. + call cursor(plnum, 1) + let parlnum = searchpair('(\|{\|\[', '', ')\|}\|\]', 'nbW', + \ "line('.') < " . (plnum - s:maxoff) . " ? dummy :" + \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')" + \ . " =~ '\\(Comment\\|Todo\\|String\\)$'") + if parlnum > 0 + let plindent = indent(parlnum) + let plnumstart = parlnum + else + let plindent = indent(plnum) + let plnumstart = plnum + endif + + + " When inside parenthesis: If at the first line below the parenthesis add + " two 'shiftwidth', otherwise same as previous line. + " i = (a + " + b + " + c) + call cursor(a:lnum, 1) + let p = searchpair('(\|{\|\[', '', ')\|}\|\]', 'bW', + \ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :" + \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')" + \ . " =~ '\\(Comment\\|Todo\\|String\\)$'") + if p > 0 + if p == plnum + " When the start is inside parenthesis, only indent one 'shiftwidth'. + let pp = searchpair('(\|{\|\[', '', ')\|}\|\]', 'bW', + \ "line('.') < " . (a:lnum - s:maxoff) . " ? dummy :" + \ . " synIDattr(synID(line('.'), col('.'), 1), 'name')" + \ . " =~ '\\(Comment\\|Todo\\|String\\)$'") + if pp > 0 + return indent(plnum) + (exists("g:pyindent_nested_paren") ? eval(g:pyindent_nested_paren) : shiftwidth()) + endif + return indent(plnum) + (exists("g:pyindent_open_paren") ? eval(g:pyindent_open_paren) : (shiftwidth() * 2)) + endif + if plnumstart == p + return indent(plnum) + endif + return plindent + endif + + + " Get the line and remove a trailing comment. + " Use syntax highlighting attributes when possible. + let pline = getline(plnum) + let pline_len = strlen(pline) + if has('syntax_items') + " If the last character in the line is a comment, do a binary search for + " the start of the comment. synID() is slow, a linear search would take + " too long on a long line. + if synIDattr(synID(plnum, pline_len, 1), "name") =~ "\\(Comment\\|Todo\\)$" + let min = 1 + let max = pline_len + while min < max + let col = (min + max) / 2 + if synIDattr(synID(plnum, col, 1), "name") =~ "\\(Comment\\|Todo\\)$" + let max = col + else + let min = col + 1 + endif + endwhile + let pline = strpart(pline, 0, min - 1) + endif + else + let col = 0 + while col < pline_len + if pline[col] == '#' + let pline = strpart(pline, 0, col) + break + endif + let col = col + 1 + endwhile + endif + + " If the previous line ended the conditional/loop + if getline(plnum) =~ '^\s*\(endif\|endforeach\)\>\s*' + " Maintain indent + return -1 + endif + + " If the previous line ended with a builtin, indent this line + if pline =~ '^\s*\(foreach\|if\|else\|elif\)\>\s*' + return plindent + shiftwidth() + endif + + " If the current line begins with a header keyword, deindent + if getline(a:lnum) =~ '^\s*\(else\|elif\|endif\|endforeach\)' + + " Unless the previous line was a one-liner + if getline(plnumstart) =~ '^\s*\(foreach\|if\)\>\s*' + return plindent + endif + + " Or the user has already dedented + if indent(a:lnum) <= plindent - shiftwidth() + return -1 + endif + + return plindent - shiftwidth() + endif + + " When after a () construct we probably want to go back to the start line. + " a = (b + " + c) + " here + if parlnum > 0 + return plindent + endif + + return -1 + +endfunction + +let &cpo = s:keepcpo +unlet s:keepcpo + +" vim:sw=2 diff --git a/syntax-highlighting/vim/syntax/meson.vim b/syntax-highlighting/vim/syntax/meson.vim new file mode 100644 index 0000000..c2653ab --- /dev/null +++ b/syntax-highlighting/vim/syntax/meson.vim @@ -0,0 +1,117 @@ +" Vim syntax file +" Language: Meson +" Maintainer: Nirbheek Chauhan <nirbheek.chauhan@gmail.com> +" Last Change: 2015 Feb 23 +" Credits: Zvezdan Petkovic <zpetkovic@acm.org> +" Neil Schemenauer <nas@meson.ca> +" Dmitry Vasiliev +" +" This version is copied and edited from python.vim +" It's very basic, and doesn't do many things I'd like it to +" For instance, it should show errors for syntax that is valid in +" Python but not in Meson. +" +" Optional highlighting can be controlled using these variables. +" +" let meson_space_error_highlight = 1 +" + +" For version 5.x: Clear all syntax items. +" For version 6.x: Quit when a syntax file was already loaded. +if version < 600 + syntax clear +elseif exists("b:current_syntax") + finish +endif + +" We need nocompatible mode in order to continue lines with backslashes. +" Original setting will be restored. +let s:cpo_save = &cpo +set cpo&vim + +" https://github.com/mesonbuild/meson/wiki/Syntax +syn keyword mesonConditional elif else if endif +syn keyword mesonRepeat foreach endforeach +syn keyword mesonOperator and not or + +syn match mesonComment "#.*$" contains=mesonTodo,@Spell +syn keyword mesonTodo FIXME NOTE NOTES TODO XXX contained + +" Strings can either be single quoted or triple counted across multiple lines, +" but always with a ' +syn region mesonString + \ start="\z('\)" end="\z1" skip="\\\\\|\\\z1" + \ contains=mesonEscape,@Spell +syn region mesonString + \ start="\z('''\)" end="\z1" keepend + \ contains=mesonEscape,mesonSpaceError,@Spell + +syn match mesonEscape "\\[abfnrtv'\\]" contained +syn match mesonEscape "\\\o\{1,3}" contained +syn match mesonEscape "\\x\x\{2}" contained +syn match mesonEscape "\%(\\u\x\{4}\|\\U\x\{8}\)" contained +" Meson allows case-insensitive Unicode IDs: http://www.unicode.org/charts/ +syn match mesonEscape "\\N{\a\+\%(\s\a\+\)*}" contained +syn match mesonEscape "\\$" + +" Meson only supports integer numbers +" https://github.com/mesonbuild/meson/wiki/Syntax#numbers +syn match mesonNumber "\<\d\+\>" + +" booleans +syn keyword mesonConstant false true + +" Built-in functions +syn keyword mesonBuiltin add_global_arguments add_languages benchmark +syn keyword mesonBuiltin build_target configuration_data configure_file +syn keyword mesonBuiltin custom_target declare_dependency dependency +syn keyword mesonBuiltin error executable find_program find_library +syn keyword mesonBuiltin files generator get_option get_variable +syn keyword mesonBuiltin gettext import include_directories install_data +syn keyword mesonBuiltin install_headers install_man install_subdir +syn keyword mesonBuiltin is_subproject is_variable jar library message +syn keyword mesonBuiltin project run_command run_target set_variable +syn keyword mesonBuiltin shared_library static_library subdir subproject +syn keyword mesonBuiltin test vcs_tag + +if exists("meson_space_error_highlight") + " trailing whitespace + syn match mesonSpaceError display excludenl "\s\+$" + " mixed tabs and spaces + syn match mesonSpaceError display " \+\t" + syn match mesonSpaceError display "\t\+ " +endif + +if version >= 508 || !exists("did_meson_syn_inits") + if version <= 508 + let did_meson_syn_inits = 1 + command -nargs=+ HiLink hi link <args> + else + command -nargs=+ HiLink hi def link <args> + endif + + " The default highlight links. Can be overridden later. + HiLink mesonStatement Statement + HiLink mesonConditional Conditional + HiLink mesonRepeat Repeat + HiLink mesonOperator Operator + HiLink mesonComment Comment + HiLink mesonTodo Todo + HiLink mesonString String + HiLink mesonEscape Special + HiLink mesonNumber Number + HiLink mesonBuiltin Function + HiLink mesonConstant Number + if exists("meson_space_error_highlight") + HiLink mesonSpaceError Error + endif + + delcommand HiLink +endif + +let b:current_syntax = "meson" + +let &cpo = s:cpo_save +unlet s:cpo_save + +" vim:set sw=2 sts=2 ts=8 noet: diff --git a/test cases/common/124 test skip/meson.build b/test cases/common/124 test skip/meson.build new file mode 100644 index 0000000..568527f --- /dev/null +++ b/test cases/common/124 test skip/meson.build @@ -0,0 +1,4 @@ +project('test skip', 'c') + +exe_test_skip = executable('test_skip', 'test_skip.c') +test('test_skip', exe_test_skip) diff --git a/test cases/common/124 test skip/test_skip.c b/test cases/common/124 test skip/test_skip.c new file mode 100644 index 0000000..d050a61 --- /dev/null +++ b/test cases/common/124 test skip/test_skip.c @@ -0,0 +1,4 @@ +int main(int argc, char *argv[]) +{ + return 77; +} diff --git a/test cases/common/25 object extraction/lib2.c b/test cases/common/25 object extraction/lib2.c new file mode 100644 index 0000000..c30dde2 --- /dev/null +++ b/test cases/common/25 object extraction/lib2.c @@ -0,0 +1,3 @@ +int retval() { + return 43; +} diff --git a/test cases/common/25 object extraction/meson.build b/test cases/common/25 object extraction/meson.build index 7c5ab90..c76b0db 100644 --- a/test cases/common/25 object extraction/meson.build +++ b/test cases/common/25 object extraction/meson.build @@ -4,7 +4,7 @@ if meson.is_unity() message('Skipping extraction test because this is a Unity build.') else lib1 = shared_library('somelib', 'src/lib.c') - lib2 = shared_library('somelib2', 'lib.c') + lib2 = shared_library('somelib2', 'lib.c', 'lib2.c') obj1 = lib1.extract_objects('src/lib.c') obj2 = lib2.extract_objects(['lib.c']) diff --git a/test cases/common/43 has function/meson.build b/test cases/common/43 has function/meson.build index 61f96e1..e0d3344 100644 --- a/test cases/common/43 has function/meson.build +++ b/test cases/common/43 has function/meson.build @@ -1,9 +1,12 @@ project('has function', 'c', 'cpp') +# This is used in the `test_compiler_check_flags_order` unit test +unit_test_args = '-I/tmp' compilers = [meson.get_compiler('c'), meson.get_compiler('cpp')] foreach cc : compilers - if not cc.has_function('printf', prefix : '#include<stdio.h>') + if not cc.has_function('printf', prefix : '#include<stdio.h>', + args : unit_test_args) error('"printf" function not found (should always exist).') endif @@ -13,12 +16,16 @@ foreach cc : compilers # On MSVC fprintf is defined as an inline function in the header, so it cannot # be found without the include. if cc.get_id() != 'msvc' - assert(cc.has_function('fprintf'), '"fprintf" function not found without include (on !msvc).') + assert(cc.has_function('fprintf', args : unit_test_args), + '"fprintf" function not found without include (on !msvc).') else - assert(cc.has_function('fprintf', prefix : '#include <stdio.h>'), '"fprintf" function not found with include (on msvc).') + assert(cc.has_function('fprintf', prefix : '#include <stdio.h>', + args : unit_test_args), + '"fprintf" function not found with include (on msvc).') endif - if cc.has_function('hfkerhisadf', prefix : '#include<stdio.h>') + if cc.has_function('hfkerhisadf', prefix : '#include<stdio.h>', + args : unit_test_args) error('Found non-existent function "hfkerhisadf".') endif @@ -28,16 +35,23 @@ foreach cc : compilers # implemented in glibc it's probably not implemented in any other 'slimmer' # C library variants either, so the check should be safe either way hopefully. if host_machine.system() == 'linux' and cc.get_id() == 'gcc' - assert (cc.has_function('poll', prefix : '#include <poll.h>'), 'couldn\'t detect "poll" when defined by a header') - assert (not cc.has_function('lchmod', prefix : '''#include <sys/stat.h> - #include <unistd.h>'''), '"lchmod" check should have failed') + assert (cc.has_function('poll', prefix : '#include <poll.h>', + args : unit_test_args), + 'couldn\'t detect "poll" when defined by a header') + lchmod_prefix = '#include <sys/stat.h>\n#include <unistd.h>' + assert (not cc.has_function('lchmod', prefix : lchmod_prefix, + args : unit_test_args), + '"lchmod" check should have failed') endif # For some functions one needs to define _GNU_SOURCE before including the # right headers to get them picked up. Make sure we can detect these functions # as well without any prefix - if cc.has_header_symbol('sys/socket.h', 'recvmmsg', prefix : '#define _GNU_SOURCE') + if cc.has_header_symbol('sys/socket.h', 'recvmmsg', + prefix : '#define _GNU_SOURCE', + args : unit_test_args) # We assume that if recvmmsg exists sendmmsg does too - assert (cc.has_function('sendmmsg'), 'Failed to detect function "sendmmsg" (should always exist).') + assert (cc.has_function('sendmmsg', args : unit_test_args), + 'Failed to detect function "sendmmsg" (should always exist).') endif endforeach diff --git a/test cases/common/51 pkgconfig-gen/meson.build b/test cases/common/51 pkgconfig-gen/meson.build index 0933238..e1e41d9 100644 --- a/test cases/common/51 pkgconfig-gen/meson.build +++ b/test cases/common/51 pkgconfig-gen/meson.build @@ -19,11 +19,17 @@ pkgg.generate( ) pkgconfig = find_program('pkg-config', required: false) -if pkgconfig.found() and build_machine.system() != 'windows' - test('pkgconfig-validation', pkgconfig, - args: ['--validate', 'simple'], - env: ['PKG_CONFIG_PATH=' + meson.current_build_dir() + '/meson-private' ], - ) +if pkgconfig.found() + v = run_command(pkgconfig, '--version').stdout().strip() + if v.version_compare('>=0.29') + test('pkgconfig-validation', pkgconfig, + args: ['--validate', 'simple'], + env: ['PKG_CONFIG_PATH=' + meson.current_build_dir() + '/meson-private' ]) + else + message('pkg-config version \'' + v + '\' too old, skipping validate test') + endif +else + message('pkg-config not found, skipping validate test') endif # Test that name_prefix='' and name='libfoo' results in '-lfoo' diff --git a/test cases/failing/37 pkgconfig dependency impossible conditions/meson.build b/test cases/failing/37 pkgconfig dependency impossible conditions/meson.build new file mode 100644 index 0000000..54d434c --- /dev/null +++ b/test cases/failing/37 pkgconfig dependency impossible conditions/meson.build @@ -0,0 +1,3 @@ +project('impossible-dep-test', 'c', version : '1.0') + +dependency('zlib', version : ['>=1.0', '<1.0']) diff --git a/test cases/frameworks/6 gettext/data/meson.build b/test cases/frameworks/6 gettext/data/meson.build new file mode 100644 index 0000000..d927ba3 --- /dev/null +++ b/test cases/frameworks/6 gettext/data/meson.build @@ -0,0 +1,8 @@ +i18n.merge_file( + input: 'test.desktop.in', + output: 'test.desktop', + type: 'desktop', + po_dir: '../po', + install: true, + install_dir: join_paths(get_option('datadir'), 'applications') +) diff --git a/test cases/frameworks/6 gettext/data/test.desktop.in b/test cases/frameworks/6 gettext/data/test.desktop.in new file mode 100644 index 0000000..33b9a9f --- /dev/null +++ b/test cases/frameworks/6 gettext/data/test.desktop.in @@ -0,0 +1,6 @@ +[Desktop Entry] +Name=Test +GenericName=Application +Comment=Test Application +Type=Application + diff --git a/test cases/frameworks/6 gettext/installed_files.txt b/test cases/frameworks/6 gettext/installed_files.txt index c95b9fd..ffe543f 100644 --- a/test cases/frameworks/6 gettext/installed_files.txt +++ b/test cases/frameworks/6 gettext/installed_files.txt @@ -1,3 +1,4 @@ usr/bin/intlprog usr/share/locale/de/LC_MESSAGES/intltest.mo usr/share/locale/fi/LC_MESSAGES/intltest.mo +usr/share/applications/test.desktop diff --git a/test cases/frameworks/6 gettext/meson.build b/test cases/frameworks/6 gettext/meson.build index 6bba7e0..6b517a4 100644 --- a/test cases/frameworks/6 gettext/meson.build +++ b/test cases/frameworks/6 gettext/meson.build @@ -4,3 +4,4 @@ i18n = import('i18n') subdir('po') subdir('src') +subdir('data') diff --git a/test cases/frameworks/6 gettext/po/LINGUAS b/test cases/frameworks/6 gettext/po/LINGUAS new file mode 100644 index 0000000..d319e48 --- /dev/null +++ b/test cases/frameworks/6 gettext/po/LINGUAS @@ -0,0 +1,2 @@ +de +fi diff --git a/test cases/frameworks/6 gettext/po/POTFILES b/test cases/frameworks/6 gettext/po/POTFILES index 5fd4b84..f49cecd 100644 --- a/test cases/frameworks/6 gettext/po/POTFILES +++ b/test cases/frameworks/6 gettext/po/POTFILES @@ -1 +1,2 @@ src/intlmain.c +data/test.desktop diff --git a/test cases/linuxlike/5 dependency versions/meson.build b/test cases/linuxlike/5 dependency versions/meson.build index 1de87c8..7f29564 100644 --- a/test cases/linuxlike/5 dependency versions/meson.build +++ b/test cases/linuxlike/5 dependency versions/meson.build @@ -10,6 +10,17 @@ assert(zlib.type_name() == 'pkgconfig', 'zlib should be of type "pkgconfig" not zlibver = dependency('zlib', version : '<1.0', required : false) assert(zlibver.found() == false, 'zlib <1.0 should not be found!') +# Find external dependencies with various version restrictions +dependency('zlib', version : '>=1.0') +dependency('zlib', version : '<=9999') +dependency('zlib', version : '=' + zlib.version()) + +# Find external dependencies with multiple version restrictions +dependency('zlib', version : ['>=1.0', '<=9999']) +if dependency('zlib', version : ['<=1.0', '>=9999', '=' + zlib.version()], required : false).found() + error('zlib <=1.0 >=9999 should not have been found') +endif + # Test https://github.com/mesonbuild/meson/pull/610 dependency('somebrokenlib', version : '>=2.0', required : false) dependency('somebrokenlib', version : '>=1.0', required : false) diff --git a/test cases/unit/1 soname/CMakeLists.txt b/test cases/unit/1 soname/CMakeLists.txt new file mode 100644 index 0000000..c4f2e3e --- /dev/null +++ b/test cases/unit/1 soname/CMakeLists.txt @@ -0,0 +1,26 @@ +# This is a CMake version of this test. It behaves slightly differently +# so in case you ever need to debug this, here it is. +# +# The biggest difference is that if SOVERSION is not set, it +# is set to VERSION. Autotools sets it to the first number +# of VERSION. That is, for version number 1.2.3 CMake sets +# soname to 1.2.3 but Autotools sets it to 1. + +project(vertest C) +cmake_minimum_required(VERSION 3.5) + +add_library(nover SHARED versioned.c) + +add_library(verset SHARED versioned.c) +set_target_properties(verset PROPERTIES VERSION 4.5.6) + +add_library(soverset SHARED versioned.c) +set_target_properties(soverset PROPERTIES SOVERSION 1.2.3) + +add_library(bothset SHARED versioned.c) +set_target_properties(bothset PROPERTIES SOVERSION 1.2.3) +set_target_properties(bothset PROPERTIES VERSION 4.5.6) + +add_library(settosame SHARED versioned.c) +set_target_properties(settosame PROPERTIES SOVERSION 7.8.9) +set_target_properties(settosame PROPERTIES VERSION 7.8.9) diff --git a/test cases/unit/1 soname/meson.build b/test cases/unit/1 soname/meson.build new file mode 100644 index 0000000..d956afe --- /dev/null +++ b/test cases/unit/1 soname/meson.build @@ -0,0 +1,18 @@ +project('vertest', 'c') + +shared_library('nover', 'versioned.c') + +shared_library('verset', 'versioned.c', + version : '4.5.6') + +shared_library('soverset', 'versioned.c', + soversion : '1.2.3') + +shared_library('bothset', 'versioned.c', + soversion : '1.2.3', + version : '4.5.6') + +shared_library('settosame', 'versioned.c', + soversion : '7.8.9', + version : '7.8.9') + diff --git a/test cases/unit/1 soname/versioned.c b/test cases/unit/1 soname/versioned.c new file mode 100644 index 0000000..f48d2b0 --- /dev/null +++ b/test cases/unit/1 soname/versioned.c @@ -0,0 +1,3 @@ +int versioned_func() { + return 0; +} |