diff options
78 files changed, 1561 insertions, 1018 deletions
diff --git a/authors.txt b/authors.txt index 2c2a4df..18863eb 100644 --- a/authors.txt +++ b/authors.txt @@ -60,3 +60,4 @@ Daniel Stone Marc-Antoine Perennou Matthieu Gautier Kseniia Vasilchuk +Philipp Geier diff --git a/mesonbuild/astinterpreter.py b/mesonbuild/astinterpreter.py index 7a83736..1cdf523 100644 --- a/mesonbuild/astinterpreter.py +++ b/mesonbuild/astinterpreter.py @@ -47,47 +47,47 @@ class AstInterpreter(interpreterbase.InterpreterBase): def __init__(self, source_root, subdir): super().__init__(source_root, subdir) self.asts = {} - self.funcs.update({'project' : self.func_do_nothing, - 'test' : self.func_do_nothing, - 'benchmark' : self.func_do_nothing, - 'install_headers' : self.func_do_nothing, - 'install_man' : self.func_do_nothing, - 'install_data' : self.func_do_nothing, - 'install_subdir' : self.func_do_nothing, - 'configuration_data' : self.func_do_nothing, - 'configure_file' : self.func_do_nothing, - 'find_program' : self.func_do_nothing, - 'include_directories' : self.func_do_nothing, - 'add_global_arguments' : self.func_do_nothing, - 'add_global_link_arguments' : self.func_do_nothing, - 'add_project_arguments' : self.func_do_nothing, - 'add_project_link_arguments' : self.func_do_nothing, - 'message' : self.func_do_nothing, - 'generator' : self.func_do_nothing, - 'error' : self.func_do_nothing, - 'run_command' : self.func_do_nothing, - 'assert' : self.func_do_nothing, - 'subproject' : self.func_do_nothing, - 'dependency' : self.func_do_nothing, - 'get_option' : self.func_do_nothing, - 'join_paths' : self.func_do_nothing, - 'environment' : self.func_do_nothing, - 'import' : self.func_do_nothing, - 'vcs_tag' : self.func_do_nothing, - 'add_languages' : self.func_do_nothing, - 'declare_dependency' : self.func_do_nothing, - 'files' : self.func_files, + self.funcs.update({'project': self.func_do_nothing, + 'test': self.func_do_nothing, + 'benchmark': self.func_do_nothing, + 'install_headers': self.func_do_nothing, + 'install_man': self.func_do_nothing, + 'install_data': self.func_do_nothing, + 'install_subdir': self.func_do_nothing, + 'configuration_data': self.func_do_nothing, + 'configure_file': self.func_do_nothing, + 'find_program': self.func_do_nothing, + 'include_directories': self.func_do_nothing, + 'add_global_arguments': self.func_do_nothing, + 'add_global_link_arguments': self.func_do_nothing, + 'add_project_arguments': self.func_do_nothing, + 'add_project_link_arguments': self.func_do_nothing, + 'message': self.func_do_nothing, + 'generator': self.func_do_nothing, + 'error': self.func_do_nothing, + 'run_command': self.func_do_nothing, + 'assert': self.func_do_nothing, + 'subproject': self.func_do_nothing, + 'dependency': self.func_do_nothing, + 'get_option': self.func_do_nothing, + 'join_paths': self.func_do_nothing, + 'environment': self.func_do_nothing, + 'import': self.func_do_nothing, + 'vcs_tag': self.func_do_nothing, + 'add_languages': self.func_do_nothing, + 'declare_dependency': self.func_do_nothing, + 'files': self.func_files, 'executable': self.func_executable, - 'static_library' : self.func_static_lib, - 'shared_library' : self.func_shared_lib, - 'library' : self.func_library, - 'build_target' : self.func_build_target, - 'custom_target' : self.func_custom_target, - 'run_target' : self.func_run_target, - 'subdir' : self.func_subdir, - 'set_variable' : self.func_set_variable, - 'get_variable' : self.func_get_variable, - 'is_variable' : self.func_is_variable, + 'static_library': self.func_static_lib, + 'shared_library': self.func_shared_lib, + 'library': self.func_library, + 'build_target': self.func_build_target, + 'custom_target': self.func_custom_target, + 'run_target': self.func_run_target, + 'subdir': self.func_subdir, + 'set_variable': self.func_set_variable, + 'get_variable': self.func_get_variable, + 'is_variable': self.func_is_variable, }) def func_do_nothing(self, node, args, kwargs): diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py index 934f274..7e3f936 100644 --- a/mesonbuild/backend/backends.py +++ b/mesonbuild/backend/backends.py @@ -34,7 +34,7 @@ class CleanTrees(): class InstallData(): def __init__(self, source_dir, build_dir, prefix): self.source_dir = source_dir - self.build_dir= build_dir + self.build_dir = build_dir self.prefix = prefix self.targets = [] self.headers = [] @@ -197,15 +197,20 @@ class Backend(): def serialise_executable(self, exe, cmd_args, workdir, env={}, capture=None): - import uuid + import hashlib # Can't just use exe.name here; it will likely be run more than once if isinstance(exe, (dependencies.ExternalProgram, build.BuildTarget, build.CustomTarget)): basename = exe.name else: basename = os.path.basename(exe) - scratch_file = 'meson_exe_{0}_{1}.dat'.format(basename, - str(uuid.uuid4())[:8]) + # Take a digest of the cmd args, env, workdir, and capture. This avoids + # collisions and also makes the name deterministic over regenerations + # which avoids a rebuild by Ninja because the cmdline stays the same. + data = bytes(str(sorted(env.items())) + str(cmd_args) + str(workdir) + str(capture), + encoding='utf-8') + digest = hashlib.sha1(data).hexdigest() + scratch_file = 'meson_exe_{0}_{1}.dat'.format(basename, digest) exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file) with open(exe_data, 'wb') as f: if isinstance(exe, dependencies.ExternalProgram): @@ -642,9 +647,9 @@ class Backend(): return (srcs, ofilenames, cmd) def run_postconf_scripts(self): - env = {'MESON_SOURCE_ROOT' : self.environment.get_source_dir(), - 'MESON_BUILD_ROOT' : self.environment.get_build_dir() - } + env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(), + 'MESON_BUILD_ROOT': self.environment.get_build_dir(), + } child_env = os.environ.copy() child_env.update(env) diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index b43977e..60383af 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -484,11 +484,11 @@ int dummy; # a serialized executable wrapper for that and check if the # CustomTarget command needs extra paths first. if target.capture or (mesonlib.is_windows() and - self.determine_windows_extra_paths(target.command[0])): + self.determine_windows_extra_paths(target.command[0])): exe_data = self.serialise_executable(target.command[0], cmd[1:], - # All targets are built from the build dir - self.environment.get_build_dir(), - capture=ofilenames[0] if target.capture else None) + # All targets are built from the build dir + self.environment.get_build_dir(), + capture=ofilenames[0] if target.capture else None) cmd = [sys.executable, self.environment.get_build_command(), '--internal', 'exe', exe_data] cmd_type = 'meson_exe.py custom' @@ -732,7 +732,7 @@ int dummy; elem.write(outfile) # And then benchmarks. - cmd = [sys.executable, test_exe, '--benchmark','--logbase', + cmd = [sys.executable, test_exe, '--benchmark', '--logbase', 'benchmarklog', '--num-processes=1', '--no-rebuild'] elem = NinjaBuildElement(self.all_outputs, 'benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY']) elem.add_item('COMMAND', cmd) @@ -763,11 +763,11 @@ int dummy; outfile.write(' restat = 1\n\n') outfile.write('rule REGENERATE_BUILD\n') c = (quote_char + ninja_quote(sys.executable) + quote_char, - quote_char + ninja_quote(self.environment.get_build_command()) + quote_char, + quote_char + ninja_quote(self.environment.get_build_command()) + quote_char, '--internal', 'regenerate', - quote_char + ninja_quote(self.environment.get_source_dir()) + quote_char, - quote_char + ninja_quote(self.environment.get_build_dir()) + quote_char) + quote_char + ninja_quote(self.environment.get_source_dir()) + quote_char, + quote_char + ninja_quote(self.environment.get_build_dir()) + quote_char) outfile.write(" command = %s %s %s %s %s %s --backend ninja\n" % c) outfile.write(' description = Regenerating build files\n') outfile.write(' generator = 1\n\n') @@ -796,7 +796,7 @@ int dummy; class_list.append(plain_class_path) class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list] jar_rule = 'java_LINKER' - commands = [c+m+e+f] + commands = [c + m + e + f] if e != '': commands.append(main_class) commands.append(self.get_target_filename(target)) @@ -1200,7 +1200,7 @@ int dummy; elem.write(outfile) if isinstance(target, build.StaticLibrary): elem = self.generate_link(target, outfile, self.get_target_filename(target), - rel_objects, self.build.static_linker) + rel_objects, self.build.static_linker) elem.write(outfile) elif isinstance(target, build.Executable): elem = NinjaBuildElement(self.all_outputs, self.get_target_filename(target), 'swift_COMPILER', []) @@ -1623,7 +1623,7 @@ rule FORTRAN_DEP_HACK mod_files = [] usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE) dirname = self.get_target_private_dir(target) - tdeps= self.fortran_deps[target.get_basename()] + tdeps = self.fortran_deps[target.get_basename()] with open(src) as f: for line in f: usematch = usere.match(line) @@ -1769,7 +1769,7 @@ rule FORTRAN_DEP_HACK curdir = target.get_subdir() tmppath = os.path.normpath(os.path.join(self.build_to_src, curdir)) src_inc = compiler.get_include_args(tmppath, False) - if curdir == '': + if curdir == '': curdir = '.' build_inc = compiler.get_include_args(curdir, False) commands += build_inc + src_inc @@ -1781,7 +1781,7 @@ rule FORTRAN_DEP_HACK for i in target.get_include_dirs(): basedir = i.get_curdir() for d in i.get_incdirs(): - expdir = os.path.join(basedir, d) + expdir = os.path.join(basedir, d) srctreedir = os.path.join(self.build_to_src, expdir) # There may be include dirs where a build directory has not been # created for some source dir. For example if someone does this: @@ -1837,6 +1837,8 @@ rule FORTRAN_DEP_HACK pchlist = [] if len(pchlist) == 0: pch_dep = [] + elif compiler.id == 'intel': + pch_dep = [] else: arr = [] i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0])) @@ -1850,7 +1852,7 @@ rule FORTRAN_DEP_HACK if idir not in custom_target_include_dirs: custom_target_include_dirs.append(idir) for i in custom_target_include_dirs: - commands+= compiler.get_include_args(i, False) + commands += compiler.get_include_args(i, False) if self.environment.coredata.base_options.get('b_pch', False): commands += self.get_pch_include_args(compiler, target) @@ -1956,6 +1958,9 @@ rule FORTRAN_DEP_HACK src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[-1]) (commands, dep, dst, objs) = self.generate_msvc_pch_command(target, compiler, pch) extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) + elif compiler.id == 'intel': + # Intel generates on target generation + continue else: src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0]) @@ -2086,7 +2091,6 @@ rule FORTRAN_DEP_HACK return self.get_target_filename(t) def generate_shlib_aliases(self, target, outdir): - basename = target.get_filename() aliases = target.get_aliases() for alias, to in aliases.items(): aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias) @@ -2105,8 +2109,6 @@ rule FORTRAN_DEP_HACK e = NinjaBuildElement(self.all_outputs, 'clean-ctlist', 'CUSTOM_COMMAND', 'PHONY') d = CleanTrees(self.environment.get_build_dir(), trees) d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat') - script_root = self.environment.get_script_dir() - clean_script = os.path.join(script_root, 'cleantrees.py') e.add_item('COMMAND', [sys.executable, self.environment.get_build_command(), '--internal', 'cleantrees', d_file]) @@ -2132,15 +2134,27 @@ rule FORTRAN_DEP_HACK gcda_elem.add_item('description', 'Deleting gcda files') gcda_elem.write(outfile) + def get_user_option_args(self): + cmds = [] + for (k, v) in self.environment.coredata.user_options.items(): + cmds.append('-D' + k + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower())) + return cmds + # For things like scan-build and other helper tools we might have. def generate_utils(self, outfile): cmd = [sys.executable, self.environment.get_build_command(), '--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir, - sys.executable, self.environment.get_build_command()] + sys.executable, self.environment.get_build_command()] + self.get_user_option_args() elem = NinjaBuildElement(self.all_outputs, 'scan-build', 'CUSTOM_COMMAND', 'PHONY') elem.add_item('COMMAND', cmd) elem.add_item('pool', 'console') elem.write(outfile) + cmd = [sys.executable, self.environment.get_build_command(), + '--internal', 'uninstall'] + elem = NinjaBuildElement(self.all_outputs, 'uninstall', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + elem.write(outfile) def generate_ending(self, outfile): targetlist = [] diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py index 3ba63f4..e9d7389 100644 --- a/mesonbuild/backend/vs2010backend.py +++ b/mesonbuild/backend/vs2010backend.py @@ -320,21 +320,21 @@ class Vs2010Backend(backends.Backend): return '' directories = os.path.normpath(target.subdir).split(os.sep) - return os.sep.join(['..']*len(directories)) + return os.sep.join(['..'] * len(directories)) def special_quote(self, arr): return ['"%s"' % i for i in arr] def create_basic_crap(self, target): project_name = target.name - root = ET.Element('Project', {'DefaultTargets' : "Build", - 'ToolsVersion' : '4.0', - 'xmlns' : 'http://schemas.microsoft.com/developer/msbuild/2003'}) - confitems = ET.SubElement(root, 'ItemGroup', {'Label' : 'ProjectConfigurations'}) + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) prjconf = ET.SubElement(confitems, 'ProjectConfiguration', - {'Include' : self.buildtype + '|' + self.platform}) + {'Include': self.buildtype + '|' + self.platform}) p = ET.SubElement(prjconf, 'Configuration') - p.text= self.buildtype + p.text = self.buildtype pl = ET.SubElement(prjconf, 'Platform') pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') @@ -343,8 +343,8 @@ class Vs2010Backend(backends.Backend): kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') - p.text= self.platform - pname= ET.SubElement(globalgroup, 'ProjectName') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') pname.text = project_name ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props') type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') @@ -380,7 +380,7 @@ class Vs2010Backend(backends.Backend): cmd += i.fullpath else: cmd.append(i) - cmd_templ = '''"%s" '''*len(cmd) + cmd_templ = '''"%s" ''' * len(cmd) ET.SubElement(customstep, 'Command').text = cmd_templ % tuple(cmd) ET.SubElement(customstep, 'Message').text = 'Running custom command.' ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.targets') @@ -395,7 +395,7 @@ class Vs2010Backend(backends.Backend): # from the target dir, not the build root. target.absolute_paths = True (srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True) - cmd_templ = '''"%s" '''*len(cmd) + cmd_templ = '''"%s" ''' * len(cmd) ET.SubElement(customstep, 'Command').text = cmd_templ % tuple(cmd) ET.SubElement(customstep, 'Outputs').text = ';'.join(ofilenames) ET.SubElement(customstep, 'Inputs').text = ';'.join(srcs) @@ -462,18 +462,18 @@ class Vs2010Backend(backends.Backend): def escape_preprocessor_define(define): # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx table = str.maketrans({'%': '%25', '$': '%24', '@': '%40', - "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', - # We need to escape backslash because it'll be un-escaped by - # Windows during process creation when it parses the arguments - # Basically, this converts `\` to `\\`. - '\\': '\\\\'}) + "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', + # We need to escape backslash because it'll be un-escaped by + # Windows during process creation when it parses the arguments + # Basically, this converts `\` to `\\`. + '\\': '\\\\'}) return define.translate(table) @staticmethod def escape_additional_option(option): # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx table = str.maketrans({'%': '%25', '$': '%24', '@': '%40', - "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20',}) + "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20'}) option = option.translate(table) # Since we're surrounding the option with ", if it ends in \ that will # escape the " when the process arguments are parsed and the starting @@ -558,14 +558,14 @@ class Vs2010Backend(backends.Backend): buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype) project_name = target.name target_name = target.name - root = ET.Element('Project', {'DefaultTargets' : "Build", - 'ToolsVersion' : '4.0', - 'xmlns' : 'http://schemas.microsoft.com/developer/msbuild/2003'}) - confitems = ET.SubElement(root, 'ItemGroup', {'Label' : 'ProjectConfigurations'}) + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) prjconf = ET.SubElement(confitems, 'ProjectConfiguration', - {'Include' : self.buildtype + '|' + self.platform}) + {'Include': self.buildtype + '|' + self.platform}) p = ET.SubElement(prjconf, 'Configuration') - p.text= self.buildtype + p.text = self.buildtype pl = ET.SubElement(prjconf, 'Platform') pl.text = self.platform # Globals @@ -577,8 +577,8 @@ class Vs2010Backend(backends.Backend): ns = ET.SubElement(globalgroup, 'RootNamespace') ns.text = target_name p = ET.SubElement(globalgroup, 'Platform') - p.text= self.platform - pname= ET.SubElement(globalgroup, 'ProjectName') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') pname.text = project_name ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props') # Start configuration @@ -925,13 +925,13 @@ class Vs2010Backend(backends.Backend): def gen_regenproj(self, project_name, ofname): root = ET.Element('Project', {'DefaultTargets': 'Build', - 'ToolsVersion' : '4.0', - 'xmlns' : 'http://schemas.microsoft.com/developer/msbuild/2003'}) - confitems = ET.SubElement(root, 'ItemGroup', {'Label' : 'ProjectConfigurations'}) + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) prjconf = ET.SubElement(confitems, 'ProjectConfiguration', - {'Include' : self.buildtype + '|' + self.platform}) + {'Include': self.buildtype + '|' + self.platform}) p = ET.SubElement(prjconf, 'Configuration') - p.text= self.buildtype + p.text = self.buildtype pl = ET.SubElement(prjconf, 'Platform') pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') @@ -941,7 +941,7 @@ class Vs2010Backend(backends.Backend): kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') p.text = self.platform - pname= ET.SubElement(globalgroup, 'ProjectName') + pname = ET.SubElement(globalgroup, 'ProjectName') pname.text = project_name ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props') type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') @@ -1003,14 +1003,14 @@ if %%errorlevel%% neq 0 goto :VCEnd''' def gen_testproj(self, target_name, ofname): project_name = target_name - root = ET.Element('Project', {'DefaultTargets' : "Build", - 'ToolsVersion' : '4.0', - 'xmlns' : 'http://schemas.microsoft.com/developer/msbuild/2003'}) - confitems = ET.SubElement(root, 'ItemGroup', {'Label' : 'ProjectConfigurations'}) + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) prjconf = ET.SubElement(confitems, 'ProjectConfiguration', - {'Include' : self.buildtype + '|' + self.platform}) + {'Include': self.buildtype + '|' + self.platform}) p = ET.SubElement(prjconf, 'Configuration') - p.text= self.buildtype + p.text = self.buildtype pl = ET.SubElement(prjconf, 'Platform') pl.text = self.platform globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') @@ -1019,8 +1019,8 @@ if %%errorlevel%% neq 0 goto :VCEnd''' kw = ET.SubElement(globalgroup, 'Keyword') kw.text = self.platform + 'Proj' p = ET.SubElement(globalgroup, 'Platform') - p.text= self.platform - pname= ET.SubElement(globalgroup, 'ProjectName') + p.text = self.platform + pname = ET.SubElement(globalgroup, 'ProjectName') pname.text = project_name ET.SubElement(root, 'Import', Project='$(VCTargetsPath)\Microsoft.Cpp.Default.props') type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') @@ -1070,5 +1070,5 @@ if %%errorlevel%% neq 0 goto :VCEnd''' tree = ET.ElementTree(root) tree.write(ofname, encoding='utf-8', xml_declaration=True) # ElementTree can not do prettyprinting so do it manually - #doc = xml.dom.minidom.parse(ofname) - #open(ofname, 'w').write(doc.toprettyxml()) + # doc = xml.dom.minidom.parse(ofname) + # open(ofname, 'w').write(doc.toprettyxml()) diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py index 8133e0f..7ab3813 100644 --- a/mesonbuild/backend/xcodebackend.py +++ b/mesonbuild/backend/xcodebackend.py @@ -27,21 +27,22 @@ class XCodeBackend(backends.Backend): self.project_conflist = self.gen_id() self.indent = ' ' self.indent_level = 0 - self.xcodetypemap = {'c' : 'sourcecode.c.c', - 'a' : 'archive.ar', + self.xcodetypemap = {'c': 'sourcecode.c.c', + 'a': 'archive.ar', 'cc': 'sourcecode.cpp.cpp', - 'cxx' : 'sourcecode.cpp.cpp', - 'cpp' : 'sourcecode.cpp.cpp', - 'c++' : 'sourcecode.cpp.cpp', - 'm' : 'sourcecode.c.objc', - 'mm' : 'sourcecode.cpp.objcpp', - 'h' : 'sourcecode.c.h', - 'hpp' : 'sourcecode.cpp.h', - 'hxx' : 'sourcecode.cpp.h', - 'hh' : 'sourcecode.cpp.hh', - 'inc' : 'sourcecode.c.h', - 'dylib' : 'compiled.mach-o.dylib', - 'o' : 'compiled.mach-o.objfile',} + 'cxx': 'sourcecode.cpp.cpp', + 'cpp': 'sourcecode.cpp.cpp', + 'c++': 'sourcecode.cpp.cpp', + 'm': 'sourcecode.c.objc', + 'mm': 'sourcecode.cpp.objcpp', + 'h': 'sourcecode.c.h', + 'hpp': 'sourcecode.cpp.h', + 'hxx': 'sourcecode.cpp.h', + 'hh': 'sourcecode.cpp.hh', + 'inc': 'sourcecode.c.h', + 'dylib': 'compiled.mach-o.dylib', + 'o': 'compiled.mach-o.objfile', + } self.maingroup_id = self.gen_id() self.all_id = self.gen_id() self.all_buildconf_id = self.gen_id() @@ -59,7 +60,7 @@ class XCodeBackend(backends.Backend): return dirname def write_line(self, text): - self.ofile.write(self.indent*self.indent_level + text) + self.ofile.write(self.indent * self.indent_level + text) if not text.endswith('\n'): self.ofile.write('\n') @@ -129,7 +130,7 @@ class XCodeBackend(backends.Backend): self.buildmap[o] = self.gen_id() def generate_buildstylemap(self): - self.buildstylemap = {'debug' : self.gen_id()} + self.buildstylemap = {'debug': self.gen_id()} def generate_build_phase_map(self): self.buildphasemap = {} @@ -139,17 +140,17 @@ class XCodeBackend(backends.Backend): def generate_build_configuration_map(self): self.buildconfmap = {} for t in self.build.targets: - bconfs = {'debug' : self.gen_id()} + bconfs = {'debug': self.gen_id()} self.buildconfmap[t] = bconfs def generate_project_configurations_map(self): - self.project_configurations = {'debug' : self.gen_id()} + self.project_configurations = {'debug': self.gen_id()} def generate_buildall_configurations_map(self): - self.buildall_configurations = {'debug' : self.gen_id()} + self.buildall_configurations = {'debug': self.gen_id()} def generate_test_configurations_map(self): - self.test_configurations = {'debug' : self.gen_id()} + self.test_configurations = {'debug': self.gen_id()} def generate_build_configurationlist_map(self): self.buildconflistmap = {} @@ -185,35 +186,35 @@ class XCodeBackend(backends.Backend): def generate_pbx_aggregate_target(self): self.ofile.write('\n/* Begin PBXAggregateTarget section */\n') self.write_line('%s /* ALL_BUILD */ = {' % self.all_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXAggregateTarget;') self.write_line('buildConfigurationList = %s;' % self.all_buildconf_id) self.write_line('buildPhases = (') self.write_line(');') self.write_line('dependencies = (') - self.indent_level+=1 + self.indent_level += 1 for t in self.build.targets: self.write_line('%s /* PBXTargetDependency */,' % self.pbx_dep_map[t]) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('name = ALL_BUILD;') self.write_line('productName = ALL_BUILD;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('%s /* RUN_TESTS */ = {' % self.test_id) - self.indent_level +=1 + self.indent_level += 1 self.write_line('isa = PBXAggregateTarget;') self.write_line('buildConfigurationList = %s;' % self.test_buildconf_id) self.write_line('buildPhases = (') - self.indent_level+=1 + self.indent_level += 1 self.write_line('%s /* test run command */,' % self.test_command_id) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('dependencies = (') self.write_line(');') self.write_line('name = RUN_TESTS;') self.write_line('productName = RUN_TESTS;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXAggregateTarget section */\n') @@ -269,7 +270,7 @@ class XCodeBackend(backends.Backend): self.write_line('proxyType = 1;') self.write_line('remoteGlobalIDString = %s;' % self.native_targets[t]) self.write_line('remoteInfo = "%s";' % t) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXContainerItemProxy section */\n') @@ -310,14 +311,14 @@ class XCodeBackend(backends.Backend): resources_id = self.gen_id() products_id = self.gen_id() self.write_line('%s = {' % self.maingroup_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') - self.indent_level+=1 + self.indent_level += 1 self.write_line('%s /* Sources */,' % sources_id) self.write_line('%s /* Resources */,' % resources_id) self.write_line('%s /* Products */,' % products_id) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('sourceTree = "<group>";') self.indent_level -= 1 @@ -325,48 +326,48 @@ class XCodeBackend(backends.Backend): # Sources self.write_line('%s /* Sources */ = {' % sources_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') - self.indent_level+=1 + self.indent_level += 1 for t in self.build.targets: self.write_line('%s /* %s */,' % (groupmap[t], t)) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('name = Sources;') self.write_line('sourcetree = "<group>";') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('%s /* Resources */ = {' % resources_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') self.write_line(');') self.write_line('name = Resources;') self.write_line('sourceTree = "<group>";') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # Targets for t in self.build.targets: self.write_line('%s /* %s */ = {' % (groupmap[t], t)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') - self.indent_level+=1 + self.indent_level += 1 self.write_line('%s /* Source files */,' % target_src_map[t]) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('name = "%s";' % t) self.write_line('sourceTree = "<group>";') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('%s /* Source files */ = {' % target_src_map[t]) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') - self.indent_level+=1 + self.indent_level += 1 for s in self.build.targets[t].sources: s = os.path.join(s.subdir, s.fname) if isinstance(s, str): @@ -374,26 +375,26 @@ class XCodeBackend(backends.Backend): for o in self.build.targets[t].objects: o = os.path.join(self.build.targets[t].subdir, o) self.write_line('%s /* %s */,' % (self.filemap[o], o)) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('name = "Source files";') self.write_line('sourceTree = "<group>";') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # And finally products self.write_line('%s /* Products */ = {' % products_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXGroup;') self.write_line('children = (') - self.indent_level+=1 + self.indent_level += 1 for t in self.build.targets: self.write_line('%s /* %s */,' % (self.target_filemap[t], t)) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('name = Products;') self.write_line('sourceTree = "<group>";') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXGroup section */\n') @@ -402,25 +403,25 @@ class XCodeBackend(backends.Backend): for tname, idval in self.native_targets.items(): t = self.build.targets[tname] self.write_line('%s /* %s */ = {' % (idval, tname)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXNativeTarget;') self.write_line('buildConfigurationList = %s /* Build configuration list for PBXNativeTarget "%s" */;'\ % (self.buildconflistmap[tname], tname)) self.write_line('buildPhases = (') - self.indent_level+=1 + self.indent_level += 1 self.write_line('%s /* Sources */,' % self.buildphasemap[tname]) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('buildRules = (') self.write_line(');') self.write_line('dependencies = (') - self.indent_level+=1 + self.indent_level += 1 for lt in self.build.targets[tname].link_targets: # NOT DOCUMENTED, may need to make different links # to same target have different targetdependency item. idval = self.pbx_dep_map[lt.get_id()] self.write_line('%s /* PBXTargetDependency */,' % idval) - self.indent_level -=1 + self.indent_level -= 1 self.write_line(");") self.write_line('name = "%s";' % tname) self.write_line('productName = "%s";' % tname) @@ -434,7 +435,7 @@ class XCodeBackend(backends.Backend): else: raise MesonException('Unknown target type for %s' % tname) self.write_line('productType = "%s";' % typestr) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXNativeTarget section */\n') @@ -495,7 +496,7 @@ class XCodeBackend(backends.Backend): cmdstr = ' '.join(["'%s'" % i for i in cmd]) self.write_line('shellScript = "%s";' % cmdstr) self.write_line('showEnvVarsInLog = 0;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXShellScriptBuildPhase section */\n') @@ -503,19 +504,19 @@ class XCodeBackend(backends.Backend): self.ofile.write('\n/* Begin PBXSourcesBuildPhase section */\n') for name, phase_id in self.source_phase.items(): self.write_line('%s /* Sources */ = {' % self.buildphasemap[name]) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = PBXSourcesBuildPhase;') self.write_line('buildActionMask = 2147483647;') self.write_line('files = (') - self.indent_level+=1 + self.indent_level += 1 for s in self.build.targets[name].sources: s = os.path.join(s.subdir, s.fname) if not self.environment.is_header(s): self.write_line('%s /* %s */,' % (self.buildmap[s], os.path.join(self.environment.get_source_dir(), s))) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('runOnlyForDeploymentPostprocessing = 0;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXSourcesBuildPhase section */\n') @@ -528,7 +529,7 @@ class XCodeBackend(backends.Backend): self.write_line('isa = PBXTargetDependency;') self.write_line('target = %s /* %s */;' % (self.native_targets[t], t)) self.write_line('targetProxy = %s /* PBXContainerItemProxy */;' % self.containerproxy_map[t]) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End PBXTargetDependency section */\n') @@ -537,24 +538,24 @@ class XCodeBackend(backends.Backend): # First the setup for the toplevel project. for buildtype in self.buildtypes: self.write_line('%s /* %s */ = {' % (self.project_configurations[buildtype], buildtype)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') - self.indent_level+=1 + self.indent_level += 1 self.write_line('ARCHS = "$(ARCHS_STANDARD_32_64_BIT)";') self.write_line('ONLY_ACTIVE_ARCH = YES;') self.write_line('SDKROOT = "macosx";') self.write_line('SYMROOT = "%s/build";' % self.environment.get_build_dir()) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('name = "%s";' % buildtype) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # Then the all target. for buildtype in self.buildtypes: self.write_line('%s /* %s */ = {' % (self.buildall_configurations[buildtype], buildtype)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level += 1 @@ -573,16 +574,16 @@ class XCodeBackend(backends.Backend): self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir()) self.write_line('USE_HEADERMAP = NO;') self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('name = "%s";' % buildtype) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # Then the test target. for buildtype in self.buildtypes: self.write_line('%s /* %s */ = {' % (self.test_configurations[buildtype], buildtype)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level += 1 @@ -601,14 +602,14 @@ class XCodeBackend(backends.Backend): self.write_line('SYMROOT = "%s";' % self.environment.get_build_dir()) self.write_line('USE_HEADERMAP = NO;') self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('name = "%s";' % buildtype) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # Now finally targets. - langnamemap = {'c' : 'C', 'cpp' : 'CPLUSPLUS', 'objc' : 'OBJC', 'objcpp' : 'OBJCPLUSPLUS'} + langnamemap = {'c': 'C', 'cpp': 'CPLUSPLUS', 'objc': 'OBJC', 'objcpp': 'OBJCPLUSPLUS'} for target_name, target in self.build.targets.items(): for buildtype in self.buildtypes: dep_libs = [] @@ -653,7 +654,7 @@ class XCodeBackend(backends.Backend): langargs[langnamemap[lang]] = args symroot = os.path.join(self.environment.get_build_dir(), target.subdir) self.write_line('%s /* %s */ = {' % (valid, buildtype)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCBuildConfiguration;') self.write_line('buildSettings = {') self.indent_level += 1 @@ -688,57 +689,57 @@ class XCodeBackend(backends.Backend): self.write_line('SYMROOT = "%s";' % symroot) self.write_line('USE_HEADERMAP = NO;') self.write_line('WARNING_CFLAGS = ("-Wmost", "-Wno-four-char-constants", "-Wno-unknown-pragmas", );') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.write_line('name = "%s";' % buildtype) - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') self.ofile.write('/* End XCBuildConfiguration section */\n') def generate_xc_configurationList(self): self.ofile.write('\n/* Begin XCConfigurationList section */\n') self.write_line('%s /* Build configuration list for PBXProject "%s" */ = {' % (self.project_conflist, self.build.project_name)) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') - self.indent_level+=1 + self.indent_level += 1 for buildtype in self.buildtypes: self.write_line('%s /* %s */,' % (self.project_configurations[buildtype], buildtype)) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = debug;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # Now the all target self.write_line('%s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */ = {' % self.all_buildconf_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') - self.indent_level+=1 + self.indent_level += 1 for buildtype in self.buildtypes: self.write_line('%s /* %s */,' % (self.buildall_configurations[buildtype], buildtype)) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = debug;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') # Test target self.write_line('%s /* Build configuration list for PBXAggregateTarget "ALL_BUILD" */ = {' % self.test_buildconf_id) - self.indent_level+=1 + self.indent_level += 1 self.write_line('isa = XCConfigurationList;') self.write_line('buildConfigurations = (') - self.indent_level+=1 + self.indent_level += 1 for buildtype in self.buildtypes: self.write_line('%s /* %s */,' % (self.test_configurations[buildtype], buildtype)) - self.indent_level-=1 + self.indent_level -= 1 self.write_line(');') self.write_line('defaultConfigurationIsVisible = 0;') self.write_line('defaultConfigurationName = debug;') - self.indent_level-=1 + self.indent_level -= 1 self.write_line('};') for target_name in self.build.targets: diff --git a/mesonbuild/build.py b/mesonbuild/build.py index c7d4125..f895531 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -20,31 +20,31 @@ from .mesonlib import File, flatten, MesonException, stringlistify, classify_uni from .environment import for_windows, for_darwin from .compilers import is_object, clike_langs, lang_suffixes -known_basic_kwargs = {'install' : True, - 'c_pch' : True, - 'cpp_pch' : True, - 'c_args' : True, - 'cpp_args' : True, - 'cs_args' : True, - 'vala_args' : True, - 'fortran_args' : True, - 'd_args' : True, - 'java_args' : True, - 'link_args' : True, +known_basic_kwargs = {'install': True, + 'c_pch': True, + 'cpp_pch': True, + 'c_args': True, + 'cpp_args': True, + 'cs_args': True, + 'vala_args': True, + 'fortran_args': True, + 'd_args': True, + 'java_args': True, + 'link_args': True, 'link_depends': True, - 'link_with' : True, + 'link_with': True, 'include_directories': True, - 'dependencies' : True, - 'install_dir' : True, - 'main_class' : True, - 'gui_app' : True, - 'extra_files' : True, - 'install_rpath' : True, - 'resources' : True, - 'sources' : True, - 'objects' : True, - 'native' : True, - } + 'dependencies': True, + 'install_dir': True, + 'main_class': True, + 'gui_app': True, + 'extra_files': True, + 'install_rpath': True, + 'resources': True, + 'sources': True, + 'objects': True, + 'native': True, + } # These contain kwargs supported by both static and shared libraries. These are # combined here because a library() call might be shared_library() or @@ -52,16 +52,16 @@ known_basic_kwargs = {'install' : True, # FIXME: Find a way to pass that info down here so we can have proper target # kwargs checking when specifically using shared_library() or static_library(). known_lib_kwargs = known_basic_kwargs.copy() -known_lib_kwargs.update({'version' : True, # Only for shared libs - 'soversion' : True, # Only for shared libs - 'name_prefix' : True, - 'name_suffix' : True, - 'vs_module_defs' : True, # Only for shared libs +known_lib_kwargs.update({'version': True, # Only for shared libs + 'soversion': True, # Only for shared libs + 'name_prefix': True, + 'name_suffix': True, + 'vs_module_defs': True, # Only for shared libs 'vala_header': True, 'vala_vapi': True, - 'vala_gir' : True, - 'pic' : True, # Only for static libs - }) + 'vala_gir': True, + 'pic': True, # Only for static libs + }) class InvalidArguments(MesonException): @@ -98,6 +98,7 @@ class Build: self.dep_manifest_name = None self.dep_manifest = {} self.cross_stdlibs = {} + self.test_setups = {} def add_compiler(self, compiler): if self.static_linker is None and compiler.needs_static_linker(): @@ -241,7 +242,7 @@ class EnvironmentVariables(): def prepend(self, env, name, values, kwargs): sep, value = self.get_value(name, values, kwargs) if name in env: - return value + sep + env[name] + return value + sep + env[name] return value @@ -330,7 +331,7 @@ class BuildTarget(): if isinstance(s, (str, File, ExtractedObjects)): self.objects.append(s) elif isinstance(s, (GeneratedList, CustomTarget)): - msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \ + msg = 'Generated files are not allowed in the \'objects\' kwarg ' + \ 'for target {!r}.\nIt is meant only for '.format(self.name) + \ 'pre-built object files that are shipped with the\nsource ' + \ 'tree. Try adding it in the list of sources.' @@ -731,12 +732,12 @@ class BuildTarget(): # This should be reliable enough. if hasattr(dep, 'subproject'): raise InvalidArguments('Tried to use subproject object as a dependency.\n' - 'You probably wanted to use a dependency declared in it instead.\n' - 'Access it by calling get_variable() on the subproject object.') + 'You probably wanted to use a dependency declared in it instead.\n' + 'Access it by calling get_variable() on the subproject object.') raise InvalidArguments('Argument is of an unacceptable type {!r}.\nMust be ' - 'either an external dependency (returned by find_library() or ' - 'dependency()) or an internal dependency (returned by ' - 'declare_dependency()).'.format(type(dep).__name__)) + 'either an external dependency (returned by find_library() or ' + 'dependency()) or an internal dependency (returned by ' + 'declare_dependency()).'.format(type(dep).__name__)) def get_external_deps(self): return self.external_deps @@ -1224,16 +1225,16 @@ class SharedModule(SharedLibrary): super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) class CustomTarget: - known_kwargs = {'input' : True, - 'output' : True, - 'command' : True, - 'capture' : False, - 'install' : True, - 'install_dir' : True, - 'build_always' : True, - 'depends' : True, - 'depend_files' : True, - 'depfile' : True, + known_kwargs = {'input': True, + 'output': True, + 'command': True, + 'capture': False, + 'install': True, + 'install_dir': True, + 'build_always': True, + 'depends': True, + 'depend_files': True, + 'depfile': True, } def __init__(self, name, subdir, kwargs, absolute_paths=False): @@ -1433,7 +1434,7 @@ class RunTarget: class Jar(BuildTarget): def __init__(self, name, subdir, subproject, is_cross, sources, objects, environment, kwargs): - super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs); + super().__init__(name, subdir, subproject, is_cross, sources, objects, environment, kwargs) for s in self.sources: if not s.endswith('.java'): raise InvalidArguments('Jar source %s is not a java file.' % s) @@ -1507,3 +1508,10 @@ class RunScript(dict): assert(isinstance(args, list)) self['exe'] = script self['args'] = args + +class TestSetup: + def __init__(self, *, exe_wrapper=None, gdb=None, timeout_multiplier=None, env=None): + self.exe_wrapper = exe_wrapper + self.gdb = gdb + self.timeout_multiplier = timeout_multiplier + self.env = env diff --git a/mesonbuild/compilers.py b/mesonbuild/compilers.py index e25fd5a..8212d01 100644 --- a/mesonbuild/compilers.py +++ b/mesonbuild/compilers.py @@ -33,7 +33,9 @@ lib_suffixes = ('a', 'lib', 'dll', 'dylib', 'so') lang_suffixes = { 'c': ('c',), 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx'), - 'fortran': ('f', 'f90', 'f95'), + # f90, f95, f03, f08 are for free-form fortran ('f90' recommended) + # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended) + 'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'), 'd': ('d', 'di'), 'objc': ('m',), 'objcpp': ('mm',), @@ -88,93 +90,89 @@ def is_library(fname): suffix = fname.split('.')[-1] return suffix in lib_suffixes -gnulike_buildtype_args = {'plain' : [], +gnulike_buildtype_args = {'plain': [], # -O0 is passed for improved debugging information with gcc # See https://github.com/mesonbuild/meson/pull/509 - 'debug' : ['-O0', '-g'], - 'debugoptimized' : ['-O2', '-g'], - 'release' : ['-O3'], - 'minsize' : ['-Os', '-g']} - -msvc_buildtype_args = {'plain' : [], - 'debug' : ["/MDd", "/ZI", "/Ob0", "/Od", "/RTC1"], - 'debugoptimized' : ["/MD", "/Zi", "/O2", "/Ob1"], - 'release' : ["/MD", "/O2", "/Ob2"], - 'minsize' : ["/MD", "/Zi", "/Os", "/Ob1"], + 'debug': ['-O0', '-g'], + 'debugoptimized': ['-O2', '-g'], + 'release': ['-O3'], + 'minsize': ['-Os', '-g']} + +msvc_buildtype_args = {'plain': [], + 'debug': ["/MDd", "/ZI", "/Ob0", "/Od", "/RTC1"], + 'debugoptimized': ["/MD", "/Zi", "/O2", "/Ob1"], + 'release': ["/MD", "/O2", "/Ob2"], + 'minsize': ["/MD", "/Zi", "/Os", "/Ob1"], } -gnulike_buildtype_linker_args = {} - - -if mesonlib.is_osx(): - gnulike_buildtype_linker_args.update({'plain' : [], - 'debug' : [], - 'debugoptimized' : [], - 'release' : [], - 'minsize' : [], - }) -else: - gnulike_buildtype_linker_args.update({'plain' : [], - 'debug' : [], - 'debugoptimized' : [], - 'release' : ['-Wl,-O1'], - 'minsize' : [], - }) - -msvc_buildtype_linker_args = {'plain' : [], - 'debug' : [], - 'debugoptimized' : [], - 'release' : [], - 'minsize' : ['/INCREMENTAL:NO'], +apple_buildtype_linker_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + } + +gnulike_buildtype_linker_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': ['-Wl,-O1'], + 'minsize': [], + } + +msvc_buildtype_linker_args = {'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': ['/INCREMENTAL:NO'], } -java_buildtype_args = {'plain' : [], - 'debug' : ['-g'], - 'debugoptimized' : ['-g'], - 'release' : [], - 'minsize' : [], +java_buildtype_args = {'plain': [], + 'debug': ['-g'], + 'debugoptimized': ['-g'], + 'release': [], + 'minsize': [], } -rust_buildtype_args = {'plain' : [], - 'debug' : ['-g'], - 'debugoptimized' : ['-g', '--opt-level', '2'], - 'release' : ['--opt-level', '3'], - 'minsize' : [], +rust_buildtype_args = {'plain': [], + 'debug': ['-g'], + 'debugoptimized': ['-g', '--opt-level', '2'], + 'release': ['--opt-level', '3'], + 'minsize': [], } -d_gdc_buildtype_args = {'plain' : [], - 'debug' : ['-g', '-O0'], - 'debugoptimized' : ['-g', '-O'], - 'release' : ['-O3', '-frelease'], - 'minsize' : [], - } +d_gdc_buildtype_args = {'plain': [], + 'debug': ['-g', '-O0'], + 'debugoptimized': ['-g', '-O'], + 'release': ['-O3', '-frelease'], + 'minsize': [], + } -d_ldc_buildtype_args = {'plain' : [], - 'debug' : ['-g', '-O0'], - 'debugoptimized' : ['-g', '-O'], - 'release' : ['-O3', '-release'], - 'minsize' : [], - } +d_ldc_buildtype_args = {'plain': [], + 'debug': ['-g', '-O0'], + 'debugoptimized': ['-g', '-O'], + 'release': ['-O3', '-release'], + 'minsize': [], + } -d_dmd_buildtype_args = {'plain' : [], - 'debug' : ['-g'], - 'debugoptimized' : ['-g', '-O'], - 'release' : ['-O', '-release'], - 'minsize' : [], - } +d_dmd_buildtype_args = {'plain': [], + 'debug': ['-g'], + 'debugoptimized': ['-g', '-O'], + 'release': ['-O', '-release'], + 'minsize': [], + } -mono_buildtype_args = {'plain' : [], - 'debug' : ['-debug'], +mono_buildtype_args = {'plain': [], + 'debug': ['-debug'], 'debugoptimized': ['-debug', '-optimize+'], - 'release' : ['-optimize+'], - 'minsize' : [], + 'release': ['-optimize+'], + 'minsize': [], } -swift_buildtype_args = {'plain' : [], - 'debug' : ['-g'], +swift_buildtype_args = {'plain': [], + 'debug': ['-g'], 'debugoptimized': ['-g', '-O'], - 'release' : ['-O'], - 'minsize' : [], + 'release': ['-O'], + 'minsize': [], } gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32', @@ -184,9 +182,9 @@ msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib', 'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib', 'uuid.lib', 'comdlg32.lib', 'advapi32.lib'] -gnu_color_args = {'auto' : ['-fdiagnostics-color=auto'], +gnu_color_args = {'auto': ['-fdiagnostics-color=auto'], 'always': ['-fdiagnostics-color=always'], - 'never' : ['-fdiagnostics-color=never'], + 'never': ['-fdiagnostics-color=never'], } base_options = { @@ -204,15 +202,15 @@ base_options = { 'b_coverage': coredata.UserBooleanOption('b_coverage', 'Enable coverage tracking.', False), - 'b_colorout' : coredata.UserComboOption('b_colorout', 'Use colored output', + 'b_colorout': coredata.UserComboOption('b_colorout', 'Use colored output', ['auto', 'always', 'never'], 'always'), - 'b_ndebug' : coredata.UserBooleanOption('b_ndebug', - 'Disable asserts', - False), - 'b_staticpic' : coredata.UserBooleanOption('b_staticpic', - 'Build static libraries as position independent', - True), + 'b_ndebug': coredata.UserBooleanOption('b_ndebug', + 'Disable asserts', + False), + 'b_staticpic': coredata.UserBooleanOption('b_staticpic', + 'Build static libraries as position independent', + True), } def sanitizer_compile_args(value): @@ -286,12 +284,12 @@ def get_base_link_args(options, linker, is_shared_module): except KeyError: pass try: - if not is_shared_module and options['b_lundef'].value: + if not is_shared_module and 'b_lundef' in linker.base_options and options['b_lundef'].value: args.append('-Wl,--no-undefined') except KeyError: pass try: - if options['b_asneeded'].value: + if 'b_asneeded' in linker.base_options and options['b_asneeded'].value: args.append('-Wl,--as-needed') except KeyError: pass @@ -307,7 +305,7 @@ def build_unix_rpath_args(build_dir, rpath_paths, install_rpath): return [] paths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths]) if len(paths) < len(install_rpath): - padding = 'X'*(len(install_rpath) - len(paths)) + padding = 'X' * (len(install_rpath) - len(paths)) if len(paths) == 0: paths = padding else: @@ -651,7 +649,7 @@ class CCompiler(Compiler): return os.path.split(header_name)[-1] + '.' + self.get_pch_suffix() def get_linker_search_args(self, dirname): - return ['-L'+dirname] + return ['-L' + dirname] def gen_import_library_args(self, implibname): """ @@ -1654,7 +1652,7 @@ class DCompiler(Compiler): return [] paths = ':'.join([os.path.join(build_dir, p) for p in rpath_paths]) if len(paths) < len(install_rpath): - padding = 'X'*(len(install_rpath) - len(paths)) + padding = 'X' * (len(install_rpath) - len(paths)) if len(paths) == 0: paths = padding else: @@ -1671,7 +1669,7 @@ class DCompiler(Compiler): if arg == '-pthread': continue if arg.startswith('-Wl,'): - linkargs = arg[arg.index(',')+1:].split(',') + linkargs = arg[arg.index(',') + 1:].split(',') for la in linkargs: dcargs.append('-L' + la.strip()) continue @@ -1687,9 +1685,10 @@ class GnuDCompiler(DCompiler): def __init__(self, exelist, version, is_cross): DCompiler.__init__(self, exelist, version, is_cross) self.id = 'gcc' - self.warn_args = {'1': ['-Wall', '-Wdeprecated'], - '2': ['-Wall', '-Wextra', '-Wdeprecated'], - '3': ['-Wall', '-Wextra', '-Wdeprecated', '-Wpedantic']} + default_warn_args = ['-Wall', '-Wdeprecated'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} self.base_options = ['b_colorout', 'b_sanitize', 'b_staticpic'] def get_colorout_args(self, colortype): @@ -1716,7 +1715,7 @@ class GnuDCompiler(DCompiler): return ['-Werror'] def get_linker_search_args(self, dirname): - return ['-L'+dirname] + return ['-L' + dirname] def get_buildtype_args(self, buildtype): return d_gdc_buildtype_args[buildtype] @@ -1774,7 +1773,7 @@ class LLVMDCompiler(DCompiler): # -L is recognized as "add this to the search path" by the linker, # while the compiler recognizes it as "pass to linker". So, the first # -L is for the compiler, telling it to pass the second -L to the linker. - return ['-L-L'+dirname] + return ['-L-L' + dirname] def unix_link_flags_to_native(self, args): return self.translate_args_to_nongnu(args) @@ -1820,7 +1819,7 @@ class DmdDCompiler(DCompiler): # -L is recognized as "add this to the search path" by the linker, # while the compiler recognizes it as "pass to linker". So, the first # -L is for the compiler, telling it to pass the second -L to the linker. - return ['-L-L'+dirname] + return ['-L-L' + dirname] def get_buildtype_args(self, buildtype): return d_dmd_buildtype_args[buildtype] @@ -1836,7 +1835,7 @@ class DmdDCompiler(DCompiler): class VisualStudioCCompiler(CCompiler): std_warn_args = ['/W3'] - std_opt_args= ['/O2'] + std_opt_args = ['/O2'] def __init__(self, exelist, version, is_cross, exe_wrap): CCompiler.__init__(self, exelist, version, is_cross, exe_wrap) @@ -1927,7 +1926,7 @@ class VisualStudioCCompiler(CCompiler): def gen_pch_args(self, header, source, pchname): objname = os.path.splitext(pchname)[0] + '.obj' - return (objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname ]) + return (objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]) def gen_import_library_args(self, implibname): "The name of the outputted import library" @@ -1944,9 +1943,9 @@ class VisualStudioCCompiler(CCompiler): return [] def get_options(self): - return {'c_winlibs' : coredata.UserStringArrayOption('c_winlibs', - 'Windows libs to link against.', - msvc_winlibs) + return {'c_winlibs': coredata.UserStringArrayOption('c_winlibs', + 'Windows libs to link against.', + msvc_winlibs) } def get_option_link_args(self, options): @@ -1997,7 +1996,7 @@ class VisualStudioCCompiler(CCompiler): def has_multi_arguments(self, args, env): warning_text = '9002' code = 'int i;\n' - (fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix) + (fd, srcname) = tempfile.mkstemp(suffix='.' + self.default_suffix) os.close(fd) with open(srcname, 'w') as ofile: ofile.write(code) @@ -2040,13 +2039,13 @@ class VisualStudioCPPCompiler(VisualStudioCCompiler, CPPCompiler): self.base_options = ['b_pch'] # FIXME add lto, pgo and the like def get_options(self): - return {'cpp_eh' : coredata.UserComboOption('cpp_eh', - 'C++ exception handling type.', - ['none', 'a', 's', 'sc'], - 'sc'), - 'cpp_winlibs' : coredata.UserStringArrayOption('cpp_winlibs', - 'Windows libs to link against.', - msvc_winlibs) + return {'cpp_eh': coredata.UserComboOption('cpp_eh', + 'C++ exception handling type.', + ['none', 'a', 's', 'sc'], + 'sc'), + 'cpp_winlibs': coredata.UserStringArrayOption('cpp_winlibs', + 'Windows libs to link against.', + msvc_winlibs) } def get_option_compile_args(self, options): @@ -2068,6 +2067,10 @@ CLANG_OSX = 1 CLANG_WIN = 2 # Possibly clang-cl? +ICC_STANDARD = 0 +ICC_OSX = 1 +ICC_WIN = 2 + def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module): if soversion is None: sostr = '' @@ -2076,7 +2079,6 @@ def get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, i if gcc_type == GCC_STANDARD or gcc_type == GCC_MINGW: # Might not be correct for mingw but seems to work. return ['-Wl,-soname,%s%s.%s%s' % (prefix, shlib_name, suffix, sostr)] - return ['-Wl,-soname,%s%s' % (shlib_name, sostr)] elif gcc_type == GCC_OSX: if is_shared_module: return [] @@ -2128,6 +2130,8 @@ class GnuCompiler: return gnulike_buildtype_args[buildtype] def get_buildtype_linker_args(self, buildtype): + if self.gcc_type == GCC_OSX: + return apple_buildtype_linker_args[buildtype] return gnulike_buildtype_linker_args[buildtype] def get_always_args(self): @@ -2151,15 +2155,16 @@ class GnuCCompiler(GnuCompiler, CCompiler): def __init__(self, exelist, version, gcc_type, is_cross, exe_wrapper=None, defines=None): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) GnuCompiler.__init__(self, gcc_type, defines) - self.warn_args = {'1': ['-Wall', '-Winvalid-pch'], - '2': ['-Wall', '-Wextra', '-Winvalid-pch'], - '3' : ['-Wall', '-Wpedantic', '-Wextra', '-Winvalid-pch']} + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} def get_options(self): - opts = {'c_std' : coredata.UserComboOption('c_std', 'C language standard to use', - ['none', 'c89', 'c99', 'c11', - 'gnu89', 'gnu99', 'gnu11'], - 'none')} + opts = {'c_std': coredata.UserComboOption('c_std', 'C language standard to use', + ['none', 'c89', 'c99', 'c11', + 'gnu89', 'gnu99', 'gnu11'], + 'none')} if self.gcc_type == GCC_MINGW: opts.update({ 'c_winlibs': coredata.UserStringArrayOption('c_winlibs', 'Standard Win libraries to link against', @@ -2187,15 +2192,16 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): def __init__(self, exelist, version, gcc_type, is_cross, exe_wrap, defines): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap) GnuCompiler.__init__(self, gcc_type, defines) - self.warn_args = {'1': ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'], - '2': ['-Wall', '-Wextra', '-Winvalid-pch', '-Wnon-virtual-dtor'], - '3': ['-Wall', '-Wpedantic', '-Wextra', '-Winvalid-pch', '-Wnon-virtual-dtor']} + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} def get_options(self): - opts = {'cpp_std' : coredata.UserComboOption('cpp_std', 'C++ language standard to use', - ['none', 'c++03', 'c++11', 'c++14', 'c++1z', - 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++1z'], - 'none'), + opts = {'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use', + ['none', 'c++03', 'c++11', 'c++14', 'c++1z', + 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++1z'], + 'none'), 'cpp_debugstl': coredata.UserBooleanOption('cpp_debugstl', 'STL debug mode', False)} @@ -2226,16 +2232,18 @@ class GnuCPPCompiler(GnuCompiler, CPPCompiler): # too strict without this and always fails. return self.get_no_optimization_args() + ['-fpermissive'] -class GnuObjCCompiler(GnuCompiler,ObjCCompiler): + +class GnuObjCCompiler(GnuCompiler, ObjCCompiler): def __init__(self, exelist, version, is_cross, exe_wrapper=None, defines=None): ObjCCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) # Not really correct, but GNU objc is only used on non-OSX non-win. File a bug # if this breaks your use case. GnuCompiler.__init__(self, GCC_STANDARD, defines) - self.warn_args = {'1': ['-Wall', '-Winvalid-pch'], - '2': ['-Wall', '-Wextra', '-Winvalid-pch'], - '3' : ['-Wall', '-Wpedantic', '-Wextra', '-Winvalid-pch']} + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler): @@ -2244,9 +2252,10 @@ class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler): # Not really correct, but GNU objc is only used on non-OSX non-win. File a bug # if this breaks your use case. GnuCompiler.__init__(self, GCC_STANDARD, defines) - self.warn_args = {'1': ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'], - '2': ['-Wall', '-Wextra', '-Winvalid-pch', '-Wnon-virtual-dtor'], - '3' : ['-Wall', '-Wpedantic', '-Wextra', '-Winvalid-pch', '-Wnon-virtual-dtor']} + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} def get_compiler_check_args(self): # -fpermissive allows non-conforming code to compile which is necessary @@ -2275,6 +2284,8 @@ class ClangCompiler(): return gnulike_buildtype_args[buildtype] def get_buildtype_linker_args(self, buildtype): + if self.clang_type == CLANG_OSX: + return apple_buildtype_linker_args[buildtype] return gnulike_buildtype_linker_args[buildtype] def get_pch_suffix(self): @@ -2284,7 +2295,7 @@ class ClangCompiler(): # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136 # This flag is internal to Clang (or at least not documented on the man page) # so it might change semantics at any time. - return ['-include-pch', os.path.join (pch_dir, self.get_pch_name (header))] + return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): if self.clang_type == CLANG_STANDARD: @@ -2322,15 +2333,16 @@ class ClangCCompiler(ClangCompiler, CCompiler): def __init__(self, exelist, version, clang_type, is_cross, exe_wrapper=None): CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) ClangCompiler.__init__(self, clang_type) - self.warn_args = {'1': ['-Wall', '-Winvalid-pch'], - '2': ['-Wall', '-Wextra', '-Winvalid-pch'], - '3' : ['-Wall', '-Wpedantic', '-Wextra', '-Winvalid-pch']} + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} def get_options(self): - return {'c_std' : coredata.UserComboOption('c_std', 'C language standard to use', - ['none', 'c89', 'c99', 'c11', - 'gnu89', 'gnu99', 'gnu11',], - 'none')} + return {'c_std': coredata.UserComboOption('c_std', 'C language standard to use', + ['none', 'c89', 'c99', 'c11', + 'gnu89', 'gnu99', 'gnu11'], + 'none')} def get_option_compile_args(self, options): args = [] @@ -2347,15 +2359,16 @@ class ClangCPPCompiler(ClangCompiler, CPPCompiler): def __init__(self, exelist, version, cltype, is_cross, exe_wrapper=None): CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) ClangCompiler.__init__(self, cltype) - self.warn_args = {'1': ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'], - '2': ['-Wall', '-Wextra', '-Winvalid-pch', '-Wnon-virtual-dtor'], - '3': ['-Wall', '-Wpedantic', '-Wextra', '-Winvalid-pch', '-Wnon-virtual-dtor']} + default_warn_args = ['-Wall', '-Winvalid-pch', '-Wnon-virtual-dtor'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} def get_options(self): - return {'cpp_std' : coredata.UserComboOption('cpp_std', 'C++ language standard to use', - ['none', 'c++03', 'c++11', 'c++14', 'c++1z', - 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++1z'], - 'none')} + return {'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use', + ['none', 'c++03', 'c++11', 'c++14', 'c++1z', + 'gnu++11', 'gnu++14', 'gnu++1z'], + 'none')} def get_option_compile_args(self, options): args = [] @@ -2379,6 +2392,140 @@ class ClangObjCPPCompiler(ClangCompiler, GnuObjCPPCompiler): ClangCompiler.__init__(self, cltype) self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage'] + +# Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1 +class IntelCompiler: + def __init__(self, icc_type): + self.id = 'intel' + self.icc_type = icc_type + self.lang_header = 'none' + self.base_options = ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', + 'b_colorout', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded'] + # Assembly + self.can_compile_suffixes.add('s') + + def get_pic_args(self): + return ['-fPIC'] + + def get_buildtype_args(self, buildtype): + return gnulike_buildtype_args[buildtype] + + def get_buildtype_linker_args(self, buildtype): + return gnulike_buildtype_linker_args[buildtype] + + def get_pch_suffix(self): + return 'pchi' + + def get_pch_use_args(self, pch_dir, header): + return ['-pch', '-pch_dir', os.path.join(pch_dir), '-x', + self.lang_header, '-include', header, '-x', 'none'] + + def get_pch_name(self, header_name): + return os.path.split(header_name)[-1] + '.' + self.get_pch_suffix() + + def split_shlib_to_parts(self, fname): + return (os.path.split(fname)[0], fname) + + def get_soname_args(self, prefix, shlib_name, suffix, path, soversion, is_shared_module): + if self.icc_type == ICC_STANDARD: + gcc_type = GCC_STANDARD + elif self.icc_type == ICC_OSX: + gcc_type = GCC_OSX + elif self.icc_type == ICC_WIN: + gcc_type = GCC_MINGW + else: + raise MesonException('Unreachable code when converting icc type to gcc type.') + return get_gcc_soname_args(gcc_type, prefix, shlib_name, suffix, path, soversion, is_shared_module) + + def get_std_shared_lib_link_args(self): + # FIXME: Don't know how icc works on OSX + # if self.icc_type == ICC_OSX: + # return ['-bundle'] + return ['-shared'] + + +class IntelCCompiler(IntelCompiler, CCompiler): + def __init__(self, exelist, version, icc_type, is_cross, exe_wrapper=None): + CCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) + IntelCompiler.__init__(self, icc_type) + self.lang_header = 'c-header' + default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', '-Wpch-messages'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self): + c_stds = ['c89', 'c99'] + g_stds = ['gnu89', 'gnu99'] + if mesonlib.version_compare(self.version, '>=16.0.0'): + c_stds += ['c11'] + opts = {'c_std': coredata.UserComboOption('c_std', 'C language standard to use', + ['none'] + c_stds + g_stds, + 'none')} + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['c_std'] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_std_shared_lib_link_args(self): + return ['-shared'] + + def has_multi_arguments(self, args, env): + return super(IntelCCompiler, self).has_multi_arguments(args + ['-diag-error', '10006'], env) + + +class IntelCPPCompiler(IntelCompiler, CPPCompiler): + def __init__(self, exelist, version, icc_type, is_cross, exe_wrap): + CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap) + IntelCompiler.__init__(self, icc_type) + self.lang_header = 'c++-header' + default_warn_args = ['-Wall', '-w3', '-diag-disable:remark', + '-Wpch-messages', '-Wnon-virtual-dtor'] + self.warn_args = {'1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic']} + + def get_options(self): + c_stds = [] + g_stds = ['gnu++98'] + if mesonlib.version_compare(self.version, '>=15.0.0'): + c_stds += ['c++11', 'c++14'] + g_stds += ['gnu++11'] + if mesonlib.version_compare(self.version, '>=16.0.0'): + c_stds += ['c++17'] + if mesonlib.version_compare(self.version, '>=17.0.0'): + g_stds += ['gnu++14'] + opts = {'cpp_std': coredata.UserComboOption('cpp_std', 'C++ language standard to use', + ['none'] + c_stds + g_stds, + 'none'), + 'cpp_debugstl': coredata.UserBooleanOption('cpp_debugstl', + 'STL debug mode', + False)} + return opts + + def get_option_compile_args(self, options): + args = [] + std = options['cpp_std'] + if std.value != 'none': + args.append('-std=' + std.value) + if options['cpp_debugstl'].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + def get_option_link_args(self, options): + return [] + + def get_compiler_check_args(self): + return self.get_no_optimization_args() + + def has_multi_arguments(self, args, env): + return super(IntelCPPCompiler, self).has_multi_arguments(args + ['-diag-error', '10006'], env) + + class FortranCompiler(Compiler): def __init__(self, exelist, version, is_cross, exe_wrapper=None): self.language = 'fortran' @@ -2435,6 +2582,8 @@ end program prog return gnulike_buildtype_args[buildtype] def get_buildtype_linker_args(self, buildtype): + if mesonlib.is_osx(): + return apple_buildtype_linker_args[buildtype] return gnulike_buildtype_linker_args[buildtype] def split_shlib_to_parts(self, fname): @@ -2446,7 +2595,7 @@ end program prog def get_dependency_gen_args(self, outtarget, outfile): # Disabled until this is fixed: # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=62162 - #return ['-cpp', '-MMD', '-MQ', outtarget] + # return ['-cpp', '-MMD', '-MQ', outtarget] return [] def get_output_args(self, target): @@ -2523,7 +2672,7 @@ class G95FortranCompiler(FortranCompiler): self.id = 'g95' def get_module_outdir_args(self, path): - return ['-fmod='+path] + return ['-fmod=' + path] def get_always_args(self): return ['-pipe'] @@ -2551,18 +2700,21 @@ class SunFortranCompiler(FortranCompiler): def get_always_args(self): return [] - def get_warn_args(self): + def get_warn_args(self, level): return [] def get_module_outdir_args(self, path): - return ['-moddir='+path] + return ['-moddir=' + path] -class IntelFortranCompiler(FortranCompiler): +class IntelFortranCompiler(IntelCompiler, FortranCompiler): std_warn_args = ['-warn', 'all'] def __init__(self, exelist, version, is_cross, exe_wrapper=None): - self.file_suffixes = ('f', 'f90') - super().__init__(exelist, version, is_cross, exe_wrapper=None) + self.file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp') + FortranCompiler.__init__(self, exelist, version, is_cross, exe_wrapper) + # FIXME: Add support for OS X and Windows in detect_fortran_compiler so + # we are sent the type of compiler + IntelCompiler.__init__(self, ICC_STANDARD) self.id = 'intel' def get_module_outdir_args(self, path): @@ -2624,15 +2776,13 @@ class NAGFortranCompiler(FortranCompiler): def get_module_outdir_args(self, path): return ['-mdir', path] - def get_always_args(self): - return [] - def get_warn_args(self, level): return NAGFortranCompiler.std_warn_args class VisualStudioLinker(): always_args = ['/NOLOGO'] + def __init__(self, exelist): self.exelist = exelist diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py index 4b0d0c4..cc242f4 100644 --- a/mesonbuild/coredata.py +++ b/mesonbuild/coredata.py @@ -158,8 +158,8 @@ def load(filename): if not isinstance(obj, CoreData): raise MesonException(load_fail_msg) if obj.version != version: - raise MesonException('Build directory has been generated with Meson version %s, which is incompatible with current version %s.\nPlease delete this build directory AND create a new one.'% - (obj.version, version)) + raise MesonException('Build directory has been generated with Meson version %s, which is incompatible with current version %s.\nPlease delete this build directory AND create a new one.' % + (obj.version, version)) return obj def save(obj, filename): @@ -201,19 +201,19 @@ def get_builtin_option_default(optname): raise RuntimeError('Tried to get the default value for an unknown builtin option \'%s\'.' % optname) builtin_options = { - 'buildtype' : [ UserComboOption, 'Build type to use.', [ 'plain', 'debug', 'debugoptimized', 'release', 'minsize' ], 'debug' ], - 'strip' : [ UserBooleanOption, 'Strip targets on install.', False ], - 'unity' : [ UserBooleanOption, 'Unity build.', False ], - 'prefix' : [ UserStringOption, 'Installation prefix.', default_prefix() ], - 'libdir' : [ UserStringOption, 'Library directory.', default_libdir() ], - 'libexecdir' : [ UserStringOption, 'Library executable directory.', default_libexecdir() ], - 'bindir' : [ UserStringOption, 'Executable directory.', 'bin' ], - 'sbindir' : [ UserStringOption, 'System executable directory.', 'sbin' ], - 'includedir' : [ UserStringOption, 'Header file directory.', 'include' ], - 'datadir' : [ UserStringOption, 'Data file directory.', 'share' ], - 'mandir' : [ UserStringOption, 'Manual page directory.', 'share/man' ], - 'infodir' : [ UserStringOption, 'Info page directory.', 'share/info' ], - 'localedir' : [ UserStringOption, 'Locale data directory.', 'share/locale' ], + 'buildtype': [UserComboOption, 'Build type to use.', ['plain', 'debug', 'debugoptimized', 'release', 'minsize'], 'debug'], + 'strip': [UserBooleanOption, 'Strip targets on install.', False], + 'unity': [UserBooleanOption, 'Unity build.', False], + 'prefix': [UserStringOption, 'Installation prefix.', default_prefix()], + 'libdir': [UserStringOption, 'Library directory.', default_libdir()], + 'libexecdir': [UserStringOption, 'Library executable directory.', default_libexecdir()], + 'bindir': [UserStringOption, 'Executable directory.', 'bin'], + 'sbindir': [UserStringOption, 'System executable directory.', 'sbin'], + 'includedir': [UserStringOption, 'Header file directory.', 'include'], + 'datadir': [UserStringOption, 'Data file directory.', 'share'], + 'mandir': [UserStringOption, 'Manual page directory.', 'share/man'], + 'infodir': [UserStringOption, 'Info page directory.', 'share/info'], + 'localedir': [UserStringOption, 'Locale data directory.', 'share/locale'], # sysconfdir, localstatedir and sharedstatedir are a bit special. These defaults to ${prefix}/etc, # ${prefix}/var and ${prefix}/com but nobody uses that. Instead they always set it # manually to /etc, /var and /var/lib. This default values is thus pointless and not really used @@ -223,16 +223,16 @@ builtin_options = { # to set the following in project(): # # default_options : ['sysconfdir=/etc', 'localstatedir=/var', 'sharedstatedir=/var/lib'] - 'sysconfdir' : [ UserStringOption, 'Sysconf data directory.', 'etc' ], - 'localstatedir' : [ UserStringOption, 'Localstate data directory.', 'var' ], - 'sharedstatedir' : [ UserStringOption, 'Architecture-independent data directory.', 'com' ], - 'werror' : [ UserBooleanOption, 'Treat warnings as errors.', False ], - 'warning_level' : [ UserComboOption, 'Compiler warning level to use.', [ '1', '2', '3' ], '1'], - 'layout' : [ UserComboOption, 'Build directory layout.', ['mirror', 'flat' ], 'mirror' ], - 'default_library' : [ UserComboOption, 'Default library type.', [ 'shared', 'static' ], 'shared' ], - 'backend' : [ UserComboOption, 'Backend to use.', backendlist, 'ninja' ], - 'stdsplit' : [ UserBooleanOption, 'Split stdout and stderr in test logs.', True ], - 'errorlogs' : [ UserBooleanOption, "Whether to print the logs from failing tests.", True ], + 'sysconfdir': [UserStringOption, 'Sysconf data directory.', 'etc'], + 'localstatedir': [UserStringOption, 'Localstate data directory.', 'var'], + 'sharedstatedir': [UserStringOption, 'Architecture-independent data directory.', 'com'], + 'werror': [UserBooleanOption, 'Treat warnings as errors.', False], + 'warning_level': [UserComboOption, 'Compiler warning level to use.', [ '1', '2', '3' ], '1'], + 'layout': [UserComboOption, 'Build directory layout.', ['mirror', 'flat'], 'mirror'], + 'default_library': [UserComboOption, 'Default library type.', ['shared', 'static'], 'shared'], + 'backend': [UserComboOption, 'Backend to use.', backendlist, 'ninja'], + 'stdsplit': [UserBooleanOption, 'Split stdout and stderr in test logs.', True], + 'errorlogs': [UserBooleanOption, "Whether to print the logs from failing tests.", True], } forbidden_target_names = {'clean': None, @@ -249,7 +249,8 @@ forbidden_target_names = {'clean': None, 'test:': None, 'benchmark': None, 'install': None, + 'uninstall': None, 'build.ninja': None, 'scan-build': None, 'reconfigure': None, - } + } diff --git a/mesonbuild/dependencies.py b/mesonbuild/dependencies.py index 9d6fb6c..4daa296 100644 --- a/mesonbuild/dependencies.py +++ b/mesonbuild/dependencies.py @@ -529,7 +529,7 @@ class BoostDependency(Dependency): # Some boost libraries have different names for # their sources and libraries. This dict maps # between the two. - name2lib = {'test' : 'unit_test_framework'} + name2lib = {'test': 'unit_test_framework'} def __init__(self, environment, kwargs): Dependency.__init__(self, 'boost') @@ -965,7 +965,7 @@ class QtBaseDependency(Dependency): if not self.qmake.found(): continue # Check that the qmake is for qt5 - pc, stdo = Popen_safe(self.qmake.fullpath + ['-v'])[0:2] + pc, stdo = Popen_safe(self.qmake.fullpath + ['-v'])[0:2] if pc.returncode != 0: continue if not 'Qt version ' + self.qtver in stdo: @@ -978,7 +978,7 @@ class QtBaseDependency(Dependency): return self.version = re.search(self.qtver + '(\.\d+)+', stdo).group(0) # Query library path, header path, and binary path - stdo = Popen_safe(self.qmake.fullpath + ['-query'])[1] + stdo = Popen_safe(self.qmake.fullpath + ['-query'])[1] qvars = {} for line in stdo.split('\n'): line = line.strip() @@ -1458,9 +1458,9 @@ packages = {'boost': BoostDependency, 'qt4': Qt4Dependency, 'gnustep': GnuStepDependency, 'appleframeworks': AppleFrameworks, - 'wxwidgets' : WxDependency, - 'sdl2' : SDL2Dependency, - 'gl' : GLDependency, - 'threads' : ThreadDependency, - 'python3' : Python3Dependency, - } + 'wxwidgets': WxDependency, + 'sdl2': SDL2Dependency, + 'gl': GLDependency, + 'threads': ThreadDependency, + 'python3': Python3Dependency, + } diff --git a/mesonbuild/environment.py b/mesonbuild/environment.py index 311b11c..48f5865 100644 --- a/mesonbuild/environment.py +++ b/mesonbuild/environment.py @@ -386,7 +386,7 @@ class Environment(): version = self.get_gnu_version_from_defines(defines) return GnuCCompiler(ccache + [compiler], version, gtype, is_cross, exe_wrap, defines) if 'clang' in out: - if 'Apple' in out: + if 'Apple' in out or for_darwin(want_cross, self): cltype = CLANG_OSX else: cltype = CLANG_STANDARD @@ -396,6 +396,10 @@ class Environment(): # everything else to stdout. Why? Lord only knows. version = search_version(err) return VisualStudioCCompiler([compiler], version, is_cross, exe_wrap) + if '(ICC)' in out: + # TODO: add microsoft add check OSX + inteltype = ICC_STANDARD + return IntelCCompiler(ccache + [compiler], version, inteltype, is_cross, exe_wrap) errmsg = 'Unknown compiler(s): "' + ', '.join(compilers) + '"' if popen_exceptions: errmsg += '\nThe follow exceptions were encountered:' @@ -525,6 +529,10 @@ class Environment(): if 'Microsoft' in out or 'Microsoft' in err: version = search_version(err) return VisualStudioCPPCompiler([compiler], version, is_cross, exe_wrap) + if '(ICC)' in out: + # TODO: add microsoft add check OSX + inteltype = ICC_STANDARD + return IntelCPPCompiler(ccache + [compiler], version, inteltype, is_cross, exe_wrap) errmsg = 'Unknown compiler(s): "' + ', '.join(compilers) + '"' if popen_exceptions: errmsg += '\nThe follow exceptions were encountered:' @@ -680,7 +688,7 @@ class Environment(): if evar in os.environ: linker = os.environ[evar].strip() elif isinstance(compiler, VisualStudioCCompiler): - linker= self.vs_static_linker + linker = self.vs_static_linker else: linker = self.default_static_linker basename = os.path.basename(linker).lower() @@ -793,9 +801,12 @@ def get_args_from_envvars(compiler): return ([], []) # Compile flags - cflags_mapping = {'c': 'CFLAGS', 'cpp': 'CXXFLAGS', - 'objc': 'OBJCFLAGS', 'objcpp': 'OBJCXXFLAGS', - 'fortran': 'FFLAGS', 'd': 'DFLAGS'} + cflags_mapping = {'c': 'CFLAGS', + 'cpp': 'CXXFLAGS', + 'objc': 'OBJCFLAGS', + 'objcpp': 'OBJCXXFLAGS', + 'fortran': 'FFLAGS', + 'd': 'DFLAGS'} compile_flags = os.environ.get(cflags_mapping[lang], '') log_var(cflags_mapping[lang], compile_flags) compile_flags = compile_flags.split() @@ -845,7 +856,7 @@ class CrossBuildInfo(): if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry: raise EnvironmentException('Malformed variable name %s in cross file..' % entry) try: - res = eval(value, {'true' : True, 'false' : False}) + res = eval(value, {'true': True, 'false': False}) except Exception: raise EnvironmentException('Malformed value in cross file variable %s.' % entry) if self.ok_type(res): diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py index 92f8af8..ec82ec9 100644 --- a/mesonbuild/interpreter.py +++ b/mesonbuild/interpreter.py @@ -30,6 +30,7 @@ from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode from .interpreterbase import InterpreterObject, MutableInterpreterObject import os, sys, shutil, uuid +import re import importlib @@ -49,11 +50,11 @@ class TryRunResultHolder(InterpreterObject): def __init__(self, res): super().__init__() self.res = res - self.methods.update({'returncode' : self.returncode_method, - 'compiled' : self.compiled_method, - 'stdout' : self.stdout_method, - 'stderr' : self.stderr_method, - }) + self.methods.update({'returncode': self.returncode_method, + 'compiled': self.compiled_method, + 'stdout': self.stdout_method, + 'stderr': self.stderr_method, + }) def returncode_method(self, args, kwargs): return self.res.returncode @@ -73,16 +74,16 @@ class RunProcess(InterpreterObject): super().__init__() pc, self.stdout, self.stderr = self.run_command(command_array, source_dir, build_dir, subdir, in_builddir) self.returncode = pc.returncode - self.methods.update({'returncode' : self.returncode_method, - 'stdout' : self.stdout_method, - 'stderr' : self.stderr_method, - }) + self.methods.update({'returncode': self.returncode_method, + 'stdout': self.stdout_method, + 'stderr': self.stderr_method, + }) def run_command(self, command_array, source_dir, build_dir, subdir, in_builddir): cmd_name = command_array[0] - env = {'MESON_SOURCE_ROOT' : source_dir, - 'MESON_BUILD_ROOT' : build_dir, - 'MESON_SUBDIR' : subdir} + env = {'MESON_SOURCE_ROOT': source_dir, + 'MESON_BUILD_ROOT': build_dir, + 'MESON_SUBDIR': subdir} if in_builddir: cwd = os.path.join(build_dir, subdir) else: @@ -129,8 +130,8 @@ class EnvironmentVariablesHolder(MutableInterpreterObject): self.held_object = build.EnvironmentVariables() self.methods.update({'set': self.set_method, 'append': self.append_method, - 'prepend' : self.prepend_method, - }) + 'prepend': self.prepend_method, + }) @stringArgs def add_var(self, method, args, kwargs): @@ -161,8 +162,8 @@ class ConfigurationDataHolder(MutableInterpreterObject): self.methods.update({'set': self.set_method, 'set10': self.set10_method, 'set_quoted': self.set_quoted_method, - 'has' : self.has_method, - }) + 'has': self.has_method, + }) def is_used(self): return self.used @@ -219,7 +220,7 @@ class DependencyHolder(InterpreterObject): def __init__(self, dep): InterpreterObject.__init__(self) self.held_object = dep - self.methods.update({'found' : self.found_method, + self.methods.update({'found': self.found_method, 'type_name': self.type_name_method, 'version': self.version_method, 'get_pkgconfig_variable': self.pkgconfig_method, @@ -251,7 +252,7 @@ class InternalDependencyHolder(InterpreterObject): def __init__(self, dep): InterpreterObject.__init__(self) self.held_object = dep - self.methods.update({'found' : self.found_method, + self.methods.update({'found': self.found_method, 'version': self.version_method, }) @@ -315,7 +316,7 @@ class GeneratorHolder(InterpreterObject): super().__init__() self.interpreter = interpreter self.held_object = build.Generator(args, kwargs) - self.methods.update({'process' : self.process_method}) + self.methods.update({'process': self.process_method}) def process_method(self, args, kwargs): extras = mesonlib.stringlistify(kwargs.get('extra_args', [])) @@ -342,11 +343,11 @@ class BuildMachine(InterpreterObject): def __init__(self, compilers): self.compilers = compilers InterpreterObject.__init__(self) - self.methods.update({'system' : self.system_method, - 'cpu_family' : self.cpu_family_method, - 'cpu' : self.cpu_method, - 'endian' : self.endian_method, - }) + self.methods.update({'system': self.system_method, + 'cpu_family': self.cpu_family_method, + 'cpu': self.cpu_method, + 'endian': self.endian_method, + }) def cpu_family_method(self, args, kwargs): return environment.detect_cpu_family(self.compilers) @@ -371,11 +372,11 @@ class CrossMachineInfo(InterpreterObject): 'Machine info is currently {}\n'.format(cross_info) + 'but is missing {}.'.format(minimum_cross_info - set(cross_info))) self.info = cross_info - self.methods.update({'system' : self.system_method, - 'cpu' : self.cpu_method, - 'cpu_family' : self.cpu_family_method, - 'endian' : self.endian_method, - }) + self.methods.update({'system': self.system_method, + 'cpu': self.cpu_method, + 'cpu_family': self.cpu_family_method, + 'endian': self.endian_method, + }) def system_method(self, args, kwargs): return self.info['system'] @@ -481,12 +482,12 @@ class BuildTargetHolder(InterpreterObject): super().__init__() self.held_object = target self.interpreter = interp - self.methods.update({'extract_objects' : self.extract_objects_method, - 'extract_all_objects' : self.extract_all_objects_method, + self.methods.update({'extract_objects': self.extract_objects_method, + 'extract_all_objects': self.extract_all_objects_method, 'get_id': self.get_id_method, - 'outdir' : self.outdir_method, - 'full_path' : self.full_path_method, - 'private_dir_include' : self.private_dir_include_method, + 'outdir': self.outdir_method, + 'full_path': self.full_path_method, + 'private_dir_include': self.private_dir_include_method, }) def __repr__(self): @@ -499,7 +500,7 @@ class BuildTargetHolder(InterpreterObject): def private_dir_include_method(self, args, kwargs): return IncludeDirsHolder(build.IncludeDirs('', [], False, - [self.interpreter.backend.get_target_private_dir(self.held_object)])) + [self.interpreter.backend.get_target_private_dir(self.held_object)])) def full_path_method(self, args, kwargs): return self.interpreter.backend.get_target_filename_abs(self.held_object) @@ -543,7 +544,7 @@ class CustomTargetHolder(InterpreterObject): super().__init__() self.held_object = object_to_hold self.interpreter = interp - self.methods.update({'full_path' : self.full_path_method, + self.methods.update({'full_path': self.full_path_method, }) def __repr__(self): @@ -587,8 +588,8 @@ class SubprojectHolder(InterpreterObject): def __init__(self, subinterpreter): super().__init__() self.held_object = subinterpreter - self.methods.update({'get_variable' : self.get_variable_method, - }) + self.methods.update({'get_variable': self.get_variable_method, + }) def get_variable_method(self, args, kwargs): if len(args) != 1: @@ -609,21 +610,21 @@ class CompilerHolder(InterpreterObject): 'sizeof': self.sizeof_method, 'has_header': self.has_header_method, 'has_header_symbol': self.has_header_symbol_method, - 'run' : self.run_method, - 'has_function' : self.has_function_method, - 'has_member' : self.has_member_method, - 'has_members' : self.has_members_method, - 'has_type' : self.has_type_method, - 'alignment' : self.alignment_method, - 'version' : self.version_method, - 'cmd_array' : self.cmd_array_method, + 'run': self.run_method, + 'has_function': self.has_function_method, + 'has_member': self.has_member_method, + 'has_members': self.has_members_method, + 'has_type': self.has_type_method, + 'alignment': self.alignment_method, + 'version': self.version_method, + 'cmd_array': self.cmd_array_method, 'find_library': self.find_library_method, - 'has_argument' : self.has_argument_method, - 'has_multi_arguments' : self.has_multi_arguments_method, - 'first_supported_argument' : self.first_supported_argument_method, - 'unittest_args' : self.unittest_args_method, + 'has_argument': self.has_argument_method, + 'has_multi_arguments': self.has_multi_arguments_method, + 'first_supported_argument': self.first_supported_argument_method, + 'unittest_args': self.unittest_args_method, 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method, - }) + }) def version_method(self, args, kwargs): return self.compiler.version @@ -701,7 +702,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO (%d)' % result.returncode) - mlog.log('Checking if "', mlog.bold(testname), '" runs : ', h, sep='') + mlog.log('Checking if "', mlog.bold(testname), '" runs: ', h, sep='') return TryRunResultHolder(result) def get_id_method(self, args, kwargs): @@ -831,7 +832,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if "', mlog.bold(testname), '" compiles : ', h, sep='') + mlog.log('Checking if "', mlog.bold(testname), '" compiles: ', h, sep='') return result def links_method(self, args, kwargs): @@ -854,7 +855,7 @@ class CompilerHolder(InterpreterObject): h = mlog.green('YES') else: h = mlog.red('NO') - mlog.log('Checking if "', mlog.bold(testname), '" links : ', h, sep='') + mlog.log('Checking if "', mlog.bold(testname), '" links: ', h, sep='') return result def has_header_method(self, args, kwargs): @@ -996,23 +997,23 @@ class MesonMain(InterpreterObject): self.interpreter = interpreter self._found_source_scripts = {} self.methods.update({'get_compiler': self.get_compiler_method, - 'is_cross_build' : self.is_cross_build_method, - 'has_exe_wrapper' : self.has_exe_wrapper_method, - 'is_unity' : self.is_unity_method, - 'is_subproject' : self.is_subproject_method, - 'current_source_dir' : self.current_source_dir_method, - 'current_build_dir' : self.current_build_dir_method, - 'source_root' : self.source_root_method, - 'build_root' : self.build_root_method, - 'add_install_script' : self.add_install_script_method, - 'add_postconf_script' : self.add_postconf_script_method, + 'is_cross_build': self.is_cross_build_method, + 'has_exe_wrapper': self.has_exe_wrapper_method, + 'is_unity': self.is_unity_method, + 'is_subproject': self.is_subproject_method, + 'current_source_dir': self.current_source_dir_method, + 'current_build_dir': self.current_build_dir_method, + 'source_root': self.source_root_method, + 'build_root': self.build_root_method, + 'add_install_script': self.add_install_script_method, + 'add_postconf_script': self.add_postconf_script_method, 'install_dependency_manifest': self.install_dependency_manifest_method, 'project_version': self.project_version_method, 'version': self.version_method, - 'project_name' : self.project_name_method, + 'project_name': self.project_name_method, 'get_cross_property': self.get_cross_property_method, - 'backend' : self.backend_method, - }) + 'backend': self.backend_method, + }) def _find_source_script(self, name, args): # Prefer scripts in the current source directory @@ -1177,53 +1178,54 @@ class Interpreter(InterpreterBase): self.build_def_files = [os.path.join(self.subdir, environment.build_filename)] def build_func_dict(self): - self.funcs.update({'project' : self.func_project, - 'message' : self.func_message, - 'error' : self.func_error, - 'executable': self.func_executable, - 'dependency' : self.func_dependency, - 'static_library' : self.func_static_lib, - 'shared_library' : self.func_shared_lib, - 'shared_module' : self.func_shared_module, - 'library' : self.func_library, - 'jar' : self.func_jar, - 'build_target': self.func_build_target, - 'custom_target' : self.func_custom_target, - 'run_target' : self.func_run_target, - 'generator' : self.func_generator, - 'test' : self.func_test, - 'benchmark' : self.func_benchmark, - 'install_headers' : self.func_install_headers, - 'install_man' : self.func_install_man, - 'subdir' : self.func_subdir, - 'install_data' : self.func_install_data, - 'install_subdir' : self.func_install_subdir, - 'configure_file' : self.func_configure_file, - 'include_directories' : self.func_include_directories, - 'add_global_arguments' : self.func_add_global_arguments, - 'add_project_arguments' : self.func_add_project_arguments, - 'add_global_link_arguments' : self.func_add_global_link_arguments, - 'add_project_link_arguments' : self.func_add_project_link_arguments, - 'add_languages' : self.func_add_languages, - 'find_program' : self.func_find_program, - 'find_library' : self.func_find_library, - 'configuration_data' : self.func_configuration_data, - 'run_command' : self.func_run_command, - 'gettext' : self.func_gettext, - 'option' : self.func_option, - 'get_option' : self.func_get_option, - 'subproject' : self.func_subproject, - 'vcs_tag' : self.func_vcs_tag, - 'set_variable' : self.func_set_variable, - 'is_variable' : self.func_is_variable, - 'get_variable' : self.func_get_variable, - 'import' : self.func_import, - 'files' : self.func_files, - 'declare_dependency': self.func_declare_dependency, - 'assert': self.func_assert, - 'environment' : self.func_environment, - 'join_paths' : self.func_join_paths, - }) + self.funcs.update({'project': self.func_project, + 'message': self.func_message, + 'error': self.func_error, + 'executable': self.func_executable, + 'dependency': self.func_dependency, + 'static_library': self.func_static_lib, + 'shared_library': self.func_shared_lib, + 'shared_module': self.func_shared_module, + 'library': self.func_library, + 'jar': self.func_jar, + 'build_target': self.func_build_target, + 'custom_target': self.func_custom_target, + 'run_target': self.func_run_target, + 'generator': self.func_generator, + 'test': self.func_test, + 'benchmark': self.func_benchmark, + 'install_headers': self.func_install_headers, + 'install_man': self.func_install_man, + 'subdir': self.func_subdir, + 'install_data': self.func_install_data, + 'install_subdir': self.func_install_subdir, + 'configure_file': self.func_configure_file, + 'include_directories': self.func_include_directories, + 'add_global_arguments': self.func_add_global_arguments, + 'add_project_arguments': self.func_add_project_arguments, + 'add_global_link_arguments': self.func_add_global_link_arguments, + 'add_project_link_arguments': self.func_add_project_link_arguments, + 'add_test_setup': self.func_add_test_setup, + 'add_languages': self.func_add_languages, + 'find_program': self.func_find_program, + 'find_library': self.func_find_library, + 'configuration_data': self.func_configuration_data, + 'run_command': self.func_run_command, + 'gettext': self.func_gettext, + 'option': self.func_option, + 'get_option': self.func_get_option, + 'subproject': self.func_subproject, + 'vcs_tag': self.func_vcs_tag, + 'set_variable': self.func_set_variable, + 'is_variable': self.func_is_variable, + 'get_variable': self.func_get_variable, + 'import': self.func_import, + 'files': self.func_files, + 'declare_dependency': self.func_declare_dependency, + 'assert': self.func_assert, + 'environment': self.func_environment, + 'join_paths': self.func_join_paths, + }) def module_method_callback(self, invalues): unwrap_single = False @@ -1811,8 +1813,8 @@ requirements use the version keyword argument instead.''') # exception; let the caller handle things. except: mlog.log('Also couldn\'t find a fallback subproject in', - mlog.bold(os.path.join(self.subproject_dir, dirname)), - 'for the dependency', mlog.bold(name)) + mlog.bold(os.path.join(self.subproject_dir, dirname)), + 'for the dependency', mlog.bold(name)) return None try: dep = self.subprojects[dirname].get_variable_method([varname], {}) @@ -1822,15 +1824,14 @@ requirements use the version keyword argument instead.''') if not isinstance(dep, DependencyHolder): raise InvalidCode('Fallback variable {!r} in the subproject {!r} is ' 'not a dependency object.'.format(varname, dirname)) - return None # Check if the version of the declared dependency matches what we want if 'version' in kwargs: wanted = kwargs['version'] found = dep.version_method([], {}) if found == 'undefined' or not mesonlib.version_compare(found, wanted): mlog.log('Subproject', mlog.bold(dirname), 'dependency', - mlog.bold(varname), 'version is', mlog.bold(found), - 'but', mlog.bold(wanted), 'is required.') + mlog.bold(varname), 'version is', mlog.bold(found), + 'but', mlog.bold(wanted), 'is required.') return None mlog.log('Found a', mlog.green('fallback'), 'subproject', mlog.bold(os.path.join(self.subproject_dir, dirname)), 'for', @@ -1979,22 +1980,7 @@ requirements use the version keyword argument instead.''') def func_test(self, node, args, kwargs): self.add_test(node, args, kwargs, True) - def add_test(self, node, args, kwargs, is_base_test): - if len(args) != 2: - raise InterpreterException('Incorrect number of arguments') - if not isinstance(args[0], str): - raise InterpreterException('First argument of test must be a string.') - if not isinstance(args[1], (ExecutableHolder, JarHolder, ExternalProgramHolder)): - raise InterpreterException('Second argument must be executable.') - par = kwargs.get('is_parallel', True) - if not isinstance(par, bool): - raise InterpreterException('Keyword argument is_parallel must be a boolean.') - cmd_args = kwargs.get('args', []) - if not isinstance(cmd_args, list): - cmd_args = [cmd_args] - for i in cmd_args: - if not isinstance(i, (str, mesonlib.File)): - raise InterpreterException('Command line arguments must be strings') + def unpack_env_kwarg(self, kwargs): envlist = kwargs.get('env', []) if isinstance(envlist, EnvironmentVariablesHolder): env = envlist.held_object @@ -2011,8 +1997,25 @@ requirements use the version keyword argument instead.''') if ' ' in k: raise InterpreterException('Env var key must not have spaces in it.') env[k] = val - if not isinstance(envlist, list): - envlist = [envlist] + return env + + def add_test(self, node, args, kwargs, is_base_test): + if len(args) != 2: + raise InterpreterException('Incorrect number of arguments') + if not isinstance(args[0], str): + raise InterpreterException('First argument of test must be a string.') + if not isinstance(args[1], (ExecutableHolder, JarHolder, ExternalProgramHolder)): + raise InterpreterException('Second argument must be executable.') + par = kwargs.get('is_parallel', True) + if not isinstance(par, bool): + raise InterpreterException('Keyword argument is_parallel must be a boolean.') + cmd_args = kwargs.get('args', []) + if not isinstance(cmd_args, list): + cmd_args = [cmd_args] + for i in cmd_args: + if not isinstance(i, (str, mesonlib.File)): + raise InterpreterException('Command line arguments must be strings') + env = self.unpack_env_kwarg(kwargs) should_fail = kwargs.get('should_fail', False) if not isinstance(should_fail, bool): raise InterpreterException('Keyword argument should_fail must be a boolean.') @@ -2176,6 +2179,47 @@ requirements use the version keyword argument instead.''') return i @stringArgs + def func_add_test_setup(self, node, args, kwargs): + if len(args) != 1: + raise InterpreterException('Add_test_setup needs one argument for the setup name.') + setup_name = args[0] + if re.fullmatch('[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None: + raise InterpreterException('Setup name may only contain alphanumeric characters.') + try: + inp = kwargs.get('exe_wrapper', []) + if not isinstance(inp, list): + inp = [inp] + exe_wrapper = [] + for i in inp: + if hasattr(i, 'held_object'): + i = i.held_object + if isinstance(i, str): + exe_wrapper.append(i) + elif isinstance(i, dependencies.ExternalProgram): + if not i.found(): + raise InterpreterException('Tried to use non-found external executable.') + exe_wrapper += i.get_command() + else: + raise InterpreterException('Exe wrapper can only contain strings or external binaries.') + except KeyError: + exe_wrapper = None + gdb = kwargs.get('gdb', False) + if not isinstance(gdb, bool): + raise InterpreterException('Gdb option must be a boolean') + timeout_multiplier = kwargs.get('timeout_multiplier', 1) + if not isinstance(timeout_multiplier, int): + raise InterpreterException('Timeout multiplier must be a number.') + env = self.unpack_env_kwarg(kwargs) + setupobj = build.TestSetup(exe_wrapper=exe_wrapper, + gdb=gdb, + timeout_multiplier=timeout_multiplier, + env=env) + if self.subproject == '': + # Dunno what we should do with subprojects really. Let's start simple + # and just use the master project ones. + self.build.test_setups[setup_name] = setupobj + + @stringArgs def func_add_global_arguments(self, node, args, kwargs): if self.subproject != '': msg = 'Global arguments can not be set in subprojects because ' \ diff --git a/mesonbuild/interpreterbase.py b/mesonbuild/interpreterbase.py index d660f4c..db694c0 100644 --- a/mesonbuild/interpreterbase.py +++ b/mesonbuild/interpreterbase.py @@ -610,7 +610,7 @@ class InterpreterBase: # @noKwargs def func_get_variable(self, node, args, kwargs): - if len(args)<1 or len(args)>2: + if len(args) < 1 or len(args) > 2: raise InvalidCode('Get_variable takes one or two arguments.') varname = args[0] if not isinstance(varname, str): diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py index 027dd58..8ddf4fe 100644 --- a/mesonbuild/mconf.py +++ b/mesonbuild/mconf.py @@ -67,8 +67,8 @@ class Conf: if longest_possible_value > 0: titles[3] = 'Possible Values' - print(' %s%s %s%s %s%s %s' % (titles[0], ' '*(longest_name - len(titles[0])), titles[1], ' '*(longest_descr - len(titles[1])), titles[2], ' '*(longest_value - len(titles[2])), titles[3])) - print(' %s%s %s%s %s%s %s' % ('-'*len(titles[0]), ' '*(longest_name - len(titles[0])), '-'*len(titles[1]), ' '*(longest_descr - len(titles[1])), '-'*len(titles[2]), ' '*(longest_value - len(titles[2])), '-'*len(titles[3]))) + print(' %s%s %s%s %s%s %s' % (titles[0], ' ' * (longest_name - len(titles[0])), titles[1], ' ' * (longest_descr - len(titles[1])), titles[2], ' ' * (longest_value - len(titles[2])), titles[3])) + print(' %s%s %s%s %s%s %s' % ('-' * len(titles[0]), ' ' * (longest_name - len(titles[0])), '-' * len(titles[1]), ' ' * (longest_descr - len(titles[1])), '-' * len(titles[2]), ' ' * (longest_value - len(titles[2])), '-' * len(titles[3]))) for i in arr: name = i[0] descr = i[1] @@ -80,9 +80,9 @@ class Conf: possible_values = '[%s]' % ', '.join(map(str, i[3])) elif i[3]: possible_values = i[3] if isinstance(i[3], str) else str(i[3]).lower() - namepad = ' '*(longest_name - len(name)) - descrpad = ' '*(longest_descr - len(descr)) - valuepad = ' '*(longest_value - len(str(value))) + namepad = ' ' * (longest_name - len(name)) + descrpad = ' ' * (longest_descr - len(descr)) + valuepad = ' ' * (longest_value - len(str(value))) f = ' %s%s %s%s %s%s %s' % (name, namepad, descr, descrpad, value, valuepad, possible_values) print(f) @@ -130,7 +130,7 @@ class Conf: carr = [] for key in [ 'buildtype', 'warning_level', 'werror', 'strip', 'unity', 'default_library' ]: carr.append([key, coredata.get_builtin_option_description(key), - self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)]) + self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)]) self.print_aligned(carr) print('') print('Base options:') @@ -178,9 +178,9 @@ class Conf: 'sysconfdir', 'localstatedir', 'sharedstatedir', - ]: + ]: parr.append([key, coredata.get_builtin_option_description(key), - self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)]) + self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)]) self.print_aligned(parr) print('') print('Project options:') @@ -195,10 +195,10 @@ class Conf: opt = options[key] if (opt.choices is None) or (len(opt.choices) == 0): # Zero length list or string - choices = ''; + choices = '' else: # A non zero length list or string, convert to string - choices = str(opt.choices); + choices = str(opt.choices) optarr.append([key, opt.description, opt.value, choices]) self.print_aligned(optarr) print('') @@ -206,7 +206,7 @@ class Conf: tarr = [] for key in [ 'stdsplit', 'errorlogs' ]: tarr.append([key, coredata.get_builtin_option_description(key), - self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)]) + self.coredata.get_builtin_option(key), coredata.get_builtin_option_choices(key)]) self.print_aligned(tarr) def run(args): diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index e85ef17..98d072b 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -237,6 +237,9 @@ def run_script_command(args): elif cmdname == 'yelphelper': import mesonbuild.scripts.yelphelper as abc cmdfunc = abc.run + elif cmdname == 'uninstall': + import mesonbuild.scripts.uninstall as abc + cmdfunc = abc.run else: raise MesonException('Unknown internal command {}.'.format(cmdname)) return cmdfunc(cmdargs) diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py index 3f1d209..3c462c7 100644 --- a/mesonbuild/mintro.py +++ b/mesonbuild/mintro.py @@ -111,19 +111,19 @@ def list_target_files(target_name, coredata, builddata): print(json.dumps(sources)) def list_buildoptions(coredata, builddata): - buildtype= {'choices': ['plain', 'debug', 'debugoptimized', 'release', 'minsize'], - 'type' : 'combo', - 'value' : coredata.get_builtin_option('buildtype'), - 'description' : 'Build type', - 'name' : 'type'} - strip = {'value' : coredata.get_builtin_option('strip'), - 'type' : 'boolean', - 'description' : 'Strip on install', - 'name' : 'strip'} - unity = {'value' : coredata.get_builtin_option('unity'), - 'type' : 'boolean', - 'description' : 'Unity build', - 'name' : 'unity'} + buildtype = {'choices': ['plain', 'debug', 'debugoptimized', 'release', 'minsize'], + 'type': 'combo', + 'value': coredata.get_builtin_option('buildtype'), + 'description': 'Build type', + 'name': 'type'} + strip = {'value': coredata.get_builtin_option('strip'), + 'type': 'boolean', + 'description': 'Strip on install', + 'name': 'strip'} + unity = {'value': coredata.get_builtin_option('unity'), + 'type': 'boolean', + 'description': 'Unity build', + 'name': 'unity'} optlist = [buildtype, strip, unity] add_keys(optlist, coredata.user_options) add_keys(optlist, coredata.compiler_options) @@ -198,8 +198,8 @@ def list_projinfo(builddata): result['version'] = builddata.project_version subprojects = [] for k, v in builddata.subprojects.items(): - c = {'name' : k, - 'version' : v} + c = {'name': k, + 'version': v} subprojects.append(c) result['subprojects'] = subprojects print(json.dumps(result)) diff --git a/mesonbuild/modules/__init__.py b/mesonbuild/modules/__init__.py index 085b4dd..184d85a 100644 --- a/mesonbuild/modules/__init__.py +++ b/mesonbuild/modules/__init__.py @@ -1,3 +1,5 @@ +import os + from .. import build from .. import dependencies from ..mesonlib import MesonException @@ -14,6 +16,36 @@ def find_program(program_name, target_name): _found_programs[program_name] = program return program + +def get_include_args(environment, include_dirs, prefix='-I'): + if not include_dirs: + return [] + + dirs_str = [] + for incdirs in include_dirs: + if hasattr(incdirs, "held_object"): + dirs = incdirs.held_object + else: + dirs = incdirs + + if isinstance(dirs, str): + dirs_str += ['%s%s' % (prefix, dirs)] + continue + + # Should be build.IncludeDirs object. + basedir = dirs.get_curdir() + for d in dirs.get_incdirs(): + expdir = os.path.join(basedir, d) + srctreedir = os.path.join(environment.get_source_dir(), expdir) + buildtreedir = os.path.join(environment.get_build_dir(), expdir) + dirs_str += ['%s%s' % (prefix, buildtreedir), + '%s%s' % (prefix, srctreedir)] + for d in dirs.get_extra_build_dirs(): + dirs_str += ['%s%s' % (prefix, d)] + + return dirs_str + + class GResourceTarget(build.CustomTarget): def __init__(self, name, subdir, kwargs): super().__init__(name, subdir, kwargs) diff --git a/mesonbuild/modules/gnome.py b/mesonbuild/modules/gnome.py index 62f4415..3f88585 100644 --- a/mesonbuild/modules/gnome.py +++ b/mesonbuild/modules/gnome.py @@ -26,7 +26,9 @@ from .. import mlog from .. import mesonlib from .. import compilers from .. import interpreter -from . import find_program, GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget +from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget +from . import find_program, get_include_args + # gresource compilation is broken due to the way # the resource compiler and Ninja clash about it @@ -258,42 +260,13 @@ can not be used with the current version of glib-compiled-resources, due to link_command = ['-l%s' % lib.name] if isinstance(lib, build.SharedLibrary): libdir = os.path.join(state.environment.get_build_dir(), lib.subdir) - link_command += ['-L%s' %libdir] + link_command += ['-L%s' % libdir] if include_rpath: - link_command += ['-Wl,-rpath %s' %libdir] + link_command += ['-Wl,-rpath %s' % libdir] if depends: depends.append(lib) return link_command - @staticmethod - def _get_include_args(state, include_dirs, prefix='-I'): - if not include_dirs: - return [] - - dirs_str = [] - for incdirs in include_dirs: - if hasattr(incdirs, "held_object"): - dirs = incdirs.held_object - else: - dirs = incdirs - - if isinstance(dirs, str): - dirs_str += ['%s%s' % (prefix, dirs)] - continue - - # Should be build.IncludeDirs object. - basedir = dirs.get_curdir() - for d in dirs.get_incdirs(): - expdir = os.path.join(basedir, d) - srctreedir = os.path.join(state.environment.get_source_dir(), expdir) - buildtreedir = os.path.join(state.environment.get_build_dir(), expdir) - dirs_str += ['%s%s' % (prefix, buildtreedir), - '%s%s' % (prefix, srctreedir)] - for d in dirs.get_extra_build_dirs(): - dirs_str += ['%s%s' % (prefix, d)] - - return dirs_str - def _get_dependencies_flags(self, deps, state, depends=None, include_rpath=False): cflags = set() ldflags = set() @@ -305,7 +278,7 @@ can not be used with the current version of glib-compiled-resources, due to if hasattr(dep, 'held_object'): dep = dep.held_object if isinstance(dep, InternalDependency): - cflags.update(self._get_include_args(state, dep.include_directories)) + cflags.update(get_include_args(state.environment, dep.include_directories)) for lib in dep.libraries: ldflags.update(self._get_link_args(state, lib.held_object, depends, include_rpath)) libdepflags = self._get_dependencies_flags(lib.held_object.get_external_deps(), state, depends, include_rpath) @@ -319,7 +292,7 @@ can not be used with the current version of glib-compiled-resources, due to for source in dep.sources: if hasattr(source, 'held_object') and isinstance(source.held_object, GirTarget): gi_includes.update([os.path.join(state.environment.get_build_dir(), - source.held_object.get_subdir())]) + source.held_object.get_subdir())]) # This should be any dependency other than an internal one. elif isinstance(dep, Dependency): cflags.update(dep.get_compile_args()) @@ -389,14 +362,14 @@ can not be used with the current version of glib-compiled-resources, due to scan_command = giscanner.get_command() + ['@INPUT@'] scan_command += pkgargs - scan_command += ['--no-libtool', '--namespace='+ns, '--nsversion=' + nsversion, '--warn-all', + scan_command += ['--no-libtool', '--namespace=' + ns, '--nsversion=' + nsversion, '--warn-all', '--output', '@OUTPUT@'] extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', [])) scan_command += extra_args scan_command += ['-I' + os.path.join(state.environment.get_source_dir(), state.subdir), '-I' + os.path.join(state.environment.get_build_dir(), state.subdir)] - scan_command += self._get_include_args(state, girtarget.get_include_dirs()) + scan_command += get_include_args(state.environment, girtarget.get_include_dirs()) if 'link_with' in kwargs: link_with = kwargs.pop('link_with') @@ -521,30 +494,29 @@ can not be used with the current version of glib-compiled-resources, due to if not isinstance(incd.held_object, (str, build.IncludeDirs)): raise MesonException( 'Gir include dirs should be include_directories().') - scan_command += self._get_include_args(state, inc_dirs) - scan_command += self._get_include_args(state, gir_inc_dirs + inc_dirs, - prefix='--add-include-path=') + scan_command += get_include_args(state.environment, inc_dirs) + scan_command += get_include_args(state.environment, gir_inc_dirs + inc_dirs, + prefix='--add-include-path=') if isinstance(girtarget, build.Executable): scan_command += ['--program', girtarget] elif isinstance(girtarget, build.SharedLibrary): libname = girtarget.get_basename() scan_command += ['--library', libname] - scankwargs = {'output' : girfile, - 'input' : libsources, - 'command' : scan_command, - 'depends' : depends, - } + scankwargs = {'output': girfile, + 'input': libsources, + 'command': scan_command, + 'depends': depends} if kwargs.get('install'): scankwargs['install'] = kwargs['install'] scankwargs['install_dir'] = kwargs.get('install_dir_gir', - os.path.join(state.environment.get_datadir(), 'gir-1.0')) + os.path.join(state.environment.get_datadir(), 'gir-1.0')) scan_target = GirTarget(girfile, state.subdir, scankwargs) typelib_output = '%s-%s.typelib' % (ns, nsversion) typelib_cmd = gicompiler.get_command() + [scan_target, '--output', '@OUTPUT@'] - typelib_cmd += self._get_include_args(state, gir_inc_dirs, - prefix='--includedir=') + typelib_cmd += get_include_args(state.environment, gir_inc_dirs, + prefix='--includedir=') for incdir in typelib_includes: typelib_cmd += ["--includedir=" + incdir] @@ -555,7 +527,7 @@ can not be used with the current version of glib-compiled-resources, due to if kwargs.get('install'): typelib_kwargs['install'] = kwargs['install'] typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib', - os.path.join(state.environment.get_libdir(), 'girepository-1.0')) + os.path.join(state.environment.get_libdir(), 'girepository-1.0')) typelib_target = TypelibTarget(typelib_output, state.subdir, typelib_kwargs) return [scan_target, typelib_target] @@ -602,13 +574,12 @@ can not be used with the current version of glib-compiled-resources, due to script = [sys.executable, state.environment.get_build_command()] args = ['--internal', - 'yelphelper', - 'install', - '--subdir=' + state.subdir, - '--id=' + project_id, - '--installdir=' + os.path.join(state.environment.get_datadir(), 'help'), - '--sources=' + source_str, - ] + 'yelphelper', + 'install', + '--subdir=' + state.subdir, + '--id=' + project_id, + '--installdir=' + os.path.join(state.environment.get_datadir(), 'help'), + '--sources=' + source_str] if symlinks: args.append('--symlinks=true') if media: @@ -712,7 +683,7 @@ can not be used with the current version of glib-compiled-resources, due to if not isinstance(incd.held_object, (str, build.IncludeDirs)): raise MesonException( 'Gir include dirs should be include_directories().') - cflags.update(self._get_include_args(state, inc_dirs)) + cflags.update(get_include_args(state.environment, inc_dirs)) if cflags: args += ['--cflags=%s' % ' '.join(cflags)] if ldflags: @@ -766,9 +737,9 @@ can not be used with the current version of glib-compiled-resources, due to cmd += ['--c-namespace', kwargs.pop('namespace')] cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@'] outputs = [namebase + '.c', namebase + '.h'] - custom_kwargs = {'input' : xml_file, - 'output' : outputs, - 'command' : cmd + custom_kwargs = {'input': xml_file, + 'output': outputs, + 'command': cmd } return build.CustomTarget(target_name, state.subdir, custom_kwargs) @@ -1062,8 +1033,8 @@ can not be used with the current version of glib-compiled-resources, due to 'depends': vapi_depends, } install_dir = kwargs.get('install_dir', - os.path.join(state.environment.coredata.get_builtin_option('datadir'), - 'vala', 'vapi')) + os.path.join(state.environment.coredata.get_builtin_option('datadir'), + 'vala', 'vapi')) if kwargs.get('install'): custom_kwargs['install'] = kwargs['install'] custom_kwargs['install_dir'] = install_dir diff --git a/mesonbuild/modules/i18n.py b/mesonbuild/modules/i18n.py index c4e4814..097eedd 100644 --- a/mesonbuild/modules/i18n.py +++ b/mesonbuild/modules/i18n.py @@ -16,6 +16,7 @@ from os import path from .. import coredata, mesonlib, build from ..mesonlib import MesonException import sys +import shutil PRESET_ARGS = { 'glib': [ @@ -63,6 +64,8 @@ class I18nModule: def gettext(self, state, args, kwargs): if len(args) != 1: raise coredata.MesonException('Gettext requires one positional argument (package name).') + if not shutil.which('xgettext'): + raise coredata.MesonException('Can not do gettext because xgettext is not installed.') packagename = args[0] languages = mesonlib.stringlistify(kwargs.get('languages', [])) datadirs = mesonlib.stringlistify(kwargs.get('data_dirs', [])) diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py index 6c59f52..aaf0746 100644 --- a/mesonbuild/modules/pkgconfig.py +++ b/mesonbuild/modules/pkgconfig.py @@ -48,7 +48,7 @@ class PkgConfigModule: # '${prefix}' is ignored if the second path is absolute (see # 'os.path.join' for details) ofile.write('libdir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('libdir'))) - ofile.write('includedir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('includedir'))) + ofile.write('includedir=%s\n' % os.path.join('${prefix}', coredata.get_builtin_option('includedir'))) ofile.write('\n') ofile.write('Name: %s\n' % name) if len(description) > 0: @@ -130,7 +130,7 @@ class PkgConfigModule: priv_reqs = mesonlib.stringlistify(kwargs.get('requires_private', [])) conflicts = mesonlib.stringlistify(kwargs.get('conflicts', [])) pcfile = filebase + '.pc' - pkgroot = kwargs.get('install_dir',None) + pkgroot = kwargs.get('install_dir', None) if pkgroot is None: pkgroot = os.path.join(state.environment.coredata.get_builtin_option('libdir'), 'pkgconfig') if not isinstance(pkgroot, str): diff --git a/mesonbuild/modules/qt4.py b/mesonbuild/modules/qt4.py index 8df4f3e..8670533 100644 --- a/mesonbuild/modules/qt4.py +++ b/mesonbuild/modules/qt4.py @@ -127,30 +127,29 @@ class Qt4Module(): else: basename = os.path.split(rcc_files[0])[1] name = 'qt4-' + basename.replace('.', '_') - rcc_kwargs = {'input' : rcc_files, - 'output' : name + '.cpp', - 'command' : [self.rcc, '-o', '@OUTPUT@', '@INPUT@'], - 'depend_files' : qrc_deps, - } + rcc_kwargs = {'input': rcc_files, + 'output': name + '.cpp', + 'command': [self.rcc, '-o', '@OUTPUT@', '@INPUT@'], + 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, rcc_kwargs) sources.append(res_target) if len(ui_files) > 0: if not self.uic.found(): raise MesonException(err_msg.format('UIC', 'uic-qt4')) - ui_kwargs = {'output' : 'ui_@BASENAME@.h', - 'arguments' : ['-o', '@OUTPUT@', '@INPUT@']} + ui_kwargs = {'output': 'ui_@BASENAME@.h', + 'arguments': ['-o', '@OUTPUT@', '@INPUT@']} ui_gen = build.Generator([self.uic], ui_kwargs) ui_output = ui_gen.process_files('Qt4 ui', ui_files, state) sources.append(ui_output) if len(moc_headers) > 0: - moc_kwargs = {'output' : 'moc_@BASENAME@.cpp', - 'arguments' : ['@INPUT@', '-o', '@OUTPUT@']} + moc_kwargs = {'output': 'moc_@BASENAME@.cpp', + 'arguments': ['@INPUT@', '-o', '@OUTPUT@']} moc_gen = build.Generator([self.moc], moc_kwargs) moc_output = moc_gen.process_files('Qt4 moc header', moc_headers, state) sources.append(moc_output) if len(moc_sources) > 0: - moc_kwargs = {'output' : '@BASENAME@.moc', - 'arguments' : ['@INPUT@', '-o', '@OUTPUT@']} + moc_kwargs = {'output': '@BASENAME@.moc', + 'arguments': ['@INPUT@', '-o', '@OUTPUT@']} moc_gen = build.Generator([self.moc], moc_kwargs) moc_output = moc_gen.process_files('Qt4 moc source', moc_sources, state) sources.append(moc_output) diff --git a/mesonbuild/modules/qt5.py b/mesonbuild/modules/qt5.py index d4cd261..7a9f8f5 100644 --- a/mesonbuild/modules/qt5.py +++ b/mesonbuild/modules/qt5.py @@ -133,30 +133,29 @@ class Qt5Module(): else: basename = os.path.split(rcc_files[0])[1] name = 'qt5-' + basename.replace('.', '_') - rcc_kwargs = {'input' : rcc_files, - 'output' : name + '.cpp', - 'command' : [self.rcc, '-o', '@OUTPUT@', '@INPUT@'], - 'depend_files' : qrc_deps, - } + rcc_kwargs = {'input': rcc_files, + 'output': name + '.cpp', + 'command': [self.rcc, '-o', '@OUTPUT@', '@INPUT@'], + 'depend_files': qrc_deps} res_target = build.CustomTarget(name, state.subdir, rcc_kwargs) sources.append(res_target) if len(ui_files) > 0: if not self.uic.found(): raise MesonException(err_msg.format('UIC', 'uic-qt5')) - ui_kwargs = {'output' : 'ui_@BASENAME@.h', - 'arguments' : ['-o', '@OUTPUT@', '@INPUT@']} + ui_kwargs = {'output': 'ui_@BASENAME@.h', + 'arguments': ['-o', '@OUTPUT@', '@INPUT@']} ui_gen = build.Generator([self.uic], ui_kwargs) ui_output = ui_gen.process_files('Qt5 ui', ui_files, state) sources.append(ui_output) if len(moc_headers) > 0: - moc_kwargs = {'output' : 'moc_@BASENAME@.cpp', - 'arguments' : ['@INPUT@', '-o', '@OUTPUT@']} + moc_kwargs = {'output': 'moc_@BASENAME@.cpp', + 'arguments': ['@INPUT@', '-o', '@OUTPUT@']} moc_gen = build.Generator([self.moc], moc_kwargs) moc_output = moc_gen.process_files('Qt5 moc header', moc_headers, state) sources.append(moc_output) if len(moc_sources) > 0: - moc_kwargs = {'output' : '@BASENAME@.moc', - 'arguments' : ['@INPUT@', '-o', '@OUTPUT@']} + moc_kwargs = {'output': '@BASENAME@.moc', + 'arguments': ['@INPUT@', '-o', '@OUTPUT@']} moc_gen = build.Generator([self.moc], moc_kwargs) moc_output = moc_gen.process_files('Qt5 moc source', moc_sources, state) sources.append(moc_output) diff --git a/mesonbuild/modules/windows.py b/mesonbuild/modules/windows.py index 012f4d0..56bad2d 100644 --- a/mesonbuild/modules/windows.py +++ b/mesonbuild/modules/windows.py @@ -14,6 +14,7 @@ from .. import mesonlib, dependencies, build from ..mesonlib import MesonException +from . import get_include_args import os class WindowsModule: @@ -26,7 +27,16 @@ class WindowsModule: def compile_resources(self, state, args, kwargs): comp = self.detect_compiler(state.compilers) + extra_args = mesonlib.stringlistify(kwargs.get('args', [])) + inc_dirs = kwargs.pop('include_directories', []) + if not isinstance(inc_dirs, list): + inc_dirs = [inc_dirs] + for incd in inc_dirs: + if not isinstance(incd.held_object, (str, build.IncludeDirs)): + raise MesonException('Resource include dirs should be include_directories().') + extra_args += get_include_args(state.environment, inc_dirs) + if comp.id == 'msvc': rescomp = dependencies.ExternalProgram('rc', silent=True) res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@'] @@ -40,7 +50,7 @@ class WindowsModule: suffix = 'o' if not rescomp.found(): raise MesonException('Could not find Windows resource compiler %s.' % ' '.join(rescomp.get_command())) - res_kwargs = {'output' : '@BASENAME@.' + suffix, + res_kwargs = {'output': '@BASENAME@.' + suffix, 'arguments': res_args} res_gen = build.Generator([rescomp], res_kwargs) res_output = res_gen.process_files('Windows resource', args, state) diff --git a/mesonbuild/mparser.py b/mesonbuild/mparser.py index ad1fedd..6b1c8ee 100644 --- a/mesonbuild/mparser.py +++ b/mesonbuild/mparser.py @@ -76,7 +76,7 @@ class Lexer: def lex(self, code, subdir): lineno = 1 line_start = 0 - loc = 0; + loc = 0 par_count = 0 bracket_count = 0 col = 0 @@ -87,7 +87,7 @@ class Lexer: mo = reg.match(code, loc) if mo: curline = lineno - col = mo.start()-line_start + col = mo.start() - line_start matched = True span_start = loc loc = mo.end() @@ -177,7 +177,7 @@ class ArrayNode: class EmptyNode: def __init__(self): - self.subdir ='' + self.subdir = '' self.lineno = 0 self.colno = 0 self.value = None @@ -207,7 +207,7 @@ class ComparisonNode: self.ctype = ctype class ArithmeticNode: - def __init__(self,operation, left, right): + def __init__(self, operation, left, right): self.subdir = left.subdir self.lineno = left.lineno self.colno = left.colno @@ -519,7 +519,7 @@ class Parser: def e9(self): t = self.current if self.accept('true'): - return BooleanNode(t, True); + return BooleanNode(t, True) if self.accept('false'): return BooleanNode(t, False) if self.accept('id'): diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py index 2ba7b99..089eb2c 100644 --- a/mesonbuild/optinterpreter.py +++ b/mesonbuild/optinterpreter.py @@ -62,10 +62,10 @@ def ComboParser(name, description, kwargs): raise OptionException('Combo choice elements must be strings.') return coredata.UserComboOption(name, description, choices, kwargs.get('value', choices[0])) -option_types = {'string' : StringParser, - 'boolean' : BooleanParser, - 'combo' : ComboParser, - } +option_types = {'string': StringParser, + 'boolean': BooleanParser, + 'combo': ComboParser, + } class OptionInterpreter: def __init__(self, subproject, command_line_options): diff --git a/mesonbuild/scripts/commandrunner.py b/mesonbuild/scripts/commandrunner.py index 63b5bb1..cf2770d 100644 --- a/mesonbuild/scripts/commandrunner.py +++ b/mesonbuild/scripts/commandrunner.py @@ -18,10 +18,10 @@ what to run, sets up the environment and executes the command.""" import sys, os, subprocess, shutil def run_command(source_dir, build_dir, subdir, command, arguments): - env = {'MESON_SOURCE_ROOT' : source_dir, - 'MESON_BUILD_ROOT' : build_dir, - 'MESON_SUBDIR' : subdir - } + env = {'MESON_SOURCE_ROOT': source_dir, + 'MESON_BUILD_ROOT': build_dir, + 'MESON_SUBDIR': subdir, + } cwd = os.path.join(source_dir, subdir) child_env = os.environ.copy() child_env.update(env) @@ -35,7 +35,7 @@ def run_command(source_dir, build_dir, subdir, command, arguments): fullpath = os.path.join(source_dir, subdir, command) command_array = [fullpath] + arguments try: - return subprocess.Popen(command_array,env=child_env, cwd=cwd) + return subprocess.Popen(command_array, env=child_env, cwd=cwd) except FileNotFoundError: print('Could not execute command "%s".' % command) sys.exit(1) diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py index baa401e..193d5b6 100644 --- a/mesonbuild/scripts/depfixer.py +++ b/mesonbuild/scripts/depfixer.py @@ -29,25 +29,25 @@ class DataSizes(): p = '<' else: p = '>' - self.Half = p+'h' + self.Half = p + 'h' self.HalfSize = 2 - self.Word = p+'I' + self.Word = p + 'I' self.WordSize = 4 - self.Sword = p+'i' + self.Sword = p + 'i' self.SwordSize = 4 if ptrsize == 64: - self.Addr = p+'Q' + self.Addr = p + 'Q' self.AddrSize = 8 - self.Off = p+'Q' + self.Off = p + 'Q' self.OffSize = 8 - self.XWord = p+'Q' + self.XWord = p + 'Q' self.XWordSize = 8 - self.Sxword = p+'q' + self.Sxword = p + 'q' self.SxwordSize = 8 else: - self.Addr = p+'I' + self.Addr = p + 'I' self.AddrSize = 4 - self.Off = p+'I' + self.Off = p + 'I' self.OffSize = 4 class DynamicEntry(DataSizes): @@ -55,8 +55,8 @@ class DynamicEntry(DataSizes): super().__init__(ptrsize, is_le) self.ptrsize = ptrsize if ptrsize == 64: - self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0]; - self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]; + self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0] + self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] else: self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0] self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0] @@ -76,29 +76,29 @@ class SectionHeader(DataSizes): is_64 = True else: is_64 = False -#Elf64_Word - self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]; -#Elf64_Word +# Elf64_Word + self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Word self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0] -#Elf64_Xword +# Elf64_Xword if is_64: self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] else: self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0] -#Elf64_Addr - self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0]; -#Elf64_Off +# Elf64_Addr + self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0] +# Elf64_Off self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0] -#Elf64_Xword +# Elf64_Xword if is_64: self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] else: self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0] -#Elf64_Word - self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0]; -#Elf64_Word - self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0]; -#Elf64_Xword +# Elf64_Word + self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Word + self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Xword if is_64: self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] else: @@ -220,7 +220,7 @@ class Elf(DataSizes): soname = i if i.d_tag == DT_STRTAB: strtab = i - else: + if soname is None or strtab is None: print("This file does not have a soname") return self.bf.seek(strtab.val + soname.val) @@ -273,7 +273,7 @@ class Elf(DataSizes): name = self.read_str() if name.startswith(prefix): basename = name.split(b'/')[-1] - padding = b'\0'*(len(name) - len(basename)) + padding = b'\0' * (len(name) - len(basename)) newname = basename + padding assert(len(newname) == len(name)) self.bf.seek(offset) @@ -299,7 +299,7 @@ class Elf(DataSizes): sys.exit("New rpath must not be longer than the old one.") self.bf.seek(rp_off) self.bf.write(new_rpath) - self.bf.write(b'\0'*(len(old_rpath) - len(new_rpath) + 1)) + self.bf.write(b'\0' * (len(old_rpath) - len(new_rpath) + 1)) if len(new_rpath) == 0: self.remove_rpath_entry(entrynum) @@ -311,8 +311,8 @@ class Elf(DataSizes): if entry.d_tag == entrynum: rpentry = self.dynamic[i] rpentry.d_tag = 0 - self.dynamic = self.dynamic[:i] + self.dynamic[i+1:] + [rpentry] - break; + self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry] + break # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently. for entry in self.dynamic[i:]: if entry.d_tag == DT_MIPS_RLD_MAP_REL: diff --git a/mesonbuild/scripts/gtkdochelper.py b/mesonbuild/scripts/gtkdochelper.py index 734f80b..6486e28 100644 --- a/mesonbuild/scripts/gtkdochelper.py +++ b/mesonbuild/scripts/gtkdochelper.py @@ -108,7 +108,9 @@ def build_gtkdoc(source_root, build_root, doc_subdir, src_subdirs, if gobject_typesfile: scanobjs_cmd = ['gtkdoc-scangobj'] + scanobjs_args + [gobject_typesfile, - '--module=' + module, '--cflags=' + cflags, '--ldflags=' + ldflags] + '--module=' + module, + '--cflags=' + cflags, + '--ldflags=' + ldflags] gtkdoc_run_check(scanobjs_cmd, abs_out) diff --git a/mesonbuild/scripts/meson_install.py b/mesonbuild/scripts/meson_install.py index c749b4f..676a1e5 100644 --- a/mesonbuild/scripts/meson_install.py +++ b/mesonbuild/scripts/meson_install.py @@ -51,7 +51,7 @@ def do_copydir(src_prefix, src_dir, dst_dir): for root, dirs, files in os.walk(src_prefix): for d in dirs: abs_src = os.path.join(src_dir, root, d) - filepart = abs_src[len(src_dir)+1:] + filepart = abs_src[len(src_dir) + 1:] abs_dst = os.path.join(dst_dir, filepart) if os.path.isdir(abs_dst): continue @@ -62,7 +62,7 @@ def do_copydir(src_prefix, src_dir, dst_dir): shutil.copystat(abs_src, abs_dst) for f in files: abs_src = os.path.join(src_dir, root, f) - filepart = abs_src[len(src_dir)+1:] + filepart = abs_src[len(src_dir) + 1:] abs_dst = os.path.join(dst_dir, filepart) if os.path.isdir(abs_dst): print('Tried to copy file %s but a directory of that name already exists.' % abs_dst) @@ -142,11 +142,11 @@ def install_headers(d): do_copyfile(fullfilename, outfilename) def run_install_script(d): - env = {'MESON_SOURCE_ROOT' : d.source_dir, - 'MESON_BUILD_ROOT' : d.build_dir, - 'MESON_INSTALL_PREFIX' : d.prefix, - 'MESON_INSTALL_DESTDIR_PREFIX' : d.fullprefix, - } + env = {'MESON_SOURCE_ROOT': d.source_dir, + 'MESON_BUILD_ROOT': d.build_dir, + 'MESON_INSTALL_PREFIX': d.prefix, + 'MESON_INSTALL_DESTDIR_PREFIX': d.fullprefix, + } child_env = os.environ.copy() child_env.update(env) diff --git a/mesonbuild/scripts/scanbuild.py b/mesonbuild/scripts/scanbuild.py index e17d2ad..728214f 100644 --- a/mesonbuild/scripts/scanbuild.py +++ b/mesonbuild/scripts/scanbuild.py @@ -16,11 +16,12 @@ import os import subprocess import shutil import tempfile +from ..environment import detect_ninja def scanbuild(exename, srcdir, blddir, privdir, logdir, args): with tempfile.TemporaryDirectory(dir=privdir) as scandir: meson_cmd = [exename] + args - build_cmd = [exename, '-o', logdir, 'ninja'] + build_cmd = [exename, '-o', logdir, detect_ninja(), '-C', scandir] rc = subprocess.call(meson_cmd + [srcdir, scandir]) if rc != 0: return rc diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py index bfd629f..976d2f0 100644 --- a/mesonbuild/scripts/symbolextractor.py +++ b/mesonbuild/scripts/symbolextractor.py @@ -80,7 +80,7 @@ def osx_syms(libfilename, outfilename): if 'LC_ID_DYLIB' in val: match = i break - result = [arr[match+2], arr[match+5]] # Libreoffice stores all 5 lines but the others seem irrelevant. + result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant. pnm, output = Popen_safe(['nm', '-g', '-P', libfilename])[0:2] if pnm.returncode != 0: raise RuntimeError('nm does not work.') diff --git a/mesonbuild/scripts/uninstall.py b/mesonbuild/scripts/uninstall.py new file mode 100644 index 0000000..85c4bba --- /dev/null +++ b/mesonbuild/scripts/uninstall.py @@ -0,0 +1,46 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +logfile = 'meson-logs/install-log.txt' + +def do_uninstall(log): + failures = 0 + successes = 0 + for line in open(log): + if line.startswith('#'): + continue + fname = line.strip() + try: + os.unlink(fname) + print('Deleted:', fname) + successes += 1 + except Exception as e: + print('Could not delete %s: %s.' % (fname, e)) + failures += 1 + print('\nUninstall finished.\n') + print('Deleted:', successes) + print('Failed:', failures) + print('\nRemember that files created by custom scripts have not been removed.') + +def run(args): + if len(args) != 0: + print('Weird error.') + return 1 + if not os.path.exists(logfile): + print('Log file does not exist, no installation has been done.') + return 0 + do_uninstall(logfile) + return 0 diff --git a/mesonbuild/scripts/yelphelper.py b/mesonbuild/scripts/yelphelper.py index 76366a4..85d89b8 100644 --- a/mesonbuild/scripts/yelphelper.py +++ b/mesonbuild/scripts/yelphelper.py @@ -33,7 +33,7 @@ def build_pot(srcdir, project_id, sources): # Must be relative paths sources = [os.path.join('C', source) for source in sources] outfile = os.path.join(srcdir, project_id + '.pot') - subprocess.call(['itstool', '-o', outfile]+sources) + subprocess.call(['itstool', '-o', outfile] + sources) def update_po(srcdir, project_id, langs): potfile = os.path.join(srcdir, project_id + '.pot') @@ -55,7 +55,7 @@ def merge_translations(blddir, sources, langs): subprocess.call([ 'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'), '-o', os.path.join(blddir, lang) - ]+sources) + ] + sources) def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, project_id, symlinks): c_install_dir = os.path.join(install_dir, 'C', project_id) @@ -65,7 +65,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr for source in sources: infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source) outfile = os.path.join(indir, source) - mlog.log('Installing %s to %s.' %(infile, outfile)) + mlog.log('Installing %s to %s.' % (infile, outfile)) shutil.copyfile(infile, outfile) shutil.copystat(infile, outfile) for m in media: @@ -73,15 +73,15 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr outfile = os.path.join(indir, m) if not os.path.exists(infile): if lang == 'C': - mlog.warning('Media file "%s" did not exist in C directory' %m) + mlog.warning('Media file "%s" did not exist in C directory' % m) elif symlinks: srcfile = os.path.join(c_install_dir, m) - mlog.log('Symlinking %s to %s.' %(outfile, srcfile)) + mlog.log('Symlinking %s to %s.' % (outfile, srcfile)) if '/' in m or '\\' in m: os.makedirs(os.path.dirname(outfile), exist_ok=True) os.symlink(srcfile, outfile) continue - mlog.log('Installing %s to %s.' %(infile, outfile)) + mlog.log('Installing %s to %s.' % (infile, outfile)) if '/' in m or '\\' in m: os.makedirs(os.path.dirname(outfile), exist_ok=True) shutil.copyfile(infile, outfile) diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py index 3db3f9b..dead6fb 100644 --- a/mesonbuild/wrap/wrap.py +++ b/mesonbuild/wrap/wrap.py @@ -99,7 +99,7 @@ class Resolver: return packagename else: mlog.warning('Subproject directory %s is empty, possibly because of an unfinished' - 'checkout, removing to reclone' % dirname) + 'checkout, removing to reclone' % dirname) os.rmdir(dirname) except NotADirectoryError: raise RuntimeError('%s is not a directory, can not use as subproject.' % dirname) @@ -181,7 +181,7 @@ class Resolver: cwd=checkoutdir) def get_data(self, url): - blocksize = 10*1024 + blocksize = 10 * 1024 if url.startswith('https://wrapdb.mesonbuild.com'): resp = open_wrapdburl(url) else: @@ -206,7 +206,7 @@ class Resolver: break downloaded += len(block) blocks.append(block) - ratio = int(downloaded/dlsize * 10) + ratio = int(downloaded / dlsize * 10) while printed_dots < ratio: print('.', end='') sys.stdout.flush() diff --git a/mesontest.py b/mesontest.py index 36f7334..af4ae2a 100755 --- a/mesontest.py +++ b/mesontest.py @@ -69,7 +69,7 @@ parser.add_argument('--suite', default=None, dest='suite', parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false', help='Do not split stderr and stdout in test logs.') parser.add_argument('--print-errorlogs', default=False, action='store_true', - help="Whether to print faling tests' logs.") + help="Whether to print failing tests' logs.") parser.add_argument('--benchmark', default=False, action='store_true', help="Run benchmarks instead of tests.") parser.add_argument('--logbase', default='testlog', @@ -82,6 +82,8 @@ parser.add_argument('-t', '--timeout-multiplier', type=float, default=1.0, help='Define a multiplier for test timeout, for example ' ' when running tests in particular conditions they might take' ' more time to execute.') +parser.add_argument('--setup', default=None, dest='setup', + help='Which test setup to use.') parser.add_argument('args', nargs='*') class TestRun(): @@ -101,9 +103,7 @@ class TestRun(): if self.cmd is None: res += 'NONE\n' else: - res += "\n%s %s\n" %(' '.join( - ["%s='%s'" % (k, v) for k, v in self.env.items()]), - ' ' .join(self.cmd)) + res += "%s%s\n" % (''.join(["%s='%s' " % (k, v) for k, v in self.env.items()]), ' ' .join(self.cmd)) if self.stdo: res += '--- stdout ---\n' res += self.stdo @@ -126,13 +126,12 @@ def decode(stream): return stream.decode('iso-8859-1', errors='ignore') def write_json_log(jsonlogfile, test_name, result): - jresult = {'name' : test_name, - 'stdout' : result.stdo, - 'result' : result.res, - 'duration' : result.duration, - 'returncode' : result.returncode, - 'command' : result.cmd, - } + jresult = {'name': test_name, + 'stdout': result.stdo, + 'result': result.res, + 'duration': result.duration, + 'returncode': result.returncode, + 'command': result.cmd} if isinstance(result.env, dict): jresult['env'] = result.env else: @@ -150,8 +149,10 @@ class TestHarness: def __init__(self, options): self.options = options self.collected_logs = [] - self.failed_tests = [] - self.error_count = 0 + self.fail_count = 0 + self.success_count = 0 + self.skip_count = 0 + self.timeout_count = 0 self.is_run = False self.cant_rebuild = False if self.options.benchmark: @@ -181,7 +182,6 @@ class TestHarness: return True def run_single_test(self, wrap, test): - failling = False if test.fname[0].endswith('.jar'): cmd = ['java', '-jar'] + test.fname elif not test.is_cross and run_with_mono(test.fname[0]): @@ -207,6 +207,7 @@ class TestHarness: cmd = wrap + cmd + test.cmd_args starttime = time.time() child_env = os.environ.copy() + child_env.update(self.options.global_env.get_env(child_env)) if isinstance(test.env, build.EnvironmentVariables): test.env = test.env.get_env(child_env) @@ -256,48 +257,47 @@ class TestHarness: stde = decode(stde) if timed_out: res = 'TIMEOUT' - failling = True + self.timeout_count += 1 if p.returncode == GNU_SKIP_RETURNCODE: res = 'SKIP' - elif (not test.should_fail and p.returncode == 0) or \ - (test.should_fail and p.returncode != 0): + self.skip_count += 1 + elif test.should_fail == bool(p.returncode): res = 'OK' + self.success_count += 1 else: res = 'FAIL' - failling = True + self.fail_count += 1 returncode = p.returncode result = TestRun(res, returncode, test.should_fail, duration, stdo, stde, cmd, test.env) - if failling: - self.failed_tests.append(result) - return result def print_stats(self, numlen, tests, name, result, i, logfile, jsonlogfile): - startpad = ' '*(numlen - len('%d' % (i+1))) - num = '%s%d/%d' % (startpad, i+1, len(tests)) - padding1 = ' '*(38-len(name)) - padding2 = ' '*(8-len(result.res)) + startpad = ' ' * (numlen - len('%d' % (i + 1))) + num = '%s%d/%d' % (startpad, i + 1, len(tests)) + padding1 = ' ' * (38 - len(name)) + padding2 = ' ' * (8 - len(result.res)) result_str = '%s %s %s%s%s%5.2f s' % \ (num, name, padding1, result.res, padding2, result.duration) print(result_str) result_str += "\n\n" + result.get_log() if (result.returncode != GNU_SKIP_RETURNCODE) and \ (result.returncode != 0) != result.should_fail: - self.error_count += 1 if self.options.print_errorlogs: self.collected_logs.append(result_str) - logfile.write(result_str) - write_json_log(jsonlogfile, name, result) - - def doit(self): - if self.is_run: - raise RuntimeError('Test harness object can only be used once.') - if not os.path.isfile(self.datafile): - print('Test data file. Probably this means that you did not run this in the build directory.') - return 1 - self.is_run = True - logfilename = self.run_tests(self.options.logbase) + if logfile: + logfile.write(result_str) + if jsonlogfile: + write_json_log(jsonlogfile, name, result) + + def print_summary(self, logfile, jsonlogfile): + msg = 'Test summary: %d OK, %d FAIL, %d SKIP, %d TIMEOUT' \ + % (self.success_count, self.fail_count, self.skip_count, self.timeout_count) + print(msg) + if logfile: + logfile.write(msg) + + def print_collected_logs(self): if len(self.collected_logs) > 0: if len(self.collected_logs) > 10: print('\nThe output from 10 first failed tests:\n') @@ -305,54 +305,100 @@ class TestHarness: print('\nThe output from the failed tests:\n') for log in self.collected_logs[:10]: lines = log.splitlines() - if len(lines) > 100: - print(lines[0]) + if len(lines) > 104: + print('\n'.join(lines[0:4])) print('--- Listing only the last 100 lines from a long log. ---') - lines = lines[-99:] + lines = lines[-100:] for line in lines: print(line) - print('Full log written to %s.' % logfilename) - return self.error_count + + def doit(self): + if self.is_run: + raise RuntimeError('Test harness object can only be used once.') + if not os.path.isfile(self.datafile): + print('Test data file. Probably this means that you did not run this in the build directory.') + return 1 + self.is_run = True + tests = self.get_tests() + if not tests: + return 0 + self.run_tests(tests) + return self.fail_count def get_tests(self): with open(self.datafile, 'rb') as f: tests = pickle.load(f) + + if not tests: + print('No tests defined.') + return [] + + if self.options.suite: + tests = [t for t in tests if self.options.suite in t.suite] + + if self.options.args: + tests = [t for t in tests if t.name in self.options.args] + + if not tests: + print('No suitable tests defined.') + return [] + for test in tests: test.rebuilt = False return tests - def run_tests(self, log_base): - logfile_base = os.path.join(self.options.wd, 'meson-logs', log_base) + def open_log_files(self): + if not self.options.logbase or self.options.verbose: + return (None, None, None, None) + + logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase) + if self.options.wrapper is None: - wrap = [] logfilename = logfile_base + '.txt' - jsonlogfilename = logfile_base+ '.json' + jsonlogfilename = logfile_base + '.json' else: - wrap = self.options.wrapper.split() - namebase = wrap[0] + namebase = os.path.split(self.get_wrapper()[0])[1] logfilename = logfile_base + '-' + namebase.replace(' ', '_') + '.txt' jsonlogfilename = logfile_base + '-' + namebase.replace(' ', '_') + '.json' - tests = self.get_tests() - if len(tests) == 0: - print('No tests defined.') - return - numlen = len('%d' % len(tests)) - executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes) - futures = [] - filtered_tests = filter_tests(self.options.suite, tests) - - jsonlogfile = None - logfile = None + + jsonlogfile = open(jsonlogfilename, 'w') + logfile = open(logfilename, 'w') + + logfile.write('Log of Meson test suite run on %s.\n\n' + % datetime.datetime.now().isoformat()) + + return (logfile, logfilename, jsonlogfile, jsonlogfilename) + + def get_wrapper(self): + wrap = [] + if self.options.gdb: + wrap = ['gdb', '--quiet', '--nh'] + if self.options.repeat > 1: + wrap += ['-ex', 'run', '-ex', 'quit'] + elif self.options.wrapper: + if isinstance(self.options.wrapper, str): + wrap = self.options.wrapper.split() + else: + wrap = self.options.wrapper + assert(isinstance(wrap, list)) + return wrap + + def get_suites(self, tests): + return set([test.suite[0] for test in tests]) + + def run_tests(self, tests): try: - if not self.options.verbose: - jsonlogfile = open(jsonlogfilename, 'w') - logfile = open(logfilename, 'w') - logfile.write('Log of Meson test suite run on %s.\n\n' % - datetime.datetime.now().isoformat()) + executor = None + logfile = None + jsonlogfile = None + futures = [] + numlen = len('%d' % len(tests)) + (logfile, logfilename, jsonlogfile, jsonlogfilename) = self.open_log_files() + wrap = self.get_wrapper() for i in range(self.options.repeat): - for i, test in enumerate(filtered_tests): + for i, test in enumerate(tests): if test.suite[0] == '': visible_name = test.name else: @@ -361,112 +407,113 @@ class TestHarness: else: visible_name = test.suite[0] + ' / ' + test.name - if not test.is_parallel: - self.drain_futures(futures) + if self.options.gdb: + test.timeout = None + if len(test.cmd_args): + wrap.append('--args') + + if not test.is_parallel or self.options.gdb: + self.drain_futures(futures, logfile, jsonlogfile) futures = [] res = self.run_single_test(wrap, test) - if not self.options.verbose: - self.print_stats(numlen, filtered_tests, visible_name, res, i, - logfile, jsonlogfile) + self.print_stats(numlen, tests, visible_name, res, i, logfile, jsonlogfile) else: + if not executor: + executor = conc.ThreadPoolExecutor(max_workers=self.options.num_processes) f = executor.submit(self.run_single_test, wrap, test) - if not self.options.verbose: - futures.append((f, numlen, filtered_tests, visible_name, i, - logfile, jsonlogfile)) + futures.append((f, numlen, tests, visible_name, i, logfile, jsonlogfile)) + if self.options.repeat > 1 and self.fail_count: + break + if self.options.repeat > 1 and self.fail_count: + break + self.drain_futures(futures, logfile, jsonlogfile) + self.print_summary(logfile, jsonlogfile) + self.print_collected_logs() + + if logfilename: + print('Full log written to %s.' % logfilename) finally: if jsonlogfile: jsonlogfile.close() if logfile: logfile.close() - return logfilename - - def drain_futures(self, futures, logfile, jsonlogfile): for i in futures: (result, numlen, tests, name, i, logfile, jsonlogfile) = i - if self.options.repeat > 1 and self.failed_tests: + if self.options.repeat > 1 and self.fail_count: result.cancel() - elif not self.options.verbose: - self.print_stats(numlen, tests, name, result.result(), i, logfile, jsonlogfile) - else: + if self.options.verbose: result.result() - - if self.options.repeat > 1 and self.failed_tests: - if not self.options.verbose: - for res in self.failed_tests: - print('Test failed:\n\n-- stdout --\n') - print(res.stdo) - print('\n-- stderr --\n') - print(res.stde) - return 1 - - return + self.print_stats(numlen, tests, name, result.result(), i, logfile, jsonlogfile) def run_special(self): 'Tests run by the user, usually something like "under gdb 1000 times".' if self.is_run: raise RuntimeError('Can not use run_special after a full run.') - if self.options.wrapper is not None: - wrap = self.options.wrapper.split(' ') - else: - wrap = [] - if self.options.gdb and len(wrap) > 0: - print('Can not specify both a wrapper and gdb.') - return 1 if os.path.isfile('build.ninja'): subprocess.check_call([environment.detect_ninja(), 'all']) tests = self.get_tests() - if self.options.list: - for i in tests: - print(i.name) - return 0 - for t in tests: - if t.name in self.options.args: - for i in range(self.options.repeat): - print('Running: %s %d/%d' % (t.name, i+1, self.options.repeat)) - if self.options.gdb: - wrap = ['gdb', '--quiet'] - if len(t.cmd_args) > 0: - wrap.append('--args') - if self.options.repeat > 1: - # The user wants to debug interactively, so no timeout. - t.timeout = None - wrap += ['-ex', 'run', '-ex', 'quit'] - - res = self.run_single_test(wrap, t) - else: - res = self.run_single_test(wrap, t) - if (res.returncode == 0 and res.should_fail) or \ - (res.returncode != 0 and not res.should_fail): - if not self.options.verbose: - print('Test failed:\n\n-- stdout --\n') - print(res.stdo) - print('\n-- stderr --\n') - print(res.stde) - return 1 - return 0 + self.run_tests(tests) + return self.fail_count -def filter_tests(suite, tests): - if suite is None: - return tests - return [x for x in tests if suite in x.suite] +def list_tests(th): + tests = th.get_tests() + print_suites = True if len(th.get_suites(tests)) != 1 else False + for i in tests: + if print_suites: + print("%s / %s" % (i.suite[0], i.name)) + else: + print("%s" % i.name) + + +def merge_suite_options(options): + buildfile = os.path.join(options.wd, 'meson-private/build.dat') + with open(buildfile, 'rb') as f: + build = pickle.load(f) + setups = build.test_setups + if options.setup not in setups: + sys.exit('Unknown test setup: %s' % options.setup) + current = setups[options.setup] + if not options.gdb: + options.gdb = current.gdb + if options.timeout_multiplier is None: + options.timeout_multiplier = current.timeout_multiplier +# if options.env is None: +# options.env = current.env # FIXME, should probably merge options here. + if options.wrapper is not None and current.exe_wrapper is not None: + sys.exit('Conflict: both test setup and command line specify an exe wrapper.') + if options.wrapper is None: + options.wrapper = current.exe_wrapper + return current.env def run(args): options = parser.parse_args(args) + if options.benchmark: options.num_processes = 1 + if options.setup is not None: + global_env = merge_suite_options(options) + else: + global_env = build.EnvironmentVariables() + + setattr(options, 'global_env', global_env) + if options.gdb: options.verbose = True + if options.wrapper: + print('Must not specify both a wrapper and gdb at the same time.') + return 1 options.wd = os.path.abspath(options.wd) th = TestHarness(options) if options.list: - return th.run_special() + list_tests(th) + return 0 if not options.no_rebuild: if not th.rebuild_all(): sys.exit(-1) diff --git a/run_project_tests.py b/run_project_tests.py index 61177ff..18988fb 100755 --- a/run_project_tests.py +++ b/run_project_tests.py @@ -58,9 +58,15 @@ class AutoDeletedDir(): try: shutil.rmtree(self.dir) return - except OSError: - if i == retries-1: - raise + # Sometimes we get: ValueError: I/O operation on closed file. + except ValueError: + return + # Deleting can raise OSError or PermissionError on Windows + # (most likely because of anti-virus locking the file) + except (OSError, PermissionError): + if i == retries - 1: + mlog.warning('Could not delete temporary directory.') + return time.sleep(0.1 * (2**i)) failing_logs = [] @@ -84,7 +90,7 @@ def stop_handler(signal, frame): signal.signal(signal.SIGINT, stop_handler) signal.signal(signal.SIGTERM, stop_handler) -#unity_flags = ['--unity'] +# unity_flags = ['--unity'] unity_flags = [] backend_flags = None @@ -395,7 +401,7 @@ def run_tests(all_tests, log_name_base, extra_args): executor = conc.ProcessPoolExecutor(max_workers=num_workers) for name, test_cases, skipped in all_tests: - current_suite = ET.SubElement(junit_root, 'testsuite', {'name' : name, 'tests' : str(len(test_cases))}) + current_suite = ET.SubElement(junit_root, 'testsuite', {'name': name, 'tests': str(len(test_cases))}) if skipped: print('\nNot running %s tests.\n' % name) else: @@ -415,8 +421,8 @@ def run_tests(all_tests, log_name_base, extra_args): result = result.result() if result is None or 'MESON_SKIP_TEST' in result.stdo: print('Skipping:', t) - current_test = ET.SubElement(current_suite, 'testcase', {'name' : testname, - 'classname' : name}) + current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, + 'classname': name}) ET.SubElement(current_test, 'skipped', {}) skipped_tests += 1 else: @@ -435,11 +441,11 @@ def run_tests(all_tests, log_name_base, extra_args): test_time += result.testtime total_time = conf_time + build_time + test_time log_text_file(logfile, t, result.stdo, result.stde) - current_test = ET.SubElement(current_suite, 'testcase', {'name' : testname, - 'classname' : name, - 'time' : '%.3f' % total_time}) + current_test = ET.SubElement(current_suite, 'testcase', {'name': testname, + 'classname': name, + 'time': '%.3f' % total_time}) if result.msg != '': - ET.SubElement(current_test, 'failure', {'message' : result.msg}) + ET.SubElement(current_test, 'failure', {'message': result.msg}) stdoel = ET.SubElement(current_test, 'system-out') stdoel.text = result.stdo stdeel = ET.SubElement(current_test, 'system-err') @@ -472,7 +478,7 @@ def check_format(): def pbcompile(compiler, source, objectfile): if compiler == 'cl': - cmd = [compiler, '/nologo', '/Fo'+objectfile, '/c', source] + cmd = [compiler, '/nologo', '/Fo' + objectfile, '/c', source] else: cmd = [compiler, '-c', source, '-o', objectfile] subprocess.check_call(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) @@ -518,7 +524,7 @@ def generate_prebuilt(): if __name__ == '__main__': parser = argparse.ArgumentParser(description="Run the test suite of Meson.") parser.add_argument('extra_args', nargs='*', - help='arguments that are passed directly to Meson (remember to have -- before these).') + help='arguments that are passed directly to Meson (remember to have -- before these).') parser.add_argument('--backend', default=None, dest='backend', choices = backendlist) options = parser.parse_args() @@ -554,7 +560,7 @@ if __name__ == '__main__': print('\nTotal passed tests:', passing_tests) print('Total failed tests:', failing_tests) print('Total skipped tests:', skipped_tests) - if failing_tests > 0 and ('TRAVIS' in os.environ or 'APPVEYOR' in os.environ): + if failing_tests > 0: print('\nMesonlogs of failing tests\n') for l in failing_logs: print(l, '\n') diff --git a/run_tests.py b/run_tests.py index 5ba41bd..2dfbaff 100755 --- a/run_tests.py +++ b/run_tests.py @@ -15,6 +15,7 @@ # limitations under the License. import subprocess, sys, shutil +import platform from mesonbuild import mesonlib if __name__ == '__main__': @@ -22,7 +23,8 @@ if __name__ == '__main__': if mesonlib.is_linux(): print('Running unittests.\n') returncode += subprocess.call([sys.executable, 'run_unittests.py', '-v']) - if shutil.which('arm-linux-gnueabihf-gcc-6'): # Ubuntu packages do not have a binary without -6 suffix. + # Ubuntu packages do not have a binary without -6 suffix. + if shutil.which('arm-linux-gnueabihf-gcc-6') and not platform.machine().startswith('arm'): print('Running cross compilation tests.\n') returncode += subprocess.call([sys.executable, 'run_cross_test.py', 'cross/ubuntu-armhf.txt']) returncode += subprocess.call([sys.executable, 'run_project_tests.py'] + sys.argv[1:]) diff --git a/run_unittests.py b/run_unittests.py index 3d5a237..d1c192f 100755 --- a/run_unittests.py +++ b/run_unittests.py @@ -19,7 +19,7 @@ import re, json import tempfile from glob import glob import mesonbuild.environment -from mesonbuild.environment import detect_ninja +from mesonbuild.environment import detect_ninja, Environment from mesonbuild.dependencies import PkgConfigDependency def get_soname(fname): @@ -33,6 +33,12 @@ def get_soname(fname): return m.group(1) raise RuntimeError('Could not determine soname:\n\n' + raw_out) +def get_fake_options(): + import argparse + opts = argparse.Namespace() + opts.cross_file = None + return opts + class FakeEnvironment(object): def __init__(self): self.cross_info = None @@ -57,12 +63,14 @@ class LinuxlikeTests(unittest.TestCase): src_root = os.path.dirname(__file__) src_root = os.path.join(os.getcwd(), src_root) self.builddir = tempfile.mkdtemp() + self.logdir = os.path.join(self.builddir, 'meson-logs') self.prefix = '/usr' self.libdir = os.path.join(self.prefix, 'lib') self.installdir = os.path.join(self.builddir, 'install') self.meson_command = [sys.executable, os.path.join(src_root, 'meson.py')] self.mconf_command = [sys.executable, os.path.join(src_root, 'mesonconf.py')] self.mintro_command = [sys.executable, os.path.join(src_root, 'mesonintrospect.py')] + self.mtest_command = [sys.executable, os.path.join(src_root, 'mesontest.py'), '-C', self.builddir] self.ninja_command = [detect_ninja(), '-C', self.builddir] self.common_test_dir = os.path.join(src_root, 'test cases/common') self.vala_test_dir = os.path.join(src_root, 'test cases/vala') @@ -79,25 +87,37 @@ class LinuxlikeTests(unittest.TestCase): def _run(self, command): self.output += subprocess.check_output(command, env=os.environ.copy()) - def init(self, srcdir): + def init(self, srcdir, extra_args=None): + if extra_args is None: + extra_args = [] args = [srcdir, self.builddir, '--prefix', self.prefix, '--libdir', self.libdir] - self._run(self.meson_command + args) + self._run(self.meson_command + args + extra_args) + self.privatedir = os.path.join(self.builddir, 'meson-private') def build(self): self._run(self.ninja_command) + def run_tests(self): + self._run(self.ninja_command + ['test']) + def install(self): os.environ['DESTDIR'] = self.installdir self._run(self.ninja_command + ['install']) + def uninstall(self): + self._run(self.ninja_command + ['uninstall']) + def run_target(self, target): self.output += subprocess.check_output(self.ninja_command + [target]) def setconf(self, arg): self._run(self.mconf_command + [arg, self.builddir]) + def wipe(self): + shutil.rmtree(self.builddir) + def get_compdb(self): with open(os.path.join(self.builddir, 'compile_commands.json')) as ifile: return json.load(ifile) @@ -158,7 +178,7 @@ class LinuxlikeTests(unittest.TestCase): testdir = os.path.join(self.common_test_dir, '3 static') self.init(testdir) compdb = self.get_compdb() - self.assertTrue('-fPIC' in compdb[0]['command']) + self.assertIn('-fPIC', compdb[0]['command']) # This is needed to increase the difference between build.ninja's # timestamp and coredata.dat's timestamp due to a Ninja bug. # https://github.com/ninja-build/ninja/issues/371 @@ -167,7 +187,7 @@ class LinuxlikeTests(unittest.TestCase): # Regenerate build self.build() compdb = self.get_compdb() - self.assertTrue('-fPIC' not in compdb[0]['command']) + self.assertNotIn('-fPIC', compdb[0]['command']) def test_pkgconfig_gen(self): ''' @@ -180,11 +200,11 @@ class LinuxlikeTests(unittest.TestCase): self.init(testdir) env = FakeEnvironment() kwargs = {'required': True, 'silent': True} - os.environ['PKG_CONFIG_LIBDIR'] = os.path.join(self.builddir, 'meson-private') + os.environ['PKG_CONFIG_LIBDIR'] = self.privatedir simple_dep = PkgConfigDependency('libfoo', env, kwargs) self.assertTrue(simple_dep.found()) self.assertEqual(simple_dep.get_version(), '1.0') - self.assertTrue('-lfoo' in simple_dep.get_link_args()) + self.assertIn('-lfoo', simple_dep.get_link_args()) def test_vala_c_warnings(self): ''' @@ -209,15 +229,15 @@ class LinuxlikeTests(unittest.TestCase): self.assertIsNotNone(vala_command) self.assertIsNotNone(c_command) # -w suppresses all warnings, should be there in Vala but not in C - self.assertTrue('-w' in vala_command) - self.assertFalse('-w' in c_command) + self.assertIn("'-w'", vala_command) + self.assertNotIn("'-w'", c_command) # -Wall enables all warnings, should be there in C but not in Vala - self.assertFalse('-Wall' in vala_command) - self.assertTrue('-Wall' in c_command) + self.assertNotIn("'-Wall'", vala_command) + self.assertIn("'-Wall'", c_command) # -Werror converts warnings to errors, should always be there since it's # injected by an unrelated piece of code and the project has werror=true - self.assertTrue('-Werror' in vala_command) - self.assertTrue('-Werror' in c_command) + self.assertIn("'-Werror'", vala_command) + self.assertIn("'-Werror'", c_command) def test_static_compile_order(self): ''' @@ -361,6 +381,96 @@ class LinuxlikeTests(unittest.TestCase): Oargs = [arg for arg in cmd if arg.startswith('-O')] self.assertEqual(Oargs, [Oflag, '-O0']) + def test_uninstall(self): + exename = os.path.join(self.installdir, 'usr/bin/prog') + testdir = os.path.join(self.common_test_dir, '8 install') + self.init(testdir) + self.assertFalse(os.path.exists(exename)) + self.install() + self.assertTrue(os.path.exists(exename)) + self.uninstall() + self.assertFalse(os.path.exists(exename)) + + def test_custom_target_exe_data_deterministic(self): + testdir = os.path.join(self.common_test_dir, '117 custom target capture') + self.init(testdir) + meson_exe_dat1 = glob(os.path.join(self.privatedir, 'meson_exe*.dat')) + self.wipe() + self.init(testdir) + meson_exe_dat2 = glob(os.path.join(self.privatedir, 'meson_exe*.dat')) + self.assertListEqual(meson_exe_dat1, meson_exe_dat2) + + def test_testsetups(self): + if not shutil.which('valgrind'): + raise unittest.SkipTest('Valgrind not installed.') + testdir = os.path.join(self.unit_test_dir, '2 testsetups') + self.init(testdir) + self.build() + self.run_tests() + with open(os.path.join(self.logdir, 'testlog.txt')) as f: + basic_log = f.read() + self.assertRaises(subprocess.CalledProcessError, + self._run, self.mtest_command + ['--setup=valgrind']) + with open(os.path.join(self.logdir, 'testlog-valgrind.txt')) as f: + vg_log = f.read() + self.assertFalse('TEST_ENV is set' in basic_log) + self.assertFalse('Memcheck' in basic_log) + self.assertTrue('TEST_ENV is set' in vg_log) + self.assertTrue('Memcheck' in vg_log) + + def _test_stds_impl(self, testdir, compiler, p): + lang_std = p + '_std' + # Check that all the listed -std=xxx options for this compiler work + # just fine when used + for v in compiler.get_options()[lang_std].choices: + std_opt = '{}={}'.format(lang_std, v) + self.init(testdir, ['-D' + std_opt]) + cmd = self.get_compdb()[0]['command'] + if v != 'none': + cmd_std = "'-std={}'".format(v) + self.assertIn(cmd_std, cmd) + try: + self.build() + except: + print('{} was {!r}'.format(lang_std, v)) + raise + self.wipe() + # Check that an invalid std option in CFLAGS/CPPFLAGS fails + # Needed because by default ICC ignores invalid options + cmd_std = '-std=FAIL' + env_flags = p.upper() + 'FLAGS' + os.environ[env_flags] = cmd_std + self.init(testdir) + cmd = self.get_compdb()[0]['command'] + qcmd_std = "'{}'".format(cmd_std) + self.assertIn(qcmd_std, cmd) + with self.assertRaises(subprocess.CalledProcessError, + msg='{} should have failed'.format(qcmd_std)): + self.build() + + def test_compiler_c_stds(self): + ''' + Test that C stds specified for this compiler can all be used. Can't be + an ordinary test because it requires passing options to meson. + ''' + testdir = os.path.join(self.common_test_dir, '1 trivial') + env = Environment(testdir, self.builddir, self.meson_command, + get_fake_options(), []) + cc = env.detect_c_compiler(False) + self._test_stds_impl(testdir, cc, 'c') + + def test_compiler_cpp_stds(self): + ''' + Test that C++ stds specified for this compiler can all be used. Can't + be an ordinary test because it requires passing options to meson. + ''' + testdir = os.path.join(self.common_test_dir, '2 cpp') + env = Environment(testdir, self.builddir, self.meson_command, + get_fake_options(), []) + cpp = env.detect_cpp_compiler(False) + self._test_stds_impl(testdir, cpp, 'cpp') + + class RewriterTests(unittest.TestCase): def setUp(self): @@ -419,5 +529,6 @@ class RewriterTests(unittest.TestCase): self.assertEqual(top, self.read_contents('meson.build')) self.assertEqual(s2, self.read_contents('sub2/meson.build')) + if __name__ == '__main__': unittest.main() diff --git a/test cases/common/1 trivial/meson.build b/test cases/common/1 trivial/meson.build index 1f7b375..a93de75 100644 --- a/test cases/common/1 trivial/meson.build +++ b/test cases/common/1 trivial/meson.build @@ -6,6 +6,11 @@ project('trivial test', #this is a comment sources = 'trivial.c' +if meson.get_compiler('c').get_id() == 'intel' + # Error out if the -std=xxx option is incorrect + add_project_arguments('-diag-error', '10159', language : 'c') +endif + exe = executable('trivialprog', sources : sources) test('runtest', exe) # This is a comment diff --git a/test cases/common/129 object only target/obj_generator.py b/test cases/common/129 object only target/obj_generator.py index a1f7421..f0239b4 100755 --- a/test cases/common/129 object only target/obj_generator.py +++ b/test cases/common/129 object only target/obj_generator.py @@ -12,7 +12,7 @@ if __name__ == '__main__': ifile = sys.argv[2] ofile = sys.argv[3] if compiler.endswith('cl'): - cmd = [compiler, '/nologo', '/MDd', '/Fo'+ofile, '/c', ifile] + cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile] else: cmd = [compiler, '-c', ifile, '-o', ofile] sys.exit(subprocess.call(cmd)) diff --git a/test cases/common/132 dependency file generation/main .c b/test cases/common/132 dependency file generation/main .c new file mode 100644 index 0000000..0fb4389 --- /dev/null +++ b/test cases/common/132 dependency file generation/main .c @@ -0,0 +1,3 @@ +int main(int argc, char *argv[]) { + return 0; +} diff --git a/test cases/common/132 dependency file generation/meson.build b/test cases/common/132 dependency file generation/meson.build new file mode 100644 index 0000000..dcfdcd9 --- /dev/null +++ b/test cases/common/132 dependency file generation/meson.build @@ -0,0 +1,12 @@ +project('dep file gen', 'c') + +cc_id = meson.get_compiler('c').get_id() +if cc_id == 'intel' + # ICC does not escape spaces in paths in the dependency file, so Ninja + # (correctly) thinks that the rule has multiple outputs and errors out: + # 'depfile has multiple output paths' + error('MESON_SKIP_TEST: Skipping test with Intel compiler because it generates broken dependency files') +endif + +e = executable('main file', 'main .c') +test('test it', e) diff --git a/test cases/common/2 cpp/meson.build b/test cases/common/2 cpp/meson.build index 9c6f71a..6398382 100644 --- a/test cases/common/2 cpp/meson.build +++ b/test cases/common/2 cpp/meson.build @@ -1,3 +1,9 @@ project('c++ test', 'cpp') + +if meson.get_compiler('cpp').get_id() == 'intel' + # Error out if the -std=xxx option is incorrect + add_project_arguments('-diag-error', '10159', language : 'cpp') +endif + exe = executable('trivialprog', 'trivial.cc', extra_files : 'something.txt') test('runtest', exe) diff --git a/test cases/common/25 object extraction/meson.build b/test cases/common/25 object extraction/meson.build index c76b0db..d99ec84 100644 --- a/test cases/common/25 object extraction/meson.build +++ b/test cases/common/25 object extraction/meson.build @@ -9,8 +9,8 @@ else obj1 = lib1.extract_objects('src/lib.c') obj2 = lib2.extract_objects(['lib.c']) - e1 = executable('main 1', 'main.c', objects : obj1) - e2 = executable('main 2', 'main.c', objects : obj2) + e1 = executable('main1', 'main.c', objects : obj1) + e2 = executable('main2', 'main.c', objects : obj2) test('extraction test 1', e1) test('extraction test 2', e2) diff --git a/test cases/common/59 object generator/obj_generator.py b/test cases/common/59 object generator/obj_generator.py index d028156..a33872a 100755 --- a/test cases/common/59 object generator/obj_generator.py +++ b/test cases/common/59 object generator/obj_generator.py @@ -12,7 +12,7 @@ if __name__ == '__main__': ifile = sys.argv[2] ofile = sys.argv[3] if compiler.endswith('cl'): - cmd = [compiler, '/nologo', '/MDd', '/Fo'+ofile, '/c', ifile] + cmd = [compiler, '/nologo', '/MDd', '/Fo' + ofile, '/c', ifile] else: cmd = [compiler, '-c', ifile, '-o', ofile] sys.exit(subprocess.call(cmd)) diff --git a/test cases/common/94 default options/meson.build b/test cases/common/94 default options/meson.build index a718bcc..a9176e0 100644 --- a/test cases/common/94 default options/meson.build +++ b/test cases/common/94 default options/meson.build @@ -1,20 +1,20 @@ project('default options', 'cpp', 'c', default_options : [ 'buildtype=debugoptimized', - 'cpp_std=c++03', + 'cpp_std=c++11', 'cpp_eh=none', 'warning_level=3', ]) -cpp = meson.get_compiler('cpp') +cpp_id = meson.get_compiler('cpp').get_id() assert(get_option('buildtype') == 'debugoptimized', 'Build type default value wrong.') -if cpp.get_id() == 'msvc' +if cpp_id == 'msvc' cpp_eh = get_option('cpp_eh') assert(cpp_eh == 'none', 'MSVC eh value is "' + cpp_eh + '" instead of "none"') else cpp_std = get_option('cpp_std') - assert(cpp_std == 'c++03', 'C++ std value is "' + cpp_std + '" instead of c++03.') + assert(cpp_std == 'c++11', 'C++ std value is "' + cpp_std + '" instead of c++11.') endif w_level = get_option('warning_level') diff --git a/test cases/d/3 shared library/meson.build b/test cases/d/3 shared library/meson.build index 5dae66b..1f45109 100644 --- a/test cases/d/3 shared library/meson.build +++ b/test cases/d/3 shared library/meson.build @@ -1,12 +1,9 @@ project('D Shared Library', 'd') -if meson.get_compiler('d').get_id() != 'gcc' - - ldyn = shared_library('stuff', 'libstuff.d', install : true) - ed = executable('app_d', 'app.d', link_with : ldyn, install : true) - test('linktest_dyn', ed) - -else - message('GDC can not build shared libraries. Test skipped.') - install_data('no-installed-files', install_dir : '') +if meson.get_compiler('d').get_id() == 'gcc' + error('MESON_SKIP_TEST: GDC can not build shared libraries') endif + +ldyn = shared_library('stuff', 'libstuff.d', install : true) +ed = executable('app_d', 'app.d', link_with : ldyn, install : true) +test('linktest_dyn', ed) diff --git a/test cases/d/3 shared library/no-installed-files b/test cases/d/3 shared library/no-installed-files deleted file mode 100644 index e69de29..0000000 --- a/test cases/d/3 shared library/no-installed-files +++ /dev/null diff --git a/test cases/d/4 library versions/meson.build b/test cases/d/4 library versions/meson.build index 26cc38a..f680651 100644 --- a/test cases/d/4 library versions/meson.build +++ b/test cases/d/4 library versions/meson.build @@ -1,25 +1,22 @@ project('D library versions', 'd') if meson.get_compiler('d').get_id() == 'gcc' - message('GDC can not build shared libraries. Test skipped.') - install_data('no-installed-files', install_dir : '') -else - - shared_library('some', 'lib.d', - version : '1.2.3', - soversion : '0', - install : true) + error('MESON_SKIP_TEST: GDC can not build shared libraries') +endif - shared_library('noversion', 'lib.d', - install : true) +shared_library('some', 'lib.d', + version : '1.2.3', + soversion : '0', + install : true) - shared_library('onlyversion', 'lib.d', - version : '1.4.5', - install : true) +shared_library('noversion', 'lib.d', + install : true) - shared_library('onlysoversion', 'lib.d', - # Also test that int soversion is acceptable - soversion : 5, - install : true) +shared_library('onlyversion', 'lib.d', + version : '1.4.5', + install : true) -endif +shared_library('onlysoversion', 'lib.d', + # Also test that int soversion is acceptable + soversion : 5, + install : true) diff --git a/test cases/d/4 library versions/no-installed-files b/test cases/d/4 library versions/no-installed-files deleted file mode 100644 index e69de29..0000000 --- a/test cases/d/4 library versions/no-installed-files +++ /dev/null diff --git a/test cases/failing build/1 vala c werror/unused-var.c b/test cases/failing build/1 vala c werror/unused-var.c index e11d64c..6b85078 100644 --- a/test cases/failing build/1 vala c werror/unused-var.c +++ b/test cases/failing build/1 vala c werror/unused-var.c @@ -1,3 +1,5 @@ +#warning "something" + int somelib(void) { diff --git a/test cases/fortran/1 basic/meson.build b/test cases/fortran/1 basic/meson.build index 9c40951..833a177 100644 --- a/test cases/fortran/1 basic/meson.build +++ b/test cases/fortran/1 basic/meson.build @@ -2,6 +2,6 @@ project('simple fortran', 'fortran') add_global_arguments('-fbounds-check', language : 'fortran') -e = executable('simple', 'simple.f95', +e = executable('simple', 'simple.f90', fortran_args : '-ffree-form') test('Simple Fortran', e) diff --git a/test cases/fortran/1 basic/simple.f95 b/test cases/fortran/1 basic/simple.f90 index e0fb1d8..e0fb1d8 100644 --- a/test cases/fortran/1 basic/simple.f95 +++ b/test cases/fortran/1 basic/simple.f90 diff --git a/test cases/fortran/2 modules/meson.build b/test cases/fortran/2 modules/meson.build index 0087c26..030f255 100644 --- a/test cases/fortran/2 modules/meson.build +++ b/test cases/fortran/2 modules/meson.build @@ -1,4 +1,4 @@ project('modules', 'fortran') -e = executable('modprog', 'stuff.f95', 'prog.f95') +e = executable('modprog', 'stuff.f90', 'prog.f90') test('moduletest', e) diff --git a/test cases/fortran/2 modules/prog.f95 b/test cases/fortran/2 modules/prog.f90 index c3998cc..c3998cc 100644 --- a/test cases/fortran/2 modules/prog.f95 +++ b/test cases/fortran/2 modules/prog.f90 diff --git a/test cases/fortran/2 modules/stuff.f95 b/test cases/fortran/2 modules/stuff.f90 index 4a6399b..4a6399b 100644 --- a/test cases/fortran/2 modules/stuff.f95 +++ b/test cases/fortran/2 modules/stuff.f90 diff --git a/test cases/fortran/5 static/main.f95 b/test cases/fortran/5 static/main.f90 index dc6454c..dc6454c 100644 --- a/test cases/fortran/5 static/main.f95 +++ b/test cases/fortran/5 static/main.f90 diff --git a/test cases/fortran/5 static/meson.build b/test cases/fortran/5 static/meson.build index d6f922b..bd74a29 100644 --- a/test cases/fortran/5 static/meson.build +++ b/test cases/fortran/5 static/meson.build @@ -1,5 +1,5 @@ project('try-static-library', 'fortran') -static_hello = static_library('static_hello', 'static_hello.f95') +static_hello = static_library('static_hello', 'static_hello.f90') -executable('test_exe', 'main.f95', link_with : static_hello) +executable('test_exe', 'main.f90', link_with : static_hello) diff --git a/test cases/fortran/5 static/static_hello.f95 b/test cases/fortran/5 static/static_hello.f90 index 63415b0..63415b0 100644 --- a/test cases/fortran/5 static/static_hello.f95 +++ b/test cases/fortran/5 static/static_hello.f90 diff --git a/test cases/fortran/6 dynamic/dynamic.f95 b/test cases/fortran/6 dynamic/dynamic.f90 index e78a406..e78a406 100644 --- a/test cases/fortran/6 dynamic/dynamic.f95 +++ b/test cases/fortran/6 dynamic/dynamic.f90 diff --git a/test cases/fortran/6 dynamic/main.f95 b/test cases/fortran/6 dynamic/main.f90 index cb3a53f..cb3a53f 100644 --- a/test cases/fortran/6 dynamic/main.f95 +++ b/test cases/fortran/6 dynamic/main.f90 diff --git a/test cases/fortran/6 dynamic/meson.build b/test cases/fortran/6 dynamic/meson.build index 53edaf6..c791dac 100644 --- a/test cases/fortran/6 dynamic/meson.build +++ b/test cases/fortran/6 dynamic/meson.build @@ -1,4 +1,4 @@ project('dynamic_fortran', 'fortran') -dynamic = shared_library('dynamic', 'dynamic.f95') -executable('test_exe', 'main.f95', link_with : dynamic) +dynamic = shared_library('dynamic', 'dynamic.f90') +executable('test_exe', 'main.f90', link_with : dynamic) diff --git a/test cases/frameworks/11 gir subproject/gir/meson.build b/test cases/frameworks/11 gir subproject/gir/meson.build index e92c641..48e0a47 100644 --- a/test cases/frameworks/11 gir subproject/gir/meson.build +++ b/test cases/frameworks/11 gir subproject/gir/meson.build @@ -28,8 +28,10 @@ gnome.generate_gir( message('TEST: ' + girsubproject.outdir()) +envdata = environment() +envdata.append('GI_TYPELIB_PATH', girsubproject.outdir(), 'subprojects/mesongir', separator : ':') +envdata.append('LD_LIBRARY_PATH', girsubproject.outdir(), 'subprojects/mesongir') + test('gobject introspection/subproject/c', girexe) test('gobject introspection/subproject/py', find_program('prog.py'), - env : ['GI_TYPELIB_PATH=' + girsubproject.outdir() + ':subprojects/mesongir', - 'LD_LIBRARY_PATH=' + girsubproject.outdir() + ':subprojects/mesongir', - ]) + env : envdata) diff --git a/test cases/frameworks/7 gnome/gir/meson.build b/test cases/frameworks/7 gnome/gir/meson.build index beddc81..f3a4534 100644 --- a/test cases/frameworks/7 gnome/gir/meson.build +++ b/test cases/frameworks/7 gnome/gir/meson.build @@ -33,7 +33,8 @@ gnome.generate_gir( test('gobject introspection/c', girexe) gir_paths = ':'.join([girlib.outdir(), dep1lib.outdir(), dep2lib.outdir()]) +envdata = environment() +envdata.append('GI_TYPELIB_PATH', gir_paths, separator : ':') +envdata.append('LD_LIBRARY_PATH', gir_paths) test('gobject introspection/py', find_program('prog.py'), - env : ['GI_TYPELIB_PATH=' + gir_paths, - 'LD_LIBRARY_PATH=' + gir_paths, - ]) + env : envdata) diff --git a/test cases/frameworks/7 gnome/meson.build b/test cases/frameworks/7 gnome/meson.build index a771e71..c75c049 100644 --- a/test cases/frameworks/7 gnome/meson.build +++ b/test cases/frameworks/7 gnome/meson.build @@ -1,5 +1,14 @@ project('gobject-introspection', 'c') +cc = meson.get_compiler('c') + +add_global_arguments('-DMESON_TEST', language : 'c') +if cc.get_id() == 'intel' + # Ignore invalid GCC pragma warnings from glib + # https://bugzilla.gnome.org/show_bug.cgi?id=776562 + add_global_arguments('-wd2282', language : 'c') +endif + gnome = import('gnome') gio = dependency('gio-2.0') giounix = dependency('gio-unix-2.0') @@ -7,7 +16,6 @@ glib = dependency('glib-2.0') gobj = dependency('gobject-2.0') gir = dependency('gobject-introspection-1.0') gmod = dependency('gmodule-2.0') -add_global_arguments('-DMESON_TEST', language : 'c') subdir('resources-data') subdir('resources') diff --git a/test cases/unit/2 testsetups/buggy.c b/test cases/unit/2 testsetups/buggy.c new file mode 100644 index 0000000..5d20a24 --- /dev/null +++ b/test cases/unit/2 testsetups/buggy.c @@ -0,0 +1,14 @@ +#include<stdio.h> +#include<stdlib.h> + +#include<impl.h> + +int main(int argc, char **argv) { + char *ten = malloc(10); + do_nasty(ten); + free(ten); + if(getenv("TEST_ENV")) { + printf("TEST_ENV is set.\n"); + } + return 0; +} diff --git a/test cases/unit/2 testsetups/impl.c b/test cases/unit/2 testsetups/impl.c new file mode 100644 index 0000000..d87f3de --- /dev/null +++ b/test cases/unit/2 testsetups/impl.c @@ -0,0 +1,5 @@ +/* Write past the end. */ + +void do_nasty(char *ptr) { + ptr[10] = 'n'; +} diff --git a/test cases/unit/2 testsetups/impl.h b/test cases/unit/2 testsetups/impl.h new file mode 100644 index 0000000..7a08cb3 --- /dev/null +++ b/test cases/unit/2 testsetups/impl.h @@ -0,0 +1,3 @@ +#pragma once + +void do_nasty(char *ptr); diff --git a/test cases/unit/2 testsetups/meson.build b/test cases/unit/2 testsetups/meson.build new file mode 100644 index 0000000..a65548e --- /dev/null +++ b/test cases/unit/2 testsetups/meson.build @@ -0,0 +1,16 @@ +project('testsetups', 'c') + +vg = find_program('valgrind') + +# This is only set when running under Valgrind test setup. +env = environment() +env.set('TEST_ENV', '1') + +add_test_setup('valgrind', + exe_wrapper : [vg, '--error-exitcode=1', '--leak-check=full'], + timeout_multiplier : 100, + env : env) + +buggy = executable('buggy', 'buggy.c', 'impl.c') +test('Test buggy', buggy) + diff --git a/test cases/windows/5 resources/inc/resource.h b/test cases/windows/5 resources/inc/resource.h new file mode 100644 index 0000000..dbdd509 --- /dev/null +++ b/test cases/windows/5 resources/inc/resource.h @@ -0,0 +1 @@ +#define ICON_ID 1 diff --git a/test cases/windows/5 resources/meson.build b/test cases/windows/5 resources/meson.build index fe75719..3c13634 100644 --- a/test cases/windows/5 resources/meson.build +++ b/test cases/windows/5 resources/meson.build @@ -1,9 +1,12 @@ project('winmain', 'c') win = import('windows') +res = win.compile_resources('myres.rc', + include_directories : include_directories('inc') +) exe = executable('prog', 'prog.c', - win.compile_resources('myres.rc'), + res, gui_app : true) test('winmain', exe) diff --git a/test cases/windows/5 resources/myres.rc b/test cases/windows/5 resources/myres.rc index 12838ae..802bc7b 100644 --- a/test cases/windows/5 resources/myres.rc +++ b/test cases/windows/5 resources/myres.rc @@ -1,3 +1,4 @@ #include<windows.h> +#include"resource.h" -1 ICON "sample.ico" +ICON_ID ICON "sample.ico" diff --git a/test cases/windows/7 mingw dll versioning/meson.build b/test cases/windows/7 mingw dll versioning/meson.build index 23a3343..2f6035e 100644 --- a/test cases/windows/7 mingw dll versioning/meson.build +++ b/test cases/windows/7 mingw dll versioning/meson.build @@ -2,16 +2,16 @@ project('mingw dll versioning', 'c') cc = meson.get_compiler('c') +if cc.get_id() == 'msvc' + error('MESON_SKIP_TEST: test is only for MinGW') +endif + # Test that MinGW/GCC creates correctly-named dll files and dll.a files, # and also installs them in the right place -if cc.get_id() != 'msvc' - shared_library('some', 'lib.c', - version : '1.2.3', - soversion : '0', - install : true) +shared_library('some', 'lib.c', + version : '1.2.3', + soversion : '0', + install : true) - shared_library('noversion', 'lib.c', - install : true) -else - install_data('no-installed-files', install_dir : '') -endif +shared_library('noversion', 'lib.c', + install : true) diff --git a/test cases/windows/7 mingw dll versioning/no-installed-files b/test cases/windows/7 mingw dll versioning/no-installed-files deleted file mode 100644 index e69de29..0000000 --- a/test cases/windows/7 mingw dll versioning/no-installed-files +++ /dev/null diff --git a/test cases/windows/8 msvc dll versioning/meson.build b/test cases/windows/8 msvc dll versioning/meson.build index 0c36173..d6aecb6 100644 --- a/test cases/windows/8 msvc dll versioning/meson.build +++ b/test cases/windows/8 msvc dll versioning/meson.build @@ -2,15 +2,15 @@ project('msvc dll versioning', 'c') cc = meson.get_compiler('c') +if cc.get_id() != 'msvc' + error('MESON_SKIP_TEST: test is only for msvc') +endif + # Test that MSVC creates foo-0.dll and bar.dll -if cc.get_id() == 'msvc' - shared_library('some', 'lib.c', - version : '1.2.3', - soversion : '0', - install : true) +shared_library('some', 'lib.c', + version : '1.2.3', + soversion : '0', + install : true) - shared_library('noversion', 'lib.c', - install : true) -else - install_data('no-installed-files', install_dir : '') -endif +shared_library('noversion', 'lib.c', + install : true) diff --git a/test cases/windows/8 msvc dll versioning/no-installed-files b/test cases/windows/8 msvc dll versioning/no-installed-files deleted file mode 100644 index e69de29..0000000 --- a/test cases/windows/8 msvc dll versioning/no-installed-files +++ /dev/null diff --git a/tools/ac_converter.py b/tools/ac_converter.py index 4f284af..0531e98 100755 --- a/tools/ac_converter.py +++ b/tools/ac_converter.py @@ -28,36 +28,36 @@ import sys # Add stuff here as it is encountered. function_data = \ - {'HAVE_FEENABLEEXCEPT' : ('feenableexcept', 'fenv.h'), - 'HAVE_FECLEAREXCEPT' : ('feclearexcept', 'fenv.h'), - 'HAVE_FEDISABLEEXCEPT' : ('fedisableexcept', 'fenv.h'), - 'HAVE_MMAP' : ('mmap', 'sys/mman.h'), - 'HAVE_GETPAGESIZE' : ('getpagesize', 'unistd.h'), - 'HAVE_GETISAX' : ('getisax', 'sys/auxv.h'), - 'HAVE_GETTIMEOFDAY' : ('gettimeofday', 'sys/time.h'), - 'HAVE_MPROTECT' : ('mprotect', 'sys/mman.h'), - 'HAVE_POSIX_MEMALIGN' : ('posix_memalign', 'stdlib.h'), - 'HAVE_SIGACTION' : ('sigaction', 'signal.h'), - 'HAVE_ALARM' : ('alarm', 'unistd.h'), - 'HAVE_CLOCK_GETTIME' : ('clock_gettime', 'time.h'), - 'HAVE_CTIME_R' : ('ctime_r', 'time.h'), - 'HAVE_DRAND48' : ('drand48', 'stdlib.h'), - 'HAVE_FLOCKFILE' : ('flockfile', 'stdio.h'), - 'HAVE_FORK' : ('fork', 'unistd.h'), - 'HAVE_FUNLOCKFILE' : ('funlockfile', 'stdio.h'), - 'HAVE_GETLINE' : ('getline', 'stdio.h'), - 'HAVE_LINK' : ('link', 'unistd.h'), - 'HAVE_RAISE' : ('raise', 'signal.h'), - 'HAVE_STRNDUP' : ('strndup', 'string.h'), - 'HAVE_SCHED_GETAFFINITY' : ('sched_getaffinity', 'sched.h'), - 'HAVE_WAITPID' : ('waitpid', 'sys/wait.h'), - 'HAVE_XRENDERCREATECONICALGRADIENT' : ('XRenderCreateConicalGradient', 'xcb/render.h'), - 'HAVE_XRENDERCREATELINEARGRADIENT' : ('XRenderCreateLinearGradient', 'xcb/render.h'), - 'HAVE_XRENDERCREATERADIALGRADIENT' : ('XRenderCreateRadialGradient', 'xcb/render.h'), - 'HAVE_XRENDERCREATESOLIDFILL' : ('XRenderCreateSolidFill', 'xcb/render.h'), + {'HAVE_FEENABLEEXCEPT': ('feenableexcept', 'fenv.h'), + 'HAVE_FECLEAREXCEPT': ('feclearexcept', 'fenv.h'), + 'HAVE_FEDISABLEEXCEPT': ('fedisableexcept', 'fenv.h'), + 'HAVE_MMAP': ('mmap', 'sys/mman.h'), + 'HAVE_GETPAGESIZE': ('getpagesize', 'unistd.h'), + 'HAVE_GETISAX': ('getisax', 'sys/auxv.h'), + 'HAVE_GETTIMEOFDAY': ('gettimeofday', 'sys/time.h'), + 'HAVE_MPROTECT': ('mprotect', 'sys/mman.h'), + 'HAVE_POSIX_MEMALIGN': ('posix_memalign', 'stdlib.h'), + 'HAVE_SIGACTION': ('sigaction', 'signal.h'), + 'HAVE_ALARM': ('alarm', 'unistd.h'), + 'HAVE_CLOCK_GETTIME': ('clock_gettime', 'time.h'), + 'HAVE_CTIME_R': ('ctime_r', 'time.h'), + 'HAVE_DRAND48': ('drand48', 'stdlib.h'), + 'HAVE_FLOCKFILE': ('flockfile', 'stdio.h'), + 'HAVE_FORK': ('fork', 'unistd.h'), + 'HAVE_FUNLOCKFILE': ('funlockfile', 'stdio.h'), + 'HAVE_GETLINE': ('getline', 'stdio.h'), + 'HAVE_LINK': ('link', 'unistd.h'), + 'HAVE_RAISE': ('raise', 'signal.h'), + 'HAVE_STRNDUP': ('strndup', 'string.h'), + 'HAVE_SCHED_GETAFFINITY': ('sched_getaffinity', 'sched.h'), + 'HAVE_WAITPID': ('waitpid', 'sys/wait.h'), + 'HAVE_XRENDERCREATECONICALGRADIENT': ('XRenderCreateConicalGradient', 'xcb/render.h'), + 'HAVE_XRENDERCREATELINEARGRADIENT': ('XRenderCreateLinearGradient', 'xcb/render.h'), + 'HAVE_XRENDERCREATERADIALGRADIENT': ('XRenderCreateRadialGradient', 'xcb/render.h'), + 'HAVE_XRENDERCREATESOLIDFILL': ('XRenderCreateSolidFill', 'xcb/render.h'), 'HAVE_DCGETTEXT': ('dcgettext', 'libintl.h'), 'HAVE_ENDMNTENT': ('endmntent', 'mntent.h'), - 'HAVE_ENDSERVENT' : ('endservent', 'netdb.h'), + 'HAVE_ENDSERVENT': ('endservent', 'netdb.h'), 'HAVE_EVENTFD': ('eventfd', 'sys/eventfd.h'), 'HAVE_FALLOCATE': ('fallocate', 'fcntl.h'), 'HAVE_FCHMOD': ('fchmod', 'sys/stat.h'), @@ -68,9 +68,9 @@ function_data = \ 'HAVE_GETFSSTAT': ('getfsstat', 'sys/mount.h'), 'HAVE_GETMNTENT_R': ('getmntent_r', 'mntent.h'), 'HAVE_GETPROTOBYNAME_R': ('getprotobyname_r', 'netdb.h'), - 'HAVE_GETRESUID' : ('getresuid', 'unistd.h'), - 'HAVE_GETVFSSTAT' : ('getvfsstat', 'sys/statvfs.h'), - 'HAVE_GMTIME_R' : ('gmtime_r', 'time.h'), + 'HAVE_GETRESUID': ('getresuid', 'unistd.h'), + 'HAVE_GETVFSSTAT': ('getvfsstat', 'sys/statvfs.h'), + 'HAVE_GMTIME_R': ('gmtime_r', 'time.h'), 'HAVE_HASMNTOPT': ('hasmntopt', 'mntent.h'), 'HAVE_IF_INDEXTONAME': ('if_indextoname', 'net/if.h'), 'HAVE_IF_NAMETOINDEX': ('if_nametoindex', 'net/if.h'), @@ -94,7 +94,7 @@ function_data = \ 'HAVE_READLINK': ('readlink', 'unistd.h'), 'HAVE_RES_INIT': ('res_init', 'resolv.h'), 'HAVE_SENDMMSG': ('sendmmsg', 'sys/socket.h'), - 'HAVE_SOCKET' : ('socket', 'sys/socket.h'), + 'HAVE_SOCKET': ('socket', 'sys/socket.h'), 'HAVE_GETENV': ('getenv', 'stdlib.h'), 'HAVE_SETENV': ('setenv', 'stdlib.h'), 'HAVE_PUTENV': ('putenv', 'stdlib.h'), @@ -217,7 +217,7 @@ function_data = \ 'HAVE_SETJMP': ('setjmp', 'setjmp.h'), 'HAVE_PTHREAD_SETNAME_NP': ('pthread_setname_np', 'pthread.h'), 'HAVE_PTHREAD_SET_NAME_NP': ('pthread_set_name_np', 'pthread.h'), - } + } headers = [] functions = [] diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py index db405f7..7d6d02a 100755 --- a/tools/cmake2meson.py +++ b/tools/cmake2meson.py @@ -46,14 +46,14 @@ class Lexer: def lex(self, code): lineno = 1 line_start = 0 - loc = 0; + loc = 0 col = 0 while(loc < len(code)): matched = False for (tid, reg) in self.token_specification: mo = reg.match(code, loc) if mo: - col = mo.start()-line_start + col = mo.start() - line_start matched = True loc = mo.end() match_text = mo.group() @@ -70,7 +70,7 @@ class Lexer: elif tid == 'id': yield(Token('id', match_text)) elif tid == 'eol': - #yield('eol') + # yield('eol') lineno += 1 col = 1 line_start = mo.end() @@ -133,9 +133,9 @@ class Parser(): yield(self.statement()) class Converter: - ignored_funcs = {'cmake_minimum_required' : True, - 'enable_testing' : True, - 'include' : True} + ignored_funcs = {'cmake_minimum_required': True, + 'enable_testing': True, + 'include': True} def __init__(self, cmake_root): self.cmake_root = cmake_root self.indent_unit = ' ' @@ -240,7 +240,7 @@ class Converter: else: line = '''# %s(%s)''' % (t.name, self.convert_args(t.args)) self.indent_level += preincrement - indent = self.indent_level*self.indent_unit + indent = self.indent_level * self.indent_unit outfile.write(indent) outfile.write(line) if not(line.endswith('\n')): |