aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJussi Pakkanen <jpakkane@gmail.com>2016-09-01 23:12:06 +0300
committerJussi Pakkanen <jpakkane@gmail.com>2016-09-01 23:12:06 +0300
commitcdf0c4f1a945f1262ae604047fd240b25cf44050 (patch)
treeefb861fa017c1f8663b75570fe61fa644c2bd3d0
parent389259c229b30d38ec9de503dff965973b24ee26 (diff)
parent859c5e28df90851838aacc4b9ad49d3630e4992a (diff)
downloadmeson-cdf0c4f1a945f1262ae604047fd240b25cf44050.zip
meson-cdf0c4f1a945f1262ae604047fd240b25cf44050.tar.gz
meson-cdf0c4f1a945f1262ae604047fd240b25cf44050.tar.bz2
Merge branch 'QuLogic-context-managers'
-rwxr-xr-xghwt.py4
-rw-r--r--mesonbuild/backend/backends.py59
-rw-r--r--mesonbuild/backend/ninjabackend.py141
-rw-r--r--mesonbuild/backend/vs2010backend.py120
-rw-r--r--mesonbuild/backend/xcodebackend.py36
-rw-r--r--mesonbuild/compilers.py73
-rw-r--r--mesonbuild/coredata.py6
-rw-r--r--mesonbuild/dependencies.py25
-rw-r--r--mesonbuild/interpreter.py6
-rw-r--r--mesonbuild/mconf.py9
-rw-r--r--mesonbuild/mesonlib.py16
-rw-r--r--mesonbuild/mesonmain.py3
-rw-r--r--mesonbuild/mintro.py12
-rw-r--r--mesonbuild/modules/pkgconfig.py62
-rw-r--r--mesonbuild/modules/rpm.py147
-rw-r--r--mesonbuild/optinterpreter.py3
-rw-r--r--mesonbuild/scripts/depfixer.py34
-rw-r--r--mesonbuild/scripts/meson_benchmark.py43
-rw-r--r--mesonbuild/scripts/meson_exe.py3
-rw-r--r--mesonbuild/scripts/meson_install.py11
-rw-r--r--mesonbuild/scripts/meson_test.py45
-rw-r--r--mesonbuild/scripts/regen_checker.py6
-rw-r--r--mesonbuild/scripts/symbolextractor.py9
-rw-r--r--mesonbuild/scripts/vcstagger.py13
-rw-r--r--mesonbuild/wrap/wrap.py80
-rwxr-xr-xmesonbuild/wrap/wraptool.py6
-rwxr-xr-xrun_tests.py12
-rwxr-xr-xtest cases/common/103 manygen/subdir/manygen.py15
-rw-r--r--test cases/common/107 postconf/postconf.py9
-rw-r--r--test cases/common/108 postconf with args/postconf.py9
-rwxr-xr-xtest cases/common/113 generatorcustom/catter.py3
-rwxr-xr-xtest cases/common/113 generatorcustom/gen.py6
-rwxr-xr-xtest cases/common/117 custom target capture/my_compiler.py3
-rwxr-xr-xtest cases/common/16 configure file/generator.py8
-rwxr-xr-xtest cases/common/48 test args/tester.py5
-rwxr-xr-xtest cases/common/56 custom target/depfile/dep.py6
-rwxr-xr-xtest cases/common/56 custom target/my_compiler.py7
-rwxr-xr-xtest cases/common/57 custom target chain/my_compiler.py7
-rwxr-xr-xtest cases/common/57 custom target chain/my_compiler2.py7
-rwxr-xr-xtest cases/common/57 custom target chain/usetarget/subcomp.py4
-rw-r--r--test cases/common/58 run target/converter.py3
-rwxr-xr-xtest cases/common/58 run target/fakeburner.py3
-rwxr-xr-xtest cases/common/61 custom target source output/generator.py6
-rw-r--r--test cases/common/64 custom header generator/makeheader.py6
-rwxr-xr-xtest cases/common/65 multiple generators/mygen.py9
-rwxr-xr-xtest cases/common/72 build always/version_gen.py9
-rw-r--r--test cases/common/76 configure file in custom target/src/mycompiler.py9
-rwxr-xr-xtest cases/common/78 ctarget dependency/gen1.py6
-rwxr-xr-xtest cases/common/78 ctarget dependency/gen2.py3
-rwxr-xr-xtest cases/common/93 private include/stlib/compiler.py6
-rwxr-xr-xtest cases/common/98 gen extra/srcgen.py6
-rwxr-xr-xtools/ac_converter.py107
-rwxr-xr-xtools/cmake2meson.py55
53 files changed, 736 insertions, 565 deletions
diff --git a/ghwt.py b/ghwt.py
index 493b1e2..bb0be70 100755
--- a/ghwt.py
+++ b/ghwt.py
@@ -52,8 +52,8 @@ def unpack(sproj, branch, outdir):
return 1
spdir = os.path.split(outdir)[0]
ofilename = os.path.join(spdir, config['wrap-file']['source_filename'])
- ofile = open(ofilename, 'wb')
- ofile.write(us)
+ with open(ofilename, 'wb') as ofile:
+ ofile.write(us)
if 'lead_directory_missing' in config['wrap-file']:
os.mkdir(outdir)
shutil.unpack_archive(ofilename, outdir)
diff --git a/mesonbuild/backend/backends.py b/mesonbuild/backend/backends.py
index 82b387d..54be8ec 100644
--- a/mesonbuild/backend/backends.py
+++ b/mesonbuild/backend/backends.py
@@ -139,24 +139,34 @@ class Backend():
langlist = {}
abs_files = []
result = []
- for src in unity_src:
- comp = self.get_compiler_for_source(src, target.is_cross)
- language = comp.get_language()
- suffix = '.' + comp.get_default_suffix()
- if language not in langlist:
- outfilename = os.path.join(self.get_target_private_dir_abs(target), target.name + '-unity' + suffix)
- outfileabs = os.path.join(self.environment.get_build_dir(), outfilename)
- outfileabs_tmp = outfileabs + '.tmp'
- abs_files.append(outfileabs)
- outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp)
- if not os.path.exists(outfileabs_tmp_dir):
- os.makedirs(outfileabs_tmp_dir)
- outfile = open(outfileabs_tmp, 'w')
- langlist[language] = outfile
- result.append(outfilename)
- ofile = langlist[language]
- ofile.write('#include<%s>\n' % src)
- [x.close() for x in langlist.values()]
+
+ def init_language_file(language, suffix):
+ outfilename = os.path.join(self.get_target_private_dir_abs(target),
+ target.name + '-unity' + suffix)
+ outfileabs = os.path.join(self.environment.get_build_dir(),
+ outfilename)
+ outfileabs_tmp = outfileabs + '.tmp'
+ abs_files.append(outfileabs)
+ outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp)
+ if not os.path.exists(outfileabs_tmp_dir):
+ os.makedirs(outfileabs_tmp_dir)
+ result.append(outfilename)
+ return open(outfileabs_tmp, 'w')
+
+ try:
+ for src in unity_src:
+ comp = self.get_compiler_for_source(src, target.is_cross)
+ language = comp.get_language()
+ try:
+ ofile = langlist[language]
+ except KeyError:
+ suffix = '.' + comp.get_default_suffix()
+ ofile = langlist[language] = init_language_file(language,
+ suffix)
+ ofile.write('#include<%s>\n' % src)
+ finally:
+ for x in langlist.values():
+ x.close()
[mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files]
return result
@@ -215,13 +225,11 @@ class Backend():
def serialise_tests(self):
test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
- datafile = open(test_data, 'wb')
- self.write_test_file(datafile)
- datafile.close()
+ with open(test_data, 'wb') as datafile:
+ self.write_test_file(datafile)
benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
- datafile = open(benchmark_data, 'wb')
- self.write_benchmark_file(datafile)
- datafile.close()
+ with open(benchmark_data, 'wb') as datafile:
+ self.write_benchmark_file(datafile)
return (test_data, benchmark_data)
def has_source_suffix(self, target, suffix):
@@ -442,7 +450,8 @@ class Backend():
mfobj = {'type': 'dependency manifest',
'version': '1.0'}
mfobj['projects'] = self.build.dep_manifest
- open(ifilename, 'w').write(json.dumps(mfobj))
+ with open(ifilename, 'w') as f:
+ f.write(json.dumps(mfobj))
d.data.append([ifilename, ofilename])
def get_regen_filelist(self):
diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py
index 595fedd..e81c407 100644
--- a/mesonbuild/backend/ninjabackend.py
+++ b/mesonbuild/backend/ninjabackend.py
@@ -133,17 +133,18 @@ class NinjaBackend(backends.Backend):
self.all_outputs = {}
self.valgrind = environment.find_valgrind()
- def detect_vs_dep_prefix(self, outfile, tempfilename):
+ def detect_vs_dep_prefix(self, tempfilename):
'''VS writes its dependency in a locale dependent format.
Detect the search prefix to use.'''
# Of course there is another program called 'cl' on
# some platforms. Let's just require that on Windows
# cl points to msvc.
if not mesonlib.is_windows() or shutil.which('cl') is None:
- return outfile
- outfile.close()
- open(os.path.join(self.environment.get_scratch_dir(), 'incdetect.c'),
- 'w').write('''#include<stdio.h>
+ return open(tempfilename, 'a')
+ filename = os.path.join(self.environment.get_scratch_dir(),
+ 'incdetect.c')
+ with open(filename, 'w') as f:
+ f.write('''#include<stdio.h>
int dummy;
''')
@@ -157,9 +158,8 @@ int dummy;
for line in stdo.split(b'\r\n'):
if line.endswith(b'stdio.h'):
matchstr = b':'.join(line.split(b':')[0:2]) + b':'
- binfile = open(tempfilename, 'ab')
- binfile.write(b'msvc_deps_prefix = ' + matchstr + b'\r\n')
- binfile.close()
+ with open(tempfilename, 'ab') as binfile:
+ binfile.write(b'msvc_deps_prefix = ' + matchstr + b'\r\n')
return open(tempfilename, 'a')
raise MesonException('Could not determine vs dep dependency prefix string.')
@@ -167,30 +167,31 @@ int dummy;
self.interpreter = interp
outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename)
tempfilename = outfilename + '~'
- outfile = open(tempfilename, 'w')
- outfile.write('# This is the build file for project "%s"\n' % self.build.get_project())
- outfile.write('# It is autogenerated by the Meson build system.\n')
- outfile.write('# Do not edit by hand.\n\n')
- outfile.write('ninja_required_version = 1.5.1\n\n')
- outfile = self.detect_vs_dep_prefix(outfile, tempfilename)
- self.generate_rules(outfile)
- self.generate_phony(outfile)
- outfile.write('# Build rules for targets\n\n')
- [self.generate_target(t, outfile) for t in self.build.get_targets().values()]
- outfile.write('# Test rules\n\n')
- self.generate_tests(outfile)
- outfile.write('# Install rules\n\n')
- self.generate_install(outfile)
- if 'b_coverage' in self.environment.coredata.base_options and \
- self.environment.coredata.base_options['b_coverage'].value:
- outfile.write('# Coverage rules\n\n')
- self.generate_coverage_rules(outfile)
- outfile.write('# Suffix\n\n')
- self.generate_utils(outfile)
- self.generate_ending(outfile)
+ with open(tempfilename, 'w') as outfile:
+ outfile.write('# This is the build file for project "%s"\n' %
+ self.build.get_project())
+ outfile.write('# It is autogenerated by the Meson build system.\n')
+ outfile.write('# Do not edit by hand.\n\n')
+ outfile.write('ninja_required_version = 1.5.1\n\n')
+ with self.detect_vs_dep_prefix(tempfilename) as outfile:
+ self.generate_rules(outfile)
+ self.generate_phony(outfile)
+ outfile.write('# Build rules for targets\n\n')
+ for t in self.build.get_targets().values():
+ self.generate_target(t, outfile)
+ outfile.write('# Test rules\n\n')
+ self.generate_tests(outfile)
+ outfile.write('# Install rules\n\n')
+ self.generate_install(outfile)
+ if 'b_coverage' in self.environment.coredata.base_options and \
+ self.environment.coredata.base_options['b_coverage'].value:
+ outfile.write('# Coverage rules\n\n')
+ self.generate_coverage_rules(outfile)
+ outfile.write('# Suffix\n\n')
+ self.generate_utils(outfile)
+ self.generate_ending(outfile)
# Only ovewrite the old build file after the new one has been
# fully created.
- outfile.close()
os.replace(tempfilename, outfilename)
self.generate_compdb()
@@ -202,7 +203,8 @@ int dummy;
jsondb = subprocess.check_output([ninja_exe, '-t', 'compdb', 'c_COMPILER', 'cpp_COMPILER'], cwd=builddir)
except Exception:
raise MesonException('Could not create compilation database.')
- open(os.path.join(builddir, 'compile_commands.json'), 'wb').write(jsondb)
+ with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f:
+ f.write(jsondb)
# Get all generated headers. Any source file might need them so
# we need to add an order dependency to them.
@@ -505,8 +507,8 @@ int dummy;
self.generate_subdir_install(d)
elem.write(outfile)
- ofile = open(install_data_file, 'wb')
- pickle.dump(d, ofile)
+ with open(install_data_file, 'wb') as ofile:
+ pickle.dump(d, ofile)
def generate_target_install(self, d):
should_strip = self.environment.coredata.get_builtin_option('strip')
@@ -1416,16 +1418,22 @@ rule FORTRAN_DEP_HACK
# but those are really rare. I hope.
if not compiler.can_compile(s):
continue
- for line in open(os.path.join(self.environment.get_source_dir(), s.subdir, s.fname)):
- modmatch = modre.match(line)
- if modmatch is not None:
- modname = modmatch.group(1)
- if modname.lower() == 'procedure': # MODULE PROCEDURE construct
- continue
- if modname in module_files:
- raise InvalidArguments('Namespace collision: module %s defined in two files %s and %s.' %
- (modname, module_files[modname], s))
- module_files[modname] = s
+ filename = os.path.join(self.environment.get_source_dir(),
+ s.subdir, s.fname)
+ with open(filename) as f:
+ for line in f:
+ modmatch = modre.match(line)
+ if modmatch is not None:
+ modname = modmatch.group(1)
+ if modname.lower() == 'procedure':
+ # MODULE PROCEDURE construct
+ continue
+ if modname in module_files:
+ raise InvalidArguments(
+ 'Namespace collision: module %s defined in '
+ 'two files %s and %s.' %
+ (modname, module_files[modname], s))
+ module_files[modname] = s
self.fortran_deps[target.get_basename()] = module_files
def get_fortran_deps(self, compiler, src, target):
@@ -1433,27 +1441,32 @@ rule FORTRAN_DEP_HACK
usere = re.compile(r"\s*use\s+(\w+)", re.IGNORECASE)
dirname = self.get_target_private_dir(target)
tdeps= self.fortran_deps[target.get_basename()]
- for line in open(src):
- usematch = usere.match(line)
- if usematch is not None:
- usename = usematch.group(1)
- if usename not in tdeps:
- # The module is not provided by any source file. This is due to
- # a) missing file/typo/etc
- # b) using a module provided by the compiler, such as OpenMP
- # There's no easy way to tell which is which (that I know of)
- # so just ignore this and go on. Ideally we would print a
- # warning message to the user but this is a common occurrance,
- # which would lead to lots of distracting noise.
- continue
- mod_source_file = tdeps[usename]
- # Check if a source uses a module it exports itself.
- # Potential bug if multiple targets have a file with
- # the same name.
- if mod_source_file.fname == os.path.split(src)[1]:
- continue
- mod_name = compiler.module_name_to_filename(usematch.group(1))
- mod_files.append(os.path.join(dirname, mod_name))
+ with open(src) as f:
+ for line in f:
+ usematch = usere.match(line)
+ if usematch is not None:
+ usename = usematch.group(1)
+ if usename not in tdeps:
+ # The module is not provided by any source file. This
+ # is due to:
+ # a) missing file/typo/etc
+ # b) using a module provided by the compiler, such as
+ # OpenMP
+ # There's no easy way to tell which is which (that I
+ # know of) so just ignore this and go on. Ideally we
+ # would print a warning message to the user but this is
+ # a common occurrence, which would lead to lots of
+ # distracting noise.
+ continue
+ mod_source_file = tdeps[usename]
+ # Check if a source uses a module it exports itself.
+ # Potential bug if multiple targets have a file with
+ # the same name.
+ if mod_source_file.fname == os.path.split(src)[1]:
+ continue
+ mod_name = compiler.module_name_to_filename(
+ usematch.group(1))
+ mod_files.append(os.path.join(dirname, mod_name))
return mod_files
def get_cross_stdlib_args(self, target, compiler):
diff --git a/mesonbuild/backend/vs2010backend.py b/mesonbuild/backend/vs2010backend.py
index 669bcf8..eca7473 100644
--- a/mesonbuild/backend/vs2010backend.py
+++ b/mesonbuild/backend/vs2010backend.py
@@ -175,14 +175,18 @@ class Vs2010Backend(backends.Backend):
@staticmethod
def touch_regen_timestamp(build_dir):
- open(Vs2010Backend.get_regen_stampfile(build_dir), 'w').close()
+ with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w'):
+ pass
def generate_regen_info(self):
deps = self.get_regen_filelist()
regeninfo = RegenInfo(self.environment.get_source_dir(),
self.environment.get_build_dir(),
deps)
- pickle.dump(regeninfo, open(os.path.join(self.environment.get_scratch_dir(), 'regeninfo.dump'), 'wb'))
+ filename = os.path.join(self.environment.get_scratch_dir(),
+ 'regeninfo.dump')
+ with open(filename, 'wb') as f:
+ pickle.dump(regeninfo, f)
def get_obj_target_deps(self, obj_list):
result = {}
@@ -217,57 +221,66 @@ class Vs2010Backend(backends.Backend):
return all_deps
def generate_solution(self, sln_filename, projlist):
- ofile = open(sln_filename, 'w')
- ofile.write('Microsoft Visual Studio Solution File, Format Version 11.00\n')
- ofile.write('# Visual Studio ' + self.vs_version + '\n')
- prj_templ = prj_line = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
- for p in projlist:
- prj_line = prj_templ % (self.environment.coredata.guid, p[0], p[1], p[2])
- ofile.write(prj_line)
- all_deps = self.determine_deps(p)
- ofile.write('\tProjectSection(ProjectDependencies) = postProject\n')
- regen_guid = self.environment.coredata.regen_guid
- ofile.write('\t\t{%s} = {%s}\n' % (regen_guid, regen_guid))
- for dep in all_deps.keys():
- guid = self.environment.coredata.target_guids[dep]
- ofile.write('\t\t{%s} = {%s}\n' % (guid, guid))
- ofile.write('EndProjectSection\n')
+ with open(sln_filename, 'w') as ofile:
+ ofile.write('Microsoft Visual Studio Solution File, Format '
+ 'Version 11.00\n')
+ ofile.write('# Visual Studio ' + self.vs_version + '\n')
+ prj_templ = prj_line = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
+ for p in projlist:
+ prj_line = prj_templ % (self.environment.coredata.guid,
+ p[0], p[1], p[2])
+ ofile.write(prj_line)
+ all_deps = self.determine_deps(p)
+ ofile.write('\tProjectSection(ProjectDependencies) = '
+ 'postProject\n')
+ regen_guid = self.environment.coredata.regen_guid
+ ofile.write('\t\t{%s} = {%s}\n' % (regen_guid, regen_guid))
+ for dep in all_deps.keys():
+ guid = self.environment.coredata.target_guids[dep]
+ ofile.write('\t\t{%s} = {%s}\n' % (guid, guid))
+ ofile.write('EndProjectSection\n')
+ ofile.write('EndProject\n')
+ test_line = prj_templ % (self.environment.coredata.guid,
+ 'RUN_TESTS', 'RUN_TESTS.vcxproj',
+ self.environment.coredata.test_guid)
+ ofile.write(test_line)
ofile.write('EndProject\n')
- test_line = prj_templ % (self.environment.coredata.guid,
- 'RUN_TESTS', 'RUN_TESTS.vcxproj', self.environment.coredata.test_guid)
- ofile.write(test_line)
- ofile.write('EndProject\n')
- regen_line = prj_templ % (self.environment.coredata.guid,
- 'REGEN', 'REGEN.vcxproj', self.environment.coredata.regen_guid)
- ofile.write(regen_line)
- ofile.write('EndProject\n')
- ofile.write('Global\n')
- ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\n')
- ofile.write('\t\t%s|%s = %s|%s\n' % (self.buildtype, self.platform, self.buildtype, self.platform))
- ofile.write('\tEndGlobalSection\n')
- ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\n')
- ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
- (self.environment.coredata.regen_guid, self.buildtype, self.platform,
- self.buildtype, self.platform))
- ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
- (self.environment.coredata.regen_guid, self.buildtype, self.platform,
- self.buildtype, self.platform))
- for p in projlist:
+ regen_line = prj_templ % (self.environment.coredata.guid,
+ 'REGEN', 'REGEN.vcxproj',
+ self.environment.coredata.regen_guid)
+ ofile.write(regen_line)
+ ofile.write('EndProject\n')
+ ofile.write('Global\n')
+ ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = '
+ 'preSolution\n')
+ ofile.write('\t\t%s|%s = %s|%s\n' %
+ (self.buildtype, self.platform, self.buildtype,
+ self.platform))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = '
+ 'postSolution\n')
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
- (p[2], self.buildtype, self.platform,
- self.buildtype, self.platform))
- if not isinstance(self.build.targets[p[0]], build.RunTarget):
- ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (self.environment.coredata.regen_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (self.environment.coredata.regen_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ for p in projlist:
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
(p[2], self.buildtype, self.platform,
self.buildtype, self.platform))
- ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
- (self.environment.coredata.test_guid, self.buildtype, self.platform,
- self.buildtype, self.platform))
- ofile.write('\tEndGlobalSection\n')
- ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n')
- ofile.write('\t\tHideSolutionNode = FALSE\n')
- ofile.write('\tEndGlobalSection\n')
- ofile.write('EndGlobal\n')
+ if not isinstance(self.build.targets[p[0]], build.RunTarget):
+ ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
+ (p[2], self.buildtype, self.platform,
+ self.buildtype, self.platform))
+ ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
+ (self.environment.coredata.test_guid, self.buildtype,
+ self.platform, self.buildtype, self.platform))
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n')
+ ofile.write('\t\tHideSolutionNode = FALSE\n')
+ ofile.write('\tEndGlobalSection\n')
+ ofile.write('EndGlobal\n')
def generate_projects(self):
projlist = []
@@ -862,12 +875,15 @@ class Vs2010Backend(backends.Backend):
tree.write(ofname, encoding='utf-8', xml_declaration=True)
# ElementTree can not do prettyprinting so do it manually
doc = xml.dom.minidom.parse(ofname)
- open(ofname, 'w').write(doc.toprettyxml())
+ with open(ofname, 'w') as of:
+ of.write(doc.toprettyxml())
# World of horror! Python insists on not quoting quotes and
# fixing the escaped &quot; into &amp;quot; whereas MSVS
# requires quoted but not fixed elements. Enter horrible hack.
- txt = open(ofname, 'r').read()
- open(ofname, 'w').write(txt.replace('&amp;quot;', '&quot;'))
+ with open(ofname, 'r') as of:
+ txt = of.read()
+ with open(ofname, 'w') as of:
+ of.write(txt.replace('&amp;quot;', '&quot;'))
def gen_regenproj(self, project_name, ofname):
root = ET.Element('Project', {'DefaultTargets': 'Build',
diff --git a/mesonbuild/backend/xcodebackend.py b/mesonbuild/backend/xcodebackend.py
index e64866d..b157741 100644
--- a/mesonbuild/backend/xcodebackend.py
+++ b/mesonbuild/backend/xcodebackend.py
@@ -82,26 +82,22 @@ class XCodeBackend(backends.Backend):
self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj')
os.makedirs(self.proj_dir, exist_ok=True)
self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj')
- self.ofile = open(self.proj_file, 'w')
- self.generate_prefix()
- self.generate_pbx_aggregate_target()
- self.generate_pbx_build_file()
- self.generate_pbx_build_style()
- self.generate_pbx_container_item_proxy()
- self.generate_pbx_file_reference()
- self.generate_pbx_group()
- self.generate_pbx_native_target()
- self.generate_pbx_project()
- self.generate_pbx_shell_build_phase(test_data)
- self.generate_pbx_sources_build_phase()
- self.generate_pbx_target_dependency()
- self.generate_xc_build_configuration()
- self.generate_xc_configurationList()
- self.generate_suffix()
-
- # for some reason, the entire file was not being flushed to the disk.
- # closing it explicitly forces a flush and fixes the issue
- self.ofile.close()
+ with open(self.proj_file, 'w') as self.ofile:
+ self.generate_prefix()
+ self.generate_pbx_aggregate_target()
+ self.generate_pbx_build_file()
+ self.generate_pbx_build_style()
+ self.generate_pbx_container_item_proxy()
+ self.generate_pbx_file_reference()
+ self.generate_pbx_group()
+ self.generate_pbx_native_target()
+ self.generate_pbx_project()
+ self.generate_pbx_shell_build_phase(test_data)
+ self.generate_pbx_sources_build_phase()
+ self.generate_pbx_target_dependency()
+ self.generate_xc_build_configuration()
+ self.generate_xc_configurationList()
+ self.generate_suffix()
def get_xcodetype(self, fname):
return self.xcodetypemap[fname.split('.')[-1]]
diff --git a/mesonbuild/compilers.py b/mesonbuild/compilers.py
index 1c6c1da..4088adc 100644
--- a/mesonbuild/compilers.py
+++ b/mesonbuild/compilers.py
@@ -538,9 +538,8 @@ class CCompiler(Compiler):
binname += '.exe'
# Write binary check source
binary_name = os.path.join(work_dir, binname)
- ofile = open(source_name, 'w')
- ofile.write(code)
- ofile.close()
+ with open(source_name, 'w') as ofile:
+ ofile.write(code)
# Compile sanity check
cmdlist = self.exelist + extra_flags + [source_name] + self.get_output_args(binary_name)
pc = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=work_dir)
@@ -617,9 +616,8 @@ int main () {{ {1}; }}'''
suflen = len(self.default_suffix)
(fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix)
os.close(fd)
- ofile = open(srcname, 'w')
- ofile.write(code)
- ofile.close()
+ with open(srcname, 'w') as ofile:
+ ofile.write(code)
# Convert flags to the native type of the selected compiler
args = self.unix_link_flags_to_native(extra_args)
# Read c_args/cpp_args/etc from the cross-info file (if needed)
@@ -647,9 +645,8 @@ int main () {{ {1}; }}'''
os.close(fd)
(fd, dstname) = tempfile.mkstemp()
os.close(fd)
- ofile = open(srcname, 'w')
- ofile.write(code)
- ofile.close()
+ with open(srcname, 'w') as ofile:
+ ofile.write(code)
# Convert flags to the native type of the selected compiler
args = self.unix_link_flags_to_native(extra_args)
# Select a CRT if needed since we're linking
@@ -672,9 +669,8 @@ int main () {{ {1}; }}'''
raise CrossNoRunException('Can not run test applications in this cross environment.')
(fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix)
os.close(fd)
- ofile = open(srcname, 'w')
- ofile.write(code)
- ofile.close()
+ with open(srcname, 'w') as ofile:
+ ofile.write(code)
# Convert flags to the native type of the selected compiler
args = self.unix_link_flags_to_native(extra_args)
# Select a CRT if needed since we're linking
@@ -997,9 +993,9 @@ class ObjCCompiler(CCompiler):
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False)
if self.is_cross:
extra_flags += self.get_compile_only_args()
- ofile = open(source_name, 'w')
- ofile.write('#import<stdio.h>\nint main(int argc, char **argv) { return 0; }\n')
- ofile.close()
+ with open(source_name, 'w') as ofile:
+ ofile.write('#import<stdio.h>\n'
+ 'int main(int argc, char **argv) { return 0; }\n')
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait()
if pc.returncode != 0:
@@ -1031,9 +1027,10 @@ class ObjCPPCompiler(CPPCompiler):
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False)
if self.is_cross:
extra_flags += self.get_compile_only_args()
- ofile = open(source_name, 'w')
- ofile.write('#import<stdio.h>\nclass MyClass;int main(int argc, char **argv) { return 0; }\n')
- ofile.close()
+ with open(source_name, 'w') as ofile:
+ ofile.write('#import<stdio.h>\n'
+ 'class MyClass;'
+ 'int main(int argc, char **argv) { return 0; }\n')
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait()
if pc.returncode != 0:
@@ -1133,13 +1130,12 @@ class MonoCompiler(Compiler):
src = 'sanity.cs'
obj = 'sanity.exe'
source_name = os.path.join(work_dir, src)
- ofile = open(source_name, 'w')
- ofile.write('''public class Sanity {
+ with open(source_name, 'w') as ofile:
+ ofile.write('''public class Sanity {
static public void Main () {
}
}
''')
- ofile.close()
pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
@@ -1245,14 +1241,13 @@ class JavaCompiler(Compiler):
src = 'SanityCheck.java'
obj = 'SanityCheck'
source_name = os.path.join(work_dir, src)
- ofile = open(source_name, 'w')
- ofile.write('''class SanityCheck {
+ with open(source_name, 'w') as ofile:
+ ofile.write('''class SanityCheck {
public static void main(String[] args) {
int i;
}
}
''')
- ofile.close()
pc = subprocess.Popen(self.exelist + [src], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
@@ -1292,11 +1287,10 @@ class ValaCompiler(Compiler):
def sanity_check(self, work_dir, environment):
src = 'valatest.vala'
source_name = os.path.join(work_dir, src)
- ofile = open(source_name, 'w')
- ofile.write('''class SanityCheck : Object {
+ with open(source_name, 'w') as ofile:
+ ofile.write('''class SanityCheck : Object {
}
''')
- ofile.close()
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=False)
pc = subprocess.Popen(self.exelist + extra_flags + ['-C', '-c', src], cwd=work_dir)
pc.wait()
@@ -1336,11 +1330,10 @@ class RustCompiler(Compiler):
def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanity.rs')
output_name = os.path.join(work_dir, 'rusttest')
- ofile = open(source_name, 'w')
- ofile.write('''fn main() {
+ with open(source_name, 'w') as ofile:
+ ofile.write('''fn main() {
}
''')
- ofile.close()
pc = subprocess.Popen(self.exelist + ['-o', output_name, source_name], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
@@ -1435,10 +1428,9 @@ class SwiftCompiler(Compiler):
src = 'swifttest.swift'
source_name = os.path.join(work_dir, src)
output_name = os.path.join(work_dir, 'swifttest')
- ofile = open(source_name, 'w')
- ofile.write('''1 + 2
+ with open(source_name, 'w') as ofile:
+ ofile.write('''1 + 2
''')
- ofile.close()
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=True)
pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
pc.wait()
@@ -1461,11 +1453,10 @@ class DCompiler(Compiler):
def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanity.d')
output_name = os.path.join(work_dir, 'dtest')
- ofile = open(source_name, 'w')
- ofile.write('''void main() {
+ with open(source_name, 'w') as ofile:
+ ofile.write('''void main() {
}
''')
- ofile.close()
pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + [source_name], cwd=work_dir)
pc.wait()
if pc.returncode != 0:
@@ -1872,9 +1863,8 @@ class VisualStudioCCompiler(CCompiler):
code = 'int i;\n'
(fd, srcname) = tempfile.mkstemp(suffix='.'+self.default_suffix)
os.close(fd)
- ofile = open(srcname, 'w')
- ofile.write(code)
- ofile.close()
+ with open(srcname, 'w') as ofile:
+ ofile.write(code)
# Read c_args/cpp_args/etc from the cross-info file (if needed)
extra_args = self.get_cross_extra_flags(env, compile=True, link=False)
extra_args += self.get_compile_only_args()
@@ -2286,12 +2276,11 @@ class FortranCompiler(Compiler):
def sanity_check(self, work_dir, environment):
source_name = os.path.join(work_dir, 'sanitycheckf.f90')
binary_name = os.path.join(work_dir, 'sanitycheckf')
- ofile = open(source_name, 'w')
- ofile.write('''program prog
+ with open(source_name, 'w') as ofile:
+ ofile.write('''program prog
print *, "Fortran compilation is working."
end program prog
''')
- ofile.close()
extra_flags = self.get_cross_extra_flags(environment, compile=True, link=True)
pc = subprocess.Popen(self.exelist + extra_flags + [source_name, '-o', binary_name])
pc.wait()
diff --git a/mesonbuild/coredata.py b/mesonbuild/coredata.py
index 2f1875f..9beb294 100644
--- a/mesonbuild/coredata.py
+++ b/mesonbuild/coredata.py
@@ -154,7 +154,8 @@ class CoreData():
raise RuntimeError('Tried to set unknown builtin option %s.' % optname)
def load(filename):
- obj = pickle.load(open(filename, 'rb'))
+ with open(filename, 'rb') as f:
+ obj = pickle.load(f)
if not isinstance(obj, CoreData):
raise RuntimeError('Core data file is corrupted.')
if obj.version != version:
@@ -165,7 +166,8 @@ def load(filename):
def save(obj, filename):
if obj.version != version:
raise RuntimeError('Fatal version mismatch corruption.')
- pickle.dump(obj, open(filename, 'wb'))
+ with open(filename, 'wb') as f:
+ pickle.dump(obj, f)
def get_builtin_options():
return list(builtin_options.keys())
diff --git a/mesonbuild/dependencies.py b/mesonbuild/dependencies.py
index d123f77..679440d 100644
--- a/mesonbuild/dependencies.py
+++ b/mesonbuild/dependencies.py
@@ -228,10 +228,11 @@ class PkgConfigDependency(Dependency):
return self.is_found
def extract_field(self, la_file, fieldname):
- for line in open(la_file):
- arr = line.strip().split('=')
- if arr[0] == fieldname:
- return arr[1][1:-1]
+ with open(la_file) as f:
+ for line in f:
+ arr = line.strip().split('=')
+ if arr[0] == fieldname:
+ return arr[1][1:-1]
return None
def extract_dlname_field(self, la_file):
@@ -374,7 +375,8 @@ class ExternalProgram():
shebang and manually parse it to figure out the interpreter to use
"""
try:
- first_line = open(script).readline().strip()
+ with open(script) as f:
+ first_line = f.readline().strip()
if first_line.startswith('#!'):
commands = first_line[2:].split('#')[0].strip().split()
if mesonlib.is_windows():
@@ -552,12 +554,13 @@ class BoostDependency(Dependency):
except FileNotFoundError:
self.version = None
return
- for line in ifile:
- if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
- ver = line.split()[-1]
- ver = ver[1:-1]
- self.version = ver.replace('_', '.')
- return
+ with ifile:
+ for line in ifile:
+ if line.startswith("#define") and 'BOOST_LIB_VERSION' in line:
+ ver = line.split()[-1]
+ ver = ver[1:-1]
+ self.version = ver.replace('_', '.')
+ return
self.version = None
def detect_src_modules(self):
diff --git a/mesonbuild/interpreter.py b/mesonbuild/interpreter.py
index 5e31a87..ac95e15 100644
--- a/mesonbuild/interpreter.py
+++ b/mesonbuild/interpreter.py
@@ -1032,7 +1032,8 @@ class Interpreter():
mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
if not os.path.isfile(mesonfile):
raise InvalidArguments('Missing Meson file in %s' % mesonfile)
- code = open(mesonfile, encoding='utf8').read()
+ with open(mesonfile, encoding='utf8') as mf:
+ code = mf.read()
if len(code.strip()) == 0:
raise InvalidCode('Builder file is empty.')
assert(isinstance(code, str))
@@ -2006,7 +2007,8 @@ class Interpreter():
if not os.path.isfile(absname):
self.subdir = prev_subdir
raise InterpreterException('Nonexistant build def file %s.' % buildfilename)
- code = open(absname, encoding='utf8').read()
+ with open(absname, encoding='utf8') as f:
+ code = f.read()
assert(isinstance(code, str))
try:
codeblock = mparser.Parser(code).parse()
diff --git a/mesonbuild/mconf.py b/mesonbuild/mconf.py
index 4b11c10..afabc62 100644
--- a/mesonbuild/mconf.py
+++ b/mesonbuild/mconf.py
@@ -36,15 +36,18 @@ class Conf:
self.build_file = os.path.join(build_dir, 'meson-private/build.dat')
if not os.path.isfile(self.coredata_file) or not os.path.isfile(self.build_file):
raise ConfException('Directory %s does not seem to be a Meson build directory.' % build_dir)
- self.coredata = pickle.load(open(self.coredata_file, 'rb'))
- self.build = pickle.load(open(self.build_file, 'rb'))
+ with open(self.coredata_file, 'rb') as f:
+ self.coredata = pickle.load(f)
+ with open(self.build_file, 'rb') as f:
+ self.build = pickle.load(f)
if self.coredata.version != coredata.version:
raise ConfException('Version mismatch (%s vs %s)' %
(coredata.version, self.coredata.version))
def save(self):
# Only called if something has changed so overwrite unconditionally.
- pickle.dump(self.coredata, open(self.coredata_file, 'wb'))
+ with open(self.coredata_file, 'wb') as f:
+ pickle.dump(self.coredata, f)
# We don't write the build file because any changes to it
# are erased when Meson is executed the nex time, i.e. the next
# time Ninja is run.
diff --git a/mesonbuild/mesonlib.py b/mesonbuild/mesonlib.py
index b5b5339..007aea8 100644
--- a/mesonbuild/mesonlib.py
+++ b/mesonbuild/mesonlib.py
@@ -96,7 +96,8 @@ def is_32bit():
def is_debianlike():
try:
- open('/etc/debian_version', 'r')
+ with open('/etc/debian_version', 'r'):
+ pass
return True
except FileNotFoundError:
return False
@@ -262,7 +263,8 @@ def do_mesondefine(line, confdata):
def do_conf_file(src, dst, confdata):
try:
- data = open(src).readlines()
+ with open(src) as f:
+ data = f.readlines()
except Exception:
raise MesonException('Could not read input file %s.' % src)
# Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
@@ -276,7 +278,8 @@ def do_conf_file(src, dst, confdata):
line = do_replacement(regex, line, confdata)
result.append(line)
dst_tmp = dst + '~'
- open(dst_tmp, 'w').writelines(result)
+ with open(dst_tmp, 'w') as f:
+ f.writelines(result)
shutil.copymode(src, dst_tmp)
replace_if_different(dst, dst_tmp)
@@ -306,9 +309,10 @@ def replace_if_different(dst, dst_tmp):
# If contents are identical, don't touch the file to prevent
# unnecessary rebuilds.
try:
- if open(dst, 'r').read() == open(dst_tmp, 'r').read():
- os.unlink(dst_tmp)
- return
+ with open(dst, 'r') as f1, open(dst_tmp, 'r') as f2:
+ if f1.read() == f2.read():
+ os.unlink(dst_tmp)
+ return
except FileNotFoundError:
pass
os.replace(dst_tmp, dst)
diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py
index 8ed1720..f35d821 100644
--- a/mesonbuild/mesonmain.py
+++ b/mesonbuild/mesonmain.py
@@ -169,7 +169,8 @@ itself as required.'''
g.generate(intr)
g.run_postconf_scripts()
dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
- pickle.dump(b, open(dumpfile, 'wb'))
+ with open(dumpfile, 'wb') as f:
+ pickle.dump(b, f)
# Write this last since we use the existence of this file to check if
# we generated the build file successfully, so we don't want an error
# that pops up during generation, post-conf scripts, etc to cause us to
diff --git a/mesonbuild/mintro.py b/mesonbuild/mintro.py
index 629b0fc..2086c37 100644
--- a/mesonbuild/mintro.py
+++ b/mesonbuild/mintro.py
@@ -177,10 +177,14 @@ def run(args):
buildfile = os.path.join(bdir, 'meson-private/build.dat')
testfile = os.path.join(bdir, 'meson-private/meson_test_setup.dat')
benchmarkfile = os.path.join(bdir, 'meson-private/meson_benchmark_setup.dat')
- coredata = pickle.load(open(corefile, 'rb'))
- builddata = pickle.load(open(buildfile, 'rb'))
- testdata = pickle.load(open(testfile, 'rb'))
- benchmarkdata = pickle.load(open(benchmarkfile, 'rb'))
+ with open(corefile, 'rb') as f:
+ coredata = pickle.load(f)
+ with open(buildfile, 'rb') as f:
+ builddata = pickle.load(f)
+ with open(testfile, 'rb') as f:
+ testdata = pickle.load(f)
+ with open(benchmarkfile, 'rb') as f:
+ benchmarkdata = pickle.load(f)
if options.list_targets:
list_targets(coredata, builddata)
elif options.target_files is not None:
diff --git a/mesonbuild/modules/pkgconfig.py b/mesonbuild/modules/pkgconfig.py
index b01b587..29e2847 100644
--- a/mesonbuild/modules/pkgconfig.py
+++ b/mesonbuild/modules/pkgconfig.py
@@ -23,37 +23,41 @@ class PkgConfigModule:
def generate_pkgconfig_file(self, state, libraries, subdirs, name, description, version, filebase,
pub_reqs, priv_reqs, priv_libs):
+ coredata = state.environment.get_coredata()
outdir = state.environment.scratch_dir
fname = os.path.join(outdir, filebase + '.pc')
- ofile = open(fname, 'w')
- coredata = state.environment.get_coredata()
- ofile.write('prefix=%s\n' % coredata.get_builtin_option('prefix'))
- ofile.write('libdir=${prefix}/%s\n' % coredata.get_builtin_option('libdir'))
- ofile.write('includedir=${prefix}/%s\n\n' % coredata.get_builtin_option('includedir'))
- ofile.write('Name: %s\n' % name)
- if len(description) > 0:
- ofile.write('Description: %s\n' % description)
- if len(version) > 0:
- ofile.write('Version: %s\n' % version)
- if len(pub_reqs) > 0:
- ofile.write('Requires: {}\n'.format(' '.join(pub_reqs)))
- if len(priv_reqs) > 0:
- ofile.write('Requires.private: {}\n'.format(' '.join(priv_reqs)))
- if len(priv_libs) > 0:
- ofile.write('Libraries.private: {}\n'.format(' '.join(priv_libs)))
- ofile.write('Libs: -L${libdir} ')
- for l in libraries:
- if l.custom_install_dir:
- ofile.write('-L${prefix}/%s ' % l.custom_install_dir)
- ofile.write('-l%s ' % l.name)
- ofile.write('\n')
- ofile.write('CFlags: ')
- for h in subdirs:
- if h == '.':
- h = ''
- ofile.write(os.path.join('-I${includedir}', h))
- ofile.write(' ')
- ofile.write('\n')
+ with open(fname, 'w') as ofile:
+ ofile.write('prefix=%s\n' % coredata.get_builtin_option('prefix'))
+ ofile.write('libdir=${prefix}/%s\n' %
+ coredata.get_builtin_option('libdir'))
+ ofile.write('includedir=${prefix}/%s\n\n' %
+ coredata.get_builtin_option('includedir'))
+ ofile.write('Name: %s\n' % name)
+ if len(description) > 0:
+ ofile.write('Description: %s\n' % description)
+ if len(version) > 0:
+ ofile.write('Version: %s\n' % version)
+ if len(pub_reqs) > 0:
+ ofile.write('Requires: {}\n'.format(' '.join(pub_reqs)))
+ if len(priv_reqs) > 0:
+ ofile.write(
+ 'Requires.private: {}\n'.format(' '.join(priv_reqs)))
+ if len(priv_libs) > 0:
+ ofile.write(
+ 'Libraries.private: {}\n'.format(' '.join(priv_libs)))
+ ofile.write('Libs: -L${libdir} ')
+ for l in libraries:
+ if l.custom_install_dir:
+ ofile.write('-L${prefix}/%s ' % l.custom_install_dir)
+ ofile.write('-l%s ' % l.name)
+ ofile.write('\n')
+ ofile.write('CFlags: ')
+ for h in subdirs:
+ if h == '.':
+ h = ''
+ ofile.write(os.path.join('-I${includedir}', h))
+ ofile.write(' ')
+ ofile.write('\n')
def generate(self, state, args, kwargs):
if len(args) > 0:
diff --git a/mesonbuild/modules/rpm.py b/mesonbuild/modules/rpm.py
index acad204..89194e9 100644
--- a/mesonbuild/modules/rpm.py
+++ b/mesonbuild/modules/rpm.py
@@ -80,82 +80,87 @@ class RPMModule:
files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file))
if len(files_devel) > 0:
devel_subpkg = True
- fn = open('%s.spec' % os.path.join(state.environment.get_build_dir(), proj), 'w+')
- fn.write('Name: %s\n' % proj)
- fn.write('Version: # FIXME\n')
- fn.write('Release: 1%{?dist}\n')
- fn.write('Summary: # FIXME\n')
- fn.write('License: # FIXME\n')
- fn.write('\n')
- fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
- fn.write('\n')
- for compiler in compiler_deps:
- fn.write('BuildRequires: %s\n' % compiler)
- for dep in state.environment.coredata.deps:
- fn.write('BuildRequires: pkgconfig(%s)\n' % dep)
- for lib in state.environment.coredata.ext_libs.values():
- fn.write('BuildRequires: %s # FIXME\n' % lib.fullpath)
- mlog.log('Warning, replace', mlog.bold(lib.fullpath), 'with real package.',
- 'You can use following command to find package which contains this lib:',
- mlog.bold('dnf provides %s' % lib.fullpath))
- for prog in state.environment.coredata.ext_progs.values():
- if not prog.found():
- fn.write('BuildRequires: /usr/bin/%s # FIXME\n' % prog.get_name())
- else:
- fn.write('BuildRequires: %s\n' % ' '.join(prog.fullpath))
- fn.write('BuildRequires: meson\n')
- fn.write('\n')
- fn.write('%description\n')
- fn.write('\n')
- if devel_subpkg:
- fn.write('%package devel\n')
- fn.write('Summary: Development files for %{name}\n')
- fn.write('Requires: %{name}%{?_isa} = %{version}-%{release}\n')
+ filename = os.path.join(state.environment.get_build_dir(),
+ '%s.spec' % proj)
+ with open(filename, 'w+') as fn:
+ fn.write('Name: %s\n' % proj)
+ fn.write('Version: # FIXME\n')
+ fn.write('Release: 1%{?dist}\n')
+ fn.write('Summary: # FIXME\n')
+ fn.write('License: # FIXME\n')
+ fn.write('\n')
+ fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n')
+ fn.write('\n')
+ for compiler in compiler_deps:
+ fn.write('BuildRequires: %s\n' % compiler)
+ for dep in state.environment.coredata.deps:
+ fn.write('BuildRequires: pkgconfig(%s)\n' % dep)
+ for lib in state.environment.coredata.ext_libs.values():
+ fn.write('BuildRequires: %s # FIXME\n' % lib.fullpath)
+ mlog.log('Warning, replace', mlog.bold(lib.fullpath),
+ 'with real package.',
+ 'You can use following command to find package which '
+ 'contains this lib:',
+ mlog.bold('dnf provides %s' % lib.fullpath))
+ for prog in state.environment.coredata.ext_progs.values():
+ if not prog.found():
+ fn.write('BuildRequires: /usr/bin/%s # FIXME\n' %
+ prog.get_name())
+ else:
+ fn.write('BuildRequires: %s\n' % ' '.join(prog.fullpath))
+ fn.write('BuildRequires: meson\n')
+ fn.write('\n')
+ fn.write('%description\n')
fn.write('\n')
- fn.write('%description devel\n')
- fn.write('Development files for %{name}.\n')
+ if devel_subpkg:
+ fn.write('%package devel\n')
+ fn.write('Summary: Development files for %{name}\n')
+ fn.write('Requires: %{name}%{?_isa} = %{version}-%{release}\n')
+ fn.write('\n')
+ fn.write('%description devel\n')
+ fn.write('Development files for %{name}.\n')
+ fn.write('\n')
+ fn.write('%prep\n')
+ fn.write('%autosetup\n')
+ fn.write('rm -rf rpmbuilddir && mkdir rpmbuilddir\n')
fn.write('\n')
- fn.write('%prep\n')
- fn.write('%autosetup\n')
- fn.write('rm -rf rpmbuilddir && mkdir rpmbuilddir\n')
- fn.write('\n')
- fn.write('%build\n')
- fn.write('pushd rpmbuilddir\n')
- fn.write(' %meson ..\n')
- fn.write(' ninja-build -v\n')
- fn.write('popd\n')
- fn.write('\n')
- fn.write('%install\n')
- fn.write('pushd rpmbuilddir\n')
- fn.write(' DESTDIR=%{buildroot} ninja-build -v install\n')
- fn.write('popd\n')
- if len(to_delete) > 0:
- fn.write('rm -rf %s\n' % ' '.join(to_delete))
- fn.write('\n')
- fn.write('%check\n')
- fn.write('pushd rpmbuilddir\n')
- fn.write(' ninja-build -v test\n')
- fn.write('popd\n')
- fn.write('\n')
- fn.write('%files\n')
- for f in files:
- fn.write('%s\n' % f)
- fn.write('\n')
- if devel_subpkg:
- fn.write('%files devel\n')
- for f in files_devel:
+ fn.write('%build\n')
+ fn.write('pushd rpmbuilddir\n')
+ fn.write(' %meson ..\n')
+ fn.write(' ninja-build -v\n')
+ fn.write('popd\n')
+ fn.write('\n')
+ fn.write('%install\n')
+ fn.write('pushd rpmbuilddir\n')
+ fn.write(' DESTDIR=%{buildroot} ninja-build -v install\n')
+ fn.write('popd\n')
+ if len(to_delete) > 0:
+ fn.write('rm -rf %s\n' % ' '.join(to_delete))
+ fn.write('\n')
+ fn.write('%check\n')
+ fn.write('pushd rpmbuilddir\n')
+ fn.write(' ninja-build -v test\n')
+ fn.write('popd\n')
+ fn.write('\n')
+ fn.write('%files\n')
+ for f in files:
fn.write('%s\n' % f)
fn.write('\n')
- if so_installed:
- fn.write('%post -p /sbin/ldconfig\n')
+ if devel_subpkg:
+ fn.write('%files devel\n')
+ for f in files_devel:
+ fn.write('%s\n' % f)
+ fn.write('\n')
+ if so_installed:
+ fn.write('%post -p /sbin/ldconfig\n')
+ fn.write('\n')
+ fn.write('%postun -p /sbin/ldconfig\n')
+ fn.write('\n')
+ fn.write('%changelog\n')
+ fn.write('* %s meson <meson@example.com> - \n' %
+ datetime.date.today().strftime('%a %b %d %Y'))
+ fn.write('- \n')
fn.write('\n')
- fn.write('%postun -p /sbin/ldconfig\n')
- fn.write('\n')
- fn.write('%changelog\n')
- fn.write('* %s meson <meson@example.com> - \n' % datetime.date.today().strftime('%a %b %d %Y'))
- fn.write('- \n')
- fn.write('\n')
- fn.close()
mlog.log('RPM spec template written to %s.spec.\n' % proj)
def initialize():
diff --git a/mesonbuild/optinterpreter.py b/mesonbuild/optinterpreter.py
index b355047..9f57fd6 100644
--- a/mesonbuild/optinterpreter.py
+++ b/mesonbuild/optinterpreter.py
@@ -78,7 +78,8 @@ class OptionInterpreter:
def process(self, option_file):
try:
- ast = mparser.Parser(open(option_file, 'r', encoding='utf8').read()).parse()
+ with open(option_file, 'r', encoding='utf8') as f:
+ ast = mparser.Parser(f.read()).parse()
except mesonlib.MesonException as me:
me.file = option_file
raise me
diff --git a/mesonbuild/scripts/depfixer.py b/mesonbuild/scripts/depfixer.py
index cb136f4..7124c6f 100644
--- a/mesonbuild/scripts/depfixer.py
+++ b/mesonbuild/scripts/depfixer.py
@@ -115,11 +115,21 @@ class Elf(DataSizes):
self.bfile = bfile
self.verbose = verbose
self.bf = open(bfile, 'r+b')
- (self.ptrsize, self.is_le) = self.detect_elf_type()
- super().__init__(self.ptrsize, self.is_le)
- self.parse_header()
- self.parse_sections()
- self.parse_dynamic()
+ try:
+ (self.ptrsize, self.is_le) = self.detect_elf_type()
+ super().__init__(self.ptrsize, self.is_le)
+ self.parse_header()
+ self.parse_sections()
+ self.parse_dynamic()
+ except:
+ self.bf.close()
+ raise
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.bf.close()
def detect_elf_type(self):
data = self.bf.read(6)
@@ -308,13 +318,13 @@ def run(args):
print('Don\'t run this unless you know what you are doing.')
print('%s: <binary file> <prefix>' % sys.argv[0])
exit(1)
- e = Elf(args[0])
- if len(args) == 1:
- e.print_rpath()
- e.print_runpath()
- else:
- new_rpath = args[1]
- e.fix_rpath(new_rpath)
+ with Elf(args[0]) as e:
+ if len(args) == 1:
+ e.print_rpath()
+ e.print_runpath()
+ else:
+ new_rpath = args[1]
+ e.fix_rpath(new_rpath)
return 0
if __name__ == '__main__':
diff --git a/mesonbuild/scripts/meson_benchmark.py b/mesonbuild/scripts/meson_benchmark.py
index d1107b6..6d138b0 100644
--- a/mesonbuild/scripts/meson_benchmark.py
+++ b/mesonbuild/scripts/meson_benchmark.py
@@ -52,33 +52,34 @@ def run_benchmarks(options, datafile):
failed_tests = 0
logfile_base = 'meson-logs/benchmarklog'
jsonlogfilename = logfile_base+ '.json'
- jsonlogfile = open(jsonlogfilename, 'w')
- tests = pickle.load(open(datafile, 'rb'))
+ with open(datafile, 'rb') as f:
+ tests = pickle.load(f)
num_tests = len(tests)
if num_tests == 0:
print('No benchmarks defined.')
return 0
iteration_count = 5
wrap = [] # Benchmarks on cross builds are pointless so don't support them.
- for i, test in enumerate(tests):
- runs = []
- durations = []
- failed = False
- for _ in range(iteration_count):
- res = meson_test.run_single_test(wrap, test)
- runs.append(res)
- durations.append(res.duration)
- if res.returncode != 0:
- failed = True
- mean = statistics.mean(durations)
- stddev = statistics.stdev(durations)
- if failed:
- resultstr = 'FAIL'
- failed_tests += 1
- else:
- resultstr = 'OK'
- print_stats(3, num_tests, test.name, resultstr, i, mean, stddev)
- print_json_log(jsonlogfile, runs, test.name, i)
+ with open(jsonlogfilename, 'w') as jsonlogfile:
+ for i, test in enumerate(tests):
+ runs = []
+ durations = []
+ failed = False
+ for _ in range(iteration_count):
+ res = meson_test.run_single_test(wrap, test)
+ runs.append(res)
+ durations.append(res.duration)
+ if res.returncode != 0:
+ failed = True
+ mean = statistics.mean(durations)
+ stddev = statistics.stdev(durations)
+ if failed:
+ resultstr = 'FAIL'
+ failed_tests += 1
+ else:
+ resultstr = 'OK'
+ print_stats(3, num_tests, test.name, resultstr, i, mean, stddev)
+ print_json_log(jsonlogfile, runs, test.name, i)
print('\nFull log written to meson-logs/benchmarklog.json.')
return failed_tests
diff --git a/mesonbuild/scripts/meson_exe.py b/mesonbuild/scripts/meson_exe.py
index 1a0fcda..d2ae357 100644
--- a/mesonbuild/scripts/meson_exe.py
+++ b/mesonbuild/scripts/meson_exe.py
@@ -74,7 +74,8 @@ def run(args):
print('Test runner for Meson. Do not run on your own, mmm\'kay?')
print(sys.argv[0] + ' [data file]')
exe_data_file = options.args[0]
- exe = pickle.load(open(exe_data_file, 'rb'))
+ with open(exe_data_file, 'rb') as f:
+ exe = pickle.load(f)
return run_exe(exe)
if __name__ == '__main__':
diff --git a/mesonbuild/scripts/meson_install.py b/mesonbuild/scripts/meson_install.py
index 5c5e72c..5cf02e6 100644
--- a/mesonbuild/scripts/meson_install.py
+++ b/mesonbuild/scripts/meson_install.py
@@ -45,8 +45,8 @@ def get_destdir_path(d, path):
return output
def do_install(datafilename):
- ifile = open(datafilename, 'rb')
- d = pickle.load(ifile)
+ with open(datafilename, 'rb') as ifile:
+ d = pickle.load(ifile)
d.destdir = os.environ.get('DESTDIR', '')
d.fullprefix = destdir_join(d.destdir, d.prefix)
@@ -110,7 +110,9 @@ def install_man(d):
os.makedirs(outdir, exist_ok=True)
print('Installing %s to %s.' % (full_source_filename, outdir))
if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'):
- open(outfilename, 'wb').write(gzip.compress(open(full_source_filename, 'rb').read()))
+ with open(outfilename, 'wb') as of:
+ with open(full_source_filename, 'rb') as sf:
+ of.write(gzip.compress(sf.read()))
shutil.copystat(full_source_filename, outfilename)
append_to_log(outfilename)
else:
@@ -140,7 +142,8 @@ def run_install_script(d):
print('Running custom install script %s' % script)
suffix = os.path.splitext(script)[1].lower()
if platform.system().lower() == 'windows' and suffix != '.bat':
- first_line = open(script, encoding='latin_1', errors='ignore').readline().strip()
+ with open(script, encoding='latin_1', errors='ignore') as f:
+ first_line = f.readline().strip()
if first_line.startswith('#!'):
if shutil.which(first_line[2:]):
commands = [first_line[2:]]
diff --git a/mesonbuild/scripts/meson_test.py b/mesonbuild/scripts/meson_test.py
index 33b6165..951fb85 100644
--- a/mesonbuild/scripts/meson_test.py
+++ b/mesonbuild/scripts/meson_test.py
@@ -202,10 +202,8 @@ def run_tests(datafilename):
wrap = [options.wrapper]
logfilename = logfile_base + '-' + options.wrapper.replace(' ', '_') + '.txt'
jsonlogfilename = logfile_base + '-' + options.wrapper.replace(' ', '_') + '.json'
- logfile = open(logfilename, 'w')
- jsonlogfile = open(jsonlogfilename, 'w')
- logfile.write('Log of Meson test suite run on %s.\n\n' % datetime.datetime.now().isoformat())
- tests = pickle.load(open(datafilename, 'rb'))
+ with open(datafilename, 'rb') as f:
+ tests = pickle.load(f)
if len(tests) == 0:
print('No tests defined.')
return
@@ -222,24 +220,31 @@ def run_tests(datafilename):
executor = conc.ThreadPoolExecutor(max_workers=num_workers)
futures = []
filtered_tests = filter_tests(options.suite, tests)
- for i, test in enumerate(filtered_tests):
- if test.suite[0] == '':
- visible_name = test.name
- else:
- if options.suite is not None:
- visible_name = options.suite + ' / ' + test.name
+
+ with open(jsonlogfilename, 'w') as jsonlogfile, \
+ open(logfilename, 'w') as logfile:
+ logfile.write('Log of Meson test suite run on %s.\n\n' %
+ datetime.datetime.now().isoformat())
+ for i, test in enumerate(filtered_tests):
+ if test.suite[0] == '':
+ visible_name = test.name
else:
- visible_name = test.suite[0] + ' / ' + test.name
+ if options.suite is not None:
+ visible_name = options.suite + ' / ' + test.name
+ else:
+ visible_name = test.suite[0] + ' / ' + test.name
- if not test.is_parallel:
- drain_futures(futures)
- futures = []
- res = run_single_test(wrap, test)
- print_stats(numlen, filtered_tests, visible_name, res, i, logfile, jsonlogfile)
- else:
- f = executor.submit(run_single_test, wrap, test)
- futures.append((f, numlen, filtered_tests, visible_name, i, logfile, jsonlogfile))
- drain_futures(futures)
+ if not test.is_parallel:
+ drain_futures(futures)
+ futures = []
+ res = run_single_test(wrap, test)
+ print_stats(numlen, filtered_tests, visible_name, res, i,
+ logfile, jsonlogfile)
+ else:
+ f = executor.submit(run_single_test, wrap, test)
+ futures.append((f, numlen, filtered_tests, visible_name, i,
+ logfile, jsonlogfile))
+ drain_futures(futures)
return logfilename
def run(args):
diff --git a/mesonbuild/scripts/regen_checker.py b/mesonbuild/scripts/regen_checker.py
index f65f3bd..ddf4943 100644
--- a/mesonbuild/scripts/regen_checker.py
+++ b/mesonbuild/scripts/regen_checker.py
@@ -48,8 +48,10 @@ def run(args):
private_dir = args[0]
dumpfile = os.path.join(private_dir, 'regeninfo.dump')
coredata = os.path.join(private_dir, 'coredata.dat')
- regeninfo = pickle.load(open(dumpfile, 'rb'))
- coredata = pickle.load(open(coredata, 'rb'))
+ with open(dumpfile, 'rb') as f:
+ regeninfo = pickle.load(f)
+ with open(coredata, 'rb') as f:
+ coredata = pickle.load(f)
mesonscript = coredata.meson_script_file
backend = coredata.get_builtin_option('backend')
regen_timestamp = os.stat(dumpfile).st_mtime
diff --git a/mesonbuild/scripts/symbolextractor.py b/mesonbuild/scripts/symbolextractor.py
index 79c1264..c117301 100644
--- a/mesonbuild/scripts/symbolextractor.py
+++ b/mesonbuild/scripts/symbolextractor.py
@@ -34,16 +34,19 @@ parser.add_argument('args', nargs='+')
def dummy_syms(outfilename):
"""Just touch it so relinking happens always."""
- open(outfilename, 'w').close()
+ with open(outfilename, 'w'):
+ pass
def write_if_changed(text, outfilename):
try:
- oldtext = open(outfilename, 'r').read()
+ with open(outfilename, 'r') as f:
+ oldtext = f.read()
if text == oldtext:
return
except FileNotFoundError:
pass
- open(outfilename, 'w').write(text)
+ with open(outfilename, 'w') as f:
+ f.write(text)
def linux_syms(libfilename, outfilename):
pe = subprocess.Popen(['readelf', '-d', libfilename], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
diff --git a/mesonbuild/scripts/vcstagger.py b/mesonbuild/scripts/vcstagger.py
index 390e37a..3f36e0a 100644
--- a/mesonbuild/scripts/vcstagger.py
+++ b/mesonbuild/scripts/vcstagger.py
@@ -23,9 +23,16 @@ def config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_
except Exception:
new_string = fallback
- new_data = open(infile).read().replace(replace_string, new_string)
- if (not os.path.exists(outfile)) or (open(outfile).read() != new_data):
- open(outfile, 'w').write(new_data)
+ with open(infile) as f:
+ new_data = f.read().replace(replace_string, new_string)
+ if os.path.exists(outfile):
+ with open(outfile) as f:
+ needs_update = (f.read() != new_data)
+ else:
+ needs_update = True
+ if needs_update:
+ with open(outfile, 'w') as f:
+ f.write(new_data)
def run(args):
infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
diff --git a/mesonbuild/wrap/wrap.py b/mesonbuild/wrap/wrap.py
index bfb7ed4..f03af67 100644
--- a/mesonbuild/wrap/wrap.py
+++ b/mesonbuild/wrap/wrap.py
@@ -13,6 +13,7 @@
# limitations under the License.
from .. import mlog
+import contextlib
import urllib.request, os, hashlib, shutil
import subprocess
import sys
@@ -58,23 +59,23 @@ def open_wrapdburl(urlstring):
class PackageDefinition:
def __init__(self, fname):
self.values = {}
- ifile = open(fname)
- first = ifile.readline().strip()
+ with open(fname) as ifile:
+ first = ifile.readline().strip()
- if first == '[wrap-file]':
- self.type = 'file'
- elif first == '[wrap-git]':
- self.type = 'git'
- else:
- raise RuntimeError('Invalid format of package file')
- for line in ifile:
- line = line.strip()
- if line == '':
- continue
- (k, v) = line.split('=', 1)
- k = k.strip()
- v = v.strip()
- self.values[k] = v
+ if first == '[wrap-file]':
+ self.type = 'file'
+ elif first == '[wrap-git]':
+ self.type = 'git'
+ else:
+ raise RuntimeError('Invalid format of package file')
+ for line in ifile:
+ line = line.strip()
+ if line == '':
+ continue
+ (k, v) = line.split('=', 1)
+ k = k.strip()
+ v = v.strip()
+ self.values[k] = v
def get(self, key):
return self.values[key]
@@ -137,26 +138,26 @@ class Resolver:
resp = open_wrapdburl(url)
else:
resp = urllib.request.urlopen(url)
- dlsize = int(resp.info()['Content-Length'])
- print('Download size:', dlsize)
- print('Downloading: ', end='')
- sys.stdout.flush()
- printed_dots = 0
- blocks = []
- downloaded = 0
- while True:
- block = resp.read(blocksize)
- if block == b'':
- break
- downloaded += len(block)
- blocks.append(block)
- ratio = int(downloaded/dlsize * 10)
- while printed_dots < ratio:
- print('.', end='')
- sys.stdout.flush()
- printed_dots += 1
- print('')
- resp.close()
+ with contextlib.closing(resp) as resp:
+ dlsize = int(resp.info()['Content-Length'])
+ print('Download size:', dlsize)
+ print('Downloading: ', end='')
+ sys.stdout.flush()
+ printed_dots = 0
+ blocks = []
+ downloaded = 0
+ while True:
+ block = resp.read(blocksize)
+ if block == b'':
+ break
+ downloaded += len(block)
+ blocks.append(block)
+ ratio = int(downloaded/dlsize * 10)
+ while printed_dots < ratio:
+ print('.', end='')
+ sys.stdout.flush()
+ printed_dots += 1
+ print('')
return b''.join(blocks)
def get_hash(self, data):
@@ -177,7 +178,8 @@ class Resolver:
expected = p.get('source_hash')
if dhash != expected:
raise RuntimeError('Incorrect hash for source %s:\n %s expected\n %s actual.' % (packagename, expected, dhash))
- open(ofname, 'wb').write(srcdata)
+ with open(ofname, 'wb') as f:
+ f.write(srcdata)
if p.has_patch():
purl = p.get('patch_url')
mlog.log('Downloading patch from', mlog.bold(purl))
@@ -186,7 +188,9 @@ class Resolver:
expected = p.get('patch_hash')
if phash != expected:
raise RuntimeError('Incorrect hash for patch %s:\n %s expected\n %s actual' % (packagename, expected, phash))
- open(os.path.join(self.cachedir, p.get('patch_filename')), 'wb').write(pdata)
+ filename = os.path.join(self.cachedir, p.get('patch_filename'))
+ with open(filename, 'wb') as f:
+ f.write(pdata)
else:
mlog.log('Package does not require patch.')
diff --git a/mesonbuild/wrap/wraptool.py b/mesonbuild/wrap/wraptool.py
index c5f8eef..e94a2c5 100755
--- a/mesonbuild/wrap/wraptool.py
+++ b/mesonbuild/wrap/wraptool.py
@@ -92,7 +92,8 @@ def install(name):
(branch, revision) = get_latest_version(name)
u = open_wrapdburl(API_ROOT + 'projects/%s/%s/%s/get_wrap' % (name, branch, revision))
data = u.read()
- open(wrapfile, 'wb').write(data)
+ with open(wrapfile, 'wb') as f:
+ f.write(data)
print('Installed', name, 'branch', branch, 'revision', revision)
def get_current_version(wrapfile):
@@ -129,7 +130,8 @@ def update(name):
os.unlink(os.path.join('subprojects/packagecache', patch_file))
except FileNotFoundError:
pass
- open(wrapfile, 'wb').write(data)
+ with open(wrapfile, 'wb') as f:
+ f.write(data)
print('Updated', name, 'to branch', new_branch, 'revision', new_revision)
def info(name):
diff --git a/run_tests.py b/run_tests.py
index 3e49662..48f1230 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -156,8 +156,9 @@ def validate_install(srcdir, installdir):
if os.path.exists(os.path.join(installdir, noinst_file)):
expected[noinst_file] = False
elif os.path.exists(info_file):
- for line in open(info_file):
- expected[platform_fix_exe_name(line.strip())] = False
+ with open(info_file) as f:
+ for line in f:
+ expected[platform_fix_exe_name(line.strip())] = False
# Check if expected files were found
for fname in expected:
if os.path.exists(os.path.join(installdir, fname)):
@@ -249,7 +250,8 @@ def _run_test(testdir, test_build_dir, install_dir, extra_args, flags, compile_c
(returncode, stdo, stde) = run_configure_inprocess(gen_command)
try:
logfile = os.path.join(test_build_dir, 'meson-logs/meson-log.txt')
- mesonlog = open(logfile, errors='ignore').read()
+ with open(logfile, errors='ignore') as f:
+ mesonlog = f.read()
except Exception:
mesonlog = 'No meson-log.txt found.'
gen_time = time.time() - gen_start
@@ -401,7 +403,9 @@ def run_tests(extra_args):
def check_file(fname):
linenum = 1
- for line in open(fname, 'rb').readlines():
+ with open(fname, 'rb') as f:
+ lines = f.readlines()
+ for line in lines:
if b'\t' in line:
print("File %s contains a literal tab on line %d. Only spaces are permitted." % (fname, linenum))
sys.exit(1)
diff --git a/test cases/common/103 manygen/subdir/manygen.py b/test cases/common/103 manygen/subdir/manygen.py
index 4411183..c233ae6 100755
--- a/test cases/common/103 manygen/subdir/manygen.py
+++ b/test cases/common/103 manygen/subdir/manygen.py
@@ -6,7 +6,8 @@
import sys, os
import shutil, subprocess
-funcname = open(sys.argv[1]).readline().strip()
+with open(sys.argv[1]) as f:
+ funcname = f.readline().strip()
outdir = sys.argv[2]
if not os.path.isdir(outdir):
@@ -44,19 +45,22 @@ outc = os.path.join(outdir, funcname + '.c')
tmpc = 'diibadaaba.c'
tmpo = 'diibadaaba' + objsuffix
-open(outc, 'w').write('''#include"%s.h"
+with open(outc, 'w') as f:
+ f.write('''#include"%s.h"
int %s_in_src() {
return 0;
}
''' % (funcname, funcname))
-open(outh, 'w').write('''#pragma once
+with open(outh, 'w') as f:
+ f.write('''#pragma once
int %s_in_lib();
int %s_in_obj();
int %s_in_src();
''' % (funcname, funcname, funcname))
-open(tmpc, 'w').write('''int %s_in_obj() {
+with open(tmpc, 'w') as f:
+ f.write('''int %s_in_obj() {
return 0;
}
''' % funcname)
@@ -66,7 +70,8 @@ if is_vs:
else:
subprocess.check_call([compiler, '-c', '-o', outo, tmpc])
-open(tmpc, 'w').write('''int %s_in_lib() {
+with open(tmpc, 'w') as f:
+ f.write('''int %s_in_lib() {
return 0;
}
''' % funcname)
diff --git a/test cases/common/107 postconf/postconf.py b/test cases/common/107 postconf/postconf.py
index 209b7af..50c91ca 100644
--- a/test cases/common/107 postconf/postconf.py
+++ b/test cases/common/107 postconf/postconf.py
@@ -7,5 +7,10 @@ template = '''#pragma once
#define THE_NUMBER {}
'''
-data = open(os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat')).readline().strip()
-open(os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h'), 'w').write(template.format(data))
+input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat')
+output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h')
+
+with open(input_file) as f:
+ data = f.readline().strip()
+with open(output_file, 'w') as f:
+ f.write(template.format(data))
diff --git a/test cases/common/108 postconf with args/postconf.py b/test cases/common/108 postconf with args/postconf.py
index 4cfbb7c..cef7f79 100644
--- a/test cases/common/108 postconf with args/postconf.py
+++ b/test cases/common/108 postconf with args/postconf.py
@@ -9,5 +9,10 @@ template = '''#pragma once
#define THE_ARG2 {}
'''
-data = open(os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat')).readline().strip()
-open(os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h'), 'w').write(template.format(data, sys.argv[1], sys.argv[2]))
+input_file = os.path.join(os.environ['MESON_SOURCE_ROOT'], 'raw.dat')
+output_file = os.path.join(os.environ['MESON_BUILD_ROOT'], 'generated.h')
+
+with open(input_file) as f:
+ data = f.readline().strip()
+with open(output_file, 'w') as f:
+ f.write(template.format(data, sys.argv[1], sys.argv[2]))
diff --git a/test cases/common/113 generatorcustom/catter.py b/test cases/common/113 generatorcustom/catter.py
index 354d6e0..7a6c085 100755
--- a/test cases/common/113 generatorcustom/catter.py
+++ b/test cases/common/113 generatorcustom/catter.py
@@ -8,6 +8,7 @@ inputs = sys.argv[1:-1]
with open(output, 'w') as ofile:
ofile.write('#pragma once\n')
for i in inputs:
- content = open(i, 'r').read()
+ with open(i, 'r') as ifile:
+ content = ifile.read()
ofile.write(content)
ofile.write('\n')
diff --git a/test cases/common/113 generatorcustom/gen.py b/test cases/common/113 generatorcustom/gen.py
index ba02e3f..c843497 100755
--- a/test cases/common/113 generatorcustom/gen.py
+++ b/test cases/common/113 generatorcustom/gen.py
@@ -5,7 +5,9 @@ import sys, os
ifile = sys.argv[1]
ofile = sys.argv[2]
-resname = open(ifile, 'r').readline().strip()
+with open(ifile, 'r') as f:
+ resname = f.readline().strip()
templ = 'const char %s[] = "%s";\n'
-open(ofile, 'w').write(templ % (resname, resname))
+with open(ofile, 'w') as f:
+ f.write(templ % (resname, resname))
diff --git a/test cases/common/117 custom target capture/my_compiler.py b/test cases/common/117 custom target capture/my_compiler.py
index 3e9ec23..b60722a 100755
--- a/test cases/common/117 custom target capture/my_compiler.py
+++ b/test cases/common/117 custom target capture/my_compiler.py
@@ -6,7 +6,8 @@ if __name__ == '__main__':
if len(sys.argv) != 2:
print(sys.argv[0], 'input_file')
sys.exit(1)
- ifile = open(sys.argv[1]).read()
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
if ifile != 'This is a text only input file.\n':
print('Malformed input')
sys.exit(1)
diff --git a/test cases/common/16 configure file/generator.py b/test cases/common/16 configure file/generator.py
index de9a423..90223f0 100755
--- a/test cases/common/16 configure file/generator.py
+++ b/test cases/common/16 configure file/generator.py
@@ -6,8 +6,8 @@ if len(sys.argv) != 3:
print("Wrong amount of parameters.")
# Just test that it exists.
-ifile = open(sys.argv[1], 'r')
+with open(sys.argv[1], 'r') as ifile:
+ pass
-ofile = open(sys.argv[2], 'w')
-ofile.write("#define ZERO_RESULT 0\n")
-ofile.close()
+with open(sys.argv[2], 'w') as ofile:
+ ofile.write("#define ZERO_RESULT 0\n")
diff --git a/test cases/common/48 test args/tester.py b/test cases/common/48 test args/tester.py
index 36e510d..0b4010a 100755
--- a/test cases/common/48 test args/tester.py
+++ b/test cases/common/48 test args/tester.py
@@ -2,5 +2,6 @@
import sys
-if open(sys.argv[1]).read() != 'contents\n':
- sys.exit(1)
+with open(sys.argv[1]) as f:
+ if f.read() != 'contents\n':
+ sys.exit(1)
diff --git a/test cases/common/56 custom target/depfile/dep.py b/test cases/common/56 custom target/depfile/dep.py
index 3a772ec..585e192 100755
--- a/test cases/common/56 custom target/depfile/dep.py
+++ b/test cases/common/56 custom target/depfile/dep.py
@@ -9,5 +9,7 @@ depfiles = glob(os.path.join(srcdir, '*'))
quoted_depfiles = [x.replace(' ', '\ ') for x in depfiles]
-open(output, 'w').write('I am the result of globbing.')
-open(depfile, 'w').write('%s: %s\n' % (output, ' '.join(quoted_depfiles)))
+with open(output, 'w') as f:
+ f.write('I am the result of globbing.')
+with open(depfile, 'w') as f:
+ f.write('%s: %s\n' % (output, ' '.join(quoted_depfiles)))
diff --git a/test cases/common/56 custom target/my_compiler.py b/test cases/common/56 custom target/my_compiler.py
index 43e7143..d99029b 100755
--- a/test cases/common/56 custom target/my_compiler.py
+++ b/test cases/common/56 custom target/my_compiler.py
@@ -6,9 +6,10 @@ if __name__ == '__main__':
if len(sys.argv) != 3:
print(sys.argv[0], 'input_file output_file')
sys.exit(1)
- ifile = open(sys.argv[1]).read()
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
if ifile != 'This is a text only input file.\n':
print('Malformed input')
sys.exit(1)
- ofile = open(sys.argv[2], 'w')
- ofile.write('This is a binary output file.\n')
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('This is a binary output file.\n')
diff --git a/test cases/common/57 custom target chain/my_compiler.py b/test cases/common/57 custom target chain/my_compiler.py
index 43e7143..d99029b 100755
--- a/test cases/common/57 custom target chain/my_compiler.py
+++ b/test cases/common/57 custom target chain/my_compiler.py
@@ -6,9 +6,10 @@ if __name__ == '__main__':
if len(sys.argv) != 3:
print(sys.argv[0], 'input_file output_file')
sys.exit(1)
- ifile = open(sys.argv[1]).read()
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
if ifile != 'This is a text only input file.\n':
print('Malformed input')
sys.exit(1)
- ofile = open(sys.argv[2], 'w')
- ofile.write('This is a binary output file.\n')
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('This is a binary output file.\n')
diff --git a/test cases/common/57 custom target chain/my_compiler2.py b/test cases/common/57 custom target chain/my_compiler2.py
index 22a4160..22ec789 100755
--- a/test cases/common/57 custom target chain/my_compiler2.py
+++ b/test cases/common/57 custom target chain/my_compiler2.py
@@ -6,9 +6,10 @@ if __name__ == '__main__':
if len(sys.argv) != 3:
print(sys.argv[0], 'input_file output_file')
sys.exit(1)
- ifile = open(sys.argv[1]).read()
+ with open(sys.argv[1]) as f:
+ ifile = f.read()
if ifile != 'This is a binary output file.\n':
print('Malformed input')
sys.exit(1)
- ofile = open(sys.argv[2], 'w')
- ofile.write('This is a different binary output file.\n')
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('This is a different binary output file.\n')
diff --git a/test cases/common/57 custom target chain/usetarget/subcomp.py b/test cases/common/57 custom target chain/usetarget/subcomp.py
index 207a8ad..6f4b686 100755
--- a/test cases/common/57 custom target chain/usetarget/subcomp.py
+++ b/test cases/common/57 custom target chain/usetarget/subcomp.py
@@ -3,5 +3,5 @@
import sys, os
with open(sys.argv[1], 'rb') as ifile:
- open(sys.argv[2], 'w').write('Everything ok.\n')
-
+ with open(sys.argv[2], 'w') as ofile:
+ ofile.write('Everything ok.\n')
diff --git a/test cases/common/58 run target/converter.py b/test cases/common/58 run target/converter.py
index 6acbc84..8dd31fe 100644
--- a/test cases/common/58 run target/converter.py
+++ b/test cases/common/58 run target/converter.py
@@ -2,4 +2,5 @@
import sys
-open(sys.argv[2], 'wb').write(open(sys.argv[1], 'rb').read())
+with open(sys.argv[1], 'rb') as ifile, open(sys.argv[2], 'wb') as ofile:
+ ofile.write(ifile.read())
diff --git a/test cases/common/58 run target/fakeburner.py b/test cases/common/58 run target/fakeburner.py
index a100a6f..5728002 100755
--- a/test cases/common/58 run target/fakeburner.py
+++ b/test cases/common/58 run target/fakeburner.py
@@ -5,7 +5,8 @@ import sys
plain_arg = sys.argv[1]
_, filename, _ = plain_arg.split(':')
try:
- content = open(filename, 'rb').read()
+ with open(filename, 'rb') as f:
+ content = f.read()
except FileNotFoundError:
print('Could not open file. Missing dependency?')
sys.exit(1)
diff --git a/test cases/common/61 custom target source output/generator.py b/test cases/common/61 custom target source output/generator.py
index 4bf5c84..3464b0a 100755
--- a/test cases/common/61 custom target source output/generator.py
+++ b/test cases/common/61 custom target source output/generator.py
@@ -7,8 +7,10 @@ if len(sys.argv) != 2:
odir = sys.argv[1]
-open(os.path.join(odir, 'mylib.h'), 'w').write('int func();\n')
-open(os.path.join(odir, 'mylib.c'), 'w').write('''int func() {
+with open(os.path.join(odir, 'mylib.h'), 'w') as f:
+ f.write('int func();\n')
+with open(os.path.join(odir, 'mylib.c'), 'w') as f:
+ f.write('''int func() {
return 0;
}
''')
diff --git a/test cases/common/64 custom header generator/makeheader.py b/test cases/common/64 custom header generator/makeheader.py
index 9ef2bd5..f156834 100644
--- a/test cases/common/64 custom header generator/makeheader.py
+++ b/test cases/common/64 custom header generator/makeheader.py
@@ -6,5 +6,7 @@
import sys
template = '#define RET_VAL %s\n'
-output = template % (open(sys.argv[1]).readline().strip())
-open(sys.argv[2], 'w').write(output)
+with open(sys.argv[1]) as f:
+ output = template % (f.readline().strip(), )
+with open(sys.argv[2], 'w') as f:
+ f.write(output)
diff --git a/test cases/common/65 multiple generators/mygen.py b/test cases/common/65 multiple generators/mygen.py
index cd786ea..99dc331 100755
--- a/test cases/common/65 multiple generators/mygen.py
+++ b/test cases/common/65 multiple generators/mygen.py
@@ -6,14 +6,17 @@ if len(sys.argv) != 3:
print("You is fail.")
sys.exit(1)
-val = open(sys.argv[1]).read().strip()
+with open(sys.argv[1]) as f:
+ val = f.read().strip()
outdir = sys.argv[2]
outhdr = os.path.join(outdir, 'source%s.h' % val)
outsrc = os.path.join(outdir, 'source%s.cpp' % val)
-open(outhdr, 'w').write('int func%s();\n' % val)
-open(outsrc, 'w').write('''int func%s() {
+with open(outhdr, 'w') as f:
+ f.write('int func%s();\n' % val)
+with open(outsrc, 'w') as f:
+ f.write('''int func%s() {
return 0;
}
''' % val)
diff --git a/test cases/common/72 build always/version_gen.py b/test cases/common/72 build always/version_gen.py
index c82678d..d7b01ca 100755
--- a/test cases/common/72 build always/version_gen.py
+++ b/test cases/common/72 build always/version_gen.py
@@ -14,14 +14,17 @@ def generate(infile, outfile, fallback):
version = stdo.decode().strip()
except:
pass
- newdata = open(infile).read().replace('@VERSION@', version)
+ with open(infile) as f:
+ newdata = f.read().replace('@VERSION@', version)
try:
- olddata = open(outfile).read()
+ with open(outfile) as f:
+ olddata = f.read()
if olddata == newdata:
return
except:
pass
- open(outfile, 'w').write(newdata)
+ with open(outfile, 'w') as f:
+ f.write(newdata)
if __name__ == '__main__':
infile = sys.argv[1]
diff --git a/test cases/common/76 configure file in custom target/src/mycompiler.py b/test cases/common/76 configure file in custom target/src/mycompiler.py
index d5dcab5..b00c862 100644
--- a/test cases/common/76 configure file in custom target/src/mycompiler.py
+++ b/test cases/common/76 configure file in custom target/src/mycompiler.py
@@ -2,7 +2,8 @@
import sys
-ifile = open(sys.argv[1])
-if ifile.readline().strip() != '42':
- print('Incorrect input')
-open(sys.argv[2], 'w').write('Success\n')
+with open(sys.argv[1]) as ifile:
+ if ifile.readline().strip() != '42':
+ print('Incorrect input')
+with open(sys.argv[2], 'w') as ofile:
+ ofile.write('Success\n')
diff --git a/test cases/common/78 ctarget dependency/gen1.py b/test cases/common/78 ctarget dependency/gen1.py
index 64b8e6d..0fa6ea1 100755
--- a/test cases/common/78 ctarget dependency/gen1.py
+++ b/test cases/common/78 ctarget dependency/gen1.py
@@ -6,5 +6,7 @@ import time, sys
# is missing.
time.sleep(0.5)
-contents = open(sys.argv[1], 'r').read()
-open(sys.argv[2], 'w').write(contents)
+with open(sys.argv[1], 'r') as f:
+ contents = f.read()
+with open(sys.argv[2], 'w') as f:
+ f.write(contents)
diff --git a/test cases/common/78 ctarget dependency/gen2.py b/test cases/common/78 ctarget dependency/gen2.py
index 3a8be7d..b087b02 100755
--- a/test cases/common/78 ctarget dependency/gen2.py
+++ b/test cases/common/78 ctarget dependency/gen2.py
@@ -6,4 +6,5 @@ from glob import glob
files = glob(os.path.join(sys.argv[1], '*.tmp'))
assert(len(files) == 1)
-open(sys.argv[2], 'w').write(open(files[0], 'r').read())
+with open(files[0], 'r') as ifile, open(sys.argv[2], 'w') as ofile:
+ ofile.write(ifile.read())
diff --git a/test cases/common/93 private include/stlib/compiler.py b/test cases/common/93 private include/stlib/compiler.py
index 3e74c88..98dbe46 100755
--- a/test cases/common/93 private include/stlib/compiler.py
+++ b/test cases/common/93 private include/stlib/compiler.py
@@ -26,5 +26,7 @@ hfile = os.path.join(outdir, base + '.h')
c_code = c_templ % (base, base)
h_code = h_templ % base
-open(cfile, 'w').write(c_code)
-open(hfile, 'w').write(h_code)
+with open(cfile, 'w') as f:
+ f.write(c_code)
+with open(hfile, 'w') as f:
+ f.write(h_code)
diff --git a/test cases/common/98 gen extra/srcgen.py b/test cases/common/98 gen extra/srcgen.py
index 55e777e..73bc337 100755
--- a/test cases/common/98 gen extra/srcgen.py
+++ b/test cases/common/98 gen extra/srcgen.py
@@ -19,8 +19,10 @@ c_templ = '''int %s() {
options = parser.parse_args(sys.argv[1:])
-funcname = open(options.input).readline().strip()
+with open(options.input) as f:
+ funcname = f.readline().strip()
if options.upper:
funcname = funcname.upper()
-open(options.output, 'w').write(c_templ % funcname)
+with open(options.output, 'w') as f:
+ f.write(c_templ % funcname)
diff --git a/tools/ac_converter.py b/tools/ac_converter.py
index 571481e..e88f2e2 100755
--- a/tools/ac_converter.py
+++ b/tools/ac_converter.py
@@ -25,28 +25,6 @@ that are unrelated to configure checks.
import sys
-print('''cc = meson.get_compiler('c')
-cdata = configuration_data()''')
-
-print('check_headers = [')
-
-for line in open(sys.argv[1]):
- line = line.strip()
- if line.startswith('#mesondefine') and \
- line.endswith('_H'):
- token = line.split()[1]
- tarr = token.split('_')[1:-1]
- tarr = [x.lower() for x in tarr]
- hname = '/'.join(tarr) + '.h'
- print(" ['%s', '%s']," % (token, hname))
-print(']\n')
-
-print('''foreach h : check_headers
- if cc.has_header(h.get(1))
- cdata.set(h.get(0), 1)
- endif
-endforeach
-''')
# Add stuff here as it is encountered.
function_data = \
@@ -242,18 +220,71 @@ function_data = \
'HAVE_PTHREAD_SET_NAME_NP': ('pthread_set_name_np', 'pthread.h'),
}
-print('check_functions = [')
+headers = []
+functions = []
+sizes = []
+with open(sys.argv[1]) as f:
+ for line in f:
+ line = line.strip()
+ arr = line.split()
+
+ # Check for headers.
+ if line.startswith('#mesondefine') and line.endswith('_H'):
+ token = line.split()[1]
+ tarr = token.split('_')[1:-1]
+ tarr = [x.lower() for x in tarr]
+ hname = '/'.join(tarr) + '.h'
+ headers.append((token, hname))
+
+ # Check for functions.
+ try:
+ token = arr[1]
+ if token in function_data:
+ fdata = function_data[token]
+ functions.append((token, fdata[0], fdata[1]))
+ elif token.startswith('HAVE_') and not token.endswith('_H'):
+ functions.append((token, ))
+ except Exception:
+ pass
+
+ # Check for sizeof tests.
+ if len(arr) != 2:
+ continue
+ elem = arr[1]
+ if elem.startswith('SIZEOF_'):
+ typename = elem.split('_', 1)[1] \
+ .replace('_P', '*') \
+ .replace('_', ' ') \
+ .lower() \
+ .replace('size t', 'size_t')
+ sizes.append((elem, typename))
-for line in open(sys.argv[1]):
- try:
- token = line.split()[1]
- if token in function_data:
- fdata = function_data[token]
- print(" ['%s', '%s', '#include<%s>']," % (token, fdata[0], fdata[1]))
- elif token.startswith('HAVE_') and not token.endswith('_H'):
- print('# check token', token)
- except Exception:
- pass
+print('''cc = meson.get_compiler('c')
+cdata = configuration_data()''')
+
+# Convert header checks.
+
+print('check_headers = [')
+for token, hname in headers:
+ print(" ['%s', '%s']," % (token, hname))
+print(']\n')
+
+print('''foreach h : check_headers
+ if cc.has_header(h.get(1))
+ cdata.set(h.get(0), 1)
+ endif
+endforeach
+''')
+
+# Convert function checks.
+
+print('check_functions = [')
+for token in functions:
+ if len(func) == 3:
+ token, fdata0, fdata1 = token
+ print(" ['%s', '%s', '#include<%s>']," % (token, fdata0, fdata1))
+ else:
+ print('# check token', token)
print(']\n')
print('''foreach f : check_functions
@@ -265,14 +296,8 @@ endforeach
# Convert sizeof checks.
-for line in open(sys.argv[1]):
- arr = line.strip().split()
- if len(arr) != 2:
- continue
- elem = arr[1]
- if elem.startswith('SIZEOF_'):
- typename = elem.split('_', 1)[1].replace('_P', '*').replace('_', ' ').lower().replace('size t', 'size_t')
- print("cdata.set('%s', cc.sizeof('%s'))" % (elem, typename))
+for elem, typename in size:
+ print("cdata.set('%s', cc.sizeof('%s'))" % (elem, typename))
print('''
configure_file(input : 'config.h.meson',
diff --git a/tools/cmake2meson.py b/tools/cmake2meson.py
index 098a6e0..7465d45 100755
--- a/tools/cmake2meson.py
+++ b/tools/cmake2meson.py
@@ -252,39 +252,46 @@ class Converter:
subdir = self.cmake_root
cfile = os.path.join(subdir, 'CMakeLists.txt')
try:
- cmakecode = open(cfile).read()
+ with open(cfile) as f:
+ cmakecode = f.read()
except FileNotFoundError:
print('\nWarning: No CMakeLists.txt in', subdir, '\n')
return
p = Parser(cmakecode)
- outfile = open(os.path.join(subdir, 'meson.build'), 'w')
- for t in p.parse():
- if t.name == 'add_subdirectory':
- #print('\nRecursing to subdir', os.path.join(self.cmake_root, t.args[0].value), '\n')
- self.convert(os.path.join(subdir, t.args[0].value))
- #print('\nReturning to', self.cmake_root, '\n')
- self.write_entry(outfile, t)
+ with open(os.path.join(subdir, 'meson.build'), 'w') as outfile:
+ for t in p.parse():
+ if t.name == 'add_subdirectory':
+ # print('\nRecursing to subdir',
+ # os.path.join(self.cmake_root, t.args[0].value),
+ # '\n')
+ self.convert(os.path.join(subdir, t.args[0].value))
+ # print('\nReturning to', self.cmake_root, '\n')
+ self.write_entry(outfile, t)
if subdir == self.cmake_root and len(self.options) > 0:
self.write_options()
def write_options(self):
- optfile = open(os.path.join(self.cmake_root, 'meson_options.txt'), 'w')
- for o in self.options:
- (optname, description, default) = o
- if default is None:
- defaultstr = ''
- else:
- if default == 'OFF':
- typestr = ' type : boolean,'
- default = 'false'
- elif default == 'ON':
- default = 'true'
- typestr = ' type : boolean,'
+ filename = os.path.join(self.cmake_root, 'meson_options.txt')
+ with open(filename, 'w') as optfile:
+ for o in self.options:
+ (optname, description, default) = o
+ if default is None:
+ defaultstr = ''
else:
- typestr = ' type : string,'
- defaultstr = ' value : %s,' % default
- line = "option(%s,%s%s description : '%s')\n" % (optname, typestr, defaultstr, description)
- optfile.write(line)
+ if default == 'OFF':
+ typestr = ' type : boolean,'
+ default = 'false'
+ elif default == 'ON':
+ default = 'true'
+ typestr = ' type : boolean,'
+ else:
+ typestr = ' type : string,'
+ defaultstr = ' value : %s,' % default
+ line = "option(%s,%s%s description : '%s')\n" % (optname,
+ typestr,
+ defaultstr,
+ description)
+ optfile.write(line)
if __name__ == '__main__':
if len(sys.argv) != 2: