aboutsummaryrefslogtreecommitdiff
path: root/mesonbuild/backend/backends.py
blob: 31ddfb4571e70c1a115a0f9fc0e59717d37e868a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
# Copyright 2012-2016 The Meson development team

# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at

#     http://www.apache.org/licenses/LICENSE-2.0

# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os, pickle, re
import textwrap
from .. import build
from .. import dependencies
from .. import mesonlib
from .. import mlog
import json
import subprocess
from ..mesonlib import MachineChoice, MesonException, OrderedSet, OptionOverrideProxy
from ..mesonlib import classify_unity_sources, unholder
from ..mesonlib import File
from ..compilers import CompilerArgs, VisualStudioLikeCompiler
from ..interpreter import Interpreter
from collections import OrderedDict
import shlex
from functools import lru_cache
import typing as T


class CleanTrees:
    '''
    Directories outputted by custom targets that have to be manually cleaned
    because on Linux `ninja clean` only deletes empty directories.
    '''
    def __init__(self, build_dir, trees):
        self.build_dir = build_dir
        self.trees = trees

class InstallData:
    def __init__(self, source_dir, build_dir, prefix, strip_bin,
                 install_umask, mesonintrospect):
        self.source_dir = source_dir
        self.build_dir = build_dir
        self.prefix = prefix
        self.strip_bin = strip_bin
        self.install_umask = install_umask
        self.targets = []
        self.headers = []
        self.man = []
        self.data = []
        self.po_package_name = ''
        self.po = []
        self.install_scripts = []
        self.install_subdirs = []
        self.mesonintrospect = mesonintrospect

class TargetInstallData:
    def __init__(self, fname, outdir, aliases, strip, install_name_mappings, install_rpath, install_mode, optional=False):
        self.fname = fname
        self.outdir = outdir
        self.aliases = aliases
        self.strip = strip
        self.install_name_mappings = install_name_mappings
        self.install_rpath = install_rpath
        self.install_mode = install_mode
        self.optional = optional

class ExecutableSerialisation:
    def __init__(self, cmd_args, env=None, exe_wrapper=None,
                 workdir=None, extra_paths=None, capture=None):
        self.cmd_args = cmd_args
        self.env = env or {}
        if exe_wrapper is not None:
            assert(isinstance(exe_wrapper, dependencies.ExternalProgram))
        self.exe_runner = exe_wrapper
        self.workdir = workdir
        self.extra_paths = extra_paths
        self.capture = capture

class TestSerialisation:
    def __init__(self, name: str, project: str, suite: str, fname: T.List[str],
                 is_cross_built: bool, exe_wrapper: T.Optional[build.Executable],
                 needs_exe_wrapper: bool, is_parallel: bool, cmd_args: T.List[str],
                 env: build.EnvironmentVariables, should_fail: bool,
                 timeout: T.Optional[int], workdir: T.Optional[str],
                 extra_paths: T.List[str], protocol: str, priority: int):
        self.name = name
        self.project_name = project
        self.suite = suite
        self.fname = fname
        self.is_cross_built = is_cross_built
        if exe_wrapper is not None:
            assert(isinstance(exe_wrapper, dependencies.ExternalProgram))
        self.exe_runner = exe_wrapper
        self.is_parallel = is_parallel
        self.cmd_args = cmd_args
        self.env = env
        self.should_fail = should_fail
        self.timeout = timeout
        self.workdir = workdir
        self.extra_paths = extra_paths
        self.protocol = protocol
        self.priority = priority
        self.needs_exe_wrapper = needs_exe_wrapper

def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional[Interpreter] = None) -> T.Optional['Backend']:
    if backend == 'ninja':
        from . import ninjabackend
        return ninjabackend.NinjaBackend(build, interpreter)
    elif backend == 'vs':
        from . import vs2010backend
        return vs2010backend.autodetect_vs_version(build, interpreter)
    elif backend == 'vs2010':
        from . import vs2010backend
        return vs2010backend.Vs2010Backend(build, interpreter)
    elif backend == 'vs2015':
        from . import vs2015backend
        return vs2015backend.Vs2015Backend(build, interpreter)
    elif backend == 'vs2017':
        from . import vs2017backend
        return vs2017backend.Vs2017Backend(build, interpreter)
    elif backend == 'vs2019':
        from . import vs2019backend
        return vs2019backend.Vs2019Backend(build, interpreter)
    elif backend == 'xcode':
        from . import xcodebackend
        return xcodebackend.XCodeBackend(build, interpreter)
    return None

# This class contains the basic functionality that is needed by all backends.
# Feel free to move stuff in and out of it as you see fit.
class Backend:
    def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]):
        # Make it possible to construct a dummy backend
        # This is used for introspection without a build directory
        if build is None:
            self.environment = None
            return
        self.build = build
        self.interpreter = interpreter
        self.environment = build.environment
        self.processed_targets = {}
        self.build_dir = self.environment.get_build_dir()
        self.source_dir = self.environment.get_source_dir()
        self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(),
                                             self.environment.get_build_dir())

    def get_target_filename(self, t):
        if isinstance(t, build.CustomTarget):
            if len(t.get_outputs()) != 1:
                mlog.warning('custom_target {!r} has more than one output! '
                             'Using the first one.'.format(t.name))
            filename = t.get_outputs()[0]
        elif isinstance(t, build.CustomTargetIndex):
            filename = t.get_outputs()[0]
        else:
            assert(isinstance(t, build.BuildTarget))
            filename = t.get_filename()
        return os.path.join(self.get_target_dir(t), filename)

    def get_target_filename_abs(self, target):
        return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))

    def get_base_options_for_target(self, target):
        return OptionOverrideProxy(target.option_overrides_base,
                                   self.environment.coredata.builtins,
                                   self.environment.coredata.base_options)

    def get_compiler_options_for_target(self, target):
        comp_reg = self.environment.coredata.compiler_options[target.for_machine]
        comp_override = target.option_overrides_compiler
        return {
            lang: OptionOverrideProxy(comp_override[lang], comp_reg[lang])
            for lang in set(comp_reg.keys()) | set(comp_override.keys())
        }

    def get_option_for_target(self, option_name, target):
        if option_name in target.option_overrides_base:
            override = target.option_overrides_base[option_name]
            return self.environment.coredata.validate_option_value(option_name, override)
        return self.environment.coredata.get_builtin_option(option_name, target.subproject)

    def get_target_filename_for_linking(self, target):
        # On some platforms (msvc for instance), the file that is used for
        # dynamic linking is not the same as the dynamic library itself. This
        # file is called an import library, and we want to link against that.
        # On all other platforms, we link to the library directly.
        if isinstance(target, build.SharedLibrary):
            link_lib = target.get_import_filename() or target.get_filename()
            return os.path.join(self.get_target_dir(target), link_lib)
        elif isinstance(target, build.StaticLibrary):
            return os.path.join(self.get_target_dir(target), target.get_filename())
        elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)):
            if not target.is_linkable_target():
                raise MesonException('Tried to link against custom target "%s", which is not linkable.' % target.name)
            return os.path.join(self.get_target_dir(target), target.get_filename())
        elif isinstance(target, build.Executable):
            if target.import_filename:
                return os.path.join(self.get_target_dir(target), target.get_import_filename())
            else:
                return None
        raise AssertionError('BUG: Tried to link to {!r} which is not linkable'.format(target))

    @lru_cache(maxsize=None)
    def get_target_dir(self, target):
        if self.environment.coredata.get_builtin_option('layout') == 'mirror':
            dirname = target.get_subdir()
        else:
            dirname = 'meson-out'
        return dirname

    def get_target_dir_relative_to(self, t, o):
        '''Get a target dir relative to another target's directory'''
        target_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t))
        othert_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(o))
        return os.path.relpath(target_dir, othert_dir)

    def get_target_source_dir(self, target):
        # if target dir is empty, avoid extraneous trailing / from os.path.join()
        target_dir = self.get_target_dir(target)
        if target_dir:
            return os.path.join(self.build_to_src, target_dir)
        return self.build_to_src

    def get_target_private_dir(self, target):
        return os.path.join(self.get_target_dir(target), target.get_id())

    def get_target_private_dir_abs(self, target):
        return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target))

    @lru_cache(maxsize=None)
    def get_target_generated_dir(self, target, gensrc, src):
        """
        Takes a BuildTarget, a generator source (CustomTarget or GeneratedList),
        and a generated source filename.
        Returns the full path of the generated source relative to the build root
        """
        # CustomTarget generators output to the build dir of the CustomTarget
        if isinstance(gensrc, (build.CustomTarget, build.CustomTargetIndex)):
            return os.path.join(self.get_target_dir(gensrc), src)
        # GeneratedList generators output to the private build directory of the
        # target that the GeneratedList is used in
        return os.path.join(self.get_target_private_dir(target), src)

    def get_unity_source_file(self, target, suffix, number):
        # There is a potential conflict here, but it is unlikely that
        # anyone both enables unity builds and has a file called foo-unity.cpp.
        osrc = '{}-unity{}.{}'.format(target.name, number, suffix)
        return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc)

    def generate_unity_files(self, target, unity_src):
        abs_files = []
        result = []
        compsrcs = classify_unity_sources(target.compilers.values(), unity_src)
        unity_size = self.get_option_for_target('unity_size', target)

        def init_language_file(suffix, unity_file_number):
            unity_src = self.get_unity_source_file(target, suffix, unity_file_number)
            outfileabs = unity_src.absolute_path(self.environment.get_source_dir(),
                                                 self.environment.get_build_dir())
            outfileabs_tmp = outfileabs + '.tmp'
            abs_files.append(outfileabs)
            outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp)
            if not os.path.exists(outfileabs_tmp_dir):
                os.makedirs(outfileabs_tmp_dir)
            result.append(unity_src)
            return open(outfileabs_tmp, 'w')

        # For each language, generate unity source files and return the list
        for comp, srcs in compsrcs.items():
            files_in_current = unity_size + 1
            unity_file_number = 0
            ofile = None
            for src in srcs:
                if files_in_current >= unity_size:
                    if ofile:
                        ofile.close()
                    ofile = init_language_file(comp.get_default_suffix(), unity_file_number)
                    unity_file_number += 1
                    files_in_current = 0
                ofile.write('#include<%s>\n' % src)
                files_in_current += 1
            if ofile:
                ofile.close()

        [mesonlib.replace_if_different(x, x + '.tmp') for x in abs_files]
        return result

    def relpath(self, todir, fromdir):
        return os.path.relpath(os.path.join('dummyprefixdir', todir),
                               os.path.join('dummyprefixdir', fromdir))

    def flatten_object_list(self, target, proj_dir_to_build_root=''):
        obj_list = self._flatten_object_list(target, target.get_objects(), proj_dir_to_build_root)
        return list(dict.fromkeys(obj_list))

    def _flatten_object_list(self, target, objects, proj_dir_to_build_root):
        obj_list = []
        for obj in objects:
            if isinstance(obj, str):
                o = os.path.join(proj_dir_to_build_root,
                                 self.build_to_src, target.get_subdir(), obj)
                obj_list.append(o)
            elif isinstance(obj, mesonlib.File):
                obj_list.append(obj.rel_to_builddir(self.build_to_src))
            elif isinstance(obj, build.ExtractedObjects):
                if obj.recursive:
                    obj_list += self._flatten_object_list(obj.target, obj.objlist, proj_dir_to_build_root)
                obj_list += self.determine_ext_objs(obj, proj_dir_to_build_root)
            else:
                raise MesonException('Unknown data type in object list.')
        return obj_list

    def as_meson_exe_cmdline(self, tname, exe, cmd_args, workdir=None,
                             for_machine=MachineChoice.BUILD,
                             extra_bdeps=None, capture=None, force_serialize=False):
        '''
        Serialize an executable for running with a generator or a custom target
        '''
        import hashlib
        machine = self.environment.machines[for_machine]
        if machine.is_windows() or machine.is_cygwin():
            extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or [])
        else:
            extra_paths = []

        if isinstance(exe, dependencies.ExternalProgram):
            exe_cmd = exe.get_command()
            exe_for_machine = exe.for_machine
        elif isinstance(exe, (build.BuildTarget, build.CustomTarget)):
            exe_cmd = [self.get_target_filename_abs(exe)]
            exe_for_machine = exe.for_machine
        else:
            exe_cmd = [exe]
            exe_for_machine = MachineChoice.BUILD

        is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine)
        if is_cross_built and self.environment.need_exe_wrapper():
            exe_wrapper = self.environment.get_exe_wrapper()
            if not exe_wrapper.found():
                msg = 'The exe_wrapper {!r} defined in the cross file is ' \
                      'needed by target {!r}, but was not found. Please ' \
                      'check the command and/or add it to PATH.'
                raise MesonException(msg.format(exe_wrapper.name, tname))
        else:
            if exe_cmd[0].endswith('.jar'):
                exe_cmd = ['java', '-jar'] + exe_cmd
            elif exe_cmd[0].endswith('.exe') and not (mesonlib.is_windows() or mesonlib.is_cygwin()):
                exe_cmd = ['mono'] + exe_cmd
            exe_wrapper = None

        force_serialize = force_serialize or extra_paths or workdir or \
            exe_wrapper or any('\n' in c for c in cmd_args)
        if not force_serialize:
            if not capture:
                return None
            return (self.environment.get_build_command() +
                    ['--internal', 'exe', '--capture', capture, '--'] + exe_cmd + cmd_args)

        workdir = workdir or self.environment.get_build_dir()
        env = {}
        if isinstance(exe, (dependencies.ExternalProgram,
                            build.BuildTarget, build.CustomTarget)):
            basename = exe.name
        else:
            basename = os.path.basename(exe)

        # Can't just use exe.name here; it will likely be run more than once
        # Take a digest of the cmd args, env, workdir, and capture. This avoids
        # collisions and also makes the name deterministic over regenerations
        # which avoids a rebuild by Ninja because the cmdline stays the same.
        data = bytes(str(sorted(env.items())) + str(cmd_args) + str(workdir) + str(capture),
                     encoding='utf-8')
        digest = hashlib.sha1(data).hexdigest()
        scratch_file = 'meson_exe_{0}_{1}.dat'.format(basename, digest)
        exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file)
        with open(exe_data, 'wb') as f:
            es = ExecutableSerialisation(exe_cmd + cmd_args, env,
                                         exe_wrapper, workdir,
                                         extra_paths, capture)
            pickle.dump(es, f)
        return self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data]

    def serialize_tests(self):
        test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat')
        with open(test_data, 'wb') as datafile:
            self.write_test_file(datafile)
        benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat')
        with open(benchmark_data, 'wb') as datafile:
            self.write_benchmark_file(datafile)
        return test_data, benchmark_data

    def determine_linker_and_stdlib_args(self, target):
        '''
        If we're building a static library, there is only one static linker.
        Otherwise, we query the target for the dynamic linker.
        '''
        if isinstance(target, build.StaticLibrary):
            return self.build.static_linker[target.for_machine], []
        l, stdlib_args = target.get_clink_dynamic_linker_and_stdlibs()
        return l, stdlib_args

    @staticmethod
    def _libdir_is_system(libdir, compilers, env):
        libdir = os.path.normpath(libdir)
        for cc in compilers.values():
            if libdir in cc.get_library_dirs(env):
                return True
        return False

    def rpaths_for_bundled_shared_libraries(self, target, exclude_system=True):
        paths = []
        for dep in target.external_deps:
            if not isinstance(dep, (dependencies.ExternalLibrary, dependencies.PkgConfigDependency)):
                continue
            la = dep.link_args
            if len(la) != 1 or not os.path.isabs(la[0]):
                continue
            # The only link argument is an absolute path to a library file.
            libpath = la[0]
            libdir = os.path.dirname(libpath)
            if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment):
                # No point in adding system paths.
                continue
            # Windows doesn't support rpaths, but we use this function to
            # emulate rpaths by setting PATH, so also accept DLLs here
            if os.path.splitext(libpath)[1] not in ['.dll', '.lib', '.so', '.dylib']:
                continue
            if libdir.startswith(self.environment.get_source_dir()):
                rel_to_src = libdir[len(self.environment.get_source_dir()) + 1:]
                assert not os.path.isabs(rel_to_src), 'rel_to_src: {} is absolute'.format(rel_to_src)
                paths.append(os.path.join(self.build_to_src, rel_to_src))
            else:
                paths.append(libdir)
        return paths

    def determine_rpath_dirs(self, target):
        if self.environment.coredata.get_builtin_option('layout') == 'mirror':
            result = target.get_link_dep_subdirs()
        else:
            result = OrderedSet()
            result.add('meson-out')
        result.update(self.rpaths_for_bundled_shared_libraries(target))
        return tuple(result)

    def object_filename_from_source(self, target, source):
        assert isinstance(source, mesonlib.File)
        build_dir = self.environment.get_build_dir()
        rel_src = source.rel_to_builddir(self.build_to_src)

        # foo.vala files compile down to foo.c and then foo.c.o, not foo.vala.o
        if rel_src.endswith(('.vala', '.gs')):
            # See description in generate_vala_compile for this logic.
            if source.is_built:
                if os.path.isabs(rel_src):
                    rel_src = rel_src[len(build_dir) + 1:]
                rel_src = os.path.relpath(rel_src, self.get_target_private_dir(target))
            else:
                rel_src = os.path.basename(rel_src)
            # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
            source = 'meson-generated_' + rel_src[:-5] + '.c'
        elif source.is_built:
            if os.path.isabs(rel_src):
                rel_src = rel_src[len(build_dir) + 1:]
            targetdir = self.get_target_private_dir(target)
            # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix.
            source = 'meson-generated_' + os.path.relpath(rel_src, targetdir)
        else:
            if os.path.isabs(rel_src):
                # Use the absolute path directly to avoid file name conflicts
                source = rel_src
            else:
                source = os.path.relpath(os.path.join(build_dir, rel_src),
                                         os.path.join(self.environment.get_source_dir(), target.get_subdir()))
        machine = self.environment.machines[target.for_machine]
        return source.replace('/', '_').replace('\\', '_') + '.' + machine.get_object_suffix()

    def determine_ext_objs(self, extobj, proj_dir_to_build_root):
        result = []

        # Merge sources and generated sources
        sources = list(extobj.srclist)
        for gensrc in extobj.genlist:
            for s in gensrc.get_outputs():
                path = self.get_target_generated_dir(extobj.target, gensrc, s)
                dirpart, fnamepart = os.path.split(path)
                sources.append(File(True, dirpart, fnamepart))

        # Filter out headers and all non-source files
        filtered_sources = []
        for s in sources:
            if self.environment.is_source(s) and not self.environment.is_header(s):
                filtered_sources.append(s)
            elif self.environment.is_object(s):
                result.append(s.relative_name())
        sources = filtered_sources

        # extobj could contain only objects and no sources
        if not sources:
            return result

        targetdir = self.get_target_private_dir(extobj.target)

        # With unity builds, sources don't map directly to objects,
        # we only support extracting all the objects in this mode,
        # so just return all object files.
        if self.is_unity(extobj.target):
            compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources)
            sources = []
            unity_size = self.get_option_for_target('unity_size', extobj.target)
            for comp, srcs in compsrcs.items():
                for i in range(len(srcs) // unity_size + 1):
                    osrc = self.get_unity_source_file(extobj.target,
                                                      comp.get_default_suffix(), i)
                    sources.append(osrc)

        for osrc in sources:
            objname = self.object_filename_from_source(extobj.target, osrc)
            objpath = os.path.join(proj_dir_to_build_root, targetdir, objname)
            result.append(objpath)

        return result

    def get_pch_include_args(self, compiler, target):
        args = []
        pchpath = self.get_target_private_dir(target)
        includeargs = compiler.get_include_args(pchpath, False)
        p = target.get_pch(compiler.get_language())
        if p:
            args += compiler.get_pch_use_args(pchpath, p[0])
        return includeargs + args

    def create_msvc_pch_implementation(self, target, lang, pch_header):
        # We have to include the language in the file name, otherwise
        # pch.c and pch.cpp will both end up as pch.obj in VS backends.
        impl_name = 'meson_pch-%s.%s' % (lang, lang)
        pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name)
        # Make sure to prepend the build dir, since the working directory is
        # not defined. Otherwise, we might create the file in the wrong path.
        pch_file = os.path.join(self.build_dir, pch_rel_to_build)
        os.makedirs(os.path.dirname(pch_file), exist_ok=True)

        content = '#include "%s"' % os.path.basename(pch_header)
        pch_file_tmp = pch_file + '.tmp'
        with open(pch_file_tmp, 'w') as f:
            f.write(content)
        mesonlib.replace_if_different(pch_file, pch_file_tmp)
        return pch_rel_to_build

    @staticmethod
    def escape_extra_args(compiler, args):
        # No extra escaping/quoting needed when not running on Windows
        if not mesonlib.is_windows():
            return args
        extra_args = []
        # Compiler-specific escaping is needed for -D args but not for any others
        if isinstance(compiler, VisualStudioLikeCompiler):
            # MSVC needs escaping when a -D argument ends in \ or \"
            for arg in args:
                if arg.startswith('-D') or arg.startswith('/D'):
                    # Without extra escaping for these two, the next character
                    # gets eaten
                    if arg.endswith('\\'):
                        arg += '\\'
                    elif arg.endswith('\\"'):
                        arg = arg[:-2] + '\\\\"'
                extra_args.append(arg)
        else:
            # MinGW GCC needs all backslashes in defines to be doubly-escaped
            # FIXME: Not sure about Cygwin or Clang
            for arg in args:
                if arg.startswith('-D') or arg.startswith('/D'):
                    arg = arg.replace('\\', '\\\\')
                extra_args.append(arg)
        return extra_args

    def generate_basic_compiler_args(self, target, compiler, no_warn_args=False):
        # Create an empty commands list, and start adding arguments from
        # various sources in the order in which they must override each other
        # starting from hard-coded defaults followed by build options and so on.
        commands = CompilerArgs(compiler)

        copt_proxy = self.get_compiler_options_for_target(target)[compiler.language]
        # First, the trivial ones that are impossible to override.
        #
        # Add -nostdinc/-nostdinc++ if needed; can't be overridden
        commands += self.get_cross_stdlib_args(target, compiler)
        # Add things like /NOLOGO or -pipe; usually can't be overridden
        commands += compiler.get_always_args()
        # Only add warning-flags by default if the buildtype enables it, and if
        # we weren't explicitly asked to not emit warnings (for Vala, f.ex)
        if no_warn_args:
            commands += compiler.get_no_warn_args()
        elif self.get_option_for_target('buildtype', target) != 'plain':
            commands += compiler.get_warn_args(self.get_option_for_target('warning_level', target))
        # Add -Werror if werror=true is set in the build options set on the
        # command-line or default_options inside project(). This only sets the
        # action to be done for warnings if/when they are emitted, so it's ok
        # to set it after get_no_warn_args() or get_warn_args().
        if self.get_option_for_target('werror', target):
            commands += compiler.get_werror_args()
        # Add compile args for c_* or cpp_* build options set on the
        # command-line or default_options inside project().
        commands += compiler.get_option_compile_args(copt_proxy)
        # Add buildtype args: optimization level, debugging, etc.
        commands += compiler.get_buildtype_args(self.get_option_for_target('buildtype', target))
        commands += compiler.get_optimization_args(self.get_option_for_target('optimization', target))
        commands += compiler.get_debug_args(self.get_option_for_target('debug', target))
        # Add compile args added using add_project_arguments()
        commands += self.build.get_project_args(compiler, target.subproject, target.for_machine)
        # Add compile args added using add_global_arguments()
        # These override per-project arguments
        commands += self.build.get_global_args(compiler, target.for_machine)
        # Compile args added from the env: CFLAGS/CXXFLAGS, etc, or the cross
        # file. We want these to override all the defaults, but not the
        # per-target compile args.
        commands += self.environment.coredata.get_external_args(target.for_machine, compiler.get_language())
        # Always set -fPIC for shared libraries
        if isinstance(target, build.SharedLibrary):
            commands += compiler.get_pic_args()
        # Set -fPIC for static libraries by default unless explicitly disabled
        if isinstance(target, build.StaticLibrary) and target.pic:
            commands += compiler.get_pic_args()
        if isinstance(target, build.Executable) and target.pie:
            commands += compiler.get_pie_args()
        # Add compile args needed to find external dependencies. Link args are
        # added while generating the link command.
        # NOTE: We must preserve the order in which external deps are
        # specified, so we reverse the list before iterating over it.
        for dep in reversed(target.get_external_deps()):
            if not dep.found():
                continue

            if compiler.language == 'vala':
                if isinstance(dep, dependencies.PkgConfigDependency):
                    if dep.name == 'glib-2.0' and dep.version_reqs is not None:
                        for req in dep.version_reqs:
                            if req.startswith(('>=', '==')):
                                commands += ['--target-glib', req[2:]]
                                break
                    commands += ['--pkg', dep.name]
                elif isinstance(dep, dependencies.ExternalLibrary):
                    commands += dep.get_link_args('vala')
            else:
                commands += compiler.get_dependency_compile_args(dep)
            # Qt needs -fPIC for executables
            # XXX: We should move to -fPIC for all executables
            if isinstance(target, build.Executable):
                commands += dep.get_exe_args(compiler)
            # For 'automagic' deps: Boost and GTest. Also dependency('threads').
            # pkg-config puts the thread flags itself via `Cflags:`
        # Fortran requires extra include directives.
        if compiler.language == 'fortran':
            for lt in target.link_targets:
                priv_dir = self.get_target_private_dir(lt)
                commands += compiler.get_include_args(priv_dir, False)
        return commands

    def build_target_link_arguments(self, compiler, deps):
        args = []
        for d in deps:
            if not (d.is_linkable_target()):
                raise RuntimeError('Tried to link with a non-library target "%s".' % d.get_basename())
            arg = self.get_target_filename_for_linking(d)
            if not arg:
                continue
            if compiler.get_language() == 'd':
                arg = '-Wl,' + arg
            else:
                arg = compiler.get_linker_lib_prefix() + arg
            args.append(arg)
        return args

    def get_mingw_extra_paths(self, target):
        paths = OrderedSet()
        # The cross bindir
        root = self.environment.properties[target.for_machine].get_root()
        if root:
            paths.add(os.path.join(root, 'bin'))
        # The toolchain bindir
        sys_root = self.environment.properties[target.for_machine].get_sys_root()
        if sys_root:
            paths.add(os.path.join(sys_root, 'bin'))
        # Get program and library dirs from all target compilers
        if isinstance(target, build.BuildTarget):
            for cc in target.compilers.values():
                paths.update(cc.get_program_dirs(self.environment))
                paths.update(cc.get_library_dirs(self.environment))
        return list(paths)

    def determine_windows_extra_paths(self, target: T.Union[build.BuildTarget, str], extra_bdeps):
        '''On Windows there is no such thing as an rpath.
        We must determine all locations of DLLs that this exe
        links to and return them so they can be used in unit
        tests.'''
        result = set()
        prospectives = set()
        if isinstance(target, build.BuildTarget):
            prospectives.update(target.get_transitive_link_deps())
            # External deps
            for deppath in self.rpaths_for_bundled_shared_libraries(target, exclude_system=False):
                result.add(os.path.normpath(os.path.join(self.environment.get_build_dir(), deppath)))
        for bdep in extra_bdeps:
            prospectives.update(bdep.get_transitive_link_deps())
        # Internal deps
        for ld in prospectives:
            if ld == '' or ld == '.':
                continue
            dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld))
            result.add(dirseg)
        if (isinstance(target, build.BuildTarget) and
                not self.environment.machines.matches_build_machine(target.for_machine)):
            result.update(self.get_mingw_extra_paths(target))
        return list(result)

    def write_benchmark_file(self, datafile):
        self.write_test_serialisation(self.build.get_benchmarks(), datafile)

    def write_test_file(self, datafile):
        self.write_test_serialisation(self.build.get_tests(), datafile)

    def create_test_serialisation(self, tests):
        arr = []
        for t in sorted(tests, key=lambda tst: -1 * tst.priority):
            exe = t.get_exe()
            if isinstance(exe, dependencies.ExternalProgram):
                cmd = exe.get_command()
            else:
                cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(t.get_exe()))]
            if isinstance(exe, (build.BuildTarget, dependencies.ExternalProgram)):
                test_for_machine = exe.for_machine
            else:
                # E.g. an external verifier or simulator program run on a generated executable.
                # Can always be run without a wrapper.
                test_for_machine = MachineChoice.BUILD
            is_cross = not self.environment.machines.matches_build_machine(test_for_machine)
            if is_cross and self.environment.need_exe_wrapper():
                exe_wrapper = self.environment.get_exe_wrapper()
            else:
                exe_wrapper = None
            machine = self.environment.machines[exe.for_machine]
            if machine.is_windows() or machine.is_cygwin():
                extra_bdeps = []
                if isinstance(exe, build.CustomTarget):
                    extra_bdeps = exe.get_transitive_build_target_deps()
                extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps)
            else:
                extra_paths = []
            cmd_args = []
            for a in unholder(t.cmd_args):
                if isinstance(a, build.BuildTarget):
                    extra_paths += self.determine_windows_extra_paths(a, [])
                if isinstance(a, mesonlib.File):
                    a = os.path.join(self.environment.get_build_dir(), a.rel_to_builddir(self.build_to_src))
                    cmd_args.append(a)
                elif isinstance(a, str):
                    cmd_args.append(a)
                elif isinstance(a, build.Target):
                    cmd_args.append(self.construct_target_rel_path(a, t.workdir))
                else:
                    raise MesonException('Bad object in test command.')
            ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross,
                                   exe_wrapper, self.environment.need_exe_wrapper(),
                                   t.is_parallel, cmd_args, t.env,
                                   t.should_fail, t.timeout, t.workdir,
                                   extra_paths, t.protocol, t.priority)
            arr.append(ts)
        return arr

    def write_test_serialisation(self, tests, datafile):
        pickle.dump(self.create_test_serialisation(tests), datafile)

    def construct_target_rel_path(self, a, workdir):
        if workdir is None:
            return self.get_target_filename(a)
        assert(os.path.isabs(workdir))
        abs_path = self.get_target_filename_abs(a)
        return os.path.relpath(abs_path, workdir)

    def generate_depmf_install(self, d):
        if self.build.dep_manifest_name is None:
            return
        ifilename = os.path.join(self.environment.get_build_dir(), 'depmf.json')
        ofilename = os.path.join(self.environment.get_prefix(), self.build.dep_manifest_name)
        mfobj = {'type': 'dependency manifest', 'version': '1.0', 'projects': self.build.dep_manifest}
        with open(ifilename, 'w') as f:
            f.write(json.dumps(mfobj))
        # Copy file from, to, and with mode unchanged
        d.data.append([ifilename, ofilename, None])

    def get_regen_filelist(self):
        '''List of all files whose alteration means that the build
        definition needs to be regenerated.'''
        deps = [os.path.join(self.build_to_src, df)
                for df in self.interpreter.get_build_def_files()]
        if self.environment.is_cross_build():
            deps.extend(self.environment.coredata.cross_files)
        deps.extend(self.environment.coredata.config_files)
        deps.append('meson-private/coredata.dat')
        self.check_clock_skew(deps)
        return deps

    def check_clock_skew(self, file_list):
        # If a file that leads to reconfiguration has a time
        # stamp in the future, it will trigger an eternal reconfigure
        # loop.
        import time
        now = time.time()
        for f in file_list:
            absf = os.path.join(self.environment.get_build_dir(), f)
            ftime = os.path.getmtime(absf)
            delta = ftime - now
            # On Windows disk time stamps sometimes point
            # to the future by a minuscule amount, less than
            # 0.001 seconds. I don't know why.
            if delta > 0.001:
                raise MesonException('Clock skew detected. File {} has a time stamp {:.4f}s in the future.'.format(absf, delta))

    def build_target_to_cmd_array(self, bt):
        if isinstance(bt, build.BuildTarget):
            if isinstance(bt, build.Executable) and bt.for_machine is not MachineChoice.BUILD:
                if (self.environment.is_cross_build() and
                        self.environment.exe_wrapper is None and
                        self.environment.need_exe_wrapper()):
                    s = textwrap.dedent('''
                        Cannot use target {} as a generator because it is built for the
                        host machine and no exe wrapper is defined or needs_exe_wrapper is
                        true. You might want to set `native: true` instead to build it for
                        the build machine.'''.format(bt.name))
                    raise MesonException(s)
            arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(bt))]
        else:
            arr = bt.get_command()
        return arr

    def replace_extra_args(self, args, genlist):
        final_args = []
        for a in args:
            if a == '@EXTRA_ARGS@':
                final_args += genlist.get_extra_args()
            else:
                final_args.append(a)
        return final_args

    def replace_outputs(self, args, private_dir, output_list):
        newargs = []
        regex = re.compile(r'@OUTPUT(\d+)@')
        for arg in args:
            m = regex.search(arg)
            while m is not None:
                index = int(m.group(1))
                src = '@OUTPUT%d@' % index
                arg = arg.replace(src, os.path.join(private_dir, output_list[index]))
                m = regex.search(arg)
            newargs.append(arg)
        return newargs

    def get_build_by_default_targets(self):
        result = OrderedDict()
        # Get all build and custom targets that must be built by default
        for name, t in self.build.get_targets().items():
            if t.build_by_default:
                result[name] = t
        # Get all targets used as test executables and arguments. These must
        # also be built by default. XXX: Sometime in the future these should be
        # built only before running tests.
        for t in self.build.get_tests():
            exe = unholder(t.exe)
            if isinstance(exe, (build.CustomTarget, build.BuildTarget)):
                result[exe.get_id()] = exe
            for arg in unholder(t.cmd_args):
                if not isinstance(arg, (build.CustomTarget, build.BuildTarget)):
                    continue
                result[arg.get_id()] = arg
            for dep in t.depends:
                assert isinstance(dep, (build.CustomTarget, build.BuildTarget))
                result[dep.get_id()] = dep
        return result

    @lru_cache(maxsize=None)
    def get_custom_target_provided_by_generated_source(self, generated_source):
        libs = []
        for f in generated_source.get_outputs():
            if self.environment.is_library(f):
                libs.append(os.path.join(self.get_target_dir(generated_source), f))
        return libs

    @lru_cache(maxsize=None)
    def get_custom_target_provided_libraries(self, target):
        libs = []
        for t in target.get_generated_sources():
            if not isinstance(t, build.CustomTarget):
                continue
            l = self.get_custom_target_provided_by_generated_source(t)
            libs = libs + l
        return libs

    def is_unity(self, target):
        optval = self.get_option_for_target('unity', target)
        if optval == 'on' or (optval == 'subprojects' and target.subproject != ''):
            return True
        return False

    def get_custom_target_sources(self, target):
        '''
        Custom target sources can be of various object types; strings, File,
        BuildTarget, even other CustomTargets.
        Returns the path to them relative to the build root directory.
        '''
        srcs = []
        for i in unholder(target.get_sources()):
            if isinstance(i, str):
                fname = [os.path.join(self.build_to_src, target.subdir, i)]
            elif isinstance(i, build.BuildTarget):
                fname = [self.get_target_filename(i)]
            elif isinstance(i, (build.CustomTarget, build.CustomTargetIndex)):
                fname = [os.path.join(self.get_target_dir(i), p) for p in i.get_outputs()]
            elif isinstance(i, build.GeneratedList):
                fname = [os.path.join(self.get_target_private_dir(target), p) for p in i.get_outputs()]
            elif isinstance(i, build.ExtractedObjects):
                fname = [os.path.join(self.get_target_private_dir(i.target), p) for p in i.get_outputs(self)]
            else:
                fname = [i.rel_to_builddir(self.build_to_src)]
            if target.absolute_paths:
                fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname]
            srcs += fname
        return srcs

    def get_custom_target_depend_files(self, target, absolute_paths=False):
        deps = []
        for i in target.depend_files:
            if isinstance(i, mesonlib.File):
                if absolute_paths:
                    deps.append(i.absolute_path(self.environment.get_source_dir(),
                                                self.environment.get_build_dir()))
                else:
                    deps.append(i.rel_to_builddir(self.build_to_src))
            else:
                if absolute_paths:
                    deps.append(os.path.join(self.environment.get_source_dir(), target.subdir, i))
                else:
                    deps.append(os.path.join(self.build_to_src, target.subdir, i))
        return deps

    def eval_custom_target_command(self, target, absolute_outputs=False):
        # We want the outputs to be absolute only when using the VS backend
        # XXX: Maybe allow the vs backend to use relative paths too?
        source_root = self.build_to_src
        build_root = '.'
        outdir = self.get_target_dir(target)
        if absolute_outputs:
            source_root = self.environment.get_source_dir()
            build_root = self.environment.get_build_dir()
            outdir = os.path.join(self.environment.get_build_dir(), outdir)
        outputs = []
        for i in target.get_outputs():
            outputs.append(os.path.join(outdir, i))
        inputs = self.get_custom_target_sources(target)
        # Evaluate the command list
        cmd = []
        for i in target.command:
            if isinstance(i, build.BuildTarget):
                cmd += self.build_target_to_cmd_array(i)
                continue
            elif isinstance(i, build.CustomTarget):
                # GIR scanner will attempt to execute this binary but
                # it assumes that it is in path, so always give it a full path.
                tmp = i.get_outputs()[0]
                i = os.path.join(self.get_target_dir(i), tmp)
            elif isinstance(i, mesonlib.File):
                i = i.rel_to_builddir(self.build_to_src)
                if target.absolute_paths:
                    i = os.path.join(self.environment.get_build_dir(), i)
            # FIXME: str types are blindly added ignoring 'target.absolute_paths'
            # because we can't know if they refer to a file or just a string
            elif not isinstance(i, str):
                err_msg = 'Argument {0} is of unknown type {1}'
                raise RuntimeError(err_msg.format(str(i), str(type(i))))
            elif '@SOURCE_ROOT@' in i:
                i = i.replace('@SOURCE_ROOT@', source_root)
            elif '@BUILD_ROOT@' in i:
                i = i.replace('@BUILD_ROOT@', build_root)
            elif '@DEPFILE@' in i:
                if target.depfile is None:
                    msg = 'Custom target {!r} has @DEPFILE@ but no depfile ' \
                          'keyword argument.'.format(target.name)
                    raise MesonException(msg)
                dfilename = os.path.join(outdir, target.depfile)
                i = i.replace('@DEPFILE@', dfilename)
            elif '@PRIVATE_DIR@' in i:
                if target.absolute_paths:
                    pdir = self.get_target_private_dir_abs(target)
                else:
                    pdir = self.get_target_private_dir(target)
                i = i.replace('@PRIVATE_DIR@', pdir)
            elif '@PRIVATE_OUTDIR_' in i:
                match = re.search(r'@PRIVATE_OUTDIR_(ABS_)?([^/\s*]*)@', i)
                if not match:
                    msg = 'Custom target {!r} has an invalid argument {!r}' \
                          ''.format(target.name, i)
                    raise MesonException(msg)
                source = match.group(0)
                if match.group(1) is None and not target.absolute_paths:
                    lead_dir = ''
                else:
                    lead_dir = self.environment.get_build_dir()
                i = i.replace(source, os.path.join(lead_dir, outdir))
            cmd.append(i)
        # Substitute the rest of the template strings
        values = mesonlib.get_filenames_templates_dict(inputs, outputs)
        cmd = mesonlib.substitute_values(cmd, values)
        # This should not be necessary but removing it breaks
        # building GStreamer on Windows. The underlying issue
        # is problems with quoting backslashes on Windows
        # which is the seventh circle of hell. The downside is
        # that this breaks custom targets whose command lines
        # have backslashes. If you try to fix this be sure to
        # check that it does not break GST.
        #
        # The bug causes file paths such as c:\foo to get escaped
        # into c:\\foo.
        #
        # Unfortunately we have not been able to come up with an
        # isolated test case for this so unless you manage to come up
        # with one, the only way is to test the building with Gst's
        # setup. Note this in your MR or ping us and we will get it
        # fixed.
        #
        # https://github.com/mesonbuild/meson/pull/737
        cmd = [i.replace('\\', '/') for i in cmd]
        return inputs, outputs, cmd

    def run_postconf_scripts(self):
        env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(),
               'MESON_BUILD_ROOT': self.environment.get_build_dir(),
               'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in self.environment.get_build_command() + ['introspect']]),
               }
        child_env = os.environ.copy()
        child_env.update(env)

        for s in self.build.postconf_scripts:
            cmd = s['exe'] + s['args']
            subprocess.check_call(cmd, env=child_env)

    def create_install_data(self):
        strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip')
        if strip_bin is None:
            if self.environment.is_cross_build():
                mlog.warning('Cross file does not specify strip binary, result will not be stripped.')
            else:
                # TODO go through all candidates, like others
                strip_bin = [self.environment.default_strip[0]]
        d = InstallData(self.environment.get_source_dir(),
                        self.environment.get_build_dir(),
                        self.environment.get_prefix(),
                        strip_bin,
                        self.environment.coredata.get_builtin_option('install_umask'),
                        self.environment.get_build_command() + ['introspect'])
        self.generate_depmf_install(d)
        self.generate_target_install(d)
        self.generate_header_install(d)
        self.generate_man_install(d)
        self.generate_data_install(d)
        self.generate_custom_install_script(d)
        self.generate_subdir_install(d)
        return d

    def create_install_data_files(self):
        install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat')
        with open(install_data_file, 'wb') as ofile:
            pickle.dump(self.create_install_data(), ofile)

    def generate_target_install(self, d):
        for t in self.build.get_targets().values():
            if not t.should_install():
                continue
            outdirs, custom_install_dir = t.get_install_dir(self.environment)
            # Sanity-check the outputs and install_dirs
            num_outdirs, num_out = len(outdirs), len(t.get_outputs())
            if num_outdirs != 1 and num_outdirs != num_out:
                m = 'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n' \
                    "Pass 'false' for outputs that should not be installed and 'true' for\n" \
                    'using the default installation directory for an output.'
                raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs))
            install_mode = t.get_custom_install_mode()
            # Install the target output(s)
            if isinstance(t, build.BuildTarget):
                # In general, stripping static archives is tricky and full of pitfalls.
                # Wholesale stripping of static archives with a command such as
                #
                #   strip libfoo.a
                #
                # is broken, as GNU's strip will remove *every* symbol in a static
                # archive. One solution to this nonintuitive behaviour would be
                # to only strip local/debug symbols. Unfortunately, strip arguments
                # are not specified by POSIX and therefore not portable. GNU's `-g`
                # option (i.e. remove debug symbols) is equivalent to Apple's `-S`.
                #
                # TODO: Create GNUStrip/AppleStrip/etc. hierarchy for more
                #       fine-grained stripping of static archives.
                should_strip = not isinstance(t, build.StaticLibrary) and self.get_option_for_target('strip', t)
                # Install primary build output (library/executable/jar, etc)
                # Done separately because of strip/aliases/rpath
                if outdirs[0] is not False:
                    mappings = t.get_link_deps_mapping(d.prefix, self.environment)
                    i = TargetInstallData(self.get_target_filename(t), outdirs[0],
                                          t.get_aliases(), should_strip, mappings,
                                          t.install_rpath, install_mode)
                    d.targets.append(i)

                    if isinstance(t, (build.SharedLibrary, build.SharedModule, build.Executable)):
                        # On toolchains/platforms that use an import library for
                        # linking (separate from the shared library with all the
                        # code), we need to install that too (dll.a/.lib).
                        if t.get_import_filename():
                            if custom_install_dir:
                                # If the DLL is installed into a custom directory,
                                # install the import library into the same place so
                                # it doesn't go into a surprising place
                                implib_install_dir = outdirs[0]
                            else:
                                implib_install_dir = self.environment.get_import_lib_dir()
                            # Install the import library; may not exist for shared modules
                            i = TargetInstallData(self.get_target_filename_for_linking(t),
                                                  implib_install_dir, {}, False, {}, '', install_mode,
                                                  optional=isinstance(t, build.SharedModule))
                            d.targets.append(i)

                        if not should_strip and t.get_debug_filename():
                            debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename())
                            i = TargetInstallData(debug_file, outdirs[0],
                                                  {}, False, {}, '',
                                                  install_mode, optional=True)
                            d.targets.append(i)
                # Install secondary outputs. Only used for Vala right now.
                if num_outdirs > 1:
                    for output, outdir in zip(t.get_outputs()[1:], outdirs[1:]):
                        # User requested that we not install this output
                        if outdir is False:
                            continue
                        f = os.path.join(self.get_target_dir(t), output)
                        i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode)
                        d.targets.append(i)
            elif isinstance(t, build.CustomTarget):
                # If only one install_dir is specified, assume that all
                # outputs will be installed into it. This is for
                # backwards-compatibility and because it makes sense to
                # avoid repetition since this is a common use-case.
                #
                # To selectively install only some outputs, pass `false` as
                # the install_dir for the corresponding output by index
                if num_outdirs == 1 and num_out > 1:
                    for output in t.get_outputs():
                        f = os.path.join(self.get_target_dir(t), output)
                        i = TargetInstallData(f, outdirs[0], {}, False, {}, None, install_mode,
                                              optional=not t.build_by_default)
                        d.targets.append(i)
                else:
                    for output, outdir in zip(t.get_outputs(), outdirs):
                        # User requested that we not install this output
                        if outdir is False:
                            continue
                        f = os.path.join(self.get_target_dir(t), output)
                        i = TargetInstallData(f, outdir, {}, False, {}, None, install_mode,
                                              optional=not t.build_by_default)
                        d.targets.append(i)

    def generate_custom_install_script(self, d):
        result = []
        srcdir = self.environment.get_source_dir()
        builddir = self.environment.get_build_dir()
        for i in self.build.install_scripts:
            exe = i['exe']
            args = i['args']
            fixed_args = []
            for a in args:
                a = a.replace('@SOURCE_ROOT@', srcdir)
                a = a.replace('@BUILD_ROOT@', builddir)
                fixed_args.append(a)
            result.append(build.RunScript(exe, fixed_args))
        d.install_scripts = result

    def generate_header_install(self, d):
        incroot = self.environment.get_includedir()
        headers = self.build.get_headers()

        srcdir = self.environment.get_source_dir()
        builddir = self.environment.get_build_dir()
        for h in headers:
            outdir = h.get_custom_install_dir()
            if outdir is None:
                outdir = os.path.join(incroot, h.get_install_subdir())
            for f in h.get_sources():
                if not isinstance(f, File):
                    msg = 'Invalid header type {!r} can\'t be installed'
                    raise MesonException(msg.format(f))
                abspath = f.absolute_path(srcdir, builddir)
                i = [abspath, outdir, h.get_custom_install_mode()]
                d.headers.append(i)

    def generate_man_install(self, d):
        manroot = self.environment.get_mandir()
        man = self.build.get_man()
        for m in man:
            for f in m.get_sources():
                num = f.split('.')[-1]
                subdir = m.get_custom_install_dir()
                if subdir is None:
                    subdir = os.path.join(manroot, 'man' + num)
                srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())
                dstabs = os.path.join(subdir, os.path.basename(f.fname))
                i = [srcabs, dstabs, m.get_custom_install_mode()]
                d.man.append(i)

    def generate_data_install(self, d):
        data = self.build.get_data()
        srcdir = self.environment.get_source_dir()
        builddir = self.environment.get_build_dir()
        for de in data:
            assert(isinstance(de, build.Data))
            subdir = de.install_dir
            if not subdir:
                subdir = os.path.join(self.environment.get_datadir(), self.interpreter.build.project_name)
            for src_file, dst_name in zip(de.sources, de.rename):
                assert(isinstance(src_file, mesonlib.File))
                dst_abs = os.path.join(subdir, dst_name)
                i = [src_file.absolute_path(srcdir, builddir), dst_abs, de.install_mode]
                d.data.append(i)

    def generate_subdir_install(self, d):
        for sd in self.build.get_install_subdirs():
            src_dir = os.path.join(self.environment.get_source_dir(),
                                   sd.source_subdir,
                                   sd.installable_subdir).rstrip('/')
            dst_dir = os.path.join(self.environment.get_prefix(),
                                   sd.install_dir)
            if not sd.strip_directory:
                dst_dir = os.path.join(dst_dir, os.path.basename(src_dir))
            d.install_subdirs.append([src_dir, dst_dir, sd.install_mode,
                                      sd.exclude])

    def get_introspection_data(self, target_id, target):
        '''
        Returns a list of source dicts with the following format for a given target:
        [
            {
                "language": "<LANG>",
                "compiler": ["result", "of", "comp.get_exelist()"],
                "parameters": ["list", "of", "compiler", "parameters],
                "sources": ["list", "of", "all", "<LANG>", "source", "files"],
                "generated_sources": ["list", "of", "generated", "source", "files"]
            }
        ]

        This is a limited fallback / reference implementation. The backend should override this method.
        '''
        if isinstance(target, (build.CustomTarget, build.BuildTarget)):
            source_list_raw = target.sources + target.extra_files
            source_list = []
            for j in source_list_raw:
                if isinstance(j, mesonlib.File):
                    source_list += [j.absolute_path(self.source_dir, self.build_dir)]
                elif isinstance(j, str):
                    source_list += [os.path.join(self.source_dir, j)]
            source_list = list(map(lambda x: os.path.normpath(x), source_list))

            compiler = []
            if isinstance(target, build.CustomTarget):
                tmp_compiler = target.command
                if not isinstance(compiler, list):
                    tmp_compiler = [compiler]
                for j in tmp_compiler:
                    if isinstance(j, mesonlib.File):
                        compiler += [j.absolute_path(self.source_dir, self.build_dir)]
                    elif isinstance(j, str):
                        compiler += [j]
                    elif isinstance(j, (build.BuildTarget, build.CustomTarget)):
                        compiler += j.get_outputs()
                    else:
                        raise RuntimeError('Type "{}" is not supported in get_introspection_data. This is a bug'.format(type(j).__name__))

            return [{
                'language': 'unknown',
                'compiler': compiler,
                'parameters': [],
                'sources': source_list,
                'generated_sources': []
            }]

        return []
n> @permittedKwargs({}) def as_system_method(self, args, kwargs): args = listify(args) new_is_system = 'system' if len(args) > 1: raise InterpreterException('as_system takes only one optional value') if len(args) == 1: new_is_system = args[0] new_dep = self.held_object.generate_system_dependency(new_is_system) return DependencyHolder(new_dep, self.subproject) @FeatureNew('dep.as_link_whole', '0.56.0') @permittedKwargs({}) @noPosargs def as_link_whole_method(self, args, kwargs): if not isinstance(self.held_object, InternalDependency): raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects') new_dep = self.held_object.generate_link_whole_dependency() return DependencyHolder(new_dep, self.subproject) class ExternalProgramHolder(InterpreterObject, ObjectHolder): def __init__(self, ep, subproject, backend=None): InterpreterObject.__init__(self) ObjectHolder.__init__(self, ep) self.subproject = subproject self.backend = backend self.methods.update({'found': self.found_method, 'path': self.path_method, 'full_path': self.full_path_method}) self.cached_version = None @noPosargs @permittedKwargs({}) def found_method(self, args, kwargs): return self.found() @noPosargs @permittedKwargs({}) @FeatureDeprecated('ExternalProgram.path', '0.55.0', 'use ExternalProgram.full_path() instead') def path_method(self, args, kwargs): return self._full_path() @noPosargs @permittedKwargs({}) @FeatureNew('ExternalProgram.full_path', '0.55.0') def full_path_method(self, args, kwargs): return self._full_path() def _full_path(self): exe = self.held_object if isinstance(exe, build.Executable): return self.backend.get_target_filename_abs(exe) return exe.get_path() def found(self): return isinstance(self.held_object, build.Executable) or self.held_object.found() def get_command(self): return self.held_object.get_command() def get_name(self): exe = self.held_object if isinstance(exe, build.Executable): return exe.name return exe.get_name() def get_version(self, interpreter): if isinstance(self.held_object, build.Executable): return self.held_object.project_version if not self.cached_version: raw_cmd = self.get_command() + ['--version'] cmd = [self, '--version'] res = interpreter.run_command_impl(interpreter.current_node, cmd, {}, True) if res.returncode != 0: m = 'Running {!r} failed' raise InterpreterException(m.format(raw_cmd)) output = res.stdout.strip() if not output: output = res.stderr.strip() match = re.search(r'([0-9][0-9\.]+)', output) if not match: m = 'Could not find a version number in output of {!r}' raise InterpreterException(m.format(raw_cmd)) self.cached_version = match.group(1) return self.cached_version class ExternalLibraryHolder(InterpreterObject, ObjectHolder): def __init__(self, el, pv): InterpreterObject.__init__(self) ObjectHolder.__init__(self, el, pv) self.methods.update({'found': self.found_method, 'type_name': self.type_name_method, 'partial_dependency': self.partial_dependency_method, }) def found(self): return self.held_object.found() @noPosargs @permittedKwargs({}) def type_name_method(self, args, kwargs): return self.held_object.type_name @noPosargs @permittedKwargs({}) def found_method(self, args, kwargs): return self.found() def get_name(self): return self.held_object.name def get_compile_args(self): return self.held_object.get_compile_args() def get_link_args(self): return self.held_object.get_link_args() def get_exe_args(self): return self.held_object.get_exe_args() @FeatureNew('dep.partial_dependency', '0.46.0') @noPosargs @permittedKwargs(permitted_method_kwargs['partial_dependency']) def partial_dependency_method(self, args, kwargs): pdep = self.held_object.get_partial_dependency(**kwargs) return DependencyHolder(pdep, self.subproject) class GeneratorHolder(InterpreterObject, ObjectHolder): @FeatureNewKwargs('generator', '0.43.0', ['capture']) def __init__(self, interp, args, kwargs): self.interpreter = interp InterpreterObject.__init__(self) ObjectHolder.__init__(self, build.Generator(args, kwargs), interp.subproject) self.methods.update({'process': self.process_method}) @FeatureNewKwargs('generator.process', '0.45.0', ['preserve_path_from']) @permittedKwargs({'extra_args', 'preserve_path_from'}) def process_method(self, args, kwargs): extras = mesonlib.stringlistify(kwargs.get('extra_args', [])) if 'preserve_path_from' in kwargs: preserve_path_from = kwargs['preserve_path_from'] if not isinstance(preserve_path_from, str): raise InvalidArguments('Preserve_path_from must be a string.') preserve_path_from = os.path.normpath(preserve_path_from) if not os.path.isabs(preserve_path_from): # This is a bit of a hack. Fix properly before merging. raise InvalidArguments('Preserve_path_from must be an absolute path for now. Sorry.') else: preserve_path_from = None gl = self.held_object.process_files('Generator', args, self.interpreter, preserve_path_from, extra_args=extras) return GeneratedListHolder(gl) class GeneratedListHolder(InterpreterObject, ObjectHolder): def __init__(self, arg1, extra_args=None): InterpreterObject.__init__(self) if isinstance(arg1, GeneratorHolder): ObjectHolder.__init__(self, build.GeneratedList(arg1.held_object, extra_args if extra_args is not None else [])) else: ObjectHolder.__init__(self, arg1) def __repr__(self): r = '<{}: {!r}>' return r.format(self.__class__.__name__, self.held_object.get_outputs()) def add_file(self, a): self.held_object.add_file(a) # A machine that's statically known from the cross file class MachineHolder(InterpreterObject, ObjectHolder): def __init__(self, machine_info): InterpreterObject.__init__(self) ObjectHolder.__init__(self, machine_info) self.methods.update({'system': self.system_method, 'cpu': self.cpu_method, 'cpu_family': self.cpu_family_method, 'endian': self.endian_method, }) @noPosargs @permittedKwargs({}) def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str: return self.held_object.cpu_family @noPosargs @permittedKwargs({}) def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str: return self.held_object.cpu @noPosargs @permittedKwargs({}) def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str: return self.held_object.system @noPosargs @permittedKwargs({}) def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_nkwargs) -> str: return self.held_object.endian class IncludeDirsHolder(InterpreterObject, ObjectHolder): def __init__(self, idobj): InterpreterObject.__init__(self) ObjectHolder.__init__(self, idobj) class Headers(InterpreterObject): def __init__(self, sources, kwargs): InterpreterObject.__init__(self) self.sources = sources self.install_subdir = kwargs.get('subdir', '') if os.path.isabs(self.install_subdir): mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in the next release.') self.custom_install_dir = kwargs.get('install_dir', None) self.custom_install_mode = kwargs.get('install_mode', None) if self.custom_install_dir is not None: if not isinstance(self.custom_install_dir, str): raise InterpreterException('Custom_install_dir must be a string.') def set_install_subdir(self, subdir): self.install_subdir = subdir def get_install_subdir(self): return self.install_subdir def get_sources(self): return self.sources def get_custom_install_dir(self): return self.custom_install_dir def get_custom_install_mode(self): return self.custom_install_mode class DataHolder(InterpreterObject, ObjectHolder): def __init__(self, data): InterpreterObject.__init__(self) ObjectHolder.__init__(self, data) def get_source_subdir(self): return self.held_object.source_subdir def get_sources(self): return self.held_object.sources def get_install_dir(self): return self.held_object.install_dir class InstallDir(InterpreterObject): def __init__(self, src_subdir, inst_subdir, install_dir, install_mode, exclude, strip_directory, from_source_dir=True): InterpreterObject.__init__(self) self.source_subdir = src_subdir self.installable_subdir = inst_subdir self.install_dir = install_dir self.install_mode = install_mode self.exclude = exclude self.strip_directory = strip_directory self.from_source_dir = from_source_dir class Man(InterpreterObject): def __init__(self, sources, kwargs): InterpreterObject.__init__(self) self.sources = sources self.validate_sources() self.custom_install_dir = kwargs.get('install_dir', None) self.custom_install_mode = kwargs.get('install_mode', None) if self.custom_install_dir is not None and not isinstance(self.custom_install_dir, str): raise InterpreterException('Custom_install_dir must be a string.') def validate_sources(self): for s in self.sources: try: num = int(s.split('.')[-1]) except (IndexError, ValueError): num = 0 if num < 1 or num > 8: raise InvalidArguments('Man file must have a file extension of a number between 1 and 8') def get_custom_install_dir(self): return self.custom_install_dir def get_custom_install_mode(self): return self.custom_install_mode def get_sources(self): return self.sources class GeneratedObjectsHolder(InterpreterObject, ObjectHolder): def __init__(self, held_object): InterpreterObject.__init__(self) ObjectHolder.__init__(self, held_object) class TargetHolder(InterpreterObject, ObjectHolder): def __init__(self, target, interp): InterpreterObject.__init__(self) ObjectHolder.__init__(self, target, interp.subproject) self.interpreter = interp class BuildTargetHolder(TargetHolder): def __init__(self, target, interp): super().__init__(target, interp) self.methods.update({'extract_objects': self.extract_objects_method, 'extract_all_objects': self.extract_all_objects_method, 'name': self.name_method, 'get_id': self.get_id_method, 'outdir': self.outdir_method, 'full_path': self.full_path_method, 'private_dir_include': self.private_dir_include_method, }) def __repr__(self): r = '<{} {}: {}>' h = self.held_object return r.format(self.__class__.__name__, h.get_id(), h.filename) def is_cross(self): return not self.held_object.environment.machines.matches_build_machine(self.held_object.for_machine) @noPosargs @permittedKwargs({}) def private_dir_include_method(self, args, kwargs): return IncludeDirsHolder(build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self.held_object)])) @noPosargs @permittedKwargs({}) def full_path_method(self, args, kwargs): return self.interpreter.backend.get_target_filename_abs(self.held_object) @noPosargs @permittedKwargs({}) def outdir_method(self, args, kwargs): return self.interpreter.backend.get_target_dir(self.held_object) @permittedKwargs({}) def extract_objects_method(self, args, kwargs): gobjs = self.held_object.extract_objects(args) return GeneratedObjectsHolder(gobjs) @FeatureNewKwargs('extract_all_objects', '0.46.0', ['recursive']) @noPosargs @permittedKwargs({'recursive'}) def extract_all_objects_method(self, args, kwargs): recursive = kwargs.get('recursive', False) gobjs = self.held_object.extract_all_objects(recursive) if gobjs.objlist and 'recursive' not in kwargs: mlog.warning('extract_all_objects called without setting recursive ' 'keyword argument. Meson currently defaults to ' 'non-recursive to maintain backward compatibility but ' 'the default will be changed in the future.', location=self.current_node) return GeneratedObjectsHolder(gobjs) @noPosargs @permittedKwargs({}) def get_id_method(self, args, kwargs): return self.held_object.get_id() @FeatureNew('name', '0.54.0') @noPosargs @permittedKwargs({}) def name_method(self, args, kwargs): return self.held_object.name class ExecutableHolder(BuildTargetHolder): def __init__(self, target, interp): super().__init__(target, interp) class StaticLibraryHolder(BuildTargetHolder): def __init__(self, target, interp): super().__init__(target, interp) class SharedLibraryHolder(BuildTargetHolder): def __init__(self, target, interp): super().__init__(target, interp) # Set to True only when called from self.func_shared_lib(). target.shared_library_only = False class BothLibrariesHolder(BuildTargetHolder): def __init__(self, shared_holder, static_holder, interp): # FIXME: This build target always represents the shared library, but # that should be configurable. super().__init__(shared_holder.held_object, interp) self.shared_holder = shared_holder self.static_holder = static_holder self.methods.update({'get_shared_lib': self.get_shared_lib_method, 'get_static_lib': self.get_static_lib_method, }) def __repr__(self): r = '<{} {}: {}, {}: {}>' h1 = self.shared_holder.held_object h2 = self.static_holder.held_object return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename) @noPosargs @permittedKwargs({}) def get_shared_lib_method(self, args, kwargs): return self.shared_holder @noPosargs @permittedKwargs({}) def get_static_lib_method(self, args, kwargs): return self.static_holder class SharedModuleHolder(BuildTargetHolder): def __init__(self, target, interp): super().__init__(target, interp) class JarHolder(BuildTargetHolder): def __init__(self, target, interp): super().__init__(target, interp) class CustomTargetIndexHolder(TargetHolder): def __init__(self, target, interp): super().__init__(target, interp) self.methods.update({'full_path': self.full_path_method, }) @FeatureNew('custom_target[i].full_path', '0.54.0') @noPosargs @permittedKwargs({}) def full_path_method(self, args, kwargs): return self.interpreter.backend.get_target_filename_abs(self.held_object) class CustomTargetHolder(TargetHolder): def __init__(self, target, interp): super().__init__(target, interp) self.methods.update({'full_path': self.full_path_method, 'to_list': self.to_list_method, }) def __repr__(self): r = '<{} {}: {}>' h = self.held_object return r.format(self.__class__.__name__, h.get_id(), h.command) @noPosargs @permittedKwargs({}) def full_path_method(self, args, kwargs): return self.interpreter.backend.get_target_filename_abs(self.held_object) @FeatureNew('custom_target.to_list', '0.54.0') @noPosargs @permittedKwargs({}) def to_list_method(self, args, kwargs): result = [] for i in self.held_object: result.append(CustomTargetIndexHolder(i, self.interpreter)) return result def __getitem__(self, index): return CustomTargetIndexHolder(self.held_object[index], self.interpreter) def __setitem__(self, index, value): # lgtm[py/unexpected-raise-in-special-method] raise InterpreterException('Cannot set a member of a CustomTarget') def __delitem__(self, index): # lgtm[py/unexpected-raise-in-special-method] raise InterpreterException('Cannot delete a member of a CustomTarget') def outdir_include(self): return IncludeDirsHolder(build.IncludeDirs('', [], False, [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(self.held_object))])) class RunTargetHolder(TargetHolder): def __init__(self, target, interp): super().__init__(target, interp) def __repr__(self): r = '<{} {}: {}>' h = self.held_object return r.format(self.__class__.__name__, h.get_id(), h.command) class Test(InterpreterObject): def __init__(self, name: str, project: str, suite: T.List[str], exe: build.Executable, depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]], is_parallel: bool, cmd_args: T.List[str], env: build.EnvironmentVariables, should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str, priority: int): InterpreterObject.__init__(self) self.name = name self.suite = suite self.project_name = project self.exe = exe self.depends = depends self.is_parallel = is_parallel self.cmd_args = cmd_args self.env = env self.should_fail = should_fail self.timeout = timeout self.workdir = workdir self.protocol = TestProtocol.from_str(protocol) self.priority = priority def get_exe(self): return self.exe def get_name(self): return self.name class SubprojectHolder(InterpreterObject, ObjectHolder): def __init__(self, subinterpreter, subdir, warnings=0, disabled_feature=None, exception=None): InterpreterObject.__init__(self) ObjectHolder.__init__(self, subinterpreter) self.warnings = warnings self.disabled_feature = disabled_feature self.exception = exception self.subdir = PurePath(subdir).as_posix() self.methods.update({'get_variable': self.get_variable_method, 'found': self.found_method, }) @noPosargs @permittedKwargs({}) def found_method(self, args, kwargs): return self.found() def found(self): return self.held_object is not None @permittedKwargs({}) @noArgsFlattening def get_variable_method(self, args, kwargs): if len(args) < 1 or len(args) > 2: raise InterpreterException('Get_variable takes one or two arguments.') if not self.found(): raise InterpreterException('Subproject "%s" disabled can\'t get_variable on it.' % (self.subdir)) varname = args[0] if not isinstance(varname, str): raise InterpreterException('Get_variable first argument must be a string.') try: return self.held_object.variables[varname] except KeyError: pass if len(args) == 2: return args[1] raise InvalidArguments('Requested variable "{0}" not found.'.format(varname)) header_permitted_kwargs = set([ 'required', 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', ]) find_library_permitted_kwargs = set([ 'has_headers', 'required', 'dirs', 'static', ]) find_library_permitted_kwargs |= set(['header_' + k for k in header_permitted_kwargs]) class CompilerHolder(InterpreterObject): def __init__(self, compiler, env, subproject): InterpreterObject.__init__(self) self.compiler = compiler self.environment = env self.subproject = subproject self.methods.update({'compiles': self.compiles_method, 'links': self.links_method, 'get_id': self.get_id_method, 'get_linker_id': self.get_linker_id_method, 'compute_int': self.compute_int_method, 'sizeof': self.sizeof_method, 'get_define': self.get_define_method, 'check_header': self.check_header_method, 'has_header': self.has_header_method, 'has_header_symbol': self.has_header_symbol_method, 'run': self.run_method, 'has_function': self.has_function_method, 'has_member': self.has_member_method, 'has_members': self.has_members_method, 'has_type': self.has_type_method, 'alignment': self.alignment_method, 'version': self.version_method, 'cmd_array': self.cmd_array_method, 'find_library': self.find_library_method, 'has_argument': self.has_argument_method, 'has_function_attribute': self.has_func_attribute_method, 'get_supported_function_attributes': self.get_supported_function_attributes_method, 'has_multi_arguments': self.has_multi_arguments_method, 'get_supported_arguments': self.get_supported_arguments_method, 'first_supported_argument': self.first_supported_argument_method, 'has_link_argument': self.has_link_argument_method, 'has_multi_link_arguments': self.has_multi_link_arguments_method, 'get_supported_link_arguments': self.get_supported_link_arguments_method, 'first_supported_link_argument': self.first_supported_link_argument_method, 'unittest_args': self.unittest_args_method, 'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method, 'get_argument_syntax': self.get_argument_syntax_method, }) def _dep_msg(self, deps, endl): msg_single = 'with dependency {}' msg_many = 'with dependencies {}' if not deps: return endl if endl is None: endl = '' tpl = msg_many if len(deps) > 1 else msg_single names = [] for d in deps: if isinstance(d, dependencies.ExternalLibrary): name = '-l' + d.name else: name = d.name names.append(name) return tpl.format(', '.join(names)) + endl @noPosargs @permittedKwargs({}) def version_method(self, args, kwargs): return self.compiler.version @noPosargs @permittedKwargs({}) def cmd_array_method(self, args, kwargs): return self.compiler.exelist def determine_args(self, kwargs, mode='link'): nobuiltins = kwargs.get('no_builtin_args', False) if not isinstance(nobuiltins, bool): raise InterpreterException('Type of no_builtin_args not a boolean.') args = [] incdirs = extract_as_list(kwargs, 'include_directories') for i in incdirs: if not isinstance(i, IncludeDirsHolder): raise InterpreterException('Include directories argument must be an include_directories object.') for idir in i.held_object.get_incdirs(): idir = os.path.join(self.environment.get_source_dir(), i.held_object.get_curdir(), idir) args += self.compiler.get_include_args(idir, False) if not nobuiltins: for_machine = Interpreter.machine_from_native_kwarg(kwargs) opts = self.environment.coredata.compiler_options[for_machine][self.compiler.language] args += self.compiler.get_option_compile_args(opts) if mode == 'link': args += self.compiler.get_option_link_args(opts) args += mesonlib.stringlistify(kwargs.get('args', [])) return args def determine_dependencies(self, kwargs, endl=':'): deps = kwargs.get('dependencies', None) if deps is not None: deps = listify(deps) final_deps = [] for d in deps: try: d = d.held_object except Exception: pass if isinstance(d, InternalDependency) or not isinstance(d, Dependency): raise InterpreterException('Dependencies must be external dependencies') final_deps.append(d) deps = final_deps return deps, self._dep_msg(deps, endl) @permittedKwargs({ 'prefix', 'args', 'dependencies', }) def alignment_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Alignment method takes exactly one positional argument.') check_stringlist(args) typename = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of alignment must be a string.') extra_args = mesonlib.stringlistify(kwargs.get('args', [])) deps, msg = self.determine_dependencies(kwargs) result = self.compiler.alignment(typename, prefix, self.environment, extra_args=extra_args, dependencies=deps) mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result) return result @permittedKwargs({ 'name', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def run_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Run method takes exactly one positional argument.') code = args[0] if isinstance(code, mesonlib.File): code = mesonlib.File.from_absolute_file( code.rel_to_builddir(self.environment.source_dir)) elif not isinstance(code, str): raise InvalidArguments('Argument must be string or file.') testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs, endl=None) result = self.compiler.run(code, self.environment, extra_args=extra_args, dependencies=deps) if len(testname) > 0: if not result.compiled: h = mlog.red('DID NOT COMPILE') elif result.returncode == 0: h = mlog.green('YES') else: h = mlog.red('NO (%d)' % result.returncode) mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h) return TryRunResultHolder(result) @noPosargs @permittedKwargs({}) def get_id_method(self, args, kwargs): return self.compiler.get_id() @noPosargs @permittedKwargs({}) @FeatureNew('compiler.get_linker_id', '0.53.0') def get_linker_id_method(self, args, kwargs): return self.compiler.get_linker_id() @noPosargs @permittedKwargs({}) def symbols_have_underscore_prefix_method(self, args, kwargs): ''' Check if the compiler prefixes _ (underscore) to global C symbols See: https://en.wikipedia.org/wiki/Name_mangling#C ''' return self.compiler.symbols_have_underscore_prefix(self.environment) @noPosargs @permittedKwargs({}) def unittest_args_method(self, args, kwargs): ''' This function is deprecated and should not be used. It can be removed in a future version of Meson. ''' if not hasattr(self.compiler, 'get_feature_args'): raise InterpreterException('This {} compiler has no feature arguments.'.format(self.compiler.get_display_language())) build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir()) return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src) @permittedKwargs({ 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def has_member_method(self, args, kwargs): if len(args) != 2: raise InterpreterException('Has_member takes exactly two arguments.') check_stringlist(args) typename, membername = args prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_member must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) had, cached = self.compiler.has_members(typename, [membername], prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') mlog.log('Checking whether type', mlog.bold(typename, True), 'has member', mlog.bold(membername, True), msg, hadtxt, cached) return had @permittedKwargs({ 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def has_members_method(self, args, kwargs): if len(args) < 2: raise InterpreterException('Has_members needs at least two arguments.') check_stringlist(args) typename, *membernames = args prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_members must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) had, cached = self.compiler.has_members(typename, membernames, prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames])) mlog.log('Checking whether type', mlog.bold(typename, True), 'has members', members, msg, hadtxt, cached) return had @permittedKwargs({ 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def has_function_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Has_function takes exactly one argument.') check_stringlist(args) funcname = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_function must be a string.') extra_args = self.determine_args(kwargs) deps, msg = self.determine_dependencies(kwargs) had, cached = self.compiler.has_function(funcname, prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt, cached) return had @permittedKwargs({ 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def has_type_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Has_type takes exactly one argument.') check_stringlist(args) typename = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_type must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) had, cached = self.compiler.has_type(typename, prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if had: hadtxt = mlog.green('YES') else: hadtxt = mlog.red('NO') mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt, cached) return had @FeatureNew('compiler.compute_int', '0.40.0') @permittedKwargs({ 'prefix', 'low', 'high', 'guess', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def compute_int_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Compute_int takes exactly one argument.') check_stringlist(args) expression = args[0] prefix = kwargs.get('prefix', '') low = kwargs.get('low', None) high = kwargs.get('high', None) guess = kwargs.get('guess', None) if not isinstance(prefix, str): raise InterpreterException('Prefix argument of compute_int must be a string.') if low is not None and not isinstance(low, int): raise InterpreterException('Low argument of compute_int must be an int.') if high is not None and not isinstance(high, int): raise InterpreterException('High argument of compute_int must be an int.') if guess is not None and not isinstance(guess, int): raise InterpreterException('Guess argument of compute_int must be an int.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) res = self.compiler.compute_int(expression, low, high, guess, prefix, self.environment, extra_args=extra_args, dependencies=deps) mlog.log('Computing int of', mlog.bold(expression, True), msg, res) return res @permittedKwargs({ 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def sizeof_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Sizeof takes exactly one argument.') check_stringlist(args) element = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of sizeof must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) esize = self.compiler.sizeof(element, prefix, self.environment, extra_args=extra_args, dependencies=deps) mlog.log('Checking for size of', mlog.bold(element, True), msg, esize) return esize @FeatureNew('compiler.get_define', '0.40.0') @permittedKwargs({ 'prefix', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def get_define_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('get_define() takes exactly one argument.') check_stringlist(args) element = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of get_define() must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) value, cached = self.compiler.get_define(element, prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' mlog.log('Fetching value of define', mlog.bold(element, True), msg, value, cached) return value @permittedKwargs({ 'name', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def compiles_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('compiles method takes exactly one argument.') code = args[0] if isinstance(code, mesonlib.File): code = mesonlib.File.from_absolute_file( code.rel_to_builddir(self.environment.source_dir)) elif not isinstance(code, str): raise InvalidArguments('Argument must be string or file.') testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs, endl=None) result, cached = self.compiler.compiles(code, self.environment, extra_args=extra_args, dependencies=deps) if len(testname) > 0: if result: h = mlog.green('YES') else: h = mlog.red('NO') cached = mlog.blue('(cached)') if cached else '' mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h, cached) return result @permittedKwargs({ 'name', 'no_builtin_args', 'include_directories', 'args', 'dependencies', }) def links_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('links method takes exactly one argument.') code = args[0] if isinstance(code, mesonlib.File): code = mesonlib.File.from_absolute_file( code.rel_to_builddir(self.environment.source_dir)) elif not isinstance(code, str): raise InvalidArguments('Argument must be string or file.') testname = kwargs.get('name', '') if not isinstance(testname, str): raise InterpreterException('Testname argument must be a string.') extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs, endl=None) result, cached = self.compiler.links(code, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if len(testname) > 0: if result: h = mlog.green('YES') else: h = mlog.red('NO') mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h, cached) return result @FeatureNew('compiler.check_header', '0.47.0') @FeatureNewKwargs('compiler.check_header', '0.50.0', ['required']) @permittedKwargs(header_permitted_kwargs) def check_header_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('check_header method takes exactly one argument.') check_stringlist(args) hname = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header must be a string.') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) if disabled: mlog.log('Check usable header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled') return False extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) haz, cached = self.compiler.check_header(hname, prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if required and not haz: raise InterpreterException('{} header {!r} not usable'.format(self.compiler.get_display_language(), hname)) elif haz: h = mlog.green('YES') else: h = mlog.red('NO') mlog.log('Check usable header', mlog.bold(hname, True), msg, h, cached) return haz @FeatureNewKwargs('compiler.has_header', '0.50.0', ['required']) @permittedKwargs(header_permitted_kwargs) def has_header_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('has_header method takes exactly one argument.') check_stringlist(args) hname = args[0] prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header must be a string.') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) if disabled: mlog.log('Has header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled') return False extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) haz, cached = self.compiler.has_header(hname, prefix, self.environment, extra_args=extra_args, dependencies=deps) cached = mlog.blue('(cached)') if cached else '' if required and not haz: raise InterpreterException('{} header {!r} not found'.format(self.compiler.get_display_language(), hname)) elif haz: h = mlog.green('YES') else: h = mlog.red('NO') mlog.log('Has header', mlog.bold(hname, True), msg, h, cached) return haz @FeatureNewKwargs('compiler.has_header_symbol', '0.50.0', ['required']) @permittedKwargs(header_permitted_kwargs) def has_header_symbol_method(self, args, kwargs): if len(args) != 2: raise InterpreterException('has_header_symbol method takes exactly two arguments.') check_stringlist(args) hname, symbol = args prefix = kwargs.get('prefix', '') if not isinstance(prefix, str): raise InterpreterException('Prefix argument of has_header_symbol must be a string.') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False) if disabled: mlog.log('Header <{0}> has symbol'.format(hname), mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled') return False extra_args = functools.partial(self.determine_args, kwargs) deps, msg = self.determine_dependencies(kwargs) haz, cached = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment, extra_args=extra_args, dependencies=deps) if required and not haz: raise InterpreterException('{} symbol {} not found in header {}'.format(self.compiler.get_display_language(), symbol, hname)) elif haz: h = mlog.green('YES') else: h = mlog.red('NO') cached = mlog.blue('(cached)') if cached else '' mlog.log('Header <{0}> has symbol'.format(hname), mlog.bold(symbol, True), msg, h, cached) return haz def notfound_library(self, libname): lib = dependencies.ExternalLibrary(libname, None, self.environment, self.compiler.language, silent=True) return ExternalLibraryHolder(lib, self.subproject) @FeatureNewKwargs('compiler.find_library', '0.51.0', ['static']) @FeatureNewKwargs('compiler.find_library', '0.50.0', ['has_headers']) @FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler']) @disablerIfNotFound @permittedKwargs(find_library_permitted_kwargs) def find_library_method(self, args, kwargs): # TODO add dependencies support? if len(args) != 1: raise InterpreterException('find_library method takes one argument.') libname = args[0] if not isinstance(libname, str): raise InterpreterException('Library name not a string.') disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled') return self.notfound_library(libname) has_header_kwargs = {k[7:]: v for k, v in kwargs.items() if k.startswith('header_')} has_header_kwargs['required'] = required headers = mesonlib.stringlistify(kwargs.get('has_headers', [])) for h in headers: if not self.has_header_method([h], has_header_kwargs): return self.notfound_library(libname) search_dirs = extract_search_dirs(kwargs) libtype = mesonlib.LibType.PREFER_SHARED if 'static' in kwargs: if not isinstance(kwargs['static'], bool): raise InterpreterException('static must be a boolean') libtype = mesonlib.LibType.STATIC if kwargs['static'] else mesonlib.LibType.SHARED linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype) if required and not linkargs: if libtype == mesonlib.LibType.PREFER_SHARED: libtype = 'shared or static' else: libtype = libtype.name.lower() raise InterpreterException('{} {} library {!r} not found' .format(self.compiler.get_display_language(), libtype, libname)) lib = dependencies.ExternalLibrary(libname, linkargs, self.environment, self.compiler.language) return ExternalLibraryHolder(lib, self.subproject) @permittedKwargs({}) def has_argument_method(self, args: T.Sequence[str], kwargs) -> bool: args = mesonlib.stringlistify(args) if len(args) != 1: raise InterpreterException('has_argument takes exactly one argument.') return self.has_multi_arguments_method(args, kwargs) @permittedKwargs({}) def has_multi_arguments_method(self, args: T.Sequence[str], kwargs: dict): args = mesonlib.stringlistify(args) result, cached = self.compiler.has_multi_arguments(args, self.environment) if result: h = mlog.green('YES') else: h = mlog.red('NO') cached = mlog.blue('(cached)') if cached else '' mlog.log( 'Compiler for {} supports arguments {}:'.format( self.compiler.get_display_language(), ' '.join(args)), h, cached) return result @FeatureNew('compiler.get_supported_arguments', '0.43.0') @permittedKwargs({}) def get_supported_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) supported_args = [] for arg in args: if self.has_argument_method(arg, kwargs): supported_args.append(arg) return supported_args @permittedKwargs({}) def first_supported_argument_method(self, args: T.Sequence[str], kwargs: dict) -> T.List[str]: for arg in mesonlib.stringlistify(args): if self.has_argument_method(arg, kwargs): mlog.log('First supported argument:', mlog.bold(arg)) return [arg] mlog.log('First supported argument:', mlog.red('None')) return [] @FeatureNew('compiler.has_link_argument', '0.46.0') @permittedKwargs({}) def has_link_argument_method(self, args, kwargs): args = mesonlib.stringlistify(args) if len(args) != 1: raise InterpreterException('has_link_argument takes exactly one argument.') return self.has_multi_link_arguments_method(args, kwargs) @FeatureNew('compiler.has_multi_link_argument', '0.46.0') @permittedKwargs({}) def has_multi_link_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) result, cached = self.compiler.has_multi_link_arguments(args, self.environment) cached = mlog.blue('(cached)') if cached else '' if result: h = mlog.green('YES') else: h = mlog.red('NO') mlog.log( 'Compiler for {} supports link arguments {}:'.format( self.compiler.get_display_language(), ' '.join(args)), h, cached) return result @FeatureNew('compiler.get_supported_link_arguments_method', '0.46.0') @permittedKwargs({}) def get_supported_link_arguments_method(self, args, kwargs): args = mesonlib.stringlistify(args) supported_args = [] for arg in args: if self.has_link_argument_method(arg, kwargs): supported_args.append(arg) return supported_args @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0') @permittedKwargs({}) def first_supported_link_argument_method(self, args, kwargs): for i in mesonlib.stringlistify(args): if self.has_link_argument_method(i, kwargs): mlog.log('First supported link argument:', mlog.bold(i)) return [i] mlog.log('First supported link argument:', mlog.red('None')) return [] @FeatureNew('compiler.has_function_attribute', '0.48.0') @permittedKwargs({}) def has_func_attribute_method(self, args, kwargs): args = mesonlib.stringlistify(args) if len(args) != 1: raise InterpreterException('has_func_attribute takes exactly one argument.') result, cached = self.compiler.has_func_attribute(args[0], self.environment) cached = mlog.blue('(cached)') if cached else '' h = mlog.green('YES') if result else mlog.red('NO') mlog.log('Compiler for {} supports function attribute {}:'.format(self.compiler.get_display_language(), args[0]), h, cached) return result @FeatureNew('compiler.get_supported_function_attributes', '0.48.0') @permittedKwargs({}) def get_supported_function_attributes_method(self, args, kwargs): args = mesonlib.stringlistify(args) return [a for a in args if self.has_func_attribute_method(a, kwargs)] @FeatureNew('compiler.get_argument_syntax_method', '0.49.0') @noPosargs @noKwargs def get_argument_syntax_method(self, args, kwargs): return self.compiler.get_argument_syntax() ModuleState = collections.namedtuple('ModuleState', [ 'source_root', 'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment', 'project_name', 'project_version', 'backend', 'targets', 'data', 'headers', 'man', 'global_args', 'project_args', 'build_machine', 'host_machine', 'target_machine', 'current_node']) class ModuleHolder(InterpreterObject, ObjectHolder): def __init__(self, modname, module, interpreter): InterpreterObject.__init__(self) ObjectHolder.__init__(self, module) self.modname = modname self.interpreter = interpreter def method_call(self, method_name, args, kwargs): try: fn = getattr(self.held_object, method_name) except AttributeError: raise InvalidArguments('Module %s does not have method %s.' % (self.modname, method_name)) if method_name.startswith('_'): raise InvalidArguments('Function {!r} in module {!r} is private.'.format(method_name, self.modname)) if not getattr(fn, 'no-args-flattening', False): args = flatten(args) # This is not 100% reliable but we can't use hash() # because the Build object contains dicts and lists. num_targets = len(self.interpreter.build.targets) state = ModuleState( source_root = self.interpreter.environment.get_source_dir(), build_to_src=mesonlib.relpath(self.interpreter.environment.get_source_dir(), self.interpreter.environment.get_build_dir()), subproject=self.interpreter.subproject, subdir=self.interpreter.subdir, current_lineno=self.interpreter.current_lineno, environment=self.interpreter.environment, project_name=self.interpreter.build.project_name, project_version=self.interpreter.build.dep_manifest[self.interpreter.active_projectname], # The backend object is under-used right now, but we will need it: # https://github.com/mesonbuild/meson/issues/1419 backend=self.interpreter.backend, targets=self.interpreter.build.targets, data=self.interpreter.build.data, headers=self.interpreter.build.get_headers(), man=self.interpreter.build.get_man(), #global_args_for_build = self.interpreter.build.global_args.build, global_args = self.interpreter.build.global_args.host, #project_args_for_build = self.interpreter.build.projects_args.build.get(self.interpreter.subproject, {}), project_args = self.interpreter.build.projects_args.host.get(self.interpreter.subproject, {}), build_machine=self.interpreter.builtin['build_machine'].held_object, host_machine=self.interpreter.builtin['host_machine'].held_object, target_machine=self.interpreter.builtin['target_machine'].held_object, current_node=self.current_node ) # Many modules do for example self.interpreter.find_program_impl(), # so we have to ensure they use the current interpreter and not the one # that first imported that module, otherwise it will use outdated # overrides. self.held_object.interpreter = self.interpreter if self.held_object.is_snippet(method_name): value = fn(self.interpreter, state, args, kwargs) return self.interpreter.holderify(value) else: value = fn(state, args, kwargs) if num_targets != len(self.interpreter.build.targets): raise InterpreterException('Extension module altered internal state illegally.') return self.interpreter.module_method_callback(value) class Summary: def __init__(self, project_name, project_version): self.project_name = project_name self.project_version = project_version self.sections = collections.defaultdict(dict) self.max_key_len = 0 def add_section(self, section, values, kwargs): bool_yn = kwargs.get('bool_yn', False) if not isinstance(bool_yn, bool): raise InterpreterException('bool_yn keyword argument must be boolean') list_sep = kwargs.get('list_sep') if list_sep is not None and not isinstance(list_sep, str): raise InterpreterException('list_sep keyword argument must be string') for k, v in values.items(): if k in self.sections[section]: raise InterpreterException('Summary section {!r} already have key {!r}'.format(section, k)) formatted_values = [] for i in listify(v): if not isinstance(i, (str, int)): m = 'Summary value in section {!r}, key {!r}, must be string, integer or boolean' raise InterpreterException(m.format(section, k)) if bool_yn and isinstance(i, bool): formatted_values.append(mlog.green('YES') if i else mlog.red('NO')) else: formatted_values.append(str(i)) self.sections[section][k] = (formatted_values, list_sep) self.max_key_len = max(self.max_key_len, len(k)) def text_len(self, v): if isinstance(v, str): return len(v) elif isinstance(v, mlog.AnsiDecorator): return len(v.text) else: raise RuntimeError('Expecting only strings or AnsiDecorator') def dump(self): mlog.log(self.project_name, mlog.normal_cyan(self.project_version)) for section, values in self.sections.items(): mlog.log('') # newline if section: mlog.log(' ', mlog.bold(section)) for k, v in values.items(): v, list_sep = v indent = self.max_key_len - len(k) + 3 end = ' ' if v else '' mlog.log(' ' * indent, k + ':', end=end) indent = self.max_key_len + 6 self.dump_value(v, list_sep, indent) mlog.log('') # newline def dump_value(self, arr, list_sep, indent): lines_sep = '\n' + ' ' * indent if list_sep is None: mlog.log(*arr, sep=lines_sep) return max_len = shutil.get_terminal_size().columns line = [] line_len = indent lines_sep = list_sep.rstrip() + lines_sep for v in arr: v_len = self.text_len(v) + len(list_sep) if line and line_len + v_len > max_len: mlog.log(*line, sep=list_sep, end=lines_sep) line_len = indent line = [] line.append(v) line_len += v_len mlog.log(*line, sep=list_sep) class MesonMain(InterpreterObject): def __init__(self, build, interpreter): InterpreterObject.__init__(self) self.build = build self.interpreter = interpreter self._found_source_scripts = {} self.methods.update({'get_compiler': self.get_compiler_method, 'is_cross_build': self.is_cross_build_method, 'has_exe_wrapper': self.has_exe_wrapper_method, 'can_run_host_binaries': self.can_run_host_binaries_method, 'is_unity': self.is_unity_method, 'is_subproject': self.is_subproject_method, 'current_source_dir': self.current_source_dir_method, 'current_build_dir': self.current_build_dir_method, 'source_root': self.source_root_method, 'build_root': self.build_root_method, 'project_source_root': self.project_source_root_method, 'project_build_root': self.project_build_root_method, 'add_install_script': self.add_install_script_method, 'add_postconf_script': self.add_postconf_script_method, 'add_dist_script': self.add_dist_script_method, 'install_dependency_manifest': self.install_dependency_manifest_method, 'override_dependency': self.override_dependency_method, 'override_find_program': self.override_find_program_method, 'project_version': self.project_version_method, 'project_license': self.project_license_method, 'version': self.version_method, 'project_name': self.project_name_method, 'get_cross_property': self.get_cross_property_method, 'get_external_property': self.get_external_property_method, 'backend': self.backend_method, }) def _find_source_script(self, prog: T.Union[str, ExecutableHolder], args): if isinstance(prog, ExecutableHolder): prog_path = self.interpreter.backend.get_target_filename(prog.held_object) return build.RunScript([prog_path], args) elif isinstance(prog, ExternalProgramHolder): return build.RunScript(prog.get_command(), args) # Prefer scripts in the current source directory search_dir = os.path.join(self.interpreter.environment.source_dir, self.interpreter.subdir) key = (prog, search_dir) if key in self._found_source_scripts: found = self._found_source_scripts[key] else: found = dependencies.ExternalProgram(prog, search_dir=search_dir) if found.found(): self._found_source_scripts[key] = found else: m = 'Script or command {!r} not found or not executable' raise InterpreterException(m.format(prog)) return build.RunScript(found.get_command(), args) def _process_script_args( self, name: str, args: T.List[T.Union[ str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder, ExternalProgramHolder, ExecutableHolder, ]], allow_built: bool = False) -> T.List[str]: script_args = [] # T.List[str] new = False for a in args: a = unholder(a) if isinstance(a, str): script_args.append(a) elif isinstance(a, mesonlib.File): new = True script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir)) elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)): if not allow_built: raise InterpreterException('Arguments to {} cannot be built'.format(name)) new = True script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()]) # This feels really hacky, but I'm not sure how else to fix # this without completely rewriting install script handling. # This is complicated by the fact that the install target # depends on all. if isinstance(a, build.CustomTargetIndex): a.target.build_by_default = True else: a.build_by_default = True elif isinstance(a, build.ConfigureFile): new = True script_args.append(os.path.join(a.subdir, a.targetname)) elif isinstance(a, dependencies.ExternalProgram): script_args.extend(a.command) new = True else: raise InterpreterException( 'Arguments to {} must be strings, Files, CustomTargets, ' 'Indexes of CustomTargets, or ConfigureFiles'.format(name)) if new: FeatureNew.single_use( 'Calling "{}" with File, CustomTaget, Index of CustomTarget, ' 'ConfigureFile, Executable, or ExternalProgram'.format(name), '0.55.0', self.interpreter.subproject) return script_args @permittedKwargs(set()) def add_install_script_method(self, args: 'T.Tuple[T.Union[str, ExecutableHolder], T.Union[str, mesonlib.File, CustomTargetHolder, CustomTargetIndexHolder, ConfigureFileHolder], ...]', kwargs): if len(args) < 1: raise InterpreterException('add_install_script takes one or more arguments') script_args = self._process_script_args('add_install_script', args[1:], allow_built=True) script = self._find_source_script(args[0], script_args) self.build.install_scripts.append(script) @permittedKwargs(set()) def add_postconf_script_method(self, args, kwargs): if len(args) < 1: raise InterpreterException('add_postconf_script takes one or more arguments') script_args = self._process_script_args('add_postconf_script', args[1:], allow_built=True) script = self._find_source_script(args[0], script_args) self.build.postconf_scripts.append(script) @permittedKwargs(set()) def add_dist_script_method(self, args, kwargs): if len(args) < 1: raise InterpreterException('add_dist_script takes one or more arguments') if len(args) > 1: FeatureNew.single_use('Calling "add_dist_script" with multiple arguments', '0.49.0', self.interpreter.subproject) if self.interpreter.subproject != '': raise InterpreterException('add_dist_script may not be used in a subproject.') script_args = self._process_script_args('add_dist_script', args[1:], allow_built=True) script = self._find_source_script(args[0], script_args) self.build.dist_scripts.append(script) @noPosargs @permittedKwargs({}) def current_source_dir_method(self, args, kwargs): src = self.interpreter.environment.source_dir sub = self.interpreter.subdir if sub == '': return src return os.path.join(src, sub) @noPosargs @permittedKwargs({}) def current_build_dir_method(self, args, kwargs): src = self.interpreter.environment.build_dir sub = self.interpreter.subdir if sub == '': return src return os.path.join(src, sub) @noPosargs @permittedKwargs({}) def backend_method(self, args, kwargs): return self.interpreter.backend.name @noPosargs @permittedKwargs({}) @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.current_source_dir instead.') def source_root_method(self, args, kwargs): return self.interpreter.environment.source_dir @noPosargs @permittedKwargs({}) @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.current_build_dir instead.') def build_root_method(self, args, kwargs): return self.interpreter.environment.build_dir @noPosargs @permittedKwargs({}) @FeatureNew('meson.project_source_root', '0.56.0') def project_source_root_method(self, args, kwargs): src = self.interpreter.environment.source_dir sub = self.interpreter.root_subdir if sub == '': return src return os.path.join(src, sub) @noPosargs @permittedKwargs({}) @FeatureNew('meson.project_build_root', '0.56.0') def project_build_root_method(self, args, kwargs): src = self.interpreter.environment.build_dir sub = self.interpreter.root_subdir if sub == '': return src return os.path.join(src, sub) @noPosargs @permittedKwargs({}) @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.') def has_exe_wrapper_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool: return self.can_run_host_binaries_impl(args, kwargs) @noPosargs @permittedKwargs({}) @FeatureNew('meson.can_run_host_binaries', '0.55.0') def can_run_host_binaries_method(self, args: T.Tuple[object, ...], kwargs: T.Dict[str, object]) -> bool: return self.can_run_host_binaries_impl(args, kwargs) def can_run_host_binaries_impl(self, args, kwargs): if (self.is_cross_build_method(None, None) and self.build.environment.need_exe_wrapper()): if self.build.environment.exe_wrapper is None: return False # We return True when exe_wrap is defined, when it's not needed, and # when we're compiling natively. The last two are semantically confusing. # Need to revisit this. return True @noPosargs @permittedKwargs({}) def is_cross_build_method(self, args, kwargs): return self.build.environment.is_cross_build() @permittedKwargs({'native'}) def get_compiler_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('get_compiler_method must have one and only one argument.') cname = args[0] for_machine = Interpreter.machine_from_native_kwarg(kwargs) clist = self.interpreter.coredata.compilers[for_machine] if cname in clist: return CompilerHolder(clist[cname], self.build.environment, self.interpreter.subproject) raise InterpreterException('Tried to access compiler for language "%s", not specified for %s machine.' % (cname, for_machine.get_lower_case_name())) @noPosargs @permittedKwargs({}) def is_unity_method(self, args, kwargs): optval = self.interpreter.environment.coredata.get_builtin_option('unity') if optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject()): return True return False @noPosargs @permittedKwargs({}) def is_subproject_method(self, args, kwargs): return self.interpreter.is_subproject() @permittedKwargs({}) def install_dependency_manifest_method(self, args, kwargs): if len(args) != 1: raise InterpreterException('Must specify manifest install file name') if not isinstance(args[0], str): raise InterpreterException('Argument must be a string.') self.build.dep_manifest_name = args[0] @FeatureNew('meson.override_find_program', '0.46.0') @permittedKwargs({}) def override_find_program_method(self, args, kwargs): if len(args) != 2: raise InterpreterException('Override needs two arguments') name, exe = args if not isinstance(name, str): raise InterpreterException('First argument must be a string') if hasattr(exe, 'held_object'): exe = exe.held_object if isinstance(exe, mesonlib.File): abspath = exe.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir) if not os.path.exists(abspath): raise InterpreterException('Tried to override %s with a file that does not exist.' % name) exe = OverrideProgram(name, abspath) if not isinstance(exe, (dependencies.ExternalProgram, build.Executable)): raise InterpreterException('Second argument must be an external program or executable.') self.interpreter.add_find_program_override(name, exe) @FeatureNew('meson.override_dependency', '0.54.0') @permittedKwargs({'native'}) def override_dependency_method(self, args, kwargs): if len(args) != 2: raise InterpreterException('Override needs two arguments') name = args[0] dep = args[1] if not isinstance(name, str) or not name: raise InterpreterException('First argument must be a string and cannot be empty') if hasattr(dep, 'held_object'): dep = dep.held_object if not isinstance(dep, dependencies.Dependency): raise InterpreterException('Second argument must be a dependency object') identifier = dependencies.get_dep_identifier(name, kwargs) for_machine = self.interpreter.machine_from_native_kwarg(kwargs) override = self.build.dependency_overrides[for_machine].get(identifier) if override: m = 'Tried to override dependency {!r} which has already been resolved or overridden at {}' location = mlog.get_error_location_string(override.node.filename, override.node.lineno) raise InterpreterException(m.format(name, location)) self.build.dependency_overrides[for_machine][identifier] = \ build.DependencyOverride(dep, self.interpreter.current_node) @noPosargs @permittedKwargs({}) def project_version_method(self, args, kwargs): return self.build.dep_manifest[self.interpreter.active_projectname]['version'] @FeatureNew('meson.project_license()', '0.45.0') @noPosargs @permittedKwargs({}) def project_license_method(self, args, kwargs): return self.build.dep_manifest[self.interpreter.active_projectname]['license'] @noPosargs @permittedKwargs({}) def version_method(self, args, kwargs): return MesonVersionString(coredata.version) @noPosargs @permittedKwargs({}) def project_name_method(self, args, kwargs): return self.interpreter.active_projectname @noArgsFlattening @permittedKwargs({}) def get_cross_property_method(self, args, kwargs) -> str: if len(args) < 1 or len(args) > 2: raise InterpreterException('Must have one or two arguments.') propname = args[0] if not isinstance(propname, str): raise InterpreterException('Property name must be string.') try: props = self.interpreter.environment.properties.host return props[propname] except Exception: if len(args) == 2: return args[1] raise InterpreterException('Unknown cross property: %s.' % propname) @noArgsFlattening @permittedKwargs({'native'}) @FeatureNew('meson.get_external_property', '0.54.0') def get_external_property_method(self, args: T.Sequence[str], kwargs: dict) -> str: if len(args) < 1 or len(args) > 2: raise InterpreterException('Must have one or two positional arguments.') propname = args[0] if not isinstance(propname, str): raise InterpreterException('Property name must be string.') def _get_native() -> str: try: props = self.interpreter.environment.properties.build return props[propname] except Exception: if len(args) == 2: return args[1] raise InterpreterException('Unknown native property: %s.' % propname) if 'native' in kwargs: if kwargs['native']: return _get_native() else: return self.get_cross_property_method(args, {}) else: # native: not specified if self.build.environment.is_cross_build(): return self.get_cross_property_method(args, kwargs) else: return _get_native() known_library_kwargs = ( build.known_shlib_kwargs | build.known_stlib_kwargs ) known_build_target_kwargs = ( known_library_kwargs | build.known_exe_kwargs | build.known_jar_kwargs | {'target_type'} ) _base_test_args = {'args', 'depends', 'env', 'should_fail', 'timeout', 'workdir', 'suite', 'priority', 'protocol'} permitted_kwargs = {'add_global_arguments': {'language', 'native'}, 'add_global_link_arguments': {'language', 'native'}, 'add_languages': {'required', 'native'}, 'add_project_link_arguments': {'language', 'native'}, 'add_project_arguments': {'language', 'native'}, 'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env', 'is_default'}, 'benchmark': _base_test_args, 'build_target': known_build_target_kwargs, 'configure_file': {'input', 'output', 'configuration', 'command', 'copy', 'depfile', 'install_dir', 'install_mode', 'capture', 'install', 'format', 'output_format', 'encoding'}, 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'install_mode', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default', 'build_always_stale', 'console'}, 'dependency': {'default_options', 'embed', 'fallback', 'language', 'main', 'method', 'modules', 'components', 'cmake_module_path', 'optional_modules', 'native', 'not_found_message', 'required', 'static', 'version', 'private_headers', 'cmake_args', 'include_type', }, 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version', 'variables', }, 'executable': build.known_exe_kwargs, 'find_program': {'required', 'native', 'version', 'dirs'}, 'generator': {'arguments', 'output', 'depends', 'depfile', 'capture', 'preserve_path_from'}, 'include_directories': {'is_system'}, 'install_data': {'install_dir', 'install_mode', 'rename', 'sources'}, 'install_headers': {'install_dir', 'install_mode', 'subdir'}, 'install_man': {'install_dir', 'install_mode'}, 'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'}, 'jar': build.known_jar_kwargs, 'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'}, 'run_command': {'check', 'capture', 'env'}, 'run_target': {'command', 'depends'}, 'shared_library': build.known_shlib_kwargs, 'shared_module': build.known_shmod_kwargs, 'static_library': build.known_stlib_kwargs, 'both_libraries': known_library_kwargs, 'library': known_library_kwargs, 'subdir': {'if_found'}, 'subproject': {'version', 'default_options', 'required'}, 'test': set.union(_base_test_args, {'is_parallel'}), 'vcs_tag': {'input', 'output', 'fallback', 'command', 'replace_string'}, } class Interpreter(InterpreterBase): def __init__( self, build: build.Build, backend: T.Optional[Backend] = None, subproject: str = '', subdir: str = '', subproject_dir: str = 'subprojects', modules: T.Optional[T.Dict[str, ExtensionModule]] = None, default_project_options: T.Optional[T.Dict[str, str]] = None, mock: bool = False, ast: T.Optional[mparser.CodeBlockNode] = None, is_translated: bool = False, ) -> None: super().__init__(build.environment.get_source_dir(), subdir, subproject) self.an_unpicklable_object = mesonlib.an_unpicklable_object self.build = build self.environment = build.environment self.coredata = self.environment.get_coredata() self.backend = backend self.summary = {} if modules is None: self.modules = {} else: self.modules = modules # Subproject directory is usually the name of the subproject, but can # be different for dependencies provided by wrap files. self.subproject_directory_name = subdir.split(os.path.sep)[-1] self.subproject_dir = subproject_dir self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt') if not mock and ast is None: self.load_root_meson_file() self.sanity_check_ast() elif ast is not None: self.ast = ast self.sanity_check_ast() self.builtin.update({'meson': MesonMain(build, self)}) self.generators = [] self.visited_subdirs = {} self.project_args_frozen = False self.global_args_frozen = False # implies self.project_args_frozen self.subprojects = {} self.subproject_stack = [] self.configure_file_outputs = {} # Passed from the outside, only used in subprojects. if default_project_options: self.default_project_options = default_project_options.copy() else: self.default_project_options = {} self.project_default_options = {} self.build_func_dict() # build_def_files needs to be defined before parse_project is called # # For non-meson subprojects, we'll be using the ast. Even if it does # exist we don't want to add a dependency on it, it's autogenerated # from the actual build files, and is just for reference. self.build_def_files = [] build_filename = os.path.join(self.subdir, environment.build_filename) if not is_translated: self.build_def_files.append(build_filename) if not mock: self.parse_project() self._redetect_machines() def _redetect_machines(self): # Re-initialize machine descriptions. We can do a better job now because we # have the compilers needed to gain more knowledge, so wipe out old # inference and start over. machines = self.build.environment.machines.miss_defaulting() machines.build = environment.detect_machine_info(self.coredata.compilers.build) self.build.environment.machines = machines.default_missing() assert self.build.environment.machines.build.cpu is not None assert self.build.environment.machines.host.cpu is not None assert self.build.environment.machines.target.cpu is not None self.builtin['build_machine'] = \ MachineHolder(self.build.environment.machines.build) self.builtin['host_machine'] = \ MachineHolder(self.build.environment.machines.host) self.builtin['target_machine'] = \ MachineHolder(self.build.environment.machines.target) # TODO: Why is this in interpreter.py and not CoreData or Environment? def get_non_matching_default_options(self) -> T.Iterator[T.Tuple[str, str, coredata.UserOption]]: env = self.environment for def_opt_name, def_opt_value in self.project_default_options.items(): for opts in env.coredata.get_all_options(): cur_opt_value = opts.get(def_opt_name) if cur_opt_value is not None: def_opt_value = env.coredata.validate_option_value(def_opt_name, def_opt_value) if def_opt_value != cur_opt_value.value: yield (def_opt_name, def_opt_value, cur_opt_value) def build_func_dict(self): self.funcs.update({'add_global_arguments': self.func_add_global_arguments, 'add_project_arguments': self.func_add_project_arguments, 'add_global_link_arguments': self.func_add_global_link_arguments, 'add_project_link_arguments': self.func_add_project_link_arguments, 'add_test_setup': self.func_add_test_setup, 'add_languages': self.func_add_languages, 'alias_target': self.func_alias_target, 'assert': self.func_assert, 'benchmark': self.func_benchmark, 'build_target': self.func_build_target, 'configuration_data': self.func_configuration_data, 'configure_file': self.func_configure_file, 'custom_target': self.func_custom_target, 'declare_dependency': self.func_declare_dependency, 'dependency': self.func_dependency, 'disabler': self.func_disabler, 'environment': self.func_environment, 'error': self.func_error, 'executable': self.func_executable, 'generator': self.func_generator, 'gettext': self.func_gettext, 'get_option': self.func_get_option, 'get_variable': self.func_get_variable, 'files': self.func_files, 'find_library': self.func_find_library, 'find_program': self.func_find_program, 'include_directories': self.func_include_directories, 'import': self.func_import, 'install_data': self.func_install_data, 'install_headers': self.func_install_headers, 'install_man': self.func_install_man, 'install_subdir': self.func_install_subdir, 'is_disabler': self.func_is_disabler, 'is_variable': self.func_is_variable, 'jar': self.func_jar, 'join_paths': self.func_join_paths, 'library': self.func_library, 'message': self.func_message, 'warning': self.func_warning, 'option': self.func_option, 'project': self.func_project, 'run_target': self.func_run_target, 'run_command': self.func_run_command, 'set_variable': self.func_set_variable, 'subdir': self.func_subdir, 'subdir_done': self.func_subdir_done, 'subproject': self.func_subproject, 'summary': self.func_summary, 'shared_library': self.func_shared_lib, 'shared_module': self.func_shared_module, 'static_library': self.func_static_lib, 'both_libraries': self.func_both_lib, 'test': self.func_test, 'vcs_tag': self.func_vcs_tag }) if 'MESON_UNIT_TEST' in os.environ: self.funcs.update({'exception': self.func_exception}) def holderify(self, item): if isinstance(item, list): return [self.holderify(x) for x in item] if isinstance(item, dict): return {k: self.holderify(v) for k, v in item.items()} if isinstance(item, build.CustomTarget): return CustomTargetHolder(item, self) elif isinstance(item, (int, str, bool, Disabler, InterpreterObject)) or item is None: return item elif isinstance(item, build.Executable): return ExecutableHolder(item, self) elif isinstance(item, build.GeneratedList): return GeneratedListHolder(item) elif isinstance(item, build.RunTarget): raise RuntimeError('This is not a pipe.') elif isinstance(item, build.RunScript): raise RuntimeError('Do not do this.') elif isinstance(item, build.Data): return DataHolder(item) elif isinstance(item, dependencies.Dependency): return DependencyHolder(item, self.subproject) elif isinstance(item, dependencies.ExternalProgram): return ExternalProgramHolder(item, self.subproject) elif hasattr(item, 'held_object'): return item elif isinstance(item, InterpreterObject): return item else: raise InterpreterException('Module returned a value of unknown type.') def process_new_values(self, invalues): invalues = listify(invalues) for v in invalues: if isinstance(v, (RunTargetHolder, CustomTargetHolder, BuildTargetHolder)): v = v.held_object if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)): self.add_target(v.name, v) elif isinstance(v, list): self.module_method_callback(v) elif isinstance(v, build.GeneratedList): pass elif isinstance(v, build.RunScript): self.build.install_scripts.append(v) elif isinstance(v, build.Data): self.build.data.append(v) elif isinstance(v, dependencies.ExternalProgram): return ExternalProgramHolder(v, self.subproject) elif isinstance(v, dependencies.InternalDependency): # FIXME: This is special cased and not ideal: # The first source is our new VapiTarget, the rest are deps self.process_new_values(v.sources[0]) elif isinstance(v, InstallDir): self.build.install_dirs.append(v) elif hasattr(v, 'held_object'): pass elif isinstance(v, (int, str, bool, Disabler)): pass else: raise InterpreterException('Module returned a value of unknown type.') def module_method_callback(self, return_object): if not isinstance(return_object, ModuleReturnValue): raise InterpreterException('Bug in module, it returned an invalid object') invalues = return_object.new_objects self.process_new_values(invalues) return self.holderify(return_object.return_value) def get_build_def_files(self) -> T.List[str]: return self.build_def_files def add_build_def_file(self, f): # Use relative path for files within source directory, and absolute path # for system files. Skip files within build directory. Also skip not regular # files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this # is especially important to convert '/' to '\' on Windows. if isinstance(f, mesonlib.File): if f.is_built: return f = os.path.normpath(f.relative_name()) elif os.path.isfile(f) and not f.startswith('/dev'): srcdir = Path(self.environment.get_source_dir()) builddir = Path(self.environment.get_build_dir()) try: f = Path(f).resolve() except OSError: f = Path(f) s = f.stat() if (hasattr(s, 'st_file_attributes') and s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK): # This is a Windows Store link which we can't # resolve, so just do our best otherwise. f = f.parent.resolve() / f.name else: raise if builddir in f.parents: return if srcdir in f.parents: f = f.relative_to(srcdir) f = str(f) else: return if f not in self.build_def_files: self.build_def_files.append(f) def get_variables(self): return self.variables def check_stdlibs(self): machine_choices = [MachineChoice.HOST] if self.coredata.is_cross_build(): machine_choices.append(MachineChoice.BUILD) for for_machine in machine_choices: props = self.build.environment.properties[for_machine] for l in self.coredata.compilers[for_machine].keys(): try: di = mesonlib.stringlistify(props.get_stdlib(l)) except KeyError: continue if len(di) == 1: FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject) kwargs = {'fallback': di, 'native': for_machine is MachineChoice.BUILD, } name = display_name = l + '_stdlib' dep = self.dependency_impl(name, display_name, kwargs, force_fallback=True) self.build.stdlibs[for_machine][l] = dep def import_module(self, modname): if modname in self.modules: return try: module = importlib.import_module('mesonbuild.modules.' + modname) except ImportError: raise InvalidArguments('Module "%s" does not exist' % (modname, )) ext_module = module.initialize(self) assert isinstance(ext_module, ExtensionModule) self.modules[modname] = ext_module @stringArgs @noKwargs def func_import(self, node, args, kwargs): if len(args) != 1: raise InvalidCode('Import takes one argument.') modname = args[0] if modname.startswith('unstable-'): plainname = modname.split('-', 1)[1] try: # check if stable module exists self.import_module(plainname) mlog.warning('Module %s is now stable, please use the %s module instead.' % (modname, plainname)) modname = plainname except InvalidArguments: mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node) modname = 'unstable_' + plainname self.import_module(modname) return ModuleHolder(modname, self.modules[modname], self) @stringArgs @noKwargs def func_files(self, node, args, kwargs): return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in args] # Used by declare_dependency() and pkgconfig.generate() def extract_variables(self, kwargs, argname='variables', list_new=False, dict_new=False): variables = kwargs.get(argname, {}) if isinstance(variables, dict): if dict_new and variables: FeatureNew.single_use('variables as dictionary', '0.56.0', self.subproject) else: varlist = mesonlib.stringlistify(variables) if list_new: FeatureNew.single_use('variables as list of strings', '0.56.0', self.subproject) variables = {} for v in varlist: try: (key, value) = v.split('=', 1) except ValueError: raise InterpreterException('Variable {!r} must have a value separated by equals sign.'.format(v)) variables[key.strip()] = value.strip() for k, v in variables.items(): if not k or not v: raise InterpreterException('Empty variable name or value') if any(c.isspace() for c in k): raise InterpreterException('Invalid whitespace in variable name "{}"'.format(k)) if not isinstance(v, str): raise InterpreterException('variables values must be strings.') return variables @FeatureNewKwargs('declare_dependency', '0.46.0', ['link_whole']) @FeatureNewKwargs('declare_dependency', '0.54.0', ['variables']) @permittedKwargs(permitted_kwargs['declare_dependency']) @noPosargs def func_declare_dependency(self, node, args, kwargs): version = kwargs.get('version', self.project_version) if not isinstance(version, str): raise InterpreterException('Version must be a string.') incs = self.extract_incdirs(kwargs) libs = unholder(extract_as_list(kwargs, 'link_with')) libs_whole = unholder(extract_as_list(kwargs, 'link_whole')) sources = extract_as_list(kwargs, 'sources') sources = unholder(listify(self.source_strings_to_files(sources))) deps = unholder(extract_as_list(kwargs, 'dependencies')) compile_args = mesonlib.stringlistify(kwargs.get('compile_args', [])) link_args = mesonlib.stringlistify(kwargs.get('link_args', [])) variables = self.extract_variables(kwargs, list_new=True) final_deps = [] for d in deps: try: d = d.held_object except Exception: pass if not isinstance(d, (dependencies.Dependency, dependencies.ExternalLibrary, dependencies.InternalDependency)): raise InterpreterException('Dependencies must be external deps') final_deps.append(d) for l in libs: if isinstance(l, dependencies.Dependency): raise InterpreterException('''Entries in "link_with" may only be self-built targets, external dependencies (including libraries) must go to "dependencies".''') dep = dependencies.InternalDependency(version, incs, compile_args, link_args, libs, libs_whole, sources, final_deps, variables) return DependencyHolder(dep, self.subproject) @noKwargs def func_assert(self, node, args, kwargs): if len(args) == 1: FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject) value = args[0] message = None elif len(args) == 2: value, message = args if not isinstance(message, str): raise InterpreterException('Assert message not a string.') else: raise InterpreterException('Assert takes between one and two arguments') if not isinstance(value, bool): raise InterpreterException('Assert value not bool.') if not value: if message is None: from .ast import AstPrinter printer = AstPrinter() node.args.arguments[0].accept(printer) message = printer.result raise InterpreterException('Assert failed: ' + message) def validate_arguments(self, args, argcount, arg_types): if argcount is not None: if argcount != len(args): raise InvalidArguments('Expected %d arguments, got %d.' % (argcount, len(args))) for actual, wanted in zip(args, arg_types): if wanted is not None: if not isinstance(actual, wanted): raise InvalidArguments('Incorrect argument type.') @FeatureNewKwargs('run_command', '0.50.0', ['env']) @FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture']) @permittedKwargs(permitted_kwargs['run_command']) def func_run_command(self, node, args, kwargs): return self.run_command_impl(node, args, kwargs) def run_command_impl(self, node, args, kwargs, in_builddir=False): if len(args) < 1: raise InterpreterException('Not enough arguments') cmd, *cargs = args capture = kwargs.get('capture', True) srcdir = self.environment.get_source_dir() builddir = self.environment.get_build_dir() check = kwargs.get('check', False) if not isinstance(check, bool): raise InterpreterException('Check must be boolean.') env = self.unpack_env_kwarg(kwargs) m = 'must be a string, or the output of find_program(), files() '\ 'or configure_file(), or a compiler object; not {!r}' expanded_args = [] if isinstance(cmd, ExternalProgramHolder): cmd = cmd.held_object if isinstance(cmd, build.Executable): progname = node.args.arguments[0].value msg = 'Program {!r} was overridden with the compiled executable {!r}'\ ' and therefore cannot be used during configuration' raise InterpreterException(msg.format(progname, cmd.description())) if not cmd.found(): raise InterpreterException('command {!r} not found or not executable'.format(cmd.get_name())) elif isinstance(cmd, CompilerHolder): exelist = cmd.compiler.get_exelist() cmd = exelist[0] prog = ExternalProgram(cmd, silent=True) if not prog.found(): raise InterpreterException('Program {!r} not found ' 'or not executable'.format(cmd)) cmd = prog expanded_args = exelist[1:] else: if isinstance(cmd, mesonlib.File): cmd = cmd.absolute_path(srcdir, builddir) elif not isinstance(cmd, str): raise InterpreterException('First argument ' + m.format(cmd)) # Prefer scripts in the current source directory search_dir = os.path.join(srcdir, self.subdir) prog = ExternalProgram(cmd, silent=True, search_dir=search_dir) if not prog.found(): raise InterpreterException('Program or command {!r} not found ' 'or not executable'.format(cmd)) cmd = prog for a in listify(cargs): if isinstance(a, str): expanded_args.append(a) elif isinstance(a, mesonlib.File): expanded_args.append(a.absolute_path(srcdir, builddir)) elif isinstance(a, ExternalProgramHolder): expanded_args.append(a.held_object.get_path()) else: raise InterpreterException('Arguments ' + m.format(a)) # If any file that was used as an argument to the command # changes, we must re-run the configuration step. self.add_build_def_file(cmd.get_path()) for a in expanded_args: if not os.path.isabs(a): a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a) self.add_build_def_file(a) return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir, self.environment.get_build_command() + ['introspect'], in_builddir=in_builddir, check=check, capture=capture) @stringArgs def func_gettext(self, nodes, args, kwargs): raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead') def func_option(self, nodes, args, kwargs): raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.') @FeatureNewKwargs('subproject', '0.38.0', ['default_options']) @permittedKwargs(permitted_kwargs['subproject']) @stringArgs def func_subproject(self, nodes, args, kwargs): if len(args) != 1: raise InterpreterException('Subproject takes exactly one argument') subp_name = args[0] return self.do_subproject(subp_name, 'meson', kwargs) def disabled_subproject(self, subp_name, disabled_feature=None, exception=None): sub = SubprojectHolder(None, os.path.join(self.subproject_dir, subp_name), disabled_feature=disabled_feature, exception=exception) self.subprojects[subp_name] = sub return sub def get_subproject(self, subp_name): sub = self.subprojects.get(subp_name) if sub and sub.found(): return sub return None def do_subproject(self, subp_name: str, method: str, kwargs): disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) if disabled: mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled') return self.disabled_subproject(subp_name, disabled_feature=feature) default_options = mesonlib.stringlistify(kwargs.get('default_options', [])) default_options = coredata.create_options_dict(default_options) if subp_name == '': raise InterpreterException('Subproject name must not be empty.') if subp_name[0] == '.': raise InterpreterException('Subproject name must not start with a period.') if '..' in subp_name: raise InterpreterException('Subproject name must not contain a ".." path segment.') if os.path.isabs(subp_name): raise InterpreterException('Subproject name must not be an absolute path.') if has_path_sep(subp_name): mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.', location=self.current_node) if subp_name in self.subproject_stack: fullstack = self.subproject_stack + [subp_name] incpath = ' => '.join(fullstack) raise InvalidCode('Recursive include of subprojects: %s.' % incpath) if subp_name in self.subprojects: subproject = self.subprojects[subp_name] if required and not subproject.found(): raise InterpreterException('Subproject "%s" required but not found.' % (subproject.subdir)) return subproject r = self.environment.wrap_resolver try: subdir = r.resolve(subp_name, method, self.subproject) except wrap.WrapException as e: if not required: mlog.log(e) mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)') return self.disabled_subproject(subp_name, exception=e) raise e subdir_abs = os.path.join(self.environment.get_source_dir(), subdir) os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True) self.global_args_frozen = True mlog.log() with mlog.nested(): mlog.log('Executing subproject', mlog.bold(subp_name), 'method', mlog.bold(method), '\n') try: if method == 'meson': return self._do_subproject_meson(subp_name, subdir, default_options, kwargs) elif method == 'cmake': return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs) else: raise InterpreterException('The method {} is invalid for the subproject {}'.format(method, subp_name)) # Invalid code is always an error except InvalidCode: raise except Exception as e: if not required: with mlog.nested(): # Suppress the 'ERROR:' prefix because this exception is not # fatal and VS CI treat any logs with "ERROR:" as fatal. mlog.exception(e, prefix=mlog.yellow('Exception:')) mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)') return self.disabled_subproject(subp_name, exception=e) raise e def _do_subproject_meson(self, subp_name: str, subdir: str, default_options, kwargs, ast: T.Optional[mparser.CodeBlockNode] = None, build_def_files: T.Optional[T.List[str]] = None, is_translated: bool = False) -> SubprojectHolder: with mlog.nested(): new_build = self.build.copy() subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir, self.modules, default_options, ast=ast, is_translated=is_translated) subi.subprojects = self.subprojects subi.subproject_stack = self.subproject_stack + [subp_name] current_active = self.active_projectname current_warnings_counter = mlog.log_warnings_counter mlog.log_warnings_counter = 0 subi.run() subi_warnings = mlog.log_warnings_counter mlog.log_warnings_counter = current_warnings_counter mlog.log('Subproject', mlog.bold(subp_name), 'finished.') mlog.log() if 'version' in kwargs: pv = subi.project_version wanted = kwargs['version'] if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]: raise InterpreterException('Subproject %s version is %s but %s required.' % (subp_name, pv, wanted)) self.active_projectname = current_active self.subprojects.update(subi.subprojects) self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings) # Duplicates are possible when subproject uses files from project root if build_def_files: self.build_def_files = list(set(self.build_def_files + build_def_files)) # We always need the subi.build_def_files, to propgate sub-sub-projects self.build_def_files = list(set(self.build_def_files + subi.build_def_files)) self.build.merge(subi.build) self.build.subprojects[subp_name] = subi.project_version self.summary.update(subi.summary) return self.subprojects[subp_name] def _do_subproject_cmake(self, subp_name, subdir, subdir_abs, default_options, kwargs): with mlog.nested(): new_build = self.build.copy() prefix = self.coredata.builtins['prefix'].value from .modules.cmake import CMakeSubprojectOptions options = kwargs.get('options', CMakeSubprojectOptions()) if not isinstance(options, CMakeSubprojectOptions): raise InterpreterException('"options" kwarg must be CMakeSubprojectOptions' ' object (created by cmake.subproject_options())') cmake_options = mesonlib.stringlistify(kwargs.get('cmake_options', [])) cmake_options += options.cmake_options cm_int = CMakeInterpreter(new_build, Path(subdir), Path(subdir_abs), Path(prefix), new_build.environment, self.backend) cm_int.initialise(cmake_options) cm_int.analyse() # Generate a meson ast and execute it with the normal do_subproject_meson ast = cm_int.pretend_to_be_meson(options.target_options) mlog.log() with mlog.nested(): mlog.log('Processing generated meson AST') # Debug print the generated meson file from .ast import AstIndentationGenerator, AstPrinter printer = AstPrinter() ast.accept(AstIndentationGenerator()) ast.accept(printer) printer.post_process() meson_filename = os.path.join(self.build.environment.get_build_dir(), subdir, 'meson.build') with open(meson_filename, "w") as f: f.write(printer.result) mlog.log('Build file:', meson_filename) mlog.cmd_ci_include(meson_filename) mlog.log() result = self._do_subproject_meson(subp_name, subdir, default_options, kwargs, ast, cm_int.bs_files, is_translated=True) result.cm_interpreter = cm_int mlog.log() return result def get_option_internal(self, optname): raw_optname = optname if self.is_subproject(): optname = self.subproject + ':' + optname for opts in [ self.coredata.base_options, compilers.base_options, self.coredata.builtins, dict(self.coredata.get_prefixed_options_per_machine(self.coredata.builtins_per_machine)), dict(self.coredata.flatten_lang_iterator( self.coredata.get_prefixed_options_per_machine(self.coredata.compiler_options))), ]: v = opts.get(optname) if v is None or v.yielding: v = opts.get(raw_optname) if v is not None: return v try: opt = self.coredata.user_options[optname] if opt.yielding and ':' in optname and raw_optname in self.coredata.user_options: popt = self.coredata.user_options[raw_optname] if type(opt) is type(popt): opt = popt else: # Get class name, then option type as a string opt_type = opt.__class__.__name__[4:][:-6].lower() popt_type = popt.__class__.__name__[4:][:-6].lower() # This is not a hard error to avoid dependency hell, the workaround # when this happens is to simply set the subproject's option directly. mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield ' 'to parent option of type {3!r}, ignoring parent value. ' 'Use -D{2}:{0}=value to set the value for this option manually' '.'.format(raw_optname, opt_type, self.subproject, popt_type), location=self.current_node) return opt except KeyError: pass raise InterpreterException('Tried to access unknown option "%s".' % optname) @stringArgs @noKwargs def func_get_option(self, nodes, args, kwargs): if len(args) != 1: raise InterpreterException('Argument required for get_option.') optname = args[0] if ':' in optname: raise InterpreterException('Having a colon in option name is forbidden, ' 'projects are not allowed to directly access ' 'options of other subprojects.') opt = self.get_option_internal(optname) if isinstance(opt, coredata.UserFeatureOption): return FeatureOptionHolder(self.environment, optname, opt) elif isinstance(opt, coredata.UserOption): return opt.value return opt @noKwargs def func_configuration_data(self, node, args, kwargs): if len(args) > 1: raise InterpreterException('configuration_data takes only one optional positional arguments') elif len(args) == 1: FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject) initial_values = args[0] if not isinstance(initial_values, dict): raise InterpreterException('configuration_data first argument must be a dictionary') else: initial_values = {} return ConfigurationDataHolder(self.subproject, initial_values) def set_backend(self): # The backend is already set when parsing subprojects if self.backend is not None: return backend = self.coredata.get_builtin_option('backend') from .backend import backends self.backend = backends.get_backend_from_name(backend, self.build, self) if self.backend is None: raise InterpreterException('Unknown backend "%s".' % backend) if backend != self.backend.name: if self.backend.name.startswith('vs'): mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))