1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
|
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This provides C++ code generation targeting the Itanium C++ ABI. The class
// in this file generates structures that follow the Itanium C++ ABI, which is
// documented at:
// https://itanium-cxx-abi.github.io/cxx-abi/abi.html
// https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html
//
// It also supports the closely-related ARM ABI, documented at:
// https://developer.arm.com/documentation/ihi0041/g/
//
//===----------------------------------------------------------------------===//
#include "CIRGenCXXABI.h"
#include "CIRGenFunction.h"
#include "clang/AST/ExprCXX.h"
#include "clang/AST/GlobalDecl.h"
#include "clang/AST/VTableBuilder.h"
#include "clang/CIR/MissingFeatures.h"
#include "llvm/Support/ErrorHandling.h"
using namespace clang;
using namespace clang::CIRGen;
namespace {
class CIRGenItaniumCXXABI : public CIRGenCXXABI {
protected:
/// All the vtables which have been defined.
llvm::DenseMap<const CXXRecordDecl *, cir::GlobalOp> vtables;
public:
CIRGenItaniumCXXABI(CIRGenModule &cgm) : CIRGenCXXABI(cgm) {
assert(!cir::MissingFeatures::cxxabiUseARMMethodPtrABI());
assert(!cir::MissingFeatures::cxxabiUseARMGuardVarABI());
}
AddedStructorArgs getImplicitConstructorArgs(CIRGenFunction &cgf,
const CXXConstructorDecl *d,
CXXCtorType type,
bool forVirtualBase,
bool delegating) override;
bool needsVTTParameter(clang::GlobalDecl gd) override;
AddedStructorArgCounts
buildStructorSignature(GlobalDecl gd,
llvm::SmallVectorImpl<CanQualType> &argTys) override;
void emitInstanceFunctionProlog(SourceLocation loc,
CIRGenFunction &cgf) override;
void addImplicitStructorParams(CIRGenFunction &cgf, QualType &resTy,
FunctionArgList ¶ms) override;
void emitCXXConstructors(const clang::CXXConstructorDecl *d) override;
void emitCXXDestructors(const clang::CXXDestructorDecl *d) override;
void emitCXXStructor(clang::GlobalDecl gd) override;
void emitDestructorCall(CIRGenFunction &cgf, const CXXDestructorDecl *dd,
CXXDtorType type, bool forVirtualBase,
bool delegating, Address thisAddr,
QualType thisTy) override;
void emitRethrow(CIRGenFunction &cgf, bool isNoReturn) override;
bool useThunkForDtorVariant(const CXXDestructorDecl *dtor,
CXXDtorType dt) const override {
// Itanium does not emit any destructor variant as an inline thunk.
// Delegating may occur as an optimization, but all variants are either
// emitted with external linkage or as linkonce if they are inline and used.
return false;
}
bool isVirtualOffsetNeededForVTableField(CIRGenFunction &cgf,
CIRGenFunction::VPtr vptr) override;
cir::GlobalOp getAddrOfVTable(const CXXRecordDecl *rd,
CharUnits vptrOffset) override;
CIRGenCallee getVirtualFunctionPointer(CIRGenFunction &cgf,
clang::GlobalDecl gd, Address thisAddr,
mlir::Type ty,
SourceLocation loc) override;
mlir::Value getVTableAddressPoint(BaseSubobject base,
const CXXRecordDecl *vtableClass) override;
mlir::Value getVTableAddressPointInStructorWithVTT(
CIRGenFunction &cgf, const CXXRecordDecl *vtableClass, BaseSubobject base,
const CXXRecordDecl *nearestVBase);
mlir::Value getVTableAddressPointInStructor(
CIRGenFunction &cgf, const clang::CXXRecordDecl *vtableClass,
clang::BaseSubobject base,
const clang::CXXRecordDecl *nearestVBase) override;
void emitVTableDefinitions(CIRGenVTables &cgvt,
const CXXRecordDecl *rd) override;
void emitVirtualInheritanceTables(const CXXRecordDecl *rd) override;
bool doStructorsInitializeVPtrs(const CXXRecordDecl *vtableClass) override {
return true;
}
mlir::Value
getVirtualBaseClassOffset(mlir::Location loc, CIRGenFunction &cgf,
Address thisAddr, const CXXRecordDecl *classDecl,
const CXXRecordDecl *baseClassDecl) override;
};
} // namespace
void CIRGenItaniumCXXABI::emitInstanceFunctionProlog(SourceLocation loc,
CIRGenFunction &cgf) {
// Naked functions have no prolog.
if (cgf.curFuncDecl && cgf.curFuncDecl->hasAttr<NakedAttr>()) {
cgf.cgm.errorNYI(cgf.curFuncDecl->getLocation(),
"emitInstanceFunctionProlog: Naked");
}
/// Initialize the 'this' slot. In the Itanium C++ ABI, no prologue
/// adjustments are required, because they are all handled by thunks.
setCXXABIThisValue(cgf, loadIncomingCXXThis(cgf));
/// Initialize the 'vtt' slot if needed.
if (getStructorImplicitParamDecl(cgf)) {
cir::LoadOp val = cgf.getBuilder().createLoad(
cgf.getLoc(loc),
cgf.getAddrOfLocalVar(getStructorImplicitParamDecl(cgf)));
setStructorImplicitParamValue(cgf, val);
}
/// If this is a function that the ABI specifies returns 'this', initialize
/// the return slot to this' at the start of the function.
///
/// Unlike the setting of return types, this is done within the ABI
/// implementation instead of by clients of CIRGenCXXBI because:
/// 1) getThisValue is currently protected
/// 2) in theory, an ABI could implement 'this' returns some other way;
/// HasThisReturn only specifies a contract, not the implementation
if (hasThisReturn(cgf.curGD)) {
cgf.cgm.errorNYI(cgf.curFuncDecl->getLocation(),
"emitInstanceFunctionProlog: hasThisReturn");
}
}
CIRGenCXXABI::AddedStructorArgCounts
CIRGenItaniumCXXABI::buildStructorSignature(
GlobalDecl gd, llvm::SmallVectorImpl<CanQualType> &argTys) {
clang::ASTContext &astContext = cgm.getASTContext();
// All parameters are already in place except VTT, which goes after 'this'.
// These are clang types, so we don't need to worry about sret yet.
// Check if we need to add a VTT parameter (which has type void **).
if ((isa<CXXConstructorDecl>(gd.getDecl()) ? gd.getCtorType() == Ctor_Base
: gd.getDtorType() == Dtor_Base) &&
cast<CXXMethodDecl>(gd.getDecl())->getParent()->getNumVBases() != 0) {
assert(!cir::MissingFeatures::addressSpace());
argTys.insert(argTys.begin() + 1,
astContext.getPointerType(
CanQualType::CreateUnsafe(astContext.VoidPtrTy)));
return AddedStructorArgCounts::withPrefix(1);
}
return AddedStructorArgCounts{};
}
// Find out how to cirgen the complete destructor and constructor
namespace {
enum class StructorCIRGen { Emit, RAUW, Alias, COMDAT };
}
static StructorCIRGen getCIRGenToUse(CIRGenModule &cgm,
const CXXMethodDecl *md) {
if (!cgm.getCodeGenOpts().CXXCtorDtorAliases)
return StructorCIRGen::Emit;
// The complete and base structors are not equivalent if there are any virtual
// bases, so emit separate functions.
if (md->getParent()->getNumVBases())
return StructorCIRGen::Emit;
GlobalDecl aliasDecl;
if (const auto *dd = dyn_cast<CXXDestructorDecl>(md)) {
aliasDecl = GlobalDecl(dd, Dtor_Complete);
} else {
const auto *cd = cast<CXXConstructorDecl>(md);
aliasDecl = GlobalDecl(cd, Ctor_Complete);
}
cir::GlobalLinkageKind linkage = cgm.getFunctionLinkage(aliasDecl);
if (cir::isDiscardableIfUnused(linkage))
return StructorCIRGen::RAUW;
// FIXME: Should we allow available_externally aliases?
if (!cir::isValidLinkage(linkage))
return StructorCIRGen::RAUW;
if (cir::isWeakForLinker(linkage)) {
// Only ELF and wasm support COMDATs with arbitrary names (C5/D5).
if (cgm.getTarget().getTriple().isOSBinFormatELF() ||
cgm.getTarget().getTriple().isOSBinFormatWasm())
return StructorCIRGen::COMDAT;
return StructorCIRGen::Emit;
}
return StructorCIRGen::Alias;
}
static void emitConstructorDestructorAlias(CIRGenModule &cgm,
GlobalDecl aliasDecl,
GlobalDecl targetDecl) {
cir::GlobalLinkageKind linkage = cgm.getFunctionLinkage(aliasDecl);
// Does this function alias already exists?
StringRef mangledName = cgm.getMangledName(aliasDecl);
auto globalValue = dyn_cast_or_null<cir::CIRGlobalValueInterface>(
cgm.getGlobalValue(mangledName));
if (globalValue && !globalValue.isDeclaration())
return;
auto entry = cast_or_null<cir::FuncOp>(cgm.getGlobalValue(mangledName));
// Retrieve aliasee info.
auto aliasee = cast<cir::FuncOp>(cgm.getAddrOfGlobal(targetDecl));
// Populate actual alias.
cgm.emitAliasForGlobal(mangledName, entry, aliasDecl, aliasee, linkage);
}
void CIRGenItaniumCXXABI::emitCXXStructor(GlobalDecl gd) {
auto *md = cast<CXXMethodDecl>(gd.getDecl());
StructorCIRGen cirGenType = getCIRGenToUse(cgm, md);
const auto *cd = dyn_cast<CXXConstructorDecl>(md);
if (cd ? gd.getCtorType() == Ctor_Complete
: gd.getDtorType() == Dtor_Complete) {
GlobalDecl baseDecl =
cd ? gd.getWithCtorType(Ctor_Base) : gd.getWithDtorType(Dtor_Base);
;
if (cirGenType == StructorCIRGen::Alias ||
cirGenType == StructorCIRGen::COMDAT) {
emitConstructorDestructorAlias(cgm, gd, baseDecl);
return;
}
if (cirGenType == StructorCIRGen::RAUW) {
StringRef mangledName = cgm.getMangledName(gd);
mlir::Operation *aliasee = cgm.getAddrOfGlobal(baseDecl);
cgm.addReplacement(mangledName, aliasee);
return;
}
}
auto fn = cgm.codegenCXXStructor(gd);
cgm.maybeSetTrivialComdat(*md, fn);
}
void CIRGenItaniumCXXABI::addImplicitStructorParams(CIRGenFunction &cgf,
QualType &resTy,
FunctionArgList ¶ms) {
const auto *md = cast<CXXMethodDecl>(cgf.curGD.getDecl());
assert(isa<CXXConstructorDecl>(md) || isa<CXXDestructorDecl>(md));
// Check if we need a VTT parameter as well.
if (needsVTTParameter(cgf.curGD)) {
ASTContext &astContext = cgm.getASTContext();
// FIXME: avoid the fake decl
assert(!cir::MissingFeatures::addressSpace());
QualType t = astContext.getPointerType(astContext.VoidPtrTy);
auto *vttDecl = ImplicitParamDecl::Create(
astContext, /*DC=*/nullptr, md->getLocation(),
&astContext.Idents.get("vtt"), t, ImplicitParamKind::CXXVTT);
params.insert(params.begin() + 1, vttDecl);
getStructorImplicitParamDecl(cgf) = vttDecl;
}
}
void CIRGenItaniumCXXABI::emitCXXConstructors(const CXXConstructorDecl *d) {
// Just make sure we're in sync with TargetCXXABI.
assert(cgm.getTarget().getCXXABI().hasConstructorVariants());
// The constructor used for constructing this as a base class;
// ignores virtual bases.
cgm.emitGlobal(GlobalDecl(d, Ctor_Base));
// The constructor used for constructing this as a complete class;
// constructs the virtual bases, then calls the base constructor.
if (!d->getParent()->isAbstract()) {
// We don't need to emit the complete ctro if the class is abstract.
cgm.emitGlobal(GlobalDecl(d, Ctor_Complete));
}
}
void CIRGenItaniumCXXABI::emitCXXDestructors(const CXXDestructorDecl *d) {
// The destructor used for destructing this as a base class; ignores
// virtual bases.
cgm.emitGlobal(GlobalDecl(d, Dtor_Base));
// The destructor used for destructing this as a most-derived class;
// call the base destructor and then destructs any virtual bases.
cgm.emitGlobal(GlobalDecl(d, Dtor_Complete));
// The destructor in a virtual table is always a 'deleting'
// destructor, which calls the complete destructor and then uses the
// appropriate operator delete.
if (d->isVirtual())
cgm.emitGlobal(GlobalDecl(d, Dtor_Deleting));
}
CIRGenCXXABI::AddedStructorArgs CIRGenItaniumCXXABI::getImplicitConstructorArgs(
CIRGenFunction &cgf, const CXXConstructorDecl *d, CXXCtorType type,
bool forVirtualBase, bool delegating) {
if (!needsVTTParameter(GlobalDecl(d, type)))
return AddedStructorArgs{};
// Insert the implicit 'vtt' argument as the second argument. Make sure to
// correctly reflect its address space, which can differ from generic on
// some targets.
mlir::Value vtt =
cgf.getVTTParameter(GlobalDecl(d, type), forVirtualBase, delegating);
QualType vttTy =
cgm.getASTContext().getPointerType(cgm.getASTContext().VoidPtrTy);
assert(!cir::MissingFeatures::addressSpace());
return AddedStructorArgs::withPrefix({{vtt, vttTy}});
}
/// Return whether the given global decl needs a VTT (virtual table table)
/// parameter, which it does if it's a base constructor or destructor with
/// virtual bases.
bool CIRGenItaniumCXXABI::needsVTTParameter(GlobalDecl gd) {
auto *md = cast<CXXMethodDecl>(gd.getDecl());
// We don't have any virtual bases, just return early.
if (!md->getParent()->getNumVBases())
return false;
// Check if we have a base constructor.
if (isa<CXXConstructorDecl>(md) && gd.getCtorType() == Ctor_Base)
return true;
// Check if we have a base destructor.
if (isa<CXXDestructorDecl>(md) && gd.getDtorType() == Dtor_Base)
return true;
return false;
}
void CIRGenItaniumCXXABI::emitVTableDefinitions(CIRGenVTables &cgvt,
const CXXRecordDecl *rd) {
cir::GlobalOp vtable = getAddrOfVTable(rd, CharUnits());
if (vtable.hasInitializer())
return;
ItaniumVTableContext &vtContext = cgm.getItaniumVTableContext();
const VTableLayout &vtLayout = vtContext.getVTableLayout(rd);
cir::GlobalLinkageKind linkage = cgm.getVTableLinkage(rd);
mlir::Attribute rtti =
cgm.getAddrOfRTTIDescriptor(cgm.getLoc(rd->getBeginLoc()),
cgm.getASTContext().getCanonicalTagType(rd));
// Classic codegen uses ConstantInitBuilder here, which is a very general
// and feature-rich class to generate initializers for global values.
// For now, this is using a simpler approach to create the initializer in CIR.
cgvt.createVTableInitializer(vtable, vtLayout, rtti,
cir::isLocalLinkage(linkage));
// Set the correct linkage.
vtable.setLinkage(linkage);
if (cgm.supportsCOMDAT() && cir::isWeakForLinker(linkage))
vtable.setComdat(true);
// Set the right visibility.
cgm.setGVProperties(vtable, rd);
// If this is the magic class __cxxabiv1::__fundamental_type_info,
// we will emit the typeinfo for the fundamental types. This is the
// same behaviour as GCC.
const DeclContext *DC = rd->getDeclContext();
if (rd->getIdentifier() &&
rd->getIdentifier()->isStr("__fundamental_type_info") &&
isa<NamespaceDecl>(DC) && cast<NamespaceDecl>(DC)->getIdentifier() &&
cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") &&
DC->getParent()->isTranslationUnit()) {
cgm.errorNYI(rd->getSourceRange(),
"emitVTableDefinitions: __fundamental_type_info");
}
auto vtableAsGlobalValue = dyn_cast<cir::CIRGlobalValueInterface>(*vtable);
assert(vtableAsGlobalValue && "VTable must support CIRGlobalValueInterface");
// Always emit type metadata on non-available_externally definitions, and on
// available_externally definitions if we are performing whole program
// devirtualization. For WPD we need the type metadata on all vtable
// definitions to ensure we associate derived classes with base classes
// defined in headers but with a strong definition only in a shared
// library.
assert(!cir::MissingFeatures::vtableEmitMetadata());
if (cgm.getCodeGenOpts().WholeProgramVTables) {
cgm.errorNYI(rd->getSourceRange(),
"emitVTableDefinitions: WholeProgramVTables");
}
assert(!cir::MissingFeatures::vtableRelativeLayout());
if (vtContext.isRelativeLayout()) {
cgm.errorNYI(rd->getSourceRange(), "vtableRelativeLayout");
}
}
void CIRGenItaniumCXXABI::emitVirtualInheritanceTables(
const CXXRecordDecl *rd) {
CIRGenVTables &vtables = cgm.getVTables();
cir::GlobalOp vtt = vtables.getAddrOfVTT(rd);
vtables.emitVTTDefinition(vtt, cgm.getVTableLinkage(rd), rd);
}
void CIRGenItaniumCXXABI::emitDestructorCall(
CIRGenFunction &cgf, const CXXDestructorDecl *dd, CXXDtorType type,
bool forVirtualBase, bool delegating, Address thisAddr, QualType thisTy) {
GlobalDecl gd(dd, type);
if (needsVTTParameter(gd)) {
cgm.errorNYI(dd->getSourceRange(), "emitDestructorCall: VTT");
}
mlir::Value vtt = nullptr;
ASTContext &astContext = cgm.getASTContext();
QualType vttTy = astContext.getPointerType(astContext.VoidPtrTy);
assert(!cir::MissingFeatures::appleKext());
CIRGenCallee callee =
CIRGenCallee::forDirect(cgm.getAddrOfCXXStructor(gd), gd);
cgf.emitCXXDestructorCall(gd, callee, thisAddr.getPointer(), thisTy, vtt,
vttTy, nullptr);
}
// The idea here is creating a separate block for the throw with an
// `UnreachableOp` as the terminator. So, we branch from the current block
// to the throw block and create a block for the remaining operations.
static void insertThrowAndSplit(mlir::OpBuilder &builder, mlir::Location loc,
mlir::Value exceptionPtr = {},
mlir::FlatSymbolRefAttr typeInfo = {},
mlir::FlatSymbolRefAttr dtor = {}) {
mlir::Block *currentBlock = builder.getInsertionBlock();
mlir::Region *region = currentBlock->getParent();
if (currentBlock->empty()) {
cir::ThrowOp::create(builder, loc, exceptionPtr, typeInfo, dtor);
cir::UnreachableOp::create(builder, loc);
} else {
mlir::Block *throwBlock = builder.createBlock(region);
cir::ThrowOp::create(builder, loc, exceptionPtr, typeInfo, dtor);
cir::UnreachableOp::create(builder, loc);
builder.setInsertionPointToEnd(currentBlock);
cir::BrOp::create(builder, loc, throwBlock);
}
(void)builder.createBlock(region);
}
void CIRGenItaniumCXXABI::emitRethrow(CIRGenFunction &cgf, bool isNoReturn) {
// void __cxa_rethrow();
if (isNoReturn) {
CIRGenBuilderTy &builder = cgf.getBuilder();
assert(cgf.currSrcLoc && "expected source location");
mlir::Location loc = *cgf.currSrcLoc;
insertThrowAndSplit(builder, loc);
} else {
cgm.errorNYI("emitRethrow with isNoReturn false");
}
}
CIRGenCXXABI *clang::CIRGen::CreateCIRGenItaniumCXXABI(CIRGenModule &cgm) {
switch (cgm.getASTContext().getCXXABIKind()) {
case TargetCXXABI::GenericItanium:
case TargetCXXABI::GenericAArch64:
return new CIRGenItaniumCXXABI(cgm);
case TargetCXXABI::AppleARM64:
// The general Itanium ABI will do until we implement something that
// requires special handling.
assert(!cir::MissingFeatures::cxxabiAppleARM64CXXABI());
return new CIRGenItaniumCXXABI(cgm);
default:
llvm_unreachable("bad or NYI ABI kind");
}
}
cir::GlobalOp CIRGenItaniumCXXABI::getAddrOfVTable(const CXXRecordDecl *rd,
CharUnits vptrOffset) {
assert(vptrOffset.isZero() && "Itanium ABI only supports zero vptr offsets");
cir::GlobalOp &vtable = vtables[rd];
if (vtable)
return vtable;
// Queue up this vtable for possible deferred emission.
assert(!cir::MissingFeatures::deferredVtables());
SmallString<256> name;
llvm::raw_svector_ostream out(name);
getMangleContext().mangleCXXVTable(rd, out);
const VTableLayout &vtLayout =
cgm.getItaniumVTableContext().getVTableLayout(rd);
mlir::Type vtableType = cgm.getVTables().getVTableType(vtLayout);
// Use pointer alignment for the vtable. Otherwise we would align them based
// on the size of the initializer which doesn't make sense as only single
// values are read.
unsigned ptrAlign = cgm.getItaniumVTableContext().isRelativeLayout()
? 32
: cgm.getTarget().getPointerAlign(LangAS::Default);
vtable = cgm.createOrReplaceCXXRuntimeVariable(
cgm.getLoc(rd->getSourceRange()), name, vtableType,
cir::GlobalLinkageKind::ExternalLinkage,
cgm.getASTContext().toCharUnitsFromBits(ptrAlign));
// LLVM codegen handles unnamedAddr
assert(!cir::MissingFeatures::opGlobalUnnamedAddr());
// In MS C++ if you have a class with virtual functions in which you are using
// selective member import/export, then all virtual functions must be exported
// unless they are inline, otherwise a link error will result. To match this
// behavior, for such classes, we dllimport the vtable if it is defined
// externally and all the non-inline virtual methods are marked dllimport, and
// we dllexport the vtable if it is defined in this TU and all the non-inline
// virtual methods are marked dllexport.
if (cgm.getTarget().hasPS4DLLImportExport())
cgm.errorNYI(rd->getSourceRange(),
"getAddrOfVTable: PS4 DLL import/export");
cgm.setGVProperties(vtable, rd);
return vtable;
}
CIRGenCallee CIRGenItaniumCXXABI::getVirtualFunctionPointer(
CIRGenFunction &cgf, clang::GlobalDecl gd, Address thisAddr, mlir::Type ty,
SourceLocation srcLoc) {
CIRGenBuilderTy &builder = cgm.getBuilder();
mlir::Location loc = cgf.getLoc(srcLoc);
cir::PointerType tyPtr = builder.getPointerTo(ty);
auto *methodDecl = cast<CXXMethodDecl>(gd.getDecl());
mlir::Value vtable = cgf.getVTablePtr(loc, thisAddr, methodDecl->getParent());
uint64_t vtableIndex = cgm.getItaniumVTableContext().getMethodVTableIndex(gd);
mlir::Value vfunc{};
if (cgf.shouldEmitVTableTypeCheckedLoad(methodDecl->getParent())) {
cgm.errorNYI(loc, "getVirtualFunctionPointer: emitVTableTypeCheckedLoad");
} else {
assert(!cir::MissingFeatures::emitTypeMetadataCodeForVCall());
mlir::Value vfuncLoad;
if (cgm.getItaniumVTableContext().isRelativeLayout()) {
assert(!cir::MissingFeatures::vtableRelativeLayout());
cgm.errorNYI(loc, "getVirtualFunctionPointer: isRelativeLayout");
} else {
auto vtableSlotPtr = cir::VTableGetVirtualFnAddrOp::create(
builder, loc, builder.getPointerTo(tyPtr), vtable, vtableIndex);
vfuncLoad = builder.createAlignedLoad(loc, tyPtr, vtableSlotPtr,
cgf.getPointerAlign());
}
// Add !invariant.load md to virtual function load to indicate that
// function didn't change inside vtable.
// It's safe to add it without -fstrict-vtable-pointers, but it would not
// help in devirtualization because it will only matter if we will have 2
// the same virtual function loads from the same vtable load, which won't
// happen without enabled devirtualization with -fstrict-vtable-pointers.
if (cgm.getCodeGenOpts().OptimizationLevel > 0 &&
cgm.getCodeGenOpts().StrictVTablePointers) {
cgm.errorNYI(loc, "getVirtualFunctionPointer: strictVTablePointers");
}
vfunc = vfuncLoad;
}
CIRGenCallee callee(gd, vfunc.getDefiningOp());
return callee;
}
mlir::Value CIRGenItaniumCXXABI::getVTableAddressPointInStructorWithVTT(
CIRGenFunction &cgf, const CXXRecordDecl *vtableClass, BaseSubobject base,
const CXXRecordDecl *nearestVBase) {
assert((base.getBase()->getNumVBases() || nearestVBase != nullptr) &&
needsVTTParameter(cgf.curGD) && "This class doesn't have VTT");
// Get the secondary vpointer index.
uint64_t virtualPointerIndex =
cgm.getVTables().getSecondaryVirtualPointerIndex(vtableClass, base);
/// Load the VTT.
mlir::Value vttPtr = cgf.loadCXXVTT();
mlir::Location loc = cgf.getLoc(vtableClass->getSourceRange());
// Calculate the address point from the VTT, and the offset may be zero.
vttPtr = cgf.getBuilder().createVTTAddrPoint(loc, vttPtr.getType(), vttPtr,
virtualPointerIndex);
// And load the address point from the VTT.
auto vptrType = cir::VPtrType::get(cgf.getBuilder().getContext());
return cgf.getBuilder().createAlignedLoad(loc, vptrType, vttPtr,
cgf.getPointerAlign());
}
mlir::Value
CIRGenItaniumCXXABI::getVTableAddressPoint(BaseSubobject base,
const CXXRecordDecl *vtableClass) {
cir::GlobalOp vtable = getAddrOfVTable(vtableClass, CharUnits());
// Find the appropriate vtable within the vtable group, and the address point
// within that vtable.
VTableLayout::AddressPointLocation addressPoint =
cgm.getItaniumVTableContext()
.getVTableLayout(vtableClass)
.getAddressPoint(base);
mlir::OpBuilder &builder = cgm.getBuilder();
auto vtablePtrTy = cir::VPtrType::get(builder.getContext());
return builder.create<cir::VTableAddrPointOp>(
cgm.getLoc(vtableClass->getSourceRange()), vtablePtrTy,
mlir::FlatSymbolRefAttr::get(vtable.getSymNameAttr()),
cir::AddressPointAttr::get(cgm.getBuilder().getContext(),
addressPoint.VTableIndex,
addressPoint.AddressPointIndex));
}
mlir::Value CIRGenItaniumCXXABI::getVTableAddressPointInStructor(
CIRGenFunction &cgf, const clang::CXXRecordDecl *vtableClass,
clang::BaseSubobject base, const clang::CXXRecordDecl *nearestVBase) {
if ((base.getBase()->getNumVBases() || nearestVBase != nullptr) &&
needsVTTParameter(cgf.curGD)) {
return getVTableAddressPointInStructorWithVTT(cgf, vtableClass, base,
nearestVBase);
}
return getVTableAddressPoint(base, vtableClass);
}
bool CIRGenItaniumCXXABI::isVirtualOffsetNeededForVTableField(
CIRGenFunction &cgf, CIRGenFunction::VPtr vptr) {
if (vptr.nearestVBase == nullptr)
return false;
return needsVTTParameter(cgf.curGD);
}
mlir::Value CIRGenItaniumCXXABI::getVirtualBaseClassOffset(
mlir::Location loc, CIRGenFunction &cgf, Address thisAddr,
const CXXRecordDecl *classDecl, const CXXRecordDecl *baseClassDecl) {
CIRGenBuilderTy &builder = cgf.getBuilder();
mlir::Value vtablePtr = cgf.getVTablePtr(loc, thisAddr, classDecl);
mlir::Value vtableBytePtr = builder.createBitcast(vtablePtr, cgm.UInt8PtrTy);
CharUnits vbaseOffsetOffset =
cgm.getItaniumVTableContext().getVirtualBaseOffsetOffset(classDecl,
baseClassDecl);
mlir::Value offsetVal =
builder.getSInt64(vbaseOffsetOffset.getQuantity(), loc);
auto vbaseOffsetPtr = cir::PtrStrideOp::create(builder, loc, cgm.UInt8PtrTy,
vtableBytePtr, offsetVal);
mlir::Value vbaseOffset;
if (cgm.getItaniumVTableContext().isRelativeLayout()) {
assert(!cir::MissingFeatures::vtableRelativeLayout());
cgm.errorNYI(loc, "getVirtualBaseClassOffset: relative layout");
} else {
mlir::Value offsetPtr = builder.createBitcast(
vbaseOffsetPtr, builder.getPointerTo(cgm.PtrDiffTy));
vbaseOffset = builder.createLoad(
loc, Address(offsetPtr, cgm.PtrDiffTy, cgf.getPointerAlign()));
}
return vbaseOffset;
}
|