2010-01-21 16:50:45 +00:00
|
|
|
//===--- CGVTT.cpp - Emit LLVM Code for C++ VTTs --------------------------===//
|
|
|
|
|
//
|
2019-01-19 08:50:56 +00:00
|
|
|
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
|
|
|
|
|
// See https://llvm.org/LICENSE.txt for license information.
|
|
|
|
|
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
|
2010-01-21 16:50:45 +00:00
|
|
|
//
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
//
|
|
|
|
|
// This contains code dealing with C++ code generation of VTTs (vtable tables).
|
|
|
|
|
//
|
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
|
|
|
|
|
|
#include "CodeGenModule.h"
|
2010-08-31 07:33:07 +00:00
|
|
|
#include "CGCXXABI.h"
|
2010-01-21 16:50:45 +00:00
|
|
|
#include "clang/AST/RecordLayout.h"
|
2011-09-26 01:56:24 +00:00
|
|
|
#include "clang/AST/VTTBuilder.h"
|
2010-01-21 16:50:45 +00:00
|
|
|
using namespace clang;
|
|
|
|
|
using namespace CodeGen;
|
|
|
|
|
|
2015-04-02 18:55:21 +00:00
|
|
|
static llvm::GlobalVariable *
|
2013-09-27 14:48:01 +00:00
|
|
|
GetAddrOfVTTVTable(CodeGenVTables &CGVT, CodeGenModule &CGM,
|
|
|
|
|
const CXXRecordDecl *MostDerivedClass,
|
2011-10-22 19:16:39 +00:00
|
|
|
const VTTVTable &VTable,
|
|
|
|
|
llvm::GlobalVariable::LinkageTypes Linkage,
|
2016-12-13 20:40:39 +00:00
|
|
|
VTableLayout::AddressPointsMapTy &AddressPoints) {
|
2011-09-26 01:56:06 +00:00
|
|
|
if (VTable.getBase() == MostDerivedClass) {
|
|
|
|
|
assert(VTable.getBaseOffset().isZero() &&
|
2010-03-26 03:56:54 +00:00
|
|
|
"Most derived class vtable must have a zero offset!");
|
|
|
|
|
// This is a regular vtable.
|
2013-09-27 14:48:01 +00:00
|
|
|
return CGM.getCXXABI().getAddrOfVTable(MostDerivedClass, CharUnits());
|
2010-03-26 03:56:54 +00:00
|
|
|
}
|
2018-07-30 19:24:48 +00:00
|
|
|
|
|
|
|
|
return CGVT.GenerateConstructionVTable(MostDerivedClass,
|
2011-09-26 01:56:06 +00:00
|
|
|
VTable.getBaseSubobject(),
|
|
|
|
|
VTable.isVirtual(),
|
|
|
|
|
Linkage,
|
|
|
|
|
AddressPoints);
|
2010-03-26 03:56:54 +00:00
|
|
|
}
|
|
|
|
|
|
2011-01-29 19:16:51 +00:00
|
|
|
void
|
|
|
|
|
CodeGenVTables::EmitVTTDefinition(llvm::GlobalVariable *VTT,
|
|
|
|
|
llvm::GlobalVariable::LinkageTypes Linkage,
|
|
|
|
|
const CXXRecordDecl *RD) {
|
2011-09-26 01:56:10 +00:00
|
|
|
VTTBuilder Builder(CGM.getContext(), RD, /*GenerateDefinition=*/true);
|
[Clang][CodeGen]`vtable`, `typeinfo` et al. are globals
All data structures and values associated with handling virtual functions / inheritance, as well as RTTI, are globals and thus can only reside in the global address space. This was not taken fully taken into account because for most targets, global & generic appear to coincide. However, on targets where global & generic ASes differ (e.g. AMDGPU), this was problematic, since it led to the generation of invalid bitcasts (which would trigger asserts in Debug) and less than optimal code. This patch does two things:
ensures that vtables, vptrs, vtts, typeinfo are generated in the right AS, and populated accordingly;
removes a bunch of bitcasts which look like left-overs from the typed ptr era.
Reviewed By: yxsamliu
Differential Revision: https://reviews.llvm.org/D153092
2023-07-19 18:04:31 +01:00
|
|
|
llvm::ArrayType *ArrayType = llvm::ArrayType::get(
|
|
|
|
|
CGM.GlobalsInt8PtrTy, Builder.getVTTComponents().size());
|
2015-04-02 18:55:21 +00:00
|
|
|
|
|
|
|
|
SmallVector<llvm::GlobalVariable *, 8> VTables;
|
2011-09-26 01:56:06 +00:00
|
|
|
SmallVector<VTableAddressPointsMapTy, 8> VTableAddressPoints;
|
|
|
|
|
for (const VTTVTable *i = Builder.getVTTVTables().begin(),
|
|
|
|
|
*e = Builder.getVTTVTables().end(); i != e; ++i) {
|
|
|
|
|
VTableAddressPoints.push_back(VTableAddressPointsMapTy());
|
2013-09-27 14:48:01 +00:00
|
|
|
VTables.push_back(GetAddrOfVTTVTable(*this, CGM, RD, *i, Linkage,
|
2011-09-26 01:56:06 +00:00
|
|
|
VTableAddressPoints.back()));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
SmallVector<llvm::Constant *, 8> VTTComponents;
|
|
|
|
|
for (const VTTComponent *i = Builder.getVTTComponents().begin(),
|
|
|
|
|
*e = Builder.getVTTComponents().end(); i != e; ++i) {
|
|
|
|
|
const VTTVTable &VTTVT = Builder.getVTTVTables()[i->VTableIndex];
|
2015-04-02 18:55:21 +00:00
|
|
|
llvm::GlobalVariable *VTable = VTables[i->VTableIndex];
|
2016-12-13 20:40:39 +00:00
|
|
|
VTableLayout::AddressPointLocation AddressPoint;
|
2011-09-26 01:56:06 +00:00
|
|
|
if (VTTVT.getBase() == RD) {
|
|
|
|
|
// Just get the address point for the regular vtable.
|
2013-11-05 15:54:58 +00:00
|
|
|
AddressPoint =
|
2013-12-20 23:58:52 +00:00
|
|
|
getItaniumVTableContext().getVTableLayout(RD).getAddressPoint(
|
|
|
|
|
i->VTableBase);
|
2011-09-26 01:56:06 +00:00
|
|
|
} else {
|
|
|
|
|
AddressPoint = VTableAddressPoints[i->VTableIndex].lookup(i->VTableBase);
|
2016-12-13 20:40:39 +00:00
|
|
|
assert(AddressPoint.AddressPointIndex != 0 &&
|
|
|
|
|
"Did not find ctor vtable address point!");
|
2011-09-26 01:56:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
llvm::Value *Idxs[] = {
|
2020-07-22 14:41:06 +01:00
|
|
|
llvm::ConstantInt::get(CGM.Int32Ty, 0),
|
|
|
|
|
llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.VTableIndex),
|
|
|
|
|
llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.AddressPointIndex),
|
2011-09-26 01:56:06 +00:00
|
|
|
};
|
|
|
|
|
|
[IR] Change representation of getelementptr inrange (#84341)
As part of the migration to ptradd
(https://discourse.llvm.org/t/rfc-replacing-getelementptr-with-ptradd/68699),
we need to change the representation of the `inrange` attribute, which
is used for vtable splitting.
Currently, inrange is specified as follows:
```
getelementptr inbounds ({ [4 x ptr], [4 x ptr] }, ptr @vt, i64 0, inrange i32 1, i64 2)
```
The `inrange` is placed on a GEP index, and all accesses must be "in
range" of that index. The new representation is as follows:
```
getelementptr inbounds inrange(-16, 16) ({ [4 x ptr], [4 x ptr] }, ptr @vt, i64 0, i32 1, i64 2)
```
This specifies which offsets are "in range" of the GEP result. The new
representation will continue working when canonicalizing to ptradd
representation:
```
getelementptr inbounds inrange(-16, 16) (i8, ptr @vt, i64 48)
```
The inrange offsets are relative to the return value of the GEP. An
alternative design could make them relative to the source pointer
instead. The result-relative format was chosen on the off-chance that we
want to extend support to non-constant GEPs in the future, in which case
this variant is more expressive.
This implementation "upgrades" the old inrange representation in bitcode
by simply dropping it. This is a very niche feature, and I don't think
trying to upgrade it is worthwhile. Let me know if you disagree.
2024-03-20 10:59:45 +01:00
|
|
|
// Add inrange attribute to indicate that only the VTableIndex can be
|
|
|
|
|
// accessed.
|
|
|
|
|
unsigned ComponentSize =
|
|
|
|
|
CGM.getDataLayout().getTypeAllocSize(getVTableComponentType());
|
|
|
|
|
unsigned VTableSize = CGM.getDataLayout().getTypeAllocSize(
|
|
|
|
|
cast<llvm::StructType>(VTable->getValueType())
|
|
|
|
|
->getElementType(AddressPoint.VTableIndex));
|
|
|
|
|
unsigned Offset = ComponentSize * AddressPoint.AddressPointIndex;
|
2024-10-17 08:48:08 +02:00
|
|
|
llvm::ConstantRange InRange(
|
|
|
|
|
llvm::APInt(32, (int)-Offset, true),
|
|
|
|
|
llvm::APInt(32, (int)(VTableSize - Offset), true));
|
2016-12-13 20:50:44 +00:00
|
|
|
llvm::Constant *Init = llvm::ConstantExpr::getGetElementPtr(
|
[IR] Change representation of getelementptr inrange (#84341)
As part of the migration to ptradd
(https://discourse.llvm.org/t/rfc-replacing-getelementptr-with-ptradd/68699),
we need to change the representation of the `inrange` attribute, which
is used for vtable splitting.
Currently, inrange is specified as follows:
```
getelementptr inbounds ({ [4 x ptr], [4 x ptr] }, ptr @vt, i64 0, inrange i32 1, i64 2)
```
The `inrange` is placed on a GEP index, and all accesses must be "in
range" of that index. The new representation is as follows:
```
getelementptr inbounds inrange(-16, 16) ({ [4 x ptr], [4 x ptr] }, ptr @vt, i64 0, i32 1, i64 2)
```
This specifies which offsets are "in range" of the GEP result. The new
representation will continue working when canonicalizing to ptradd
representation:
```
getelementptr inbounds inrange(-16, 16) (i8, ptr @vt, i64 48)
```
The inrange offsets are relative to the return value of the GEP. An
alternative design could make them relative to the source pointer
instead. The result-relative format was chosen on the off-chance that we
want to extend support to non-constant GEPs in the future, in which case
this variant is more expressive.
This implementation "upgrades" the old inrange representation in bitcode
by simply dropping it. This is a very niche feature, and I don't think
trying to upgrade it is worthwhile. Let me know if you disagree.
2024-03-20 10:59:45 +01:00
|
|
|
VTable->getValueType(), VTable, Idxs, /*InBounds=*/true, InRange);
|
2011-09-26 01:56:06 +00:00
|
|
|
|
[clang] Implement pointer authentication for C++ virtual functions, v-tables, and VTTs (#94056)
Virtual function pointer entries in v-tables are signed with address
discrimination in addition to declaration-based discrimination, where an
integer discriminator the string hash (see
`ptrauth_string_discriminator`) of the mangled name of the overridden
method. This notably provides diversity based on the full signature of
the overridden method, including the method name and parameter types.
This patch introduces ItaniumVTableContext logic to find the original
declaration of the overridden method.
On AArch64, these pointers are signed using the `IA` key (the
process-independent code key.)
V-table pointers can be signed with either no discrimination, or a
similar scheme using address and decl-based discrimination. In this
case, the integer discriminator is the string hash of the mangled
v-table identifier of the class that originally introduced the vtable
pointer.
On AArch64, these pointers are signed using the `DA` key (the
process-independent data key.)
Not using discrimination allows attackers to simply copy valid v-table
pointers from one object to another. However, using a uniform
discriminator of 0 does have positive performance and code-size
implications on AArch64, and diversity for the most important v-table
access pattern (virtual dispatch) is already better assured by the
signing schemas used on the virtual functions. It is also known that
some code in practice copies objects containing v-tables with `memcpy`,
and while this is not permitted formally, it is something that may be
invasive to eliminate.
This is controlled by:
```
-fptrauth-vtable-pointer-type-discrimination
-fptrauth-vtable-pointer-address-discrimination
```
In addition, this provides fine-grained controls in the
ptrauth_vtable_pointer attribute, which allows overriding the default
ptrauth schema for vtable pointers on a given class hierarchy, e.g.:
```
[[clang::ptrauth_vtable_pointer(no_authentication, no_address_discrimination,
no_extra_discrimination)]]
[[clang::ptrauth_vtable_pointer(default_key, default_address_discrimination,
custom_discrimination, 0xf00d)]]
```
The override is then mangled as a parametrized vendor extension:
```
"__vtptrauth" I
<key>
<addressDiscriminated>
<extraDiscriminator>
E
```
To support this attribute, this patch adds a small extension to the
attribute-emitter tablegen backend.
Note that there are known areas where signing is either missing
altogether or can be strengthened. Some will be addressed in later
changes (e.g., member function pointers, some RTTI).
`dynamic_cast` in particular is handled by emitting an artificial
v-table pointer load (in a way that always authenticates it) before the
runtime call itself, as the runtime doesn't have enough information
today to properly authenticate it. Instead, the runtime is currently
expected to strip the v-table pointer.
---------
Co-authored-by: John McCall <rjmccall@apple.com>
Co-authored-by: Ahmed Bougacha <ahmed@bougacha.org>
2024-06-26 20:35:10 -05:00
|
|
|
if (const auto &Schema =
|
|
|
|
|
CGM.getCodeGenOpts().PointerAuth.CXXVTTVTablePointers)
|
|
|
|
|
Init = CGM.getConstantSignedPointer(Init, Schema, nullptr, GlobalDecl(),
|
|
|
|
|
QualType());
|
|
|
|
|
|
2011-09-26 01:56:06 +00:00
|
|
|
VTTComponents.push_back(Init);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
llvm::Constant *Init = llvm::ConstantArray::get(ArrayType, VTTComponents);
|
2011-01-29 19:16:51 +00:00
|
|
|
|
|
|
|
|
VTT->setInitializer(Init);
|
2010-01-21 16:50:45 +00:00
|
|
|
|
2011-01-29 19:16:51 +00:00
|
|
|
// Set the correct linkage.
|
|
|
|
|
VTT->setLinkage(Linkage);
|
2011-01-29 19:34:19 +00:00
|
|
|
|
2015-05-09 21:10:07 +00:00
|
|
|
if (CGM.supportsCOMDAT() && VTT->isWeakForLinker())
|
|
|
|
|
VTT->setComdat(CGM.getModule().getOrInsertComdat(VTT->getName()));
|
2023-11-21 19:46:34 +00:00
|
|
|
|
|
|
|
|
// Set the visibility. This will already have been set on the VTT declaration.
|
|
|
|
|
// Set it again, now that we have a definition, as the implicit visibility can
|
|
|
|
|
// apply differently to definitions.
|
|
|
|
|
CGM.setGVProperties(VTT, RD);
|
2011-01-29 19:16:51 +00:00
|
|
|
}
|
2010-01-21 16:50:45 +00:00
|
|
|
|
2011-01-29 19:16:51 +00:00
|
|
|
llvm::GlobalVariable *CodeGenVTables::GetAddrOfVTT(const CXXRecordDecl *RD) {
|
|
|
|
|
assert(RD->getNumVBases() && "Only classes with virtual bases need a VTT");
|
2010-01-21 16:50:45 +00:00
|
|
|
|
2012-02-05 02:13:05 +00:00
|
|
|
SmallString<256> OutName;
|
2011-02-11 02:52:17 +00:00
|
|
|
llvm::raw_svector_ostream Out(OutName);
|
2013-10-03 06:26:13 +00:00
|
|
|
cast<ItaniumMangleContext>(CGM.getCXXABI().getMangleContext())
|
|
|
|
|
.mangleCXXVTT(RD, Out);
|
2011-07-23 10:55:15 +00:00
|
|
|
StringRef Name = OutName.str();
|
2010-03-26 04:10:39 +00:00
|
|
|
|
2011-09-26 01:56:36 +00:00
|
|
|
// This will also defer the definition of the VTT.
|
2013-09-27 14:48:01 +00:00
|
|
|
(void) CGM.getCXXABI().getAddrOfVTable(RD, CharUnits());
|
2011-05-16 04:08:36 +00:00
|
|
|
|
2011-09-26 01:56:10 +00:00
|
|
|
VTTBuilder Builder(CGM.getContext(), RD, /*GenerateDefinition=*/false);
|
2010-01-21 16:50:45 +00:00
|
|
|
|
[Clang][CodeGen]`vtable`, `typeinfo` et al. are globals
All data structures and values associated with handling virtual functions / inheritance, as well as RTTI, are globals and thus can only reside in the global address space. This was not taken fully taken into account because for most targets, global & generic appear to coincide. However, on targets where global & generic ASes differ (e.g. AMDGPU), this was problematic, since it led to the generation of invalid bitcasts (which would trigger asserts in Debug) and less than optimal code. This patch does two things:
ensures that vtables, vptrs, vtts, typeinfo are generated in the right AS, and populated accordingly;
removes a bunch of bitcasts which look like left-overs from the typed ptr era.
Reviewed By: yxsamliu
Differential Revision: https://reviews.llvm.org/D153092
2023-07-19 18:04:31 +01:00
|
|
|
llvm::ArrayType *ArrayType = llvm::ArrayType::get(
|
|
|
|
|
CGM.GlobalsInt8PtrTy, Builder.getVTTComponents().size());
|
|
|
|
|
llvm::Align Align = CGM.getDataLayout().getABITypeAlign(CGM.GlobalsInt8PtrTy);
|
2010-01-21 16:50:45 +00:00
|
|
|
|
2018-09-12 14:09:06 +00:00
|
|
|
llvm::GlobalVariable *GV = CGM.CreateOrReplaceCXXRuntimeVariable(
|
|
|
|
|
Name, ArrayType, llvm::GlobalValue::ExternalLinkage, Align);
|
2016-06-14 21:02:05 +00:00
|
|
|
GV->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
|
2022-06-24 09:58:31 -07:00
|
|
|
CGM.setGVProperties(GV, RD);
|
2010-01-21 16:50:45 +00:00
|
|
|
return GV;
|
|
|
|
|
}
|
|
|
|
|
|
2018-07-30 19:24:48 +00:00
|
|
|
uint64_t CodeGenVTables::getSubVTTIndex(const CXXRecordDecl *RD,
|
2010-05-02 23:53:25 +00:00
|
|
|
BaseSubobject Base) {
|
2010-05-03 00:55:11 +00:00
|
|
|
BaseSubobjectPairTy ClassSubobjectPair(RD, Base);
|
2010-01-21 16:50:45 +00:00
|
|
|
|
2024-05-15 13:10:16 +01:00
|
|
|
SubVTTIndicesMapTy::iterator I = SubVTTIndices.find(ClassSubobjectPair);
|
|
|
|
|
if (I != SubVTTIndices.end())
|
2010-01-21 16:50:45 +00:00
|
|
|
return I->second;
|
2018-07-30 19:24:48 +00:00
|
|
|
|
2011-09-26 01:56:10 +00:00
|
|
|
VTTBuilder Builder(CGM.getContext(), RD, /*GenerateDefinition=*/false);
|
2010-01-21 16:50:45 +00:00
|
|
|
|
2024-05-15 13:10:16 +01:00
|
|
|
for (llvm::DenseMap<BaseSubobject, uint64_t>::const_iterator
|
|
|
|
|
I = Builder.getSubVTTIndices().begin(),
|
|
|
|
|
E = Builder.getSubVTTIndices().end();
|
|
|
|
|
I != E; ++I) {
|
2010-01-21 16:50:45 +00:00
|
|
|
// Insert all indices.
|
2010-05-03 00:55:11 +00:00
|
|
|
BaseSubobjectPairTy ClassSubobjectPair(RD, I->first);
|
2018-07-30 19:24:48 +00:00
|
|
|
|
2024-05-15 13:10:16 +01:00
|
|
|
SubVTTIndices.insert(std::make_pair(ClassSubobjectPair, I->second));
|
2010-01-21 16:50:45 +00:00
|
|
|
}
|
2018-07-30 19:24:48 +00:00
|
|
|
|
2024-05-15 13:10:16 +01:00
|
|
|
I = SubVTTIndices.find(ClassSubobjectPair);
|
|
|
|
|
assert(I != SubVTTIndices.end() && "Did not find index!");
|
2018-07-30 19:24:48 +00:00
|
|
|
|
2010-01-21 16:50:45 +00:00
|
|
|
return I->second;
|
|
|
|
|
}
|
2010-03-26 04:23:58 +00:00
|
|
|
|
2018-07-30 19:24:48 +00:00
|
|
|
uint64_t
|
2010-03-26 04:23:58 +00:00
|
|
|
CodeGenVTables::getSecondaryVirtualPointerIndex(const CXXRecordDecl *RD,
|
|
|
|
|
BaseSubobject Base) {
|
|
|
|
|
SecondaryVirtualPointerIndicesMapTy::iterator I =
|
|
|
|
|
SecondaryVirtualPointerIndices.find(std::make_pair(RD, Base));
|
|
|
|
|
|
|
|
|
|
if (I != SecondaryVirtualPointerIndices.end())
|
|
|
|
|
return I->second;
|
|
|
|
|
|
2011-09-26 01:56:10 +00:00
|
|
|
VTTBuilder Builder(CGM.getContext(), RD, /*GenerateDefinition=*/false);
|
2010-03-26 04:23:58 +00:00
|
|
|
|
|
|
|
|
// Insert all secondary vpointer indices.
|
2018-07-30 19:24:48 +00:00
|
|
|
for (llvm::DenseMap<BaseSubobject, uint64_t>::const_iterator I =
|
2010-03-26 04:23:58 +00:00
|
|
|
Builder.getSecondaryVirtualPointerIndices().begin(),
|
|
|
|
|
E = Builder.getSecondaryVirtualPointerIndices().end(); I != E; ++I) {
|
|
|
|
|
std::pair<const CXXRecordDecl *, BaseSubobject> Pair =
|
|
|
|
|
std::make_pair(RD, I->first);
|
2018-07-30 19:24:48 +00:00
|
|
|
|
2010-03-26 04:23:58 +00:00
|
|
|
SecondaryVirtualPointerIndices.insert(std::make_pair(Pair, I->second));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
I = SecondaryVirtualPointerIndices.find(std::make_pair(RD, Base));
|
|
|
|
|
assert(I != SecondaryVirtualPointerIndices.end() && "Did not find index!");
|
2018-07-30 19:24:48 +00:00
|
|
|
|
2010-03-26 04:23:58 +00:00
|
|
|
return I->second;
|
|
|
|
|
}
|