aboutsummaryrefslogtreecommitdiff
path: root/lld/ELF
diff options
context:
space:
mode:
Diffstat (limited to 'lld/ELF')
-rw-r--r--lld/ELF/Arch/AArch64.cpp13
-rw-r--r--lld/ELF/Relocations.cpp10
-rw-r--r--lld/ELF/SyntheticSections.cpp24
-rw-r--r--lld/ELF/SyntheticSections.h15
-rw-r--r--lld/ELF/Writer.cpp35
5 files changed, 84 insertions, 13 deletions
diff --git a/lld/ELF/Arch/AArch64.cpp b/lld/ELF/Arch/AArch64.cpp
index 2bf6e2c..b0aa0b9 100644
--- a/lld/ELF/Arch/AArch64.cpp
+++ b/lld/ELF/Arch/AArch64.cpp
@@ -428,6 +428,19 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
case R_AARCH64_PREL64:
write64(loc, val);
break;
+ case R_AARCH64_AUTH_ABS64:
+ // If val is wider than 32 bits, the relocation must have been moved from
+ // .relr.auth.dyn to .rela.dyn, and the addend write is not needed.
+ //
+ // If val fits in 32 bits, we have two potential scenarios:
+ // * True RELR: Write the 32-bit `val`.
+ // * RELA: Even if the value now fits in 32 bits, it might have been
+ // converted from RELR during an iteration in
+ // finalizeAddressDependentContent(). Writing the value is harmless
+ // because dynamic linking ignores it.
+ if (isInt<32>(val))
+ write32(loc, val);
+ break;
case R_AARCH64_ADD_ABS_LO12_NC:
or32AArch64Imm(loc, val);
break;
diff --git a/lld/ELF/Relocations.cpp b/lld/ELF/Relocations.cpp
index 04db413..2c02c2e 100644
--- a/lld/ELF/Relocations.cpp
+++ b/lld/ELF/Relocations.cpp
@@ -898,9 +898,9 @@ static void addRelativeReloc(InputSectionBase &isec, uint64_t offsetInSec,
isec.addReloc({expr, type, offsetInSec, addend, &sym});
if (shard)
part.relrDyn->relocsVec[parallel::getThreadIndex()].push_back(
- {&isec, offsetInSec});
+ {&isec, isec.relocs().size() - 1});
else
- part.relrDyn->relocs.push_back({&isec, offsetInSec});
+ part.relrDyn->relocs.push_back({&isec, isec.relocs().size() - 1});
return;
}
part.relaDyn->addRelativeReloc<shard>(target->relativeRel, isec, offsetInSec,
@@ -1154,6 +1154,12 @@ void RelocationScanner::processAux(RelExpr expr, RelType type, uint64_t offset,
// relative relocation. Use a symbolic relocation instead.
if (sym.isPreemptible) {
part.relaDyn->addSymbolReloc(type, *sec, offset, sym, addend, type);
+ } else if (part.relrAuthDyn && sec->addralign >= 2 && offset % 2 == 0) {
+ // When symbol values are determined in
+ // finalizeAddressDependentContent, some .relr.auth.dyn relocations
+ // may be moved to .rela.dyn.
+ sec->addReloc({expr, type, offset, addend, &sym});
+ part.relrAuthDyn->relocs.push_back({sec, sec->relocs().size() - 1});
} else {
part.relaDyn->addReloc({R_AARCH64_AUTH_RELATIVE, sec, offset,
DynamicReloc::AddendOnlyWithTargetVA, sym,
diff --git a/lld/ELF/SyntheticSections.cpp b/lld/ELF/SyntheticSections.cpp
index 298c714..22bfed0 100644
--- a/lld/ELF/SyntheticSections.cpp
+++ b/lld/ELF/SyntheticSections.cpp
@@ -1420,6 +1420,12 @@ DynamicSection<ELFT>::computeContents() {
addInt(config->useAndroidRelrTags ? DT_ANDROID_RELRENT : DT_RELRENT,
sizeof(Elf_Relr));
}
+ if (part.relrAuthDyn && part.relrAuthDyn->getParent() &&
+ !part.relrAuthDyn->relocs.empty()) {
+ addInSec(DT_AARCH64_AUTH_RELR, *part.relrAuthDyn);
+ addInt(DT_AARCH64_AUTH_RELRSZ, part.relrAuthDyn->getParent()->size);
+ addInt(DT_AARCH64_AUTH_RELRENT, sizeof(Elf_Relr));
+ }
if (isMain && in.relaPlt->isNeeded()) {
addInSec(DT_JMPREL, *in.relaPlt);
entries.emplace_back(DT_PLTRELSZ, addPltRelSz());
@@ -1731,10 +1737,13 @@ template <class ELFT> void RelocationSection<ELFT>::writeTo(uint8_t *buf) {
}
}
-RelrBaseSection::RelrBaseSection(unsigned concurrency)
- : SyntheticSection(SHF_ALLOC,
- config->useAndroidRelrTags ? SHT_ANDROID_RELR : SHT_RELR,
- config->wordsize, ".relr.dyn"),
+RelrBaseSection::RelrBaseSection(unsigned concurrency, bool isAArch64Auth)
+ : SyntheticSection(
+ SHF_ALLOC,
+ isAArch64Auth
+ ? SHT_AARCH64_AUTH_RELR
+ : (config->useAndroidRelrTags ? SHT_ANDROID_RELR : SHT_RELR),
+ config->wordsize, isAArch64Auth ? ".relr.auth.dyn" : ".relr.dyn"),
relocsVec(concurrency) {}
void RelrBaseSection::mergeRels() {
@@ -2002,8 +2011,8 @@ bool AndroidPackedRelocationSection<ELFT>::updateAllocSize() {
}
template <class ELFT>
-RelrSection<ELFT>::RelrSection(unsigned concurrency)
- : RelrBaseSection(concurrency) {
+RelrSection<ELFT>::RelrSection(unsigned concurrency, bool isAArch64Auth)
+ : RelrBaseSection(concurrency, isAArch64Auth) {
this->entsize = config->wordsize;
}
@@ -4774,6 +4783,9 @@ template <class ELFT> void elf::createSyntheticSections() {
if (config->relrPackDynRelocs) {
part.relrDyn = std::make_unique<RelrSection<ELFT>>(threadCount);
add(*part.relrDyn);
+ part.relrAuthDyn = std::make_unique<RelrSection<ELFT>>(
+ threadCount, /*isAArch64Auth=*/true);
+ add(*part.relrAuthDyn);
}
if (!config->relocatable) {
diff --git a/lld/ELF/SyntheticSections.h b/lld/ELF/SyntheticSections.h
index 34949025..eaa09ea 100644
--- a/lld/ELF/SyntheticSections.h
+++ b/lld/ELF/SyntheticSections.h
@@ -548,7 +548,9 @@ public:
static bool classof(const SectionBase *d) {
return SyntheticSection::classof(d) &&
(d->type == llvm::ELF::SHT_RELA || d->type == llvm::ELF::SHT_REL ||
- d->type == llvm::ELF::SHT_RELR);
+ d->type == llvm::ELF::SHT_RELR ||
+ (d->type == llvm::ELF::SHT_AARCH64_AUTH_RELR &&
+ config->emachine == llvm::ELF::EM_AARCH64));
}
int32_t dynamicTag, sizeDynamicTag;
SmallVector<DynamicReloc, 0> relocs;
@@ -596,15 +598,17 @@ private:
};
struct RelativeReloc {
- uint64_t getOffset() const { return inputSec->getVA(offsetInSec); }
+ uint64_t getOffset() const {
+ return inputSec->getVA(inputSec->relocs()[relocIdx].offset);
+ }
const InputSectionBase *inputSec;
- uint64_t offsetInSec;
+ size_t relocIdx;
};
class RelrBaseSection : public SyntheticSection {
public:
- RelrBaseSection(unsigned concurrency);
+ RelrBaseSection(unsigned concurrency, bool isAArch64Auth = false);
void mergeRels();
bool isNeeded() const override {
return !relocs.empty() ||
@@ -622,7 +626,7 @@ template <class ELFT> class RelrSection final : public RelrBaseSection {
using Elf_Relr = typename ELFT::Relr;
public:
- RelrSection(unsigned concurrency);
+ RelrSection(unsigned concurrency, bool isAArch64Auth = false);
bool updateAllocSize() override;
size_t getSize() const override { return relrRelocs.size() * this->entsize; }
@@ -1460,6 +1464,7 @@ struct Partition {
std::unique_ptr<PackageMetadataNote> packageMetadataNote;
std::unique_ptr<RelocationBaseSection> relaDyn;
std::unique_ptr<RelrBaseSection> relrDyn;
+ std::unique_ptr<RelrBaseSection> relrAuthDyn;
std::unique_ptr<VersionDefinitionSection> verDef;
std::unique_ptr<SyntheticSection> verNeed;
std::unique_ptr<VersionTableSection> verSym;
diff --git a/lld/ELF/Writer.cpp b/lld/ELF/Writer.cpp
index e8a7b19..fe2e190 100644
--- a/lld/ELF/Writer.cpp
+++ b/lld/ELF/Writer.cpp
@@ -1454,9 +1454,32 @@ template <class ELFT> void Writer<ELFT>::finalizeAddressDependentContent() {
in.mipsGot->updateAllocSize();
for (Partition &part : partitions) {
+ // The R_AARCH64_AUTH_RELATIVE has a smaller addend field as bits [63:32]
+ // encode the signing schema. We've put relocations in .relr.auth.dyn
+ // during RelocationScanner::processAux, but the target VA for some of
+ // them might be wider than 32 bits. We can only know the final VA at this
+ // point, so move relocations with large values from .relr.auth.dyn to
+ // .rela.dyn. See also AArch64::relocate.
+ if (part.relrAuthDyn) {
+ auto it = llvm::remove_if(
+ part.relrAuthDyn->relocs, [&part](const RelativeReloc &elem) {
+ const Relocation &reloc = elem.inputSec->relocs()[elem.relocIdx];
+ if (isInt<32>(reloc.sym->getVA(reloc.addend)))
+ return false;
+ part.relaDyn->addReloc({R_AARCH64_AUTH_RELATIVE, elem.inputSec,
+ reloc.offset,
+ DynamicReloc::AddendOnlyWithTargetVA,
+ *reloc.sym, reloc.addend, R_ABS});
+ return true;
+ });
+ changed |= (it != part.relrAuthDyn->relocs.end());
+ part.relrAuthDyn->relocs.erase(it, part.relrAuthDyn->relocs.end());
+ }
changed |= part.relaDyn->updateAllocSize();
if (part.relrDyn)
changed |= part.relrDyn->updateAllocSize();
+ if (part.relrAuthDyn)
+ changed |= part.relrAuthDyn->updateAllocSize();
if (part.memtagGlobalDescriptors)
changed |= part.memtagGlobalDescriptors->updateAllocSize();
}
@@ -1614,6 +1637,14 @@ static void removeUnusedSyntheticSections() {
auto *sec = cast<SyntheticSection>(s);
if (sec->getParent() && sec->isNeeded())
return false;
+ // .relr.auth.dyn relocations may be moved to .rela.dyn in
+ // finalizeAddressDependentContent, making .rela.dyn no longer empty.
+ // Conservatively keep .rela.dyn. .relr.auth.dyn can be made empty, but
+ // we would fail to remove it here.
+ if (config->emachine == EM_AARCH64 && config->relrPackDynRelocs)
+ if (auto *relSec = dyn_cast<RelocationBaseSection>(sec))
+ if (relSec == mainPart->relaDyn.get())
+ return false;
unused.insert(sec);
return true;
});
@@ -1926,6 +1957,10 @@ template <class ELFT> void Writer<ELFT>::finalizeSections() {
part.relrDyn->mergeRels();
finalizeSynthetic(part.relrDyn.get());
}
+ if (part.relrAuthDyn) {
+ part.relrAuthDyn->mergeRels();
+ finalizeSynthetic(part.relrAuthDyn.get());
+ }
finalizeSynthetic(part.dynSymTab.get());
finalizeSynthetic(part.gnuHashTab.get());