mirror of
https://github.com/intel/llvm.git
synced 2026-01-26 12:26:52 +08:00
Rip out EHCleanupScope.
llvm-svn: 108999
This commit is contained in:
@@ -58,11 +58,6 @@ EHScopeStack::stable_iterator
|
||||
EHScopeStack::getEnclosingEHCleanup(iterator it) const {
|
||||
assert(it != end());
|
||||
do {
|
||||
if (isa<EHCleanupScope>(*it)) {
|
||||
if (cast<EHCleanupScope>(*it).isEHCleanup())
|
||||
return stabilize(it);
|
||||
return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
|
||||
}
|
||||
if (isa<EHLazyCleanupScope>(*it)) {
|
||||
if (cast<EHLazyCleanupScope>(*it).isEHCleanup())
|
||||
return stabilize(it);
|
||||
@@ -94,36 +89,14 @@ void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
|
||||
return Scope->getCleanupBuffer();
|
||||
}
|
||||
|
||||
void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry,
|
||||
llvm::BasicBlock *NormalExit,
|
||||
llvm::BasicBlock *EHEntry,
|
||||
llvm::BasicBlock *EHExit) {
|
||||
char *Buffer = allocate(EHCleanupScope::getSize());
|
||||
new (Buffer) EHCleanupScope(BranchFixups.size(),
|
||||
InnermostNormalCleanup,
|
||||
InnermostEHCleanup,
|
||||
NormalEntry, NormalExit, EHEntry, EHExit);
|
||||
if (NormalEntry)
|
||||
InnermostNormalCleanup = stable_begin();
|
||||
if (EHEntry)
|
||||
InnermostEHCleanup = stable_begin();
|
||||
}
|
||||
|
||||
void EHScopeStack::popCleanup() {
|
||||
assert(!empty() && "popping exception stack when not empty");
|
||||
|
||||
if (isa<EHLazyCleanupScope>(*begin())) {
|
||||
EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
|
||||
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
|
||||
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
|
||||
StartOfData += Cleanup.getAllocatedSize();
|
||||
} else {
|
||||
assert(isa<EHCleanupScope>(*begin()));
|
||||
EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
|
||||
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
|
||||
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
|
||||
StartOfData += EHCleanupScope::getSize();
|
||||
}
|
||||
assert(isa<EHLazyCleanupScope>(*begin()));
|
||||
EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
|
||||
InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
|
||||
InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
|
||||
StartOfData += Cleanup.getAllocatedSize();
|
||||
|
||||
// Check whether we can shrink the branch-fixups stack.
|
||||
if (!BranchFixups.empty()) {
|
||||
@@ -177,11 +150,7 @@ void EHScopeStack::popNullFixups() {
|
||||
assert(hasNormalCleanups());
|
||||
|
||||
EHScopeStack::iterator it = find(InnermostNormalCleanup);
|
||||
unsigned MinSize;
|
||||
if (isa<EHCleanupScope>(*it))
|
||||
MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
|
||||
else
|
||||
MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
|
||||
unsigned MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
|
||||
assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
|
||||
|
||||
while (BranchFixups.size() > MinSize &&
|
||||
@@ -666,8 +635,6 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
|
||||
/// normal-only cleanup scopes.
|
||||
static bool isNonEHScope(const EHScope &S) {
|
||||
switch (S.getKind()) {
|
||||
case EHScope::Cleanup:
|
||||
return !cast<EHCleanupScope>(S).isEHCleanup();
|
||||
case EHScope::LazyCleanup:
|
||||
return !cast<EHLazyCleanupScope>(S).isEHCleanup();
|
||||
case EHScope::Filter:
|
||||
@@ -804,12 +771,6 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
|
||||
// We otherwise don't care about cleanups.
|
||||
continue;
|
||||
|
||||
case EHScope::Cleanup:
|
||||
if (!HasEHCleanup)
|
||||
HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
|
||||
// We otherwise don't care about cleanups.
|
||||
continue;
|
||||
|
||||
case EHScope::Filter: {
|
||||
assert(I.next() == EHStack.end() && "EH filter is not end of EH stack");
|
||||
assert(!CatchAll.Block && "EH filter reached after catch-all");
|
||||
|
||||
@@ -57,13 +57,13 @@ public:
|
||||
class EHScope {
|
||||
llvm::BasicBlock *CachedLandingPad;
|
||||
|
||||
unsigned K : 3;
|
||||
unsigned K : 2;
|
||||
|
||||
protected:
|
||||
enum { BitsRemaining = 29 };
|
||||
enum { BitsRemaining = 30 };
|
||||
|
||||
public:
|
||||
enum Kind { Cleanup, LazyCleanup, Catch, Terminate, Filter };
|
||||
enum Kind { LazyCleanup, Catch, Terminate, Filter };
|
||||
|
||||
EHScope(Kind K) : CachedLandingPad(0), K(K) {}
|
||||
|
||||
@@ -234,61 +234,6 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
/// A scope which needs to execute some code if we try to unwind ---
|
||||
/// either normally, via the EH mechanism, or both --- through it.
|
||||
class EHCleanupScope : public EHScope {
|
||||
/// The number of fixups required by enclosing scopes (not including
|
||||
/// this one). If this is the top cleanup scope, all the fixups
|
||||
/// from this index onwards belong to this scope.
|
||||
unsigned FixupDepth : BitsRemaining;
|
||||
|
||||
/// The nearest normal cleanup scope enclosing this one.
|
||||
EHScopeStack::stable_iterator EnclosingNormal;
|
||||
|
||||
/// The nearest EH cleanup scope enclosing this one.
|
||||
EHScopeStack::stable_iterator EnclosingEH;
|
||||
|
||||
llvm::BasicBlock *NormalEntry;
|
||||
llvm::BasicBlock *NormalExit;
|
||||
llvm::BasicBlock *EHEntry;
|
||||
llvm::BasicBlock *EHExit;
|
||||
|
||||
public:
|
||||
static size_t getSize() { return sizeof(EHCleanupScope); }
|
||||
|
||||
EHCleanupScope(unsigned FixupDepth,
|
||||
EHScopeStack::stable_iterator EnclosingNormal,
|
||||
EHScopeStack::stable_iterator EnclosingEH,
|
||||
llvm::BasicBlock *NormalEntry, llvm::BasicBlock *NormalExit,
|
||||
llvm::BasicBlock *EHEntry, llvm::BasicBlock *EHExit)
|
||||
: EHScope(Cleanup), FixupDepth(FixupDepth),
|
||||
EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH),
|
||||
NormalEntry(NormalEntry), NormalExit(NormalExit),
|
||||
EHEntry(EHEntry), EHExit(EHExit) {
|
||||
assert((NormalEntry != 0) == (NormalExit != 0));
|
||||
assert((EHEntry != 0) == (EHExit != 0));
|
||||
}
|
||||
|
||||
bool isNormalCleanup() const { return NormalEntry != 0; }
|
||||
bool isEHCleanup() const { return EHEntry != 0; }
|
||||
|
||||
llvm::BasicBlock *getNormalEntry() const { return NormalEntry; }
|
||||
llvm::BasicBlock *getNormalExit() const { return NormalExit; }
|
||||
llvm::BasicBlock *getEHEntry() const { return EHEntry; }
|
||||
llvm::BasicBlock *getEHExit() const { return EHExit; }
|
||||
unsigned getFixupDepth() const { return FixupDepth; }
|
||||
EHScopeStack::stable_iterator getEnclosingNormalCleanup() const {
|
||||
return EnclosingNormal;
|
||||
}
|
||||
EHScopeStack::stable_iterator getEnclosingEHCleanup() const {
|
||||
return EnclosingEH;
|
||||
}
|
||||
|
||||
static bool classof(const EHScope *Scope) {
|
||||
return Scope->getKind() == Cleanup;
|
||||
}
|
||||
};
|
||||
|
||||
/// An exceptions scope which filters exceptions thrown through it.
|
||||
/// Only exceptions matching the filter types will be permitted to be
|
||||
/// thrown.
|
||||
@@ -379,10 +324,6 @@ public:
|
||||
->getAllocatedSize();
|
||||
break;
|
||||
|
||||
case EHScope::Cleanup:
|
||||
Ptr += EHCleanupScope::getSize();
|
||||
break;
|
||||
|
||||
case EHScope::Terminate:
|
||||
Ptr += EHTerminateScope::getSize();
|
||||
break;
|
||||
|
||||
@@ -671,35 +671,6 @@ void CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) {
|
||||
PopCleanupBlock();
|
||||
}
|
||||
|
||||
/// Destroys a cleanup if it was unused.
|
||||
static void DestroyCleanup(CodeGenFunction &CGF,
|
||||
llvm::BasicBlock *Entry,
|
||||
llvm::BasicBlock *Exit) {
|
||||
assert(Entry->use_empty() && "destroying cleanup with uses!");
|
||||
assert(Exit->getTerminator() == 0 &&
|
||||
"exit has terminator but entry has no predecessors!");
|
||||
|
||||
// This doesn't always remove the entire cleanup, but it's much
|
||||
// safer as long as we don't know what blocks belong to the cleanup.
|
||||
// A *much* better approach if we care about this inefficiency would
|
||||
// be to lazily emit the cleanup.
|
||||
|
||||
// If the exit block is distinct from the entry, give it a branch to
|
||||
// an unreachable destination. This preserves the well-formedness
|
||||
// of the IR.
|
||||
if (Entry != Exit)
|
||||
llvm::BranchInst::Create(CGF.getUnreachableBlock(), Exit);
|
||||
|
||||
assert(!Entry->getParent() && "cleanup entry already positioned?");
|
||||
// We can't just delete the entry; we have to kill any references to
|
||||
// its instructions in other blocks.
|
||||
for (llvm::BasicBlock::iterator I = Entry->begin(), E = Entry->end();
|
||||
I != E; ++I)
|
||||
if (!I->use_empty())
|
||||
I->replaceAllUsesWith(llvm::UndefValue::get(I->getType()));
|
||||
delete Entry;
|
||||
}
|
||||
|
||||
/// Creates a switch instruction to thread branches out of the given
|
||||
/// block (which is the exit block of a cleanup).
|
||||
static void CreateCleanupSwitch(CodeGenFunction &CGF,
|
||||
@@ -984,148 +955,8 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) {
|
||||
/// any branch fixups on the cleanup.
|
||||
void CodeGenFunction::PopCleanupBlock() {
|
||||
assert(!EHStack.empty() && "cleanup stack is empty!");
|
||||
if (isa<EHLazyCleanupScope>(*EHStack.begin()))
|
||||
return PopLazyCleanupBlock(*this);
|
||||
|
||||
assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!");
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin());
|
||||
assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups());
|
||||
|
||||
// Handle the EH cleanup if (1) there is one and (2) it's different
|
||||
// from the normal cleanup.
|
||||
if (Scope.isEHCleanup() &&
|
||||
Scope.getEHEntry() != Scope.getNormalEntry()) {
|
||||
llvm::BasicBlock *EHEntry = Scope.getEHEntry();
|
||||
llvm::BasicBlock *EHExit = Scope.getEHExit();
|
||||
|
||||
if (EHEntry->use_empty()) {
|
||||
DestroyCleanup(*this, EHEntry, EHExit);
|
||||
} else {
|
||||
// TODO: this isn't really the ideal location to put this EH
|
||||
// cleanup, but lazy emission is a better solution than trying
|
||||
// to pick a better spot.
|
||||
CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP();
|
||||
EmitBlock(EHEntry);
|
||||
Builder.restoreIP(SavedIP);
|
||||
|
||||
SimplifyCleanupEdges(*this, EHEntry, EHExit);
|
||||
}
|
||||
}
|
||||
|
||||
// If we only have an EH cleanup, we don't really need to do much
|
||||
// here. Branch fixups just naturally drop down to the enclosing
|
||||
// cleanup scope.
|
||||
if (!Scope.isNormalCleanup()) {
|
||||
EHStack.popCleanup();
|
||||
assert(EHStack.getNumBranchFixups() == 0 || EHStack.hasNormalCleanups());
|
||||
return;
|
||||
}
|
||||
|
||||
// Check whether the scope has any fixups that need to be threaded.
|
||||
unsigned FixupDepth = Scope.getFixupDepth();
|
||||
bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth;
|
||||
|
||||
// Grab the entry and exit blocks.
|
||||
llvm::BasicBlock *Entry = Scope.getNormalEntry();
|
||||
llvm::BasicBlock *Exit = Scope.getNormalExit();
|
||||
|
||||
// Check whether anything's been threaded through the cleanup already.
|
||||
assert((Exit->getTerminator() == 0) == Entry->use_empty() &&
|
||||
"cleanup entry/exit mismatch");
|
||||
bool HasExistingBranches = !Entry->use_empty();
|
||||
|
||||
// Check whether we need to emit a "fallthrough" branch through the
|
||||
// cleanup for the current insertion point.
|
||||
llvm::BasicBlock *FallThrough = Builder.GetInsertBlock();
|
||||
if (FallThrough && FallThrough->getTerminator())
|
||||
FallThrough = 0;
|
||||
|
||||
// If *nothing* is using the cleanup, kill it.
|
||||
if (!FallThrough && !HasFixups && !HasExistingBranches) {
|
||||
EHStack.popCleanup();
|
||||
DestroyCleanup(*this, Entry, Exit);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, add the block to the function.
|
||||
EmitBlock(Entry);
|
||||
|
||||
if (FallThrough)
|
||||
Builder.SetInsertPoint(Exit);
|
||||
else
|
||||
Builder.ClearInsertionPoint();
|
||||
|
||||
// Fast case: if we don't have to add any fixups, and either
|
||||
// we don't have a fallthrough or the cleanup wasn't previously
|
||||
// used, then the setup above is sufficient.
|
||||
if (!HasFixups) {
|
||||
if (!FallThrough) {
|
||||
assert(HasExistingBranches && "no reason for cleanup but didn't kill before");
|
||||
EHStack.popCleanup();
|
||||
SimplifyCleanupEdges(*this, Entry, Exit);
|
||||
return;
|
||||
} else if (!HasExistingBranches) {
|
||||
assert(FallThrough && "no reason for cleanup but didn't kill before");
|
||||
// We can't simplify the exit edge in this case because we're
|
||||
// already inserting at the end of the exit block.
|
||||
EHStack.popCleanup();
|
||||
SimplifyCleanupEntry(*this, Entry);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise we're going to have to thread things through the cleanup.
|
||||
llvm::SmallVector<BranchFixup*, 8> Fixups;
|
||||
|
||||
// Synthesize a fixup for the current insertion point.
|
||||
BranchFixup Cur;
|
||||
if (FallThrough) {
|
||||
Cur.Destination = createBasicBlock("cleanup.cont");
|
||||
Cur.LatestBranch = FallThrough->getTerminator();
|
||||
Cur.LatestBranchIndex = 0;
|
||||
Cur.Origin = Cur.LatestBranch;
|
||||
|
||||
// Restore fixup invariant. EmitBlock added a branch to the cleanup
|
||||
// which we need to redirect to the destination.
|
||||
cast<llvm::BranchInst>(Cur.LatestBranch)->setSuccessor(0, Cur.Destination);
|
||||
|
||||
Fixups.push_back(&Cur);
|
||||
} else {
|
||||
Cur.Destination = 0;
|
||||
}
|
||||
|
||||
// Collect any "real" fixups we need to thread.
|
||||
for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups();
|
||||
I != E; ++I)
|
||||
if (EHStack.getBranchFixup(I).Destination)
|
||||
Fixups.push_back(&EHStack.getBranchFixup(I));
|
||||
|
||||
assert(!Fixups.empty() && "no fixups, invariants broken!");
|
||||
|
||||
// If there's only a single fixup to thread through, do so with
|
||||
// unconditional branches. This only happens if there's a single
|
||||
// branch and no fallthrough.
|
||||
if (Fixups.size() == 1 && !HasExistingBranches) {
|
||||
Fixups[0]->LatestBranch->setSuccessor(Fixups[0]->LatestBranchIndex, Entry);
|
||||
llvm::BranchInst *Br =
|
||||
llvm::BranchInst::Create(Fixups[0]->Destination, Exit);
|
||||
Fixups[0]->LatestBranch = Br;
|
||||
Fixups[0]->LatestBranchIndex = 0;
|
||||
|
||||
// Otherwise, force a switch statement and thread everything through
|
||||
// the switch.
|
||||
} else {
|
||||
CreateCleanupSwitch(*this, Exit);
|
||||
for (unsigned I = 0, E = Fixups.size(); I != E; ++I)
|
||||
ThreadFixupThroughCleanup(*this, *Fixups[I], Entry, Exit);
|
||||
}
|
||||
|
||||
// Emit the fallthrough destination block if necessary.
|
||||
if (Cur.Destination)
|
||||
EmitBlock(Cur.Destination);
|
||||
|
||||
// We're finally done with the cleanup.
|
||||
EHStack.popCleanup();
|
||||
assert(isa<EHLazyCleanupScope>(*EHStack.begin()));
|
||||
return PopLazyCleanupBlock(*this);
|
||||
}
|
||||
|
||||
void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
|
||||
@@ -1159,12 +990,7 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) {
|
||||
|
||||
for (EHScopeStack::iterator I = EHStack.begin(),
|
||||
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
|
||||
if (isa<EHCleanupScope>(*I)) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
|
||||
if (Scope.isNormalCleanup())
|
||||
ThreadFixupThroughCleanup(*this, Fixup, Scope.getNormalEntry(),
|
||||
Scope.getNormalExit());
|
||||
} else if (isa<EHLazyCleanupScope>(*I)) {
|
||||
if (isa<EHLazyCleanupScope>(*I)) {
|
||||
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
|
||||
if (Scope.isNormalCleanup()) {
|
||||
llvm::BasicBlock *Block = Scope.getNormalBlock();
|
||||
@@ -1208,12 +1034,7 @@ void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) {
|
||||
|
||||
for (EHScopeStack::iterator I = EHStack.begin(),
|
||||
E = EHStack.find(Dest.ScopeDepth); I != E; ++I) {
|
||||
if (isa<EHCleanupScope>(*I)) {
|
||||
EHCleanupScope &Scope = cast<EHCleanupScope>(*I);
|
||||
if (Scope.isEHCleanup())
|
||||
ThreadFixupThroughCleanup(*this, Fixup, Scope.getEHEntry(),
|
||||
Scope.getEHExit());
|
||||
} else if (isa<EHLazyCleanupScope>(*I)) {
|
||||
if (isa<EHLazyCleanupScope>(*I)) {
|
||||
EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I);
|
||||
if (Scope.isEHCleanup()) {
|
||||
llvm::BasicBlock *Block = Scope.getEHBlock();
|
||||
|
||||
@@ -263,12 +263,6 @@ public:
|
||||
(void) Obj;
|
||||
}
|
||||
|
||||
/// Push a cleanup on the stack.
|
||||
void pushCleanup(llvm::BasicBlock *NormalEntry,
|
||||
llvm::BasicBlock *NormalExit,
|
||||
llvm::BasicBlock *EHEntry,
|
||||
llvm::BasicBlock *EHExit);
|
||||
|
||||
/// Pops a cleanup scope off the stack. This should only be called
|
||||
/// by CodeGenFunction::PopCleanupBlock.
|
||||
void popCleanup();
|
||||
|
||||
Reference in New Issue
Block a user