aboutsummaryrefslogtreecommitdiff
path: root/lib/CodeGen/CGException.cpp
diff options
context:
space:
mode:
authorJohn McCall <rjmccall@apple.com>2010-07-13 20:32:21 +0000
committerJohn McCall <rjmccall@apple.com>2010-07-13 20:32:21 +0000
commitda65ea86482bc116906edfb9ba1d7124f76cc867 (patch)
treefc92862d7909d8eeaa367fa4fe686a875a934bf8 /lib/CodeGen/CGException.cpp
parent77a3c9ede5cdb26b5eb3001e82bb0a36ca997e1f (diff)
Teach IR generation how to lazily emit cleanups. This has a lot of advantages,
mostly in avoiding unnecessary work at compile time but also in producing more sensible block orderings. Move the destructor cleanups for local variables over to use lazy cleanups. Eventually all cleanups will do this; for now we have some awkward code duplication. Tell IR generation just to never produce landing pads in -fno-exceptions. This is a much more comprehensive solution to a problem which previously was half-solved by checks in most cleanup-generation spots. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@108270 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/CodeGen/CGException.cpp')
-rw-r--r--lib/CodeGen/CGException.cpp83
1 files changed, 69 insertions, 14 deletions
diff --git a/lib/CodeGen/CGException.cpp b/lib/CodeGen/CGException.cpp
index 085dddd95d..b10e5ae6f6 100644
--- a/lib/CodeGen/CGException.cpp
+++ b/lib/CodeGen/CGException.cpp
@@ -62,12 +62,37 @@ EHScopeStack::getEnclosingEHCleanup(iterator it) const {
return stabilize(it);
return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
}
+ if (isa<EHLazyCleanupScope>(*it)) {
+ if (cast<EHLazyCleanupScope>(*it).isEHCleanup())
+ return stabilize(it);
+ return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup();
+ }
++it;
} while (it != end());
return stable_end();
}
+void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) {
+ assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
+ char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size));
+ bool IsNormalCleanup = Kind != EHCleanup;
+ bool IsEHCleanup = Kind != NormalCleanup;
+ EHLazyCleanupScope *Scope =
+ new (Buffer) EHLazyCleanupScope(IsNormalCleanup,
+ IsEHCleanup,
+ Size,
+ BranchFixups.size(),
+ InnermostNormalCleanup,
+ InnermostEHCleanup);
+ if (IsNormalCleanup)
+ InnermostNormalCleanup = stable_begin();
+ if (IsEHCleanup)
+ InnermostEHCleanup = stable_begin();
+
+ return Scope->getCleanupBuffer();
+}
+
void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry,
llvm::BasicBlock *NormalExit,
llvm::BasicBlock *EHEntry,
@@ -86,11 +111,18 @@ void EHScopeStack::pushCleanup(llvm::BasicBlock *NormalEntry,
void EHScopeStack::popCleanup() {
assert(!empty() && "popping exception stack when not empty");
- assert(isa<EHCleanupScope>(*begin()));
- EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
- InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
- InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
- StartOfData += EHCleanupScope::getSize();
+ if (isa<EHLazyCleanupScope>(*begin())) {
+ EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin());
+ InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
+ InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
+ StartOfData += Cleanup.getAllocatedSize();
+ } else {
+ assert(isa<EHCleanupScope>(*begin()));
+ EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
+ InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
+ InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
+ StartOfData += EHCleanupScope::getSize();
+ }
// Check whether we can shrink the branch-fixups stack.
if (!BranchFixups.empty()) {
@@ -144,7 +176,11 @@ void EHScopeStack::popNullFixups() {
assert(hasNormalCleanups());
EHScopeStack::iterator it = find(InnermostNormalCleanup);
- unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
+ unsigned MinSize;
+ if (isa<EHCleanupScope>(*it))
+ MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
+ else
+ MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth();
assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
while (BranchFixups.size() > MinSize &&
@@ -391,7 +427,7 @@ static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E,
// FIXME: StmtExprs probably force this to include a non-EH
// handler.
{
- CodeGenFunction::CleanupBlock Cleanup(CGF, CodeGenFunction::EHCleanup);
+ CodeGenFunction::CleanupBlock Cleanup(CGF, EHCleanup);
llvm::BasicBlock *FreeBB = CGF.createBasicBlock("free-exnobj");
llvm::BasicBlock *DoneBB = CGF.createBasicBlock("free-exnobj.done");
@@ -598,13 +634,28 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) {
/// affect exception handling. Currently, the only non-EH scopes are
/// normal-only cleanup scopes.
static bool isNonEHScope(const EHScope &S) {
- return isa<EHCleanupScope>(S) && !cast<EHCleanupScope>(S).isEHCleanup();
+ switch (S.getKind()) {
+ case EHScope::Cleanup:
+ return !cast<EHCleanupScope>(S).isEHCleanup();
+ case EHScope::LazyCleanup:
+ return !cast<EHLazyCleanupScope>(S).isEHCleanup();
+ case EHScope::Filter:
+ case EHScope::Catch:
+ case EHScope::Terminate:
+ return false;
+ }
+
+ // Suppress warning.
+ return false;
}
llvm::BasicBlock *CodeGenFunction::getInvokeDestImpl() {
assert(EHStack.requiresLandingPad());
assert(!EHStack.empty());
+ if (!Exceptions)
+ return 0;
+
// Check the innermost scope for a cached landing pad. If this is
// a non-EH cleanup, we'll check enclosing scopes in EmitLandingPad.
llvm::BasicBlock *LP = EHStack.begin()->getCachedLandingPad();
@@ -713,6 +764,12 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() {
I != E; ++I) {
switch (I->getKind()) {
+ case EHScope::LazyCleanup:
+ if (!HasEHCleanup)
+ HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup();
+ // We otherwise don't care about cleanups.
+ continue;
+
case EHScope::Cleanup:
if (!HasEHCleanup)
HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup();
@@ -954,8 +1011,7 @@ static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, llvm::Value *Exn) {
Call->setDoesNotThrow();
{
- CodeGenFunction::CleanupBlock EndCatchCleanup(CGF,
- CodeGenFunction::NormalAndEHCleanup);
+ CodeGenFunction::CleanupBlock EndCatchCleanup(CGF, NormalAndEHCleanup);
// __cxa_end_catch never throws, so this can just be a call.
CGF.Builder.CreateCall(getEndCatchFn(CGF))->setDoesNotThrow();
@@ -1213,13 +1269,11 @@ CodeGenFunction::EnterFinallyBlock(const Stmt *Body,
// Enter a normal cleanup which will perform the @finally block.
{
- CodeGenFunction::CleanupBlock
- NormalCleanup(*this, CodeGenFunction::NormalCleanup);
+ CodeGenFunction::CleanupBlock Cleanup(*this, NormalCleanup);
// Enter a cleanup to call the end-catch function if one was provided.
if (EndCatchFn) {
- CodeGenFunction::CleanupBlock
- FinallyExitCleanup(CGF, CodeGenFunction::NormalAndEHCleanup);
+ CodeGenFunction::CleanupBlock FinallyExitCleanup(CGF, NormalAndEHCleanup);
llvm::BasicBlock *EndCatchBB = createBasicBlock("finally.endcatch");
llvm::BasicBlock *CleanupContBB = createBasicBlock("finally.cleanup.cont");
@@ -1435,3 +1489,4 @@ CodeGenFunction::CleanupBlock::~CleanupBlock() {
CGF.Builder.restoreIP(SavedIP);
}
+void EHScopeStack::LazyCleanup::_anchor() {}