diff options
author | John McCall <rjmccall@apple.com> | 2010-07-06 01:34:17 +0000 |
---|---|---|
committer | John McCall <rjmccall@apple.com> | 2010-07-06 01:34:17 +0000 |
commit | f1549f66a8216a78112286e3978cea2c29d6334c (patch) | |
tree | abcabedb8b72594ef7ea106fc08684927d3a386b /lib/CodeGen/CGDecl.cpp | |
parent | 6c47a9b9779216ef24526c064d5b6ab0db0b5009 (diff) |
Validated by nightly-test runs on x86 and x86-64 darwin, including after
self-host. Hopefully these results hold up on different platforms.
I tried to keep the GNU ObjC runtime happy, but it's hard for me to test.
Reimplement how clang generates IR for exceptions. Instead of creating new
invoke destinations which sequentially chain to the previous destination,
push a more semantic representation of *why* we need the cleanup/catch/filter
behavior, then collect that information into a single landing pad upon request.
Also reorganizes how normal cleanups (i.e. cleanups triggered by non-exceptional
control flow) are generated, since it's actually fairly closely tied in with
the former. Remove the need to track which cleanup scope a block is associated
with.
Document a lot of previously poorly-understood (by me, at least) behavior.
The new framework implements the Horrible Hack (tm), which requires every
landing pad to have a catch-all so that inlining will work. Clang no longer
requires the Horrible Hack just to make exceptions flow correctly within
a function, however. The HH is an unfortunate requirement of LLVM's EH IR.
git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@107631 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/CodeGen/CGDecl.cpp')
-rw-r--r-- | lib/CodeGen/CGDecl.cpp | 112 |
1 files changed, 58 insertions, 54 deletions
diff --git a/lib/CodeGen/CGDecl.cpp b/lib/CodeGen/CGDecl.cpp index 5c3055f5b6..959a9ae483 100644 --- a/lib/CodeGen/CGDecl.cpp +++ b/lib/CodeGen/CGDecl.cpp @@ -391,7 +391,8 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) { /// EmitLocalBlockVarDecl - Emit code and set up an entry in LocalDeclMap for a /// variable declaration with auto, register, or no storage class specifier. /// These turn into simple stack objects, or GlobalValues depending on target. -void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { +void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D, + SpecialInitFn *SpecialInit) { QualType Ty = D.getType(); bool isByRef = D.hasAttr<BlocksAttr>(); bool needsDispose = false; @@ -489,7 +490,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { { // Push a cleanup block and restore the stack there. - DelayedCleanupBlock scope(*this); + CleanupBlock scope(*this, NormalCleanup); V = Builder.CreateLoad(Stack, "tmp"); llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::stackrestore); @@ -597,7 +598,9 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { } } - if (Init) { + if (SpecialInit) { + SpecialInit(*this, D, DeclPtr); + } else if (Init) { llvm::Value *Loc = DeclPtr; if (isByRef) Loc = Builder.CreateStructGEP(DeclPtr, getByRefValueLLVMField(&D), @@ -663,7 +666,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { EmitAggExpr(Init, Loc, isVolatile); } } - + // Handle CXX destruction of variables. QualType DtorTy(Ty); while (const ArrayType *Array = getContext().getAsArrayType(DtorTy)) @@ -683,20 +686,18 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { if (const ConstantArrayType *Array = getContext().getAsConstantArrayType(Ty)) { - { - DelayedCleanupBlock Scope(*this); - QualType BaseElementTy = getContext().getBaseElementType(Array); - const llvm::Type *BasePtr = ConvertType(BaseElementTy); - BasePtr = llvm::PointerType::getUnqual(BasePtr); - llvm::Value *BaseAddrPtr = - Builder.CreateBitCast(Loc, BasePtr); - EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); - - // Make sure to jump to the exit block. - EmitBranch(Scope.getCleanupExitBlock()); - } + CleanupBlock Scope(*this, NormalCleanup); + + QualType BaseElementTy = getContext().getBaseElementType(Array); + const llvm::Type *BasePtr = ConvertType(BaseElementTy); + BasePtr = llvm::PointerType::getUnqual(BasePtr); + llvm::Value *BaseAddrPtr = + Builder.CreateBitCast(Loc, BasePtr); + EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); + if (Exceptions) { - EHCleanupBlock Cleanup(*this); + Scope.beginEHCleanup(); + QualType BaseElementTy = getContext().getBaseElementType(Array); const llvm::Type *BasePtr = ConvertType(BaseElementTy); BasePtr = llvm::PointerType::getUnqual(BasePtr); @@ -705,30 +706,30 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { EmitCXXAggrDestructorCall(D, Array, BaseAddrPtr); } } else { - { - // Normal destruction. - DelayedCleanupBlock Scope(*this); - - if (NRVO) { - // If we exited via NRVO, we skip the destructor call. - llvm::BasicBlock *NoNRVO = createBasicBlock("nrvo.unused"); - Builder.CreateCondBr(Builder.CreateLoad(NRVOFlag, "nrvo.val"), - Scope.getCleanupExitBlock(), - NoNRVO); - EmitBlock(NoNRVO); - } - - // We don't call the destructor along the normal edge if we're - // applying the NRVO. - EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, - Loc); - - // Make sure to jump to the exit block. - EmitBranch(Scope.getCleanupExitBlock()); + // Normal destruction. + CleanupBlock Scope(*this, NormalCleanup); + + llvm::BasicBlock *SkipDtor = 0; + if (NRVO) { + // If we exited via NRVO, we skip the destructor call. + llvm::BasicBlock *NoNRVO = createBasicBlock("nrvo.unused"); + SkipDtor = createBasicBlock("nrvo.skipdtor"); + Builder.CreateCondBr(Builder.CreateLoad(NRVOFlag, "nrvo.val"), + SkipDtor, + NoNRVO); + EmitBlock(NoNRVO); } + // We don't call the destructor along the normal edge if we're + // applying the NRVO. + EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, + Loc); + + if (NRVO) EmitBlock(SkipDtor); + + // Along the exceptions path we always execute the dtor. if (Exceptions) { - EHCleanupBlock Cleanup(*this); + Scope.beginEHCleanup(); EmitCXXDestructorCall(D, Dtor_Complete, /*ForVirtualBase=*/false, Loc); } @@ -752,17 +753,19 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { // // To fix this we insert a bitcast here. QualType ArgTy = Info.arg_begin()->type; - { - DelayedCleanupBlock scope(*this); - CallArgList Args; - Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr, - ConvertType(ArgTy))), - getContext().getPointerType(D.getType()))); - EmitCall(Info, F, ReturnValueSlot(), Args); - } + CleanupBlock CleanupScope(*this, NormalCleanup); + + // Normal cleanup. + CallArgList Args; + Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr, + ConvertType(ArgTy))), + getContext().getPointerType(D.getType()))); + EmitCall(Info, F, ReturnValueSlot(), Args); + + // EH cleanup. if (Exceptions) { - EHCleanupBlock Cleanup(*this); + CleanupScope.beginEHCleanup(); CallArgList Args; Args.push_back(std::make_pair(RValue::get(Builder.CreateBitCast(DeclPtr, @@ -773,15 +776,16 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D) { } if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly) { - { - DelayedCleanupBlock scope(*this); - llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding"); - V = Builder.CreateLoad(V); - BuildBlockRelease(V); - } + CleanupBlock CleanupScope(*this, NormalCleanup); + + llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding"); + V = Builder.CreateLoad(V); + BuildBlockRelease(V); + // FIXME: Turn this on and audit the codegen if (0 && Exceptions) { - EHCleanupBlock Cleanup(*this); + CleanupScope.beginEHCleanup(); + llvm::Value *V = Builder.CreateStructGEP(DeclPtr, 1, "forwarding"); V = Builder.CreateLoad(V); BuildBlockRelease(V); |