diff options
-rw-r--r-- | lib/CodeGen/CGClass.cpp | 40 | ||||
-rw-r--r-- | lib/CodeGen/CGDecl.cpp | 26 | ||||
-rw-r--r-- | lib/CodeGen/CGDeclCXX.cpp | 4 | ||||
-rw-r--r-- | lib/CodeGen/CGException.cpp | 72 | ||||
-rw-r--r-- | lib/CodeGen/CGException.h | 30 | ||||
-rw-r--r-- | lib/CodeGen/CGObjCGNU.cpp | 5 | ||||
-rw-r--r-- | lib/CodeGen/CGObjCMac.cpp | 28 | ||||
-rw-r--r-- | lib/CodeGen/CGTemporaries.cpp | 6 | ||||
-rw-r--r-- | lib/CodeGen/CodeGenFunction.cpp | 89 | ||||
-rw-r--r-- | lib/CodeGen/CodeGenFunction.h | 51 |
10 files changed, 173 insertions, 178 deletions
diff --git a/lib/CodeGen/CGClass.cpp b/lib/CodeGen/CGClass.cpp index f6357ec1d9..5bac172b0e 100644 --- a/lib/CodeGen/CGClass.cpp +++ b/lib/CodeGen/CGClass.cpp @@ -312,7 +312,7 @@ static llvm::Value *GetVTTParameter(CodeGenFunction &CGF, GlobalDecl GD, namespace { /// Call the destructor for a direct base class. - struct CallBaseDtor : EHScopeStack::LazyCleanup { + struct CallBaseDtor : EHScopeStack::Cleanup { const CXXRecordDecl *BaseClass; bool BaseIsVirtual; CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual) @@ -361,8 +361,8 @@ static void EmitBaseInitializer(CodeGenFunction &CGF, CGF.EmitAggExpr(BaseInit->getInit(), V, false, false, true); if (CGF.Exceptions && !BaseClassDecl->hasTrivialDestructor()) - CGF.EHStack.pushLazyCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, - isBaseVirtual); + CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl, + isBaseVirtual); } static void EmitAggMemberInitializer(CodeGenFunction &CGF, @@ -452,7 +452,7 @@ static void EmitAggMemberInitializer(CodeGenFunction &CGF, } namespace { - struct CallMemberDtor : EHScopeStack::LazyCleanup { + struct CallMemberDtor : EHScopeStack::Cleanup { FieldDecl *Field; CXXDestructorDecl *Dtor; @@ -570,8 +570,8 @@ static void EmitMemberInitializer(CodeGenFunction &CGF, CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl()); if (!RD->hasTrivialDestructor()) - CGF.EHStack.pushLazyCleanup<CallMemberDtor>(EHCleanup, Field, - RD->getDestructor()); + CGF.EHStack.pushCleanup<CallMemberDtor>(EHCleanup, Field, + RD->getDestructor()); } } @@ -761,7 +761,7 @@ void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) { namespace { /// Call the operator delete associated with the current destructor. - struct CallDtorDelete : EHScopeStack::LazyCleanup { + struct CallDtorDelete : EHScopeStack::Cleanup { CallDtorDelete() {} void Emit(CodeGenFunction &CGF, bool IsForEH) { @@ -772,7 +772,7 @@ namespace { } }; - struct CallArrayFieldDtor : EHScopeStack::LazyCleanup { + struct CallArrayFieldDtor : EHScopeStack::Cleanup { const FieldDecl *Field; CallArrayFieldDtor(const FieldDecl *Field) : Field(Field) {} @@ -798,7 +798,7 @@ namespace { } }; - struct CallFieldDtor : EHScopeStack::LazyCleanup { + struct CallFieldDtor : EHScopeStack::Cleanup { const FieldDecl *Field; CallFieldDtor(const FieldDecl *Field) : Field(Field) {} @@ -831,7 +831,7 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, if (DtorType == Dtor_Deleting) { assert(DD->getOperatorDelete() && "operator delete missing - EmitDtorEpilogue"); - EHStack.pushLazyCleanup<CallDtorDelete>(NormalAndEHCleanup); + EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup); return; } @@ -853,9 +853,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, if (BaseClassDecl->hasTrivialDestructor()) continue; - EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup, - BaseClassDecl, - /*BaseIsVirtual*/ true); + EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, + BaseClassDecl, + /*BaseIsVirtual*/ true); } return; @@ -878,9 +878,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, if (BaseClassDecl->hasTrivialDestructor()) continue; - EHStack.pushLazyCleanup<CallBaseDtor>(NormalAndEHCleanup, - BaseClassDecl, - /*BaseIsVirtual*/ false); + EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup, + BaseClassDecl, + /*BaseIsVirtual*/ false); } // Destroy direct fields. @@ -904,9 +904,9 @@ void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD, continue; if (Array) - EHStack.pushLazyCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field); + EHStack.pushCleanup<CallArrayFieldDtor>(NormalAndEHCleanup, Field); else - EHStack.pushLazyCleanup<CallFieldDtor>(NormalAndEHCleanup, Field); + EHStack.pushCleanup<CallFieldDtor>(NormalAndEHCleanup, Field); } } @@ -1164,7 +1164,7 @@ void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD, } namespace { - struct CallLocalDtor : EHScopeStack::LazyCleanup { + struct CallLocalDtor : EHScopeStack::Cleanup { const CXXDestructorDecl *Dtor; llvm::Value *Addr; @@ -1180,7 +1180,7 @@ namespace { void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D, llvm::Value *Addr) { - EHStack.pushLazyCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); + EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr); } void CodeGenFunction::PushDestructorCleanup(QualType T, llvm::Value *Addr) { diff --git a/lib/CodeGen/CGDecl.cpp b/lib/CodeGen/CGDecl.cpp index da2c03bc99..7dcd465de7 100644 --- a/lib/CodeGen/CGDecl.cpp +++ b/lib/CodeGen/CGDecl.cpp @@ -389,7 +389,7 @@ const llvm::Type *CodeGenFunction::BuildByRefType(const ValueDecl *D) { } namespace { - struct CallArrayDtor : EHScopeStack::LazyCleanup { + struct CallArrayDtor : EHScopeStack::Cleanup { CallArrayDtor(const CXXDestructorDecl *Dtor, const ConstantArrayType *Type, llvm::Value *Loc) @@ -408,7 +408,7 @@ namespace { } }; - struct CallVarDtor : EHScopeStack::LazyCleanup { + struct CallVarDtor : EHScopeStack::Cleanup { CallVarDtor(const CXXDestructorDecl *Dtor, llvm::Value *NRVOFlag, llvm::Value *Loc) @@ -441,7 +441,7 @@ namespace { } namespace { - struct CallStackRestore : EHScopeStack::LazyCleanup { + struct CallStackRestore : EHScopeStack::Cleanup { llvm::Value *Stack; CallStackRestore(llvm::Value *Stack) : Stack(Stack) {} void Emit(CodeGenFunction &CGF, bool IsForEH) { @@ -451,7 +451,7 @@ namespace { } }; - struct CallCleanupFunction : EHScopeStack::LazyCleanup { + struct CallCleanupFunction : EHScopeStack::Cleanup { llvm::Constant *CleanupFn; const CGFunctionInfo &FnInfo; llvm::Value *Addr; @@ -479,7 +479,7 @@ namespace { } }; - struct CallBlockRelease : EHScopeStack::LazyCleanup { + struct CallBlockRelease : EHScopeStack::Cleanup { llvm::Value *Addr; CallBlockRelease(llvm::Value *Addr) : Addr(Addr) {} @@ -592,7 +592,7 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D, DidCallStackSave = true; // Push a cleanup block and restore the stack there. - EHStack.pushLazyCleanup<CallStackRestore>(NormalCleanup, Stack); + EHStack.pushCleanup<CallStackRestore>(NormalCleanup, Stack); } // Get the element type. @@ -783,11 +783,11 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D, if (const ConstantArrayType *Array = getContext().getAsConstantArrayType(Ty)) { - EHStack.pushLazyCleanup<CallArrayDtor>(NormalAndEHCleanup, - D, Array, Loc); + EHStack.pushCleanup<CallArrayDtor>(NormalAndEHCleanup, + D, Array, Loc); } else { - EHStack.pushLazyCleanup<CallVarDtor>(NormalAndEHCleanup, - D, NRVOFlag, Loc); + EHStack.pushCleanup<CallVarDtor>(NormalAndEHCleanup, + D, NRVOFlag, Loc); } } } @@ -800,12 +800,12 @@ void CodeGenFunction::EmitLocalBlockVarDecl(const VarDecl &D, assert(F && "Could not find function!"); const CGFunctionInfo &Info = CGM.getTypes().getFunctionInfo(FD); - EHStack.pushLazyCleanup<CallCleanupFunction>(NormalAndEHCleanup, - F, &Info, DeclPtr, &D); + EHStack.pushCleanup<CallCleanupFunction>(NormalAndEHCleanup, + F, &Info, DeclPtr, &D); } if (needsDispose && CGM.getLangOptions().getGCMode() != LangOptions::GCOnly) - EHStack.pushLazyCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr); + EHStack.pushCleanup<CallBlockRelease>(NormalAndEHCleanup, DeclPtr); } /// Emit an alloca (or GlobalValue depending on target) diff --git a/lib/CodeGen/CGDeclCXX.cpp b/lib/CodeGen/CGDeclCXX.cpp index f16a0b3fbd..0293c5c125 100644 --- a/lib/CodeGen/CGDeclCXX.cpp +++ b/lib/CodeGen/CGDeclCXX.cpp @@ -330,7 +330,7 @@ static llvm::Constant *getGuardAbortFn(CodeGenFunction &CGF) { } namespace { - struct CallGuardAbort : EHScopeStack::LazyCleanup { + struct CallGuardAbort : EHScopeStack::Cleanup { llvm::GlobalVariable *Guard; CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {} @@ -388,7 +388,7 @@ CodeGenFunction::EmitStaticCXXBlockVarDeclInit(const VarDecl &D, // Call __cxa_guard_abort along the exceptional edge. if (Exceptions) - EHStack.pushLazyCleanup<CallGuardAbort>(EHCleanup, GuardVariable); + EHStack.pushCleanup<CallGuardAbort>(EHCleanup, GuardVariable); EmitBlock(InitBlock); } diff --git a/lib/CodeGen/CGException.cpp b/lib/CodeGen/CGException.cpp index 71a078f929..1f00914ea4 100644 --- a/lib/CodeGen/CGException.cpp +++ b/lib/CodeGen/CGException.cpp @@ -58,10 +58,10 @@ EHScopeStack::stable_iterator EHScopeStack::getEnclosingEHCleanup(iterator it) const { assert(it != end()); do { - if (isa<EHLazyCleanupScope>(*it)) { - if (cast<EHLazyCleanupScope>(*it).isEHCleanup()) + if (isa<EHCleanupScope>(*it)) { + if (cast<EHCleanupScope>(*it).isEHCleanup()) return stabilize(it); - return cast<EHLazyCleanupScope>(*it).getEnclosingEHCleanup(); + return cast<EHCleanupScope>(*it).getEnclosingEHCleanup(); } ++it; } while (it != end()); @@ -69,18 +69,18 @@ EHScopeStack::getEnclosingEHCleanup(iterator it) const { } -void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) { +void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) { assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned"); - char *Buffer = allocate(EHLazyCleanupScope::getSizeForCleanupSize(Size)); + char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size)); bool IsNormalCleanup = Kind != EHCleanup; bool IsEHCleanup = Kind != NormalCleanup; - EHLazyCleanupScope *Scope = - new (Buffer) EHLazyCleanupScope(IsNormalCleanup, - IsEHCleanup, - Size, - BranchFixups.size(), - InnermostNormalCleanup, - InnermostEHCleanup); + EHCleanupScope *Scope = + new (Buffer) EHCleanupScope(IsNormalCleanup, + IsEHCleanup, + Size, + BranchFixups.size(), + InnermostNormalCleanup, + InnermostEHCleanup); if (IsNormalCleanup) InnermostNormalCleanup = stable_begin(); if (IsEHCleanup) @@ -92,8 +92,8 @@ void *EHScopeStack::pushLazyCleanup(CleanupKind Kind, size_t Size) { void EHScopeStack::popCleanup() { assert(!empty() && "popping exception stack when not empty"); - assert(isa<EHLazyCleanupScope>(*begin())); - EHLazyCleanupScope &Cleanup = cast<EHLazyCleanupScope>(*begin()); + assert(isa<EHCleanupScope>(*begin())); + EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin()); InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); InnermostEHCleanup = Cleanup.getEnclosingEHCleanup(); StartOfData += Cleanup.getAllocatedSize(); @@ -150,7 +150,7 @@ void EHScopeStack::popNullFixups() { assert(hasNormalCleanups()); EHScopeStack::iterator it = find(InnermostNormalCleanup); - unsigned MinSize = cast<EHLazyCleanupScope>(*it).getFixupDepth(); + unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth(); assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); while (BranchFixups.size() > MinSize && @@ -387,7 +387,7 @@ static llvm::Constant *getCleanupValue(CodeGenFunction &CGF) { namespace { /// A cleanup to free the exception object if its initialization /// throws. - struct FreeExceptionCleanup : EHScopeStack::LazyCleanup { + struct FreeExceptionCleanup : EHScopeStack::Cleanup { FreeExceptionCleanup(llvm::Value *ShouldFreeVar, llvm::Value *ExnLocVar) : ShouldFreeVar(ShouldFreeVar), ExnLocVar(ExnLocVar) {} @@ -437,9 +437,9 @@ static void EmitAnyExprToExn(CodeGenFunction &CGF, const Expr *E, // exception during initialization. // FIXME: stmt expressions might require this to be a normal // cleanup, too. - CGF.EHStack.pushLazyCleanup<FreeExceptionCleanup>(EHCleanup, - ShouldFreeVar, - ExnLocVar); + CGF.EHStack.pushCleanup<FreeExceptionCleanup>(EHCleanup, + ShouldFreeVar, + ExnLocVar); EHScopeStack::stable_iterator Cleanup = CGF.EHStack.stable_begin(); CGF.Builder.CreateStore(ExnLoc, ExnLocVar); @@ -635,8 +635,8 @@ void CodeGenFunction::EnterCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { /// normal-only cleanup scopes. static bool isNonEHScope(const EHScope &S) { switch (S.getKind()) { - case EHScope::LazyCleanup: - return !cast<EHLazyCleanupScope>(S).isEHCleanup(); + case EHScope::Cleanup: + return !cast<EHCleanupScope>(S).isEHCleanup(); case EHScope::Filter: case EHScope::Catch: case EHScope::Terminate: @@ -765,9 +765,9 @@ llvm::BasicBlock *CodeGenFunction::EmitLandingPad() { I != E; ++I) { switch (I->getKind()) { - case EHScope::LazyCleanup: + case EHScope::Cleanup: if (!HasEHCleanup) - HasEHCleanup = cast<EHLazyCleanupScope>(*I).isEHCleanup(); + HasEHCleanup = cast<EHCleanupScope>(*I).isEHCleanup(); // We otherwise don't care about cleanups. continue; @@ -1016,7 +1016,7 @@ namespace { /// of the caught type, so we have to assume the actual thrown /// exception type might have a throwing destructor, even if the /// caught type's destructor is trivial or nothrow. - struct CallEndCatch : EHScopeStack::LazyCleanup { + struct CallEndCatch : EHScopeStack::Cleanup { CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {} bool MightThrow; @@ -1041,7 +1041,7 @@ static llvm::Value *CallBeginCatch(CodeGenFunction &CGF, llvm::CallInst *Call = CGF.Builder.CreateCall(getBeginCatchFn(CGF), Exn); Call->setDoesNotThrow(); - CGF.EHStack.pushLazyCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow); + CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow); return Call; } @@ -1232,7 +1232,7 @@ static void BeginCatch(CodeGenFunction &CGF, } namespace { - struct CallRethrow : EHScopeStack::LazyCleanup { + struct CallRethrow : EHScopeStack::Cleanup { void Emit(CodeGenFunction &CGF, bool IsForEH) { CGF.EmitCallOrInvoke(getReThrowFn(CGF), 0, 0); } @@ -1282,7 +1282,7 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { // _cxa_rethrow. This needs to happen before __cxa_end_catch is // called, and so it is pushed after BeginCatch. if (ImplicitRethrow) - EHStack.pushLazyCleanup<CallRethrow>(NormalCleanup); + EHStack.pushCleanup<CallRethrow>(NormalCleanup); // Perform the body of the catch. EmitStmt(C->getHandlerBlock()); @@ -1299,7 +1299,7 @@ void CodeGenFunction::ExitCXXTryStmt(const CXXTryStmt &S, bool IsFnTryBlock) { } namespace { - struct CallEndCatchForFinally : EHScopeStack::LazyCleanup { + struct CallEndCatchForFinally : EHScopeStack::Cleanup { llvm::Value *ForEHVar; llvm::Value *EndCatchFn; CallEndCatchForFinally(llvm::Value *ForEHVar, llvm::Value *EndCatchFn) @@ -1319,7 +1319,7 @@ namespace { } }; - struct PerformFinally : EHScopeStack::LazyCleanup { + struct PerformFinally : EHScopeStack::Cleanup { const Stmt *Body; llvm::Value *ForEHVar; llvm::Value *EndCatchFn; @@ -1335,8 +1335,8 @@ namespace { void Emit(CodeGenFunction &CGF, bool IsForEH) { // Enter a cleanup to call the end-catch function if one was provided. if (EndCatchFn) - CGF.EHStack.pushLazyCleanup<CallEndCatchForFinally>(NormalAndEHCleanup, - ForEHVar, EndCatchFn); + CGF.EHStack.pushCleanup<CallEndCatchForFinally>(NormalAndEHCleanup, + ForEHVar, EndCatchFn); // Emit the finally block. CGF.EmitStmt(Body); @@ -1430,9 +1430,9 @@ CodeGenFunction::EnterFinallyBlock(const Stmt *Body, InitTempAlloca(ForEHVar, llvm::ConstantInt::getFalse(getLLVMContext())); // Enter a normal cleanup which will perform the @finally block. - EHStack.pushLazyCleanup<PerformFinally>(NormalCleanup, Body, - ForEHVar, EndCatchFn, - RethrowFn, SavedExnVar); + EHStack.pushCleanup<PerformFinally>(NormalCleanup, Body, + ForEHVar, EndCatchFn, + RethrowFn, SavedExnVar); // Enter a catch-all scope. llvm::BasicBlock *CatchAllBB = createBasicBlock("finally.catchall"); @@ -1537,6 +1537,6 @@ llvm::BasicBlock *CodeGenFunction::getTerminateHandler() { return TerminateHandler; } -EHScopeStack::LazyCleanup::~LazyCleanup() { - llvm_unreachable("LazyCleanup is indestructable"); +EHScopeStack::Cleanup::~Cleanup() { + llvm_unreachable("Cleanup is indestructable"); } diff --git a/lib/CodeGen/CGException.h b/lib/CodeGen/CGException.h index e1c2fa9028..3c6e1a5528 100644 --- a/lib/CodeGen/CGException.h +++ b/lib/CodeGen/CGException.h @@ -63,7 +63,7 @@ protected: enum { BitsRemaining = 30 }; public: - enum Kind { LazyCleanup, Catch, Terminate, Filter }; + enum Kind { Cleanup, Catch, Terminate, Filter }; EHScope(Kind K) : CachedLandingPad(0), K(K) {} @@ -154,14 +154,14 @@ public: }; /// A cleanup scope which generates the cleanup blocks lazily. -class EHLazyCleanupScope : public EHScope { +class EHCleanupScope : public EHScope { /// Whether this cleanup needs to be run along normal edges. bool IsNormalCleanup : 1; /// Whether this cleanup needs to be run along exception edges. bool IsEHCleanup : 1; - /// The amount of extra storage needed by the LazyCleanup. + /// The amount of extra storage needed by the Cleanup. /// Always a multiple of the scope-stack alignment. unsigned CleanupSize : 12; @@ -188,18 +188,18 @@ public: /// Gets the size required for a lazy cleanup scope with the given /// cleanup-data requirements. static size_t getSizeForCleanupSize(size_t Size) { - return sizeof(EHLazyCleanupScope) + Size; + return sizeof(EHCleanupScope) + Size; } size_t getAllocatedSize() const { - return sizeof(EHLazyCleanupScope) + CleanupSize; + return sizeof(EHCleanupScope) + CleanupSize; } - EHLazyCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize, - unsigned FixupDepth, - EHScopeStack::stable_iterator EnclosingNormal, - EHScopeStack::stable_iterator EnclosingEH) - : EHScope(EHScope::LazyCleanup), + EHCleanupScope(bool IsNormal, bool IsEH, unsigned CleanupSize, + unsigned FixupDepth, + EHScopeStack::stable_iterator EnclosingNormal, + EHScopeStack::stable_iterator EnclosingEH) + : EHScope(EHScope::Cleanup), IsNormalCleanup(IsNormal), IsEHCleanup(IsEH), CleanupSize(CleanupSize), FixupDepth(FixupDepth), EnclosingNormal(EnclosingNormal), EnclosingEH(EnclosingEH), @@ -225,12 +225,12 @@ public: size_t getCleanupSize() const { return CleanupSize; } void *getCleanupBuffer() { return this + 1; } - EHScopeStack::LazyCleanup *getCleanup() { - return reinterpret_cast<EHScopeStack::LazyCleanup*>(getCleanupBuffer()); + EHScopeStack::Cleanup *getCleanup() { + return reinterpret_cast<EHScopeStack::Cleanup*>(getCleanupBuffer()); } static bool classof(const EHScope *Scope) { - return (Scope->getKind() == LazyCleanup); + return (Scope->getKind() == Cleanup); } }; @@ -319,8 +319,8 @@ public: static_cast<const EHFilterScope*>(get())->getNumFilters()); break; - case EHScope::LazyCleanup: - Ptr += static_cast<const EHLazyCleanupScope*>(get()) + case EHScope::Cleanup: + Ptr += static_cast<const EHCleanupScope*>(get()) ->getAllocatedSize(); break; diff --git a/lib/CodeGen/CGObjCGNU.cpp b/lib/CodeGen/CGObjCGNU.cpp index 567b8c3d7d..faec7eeb3b 100644 --- a/lib/CodeGen/CGObjCGNU.cpp +++ b/lib/CodeGen/CGObjCGNU.cpp @@ -1856,7 +1856,7 @@ llvm::Constant *CGObjCGNU::EnumerationMutationFunction() { } namespace { - struct CallSyncExit : EHScopeStack::LazyCleanup { + struct CallSyncExit : EHScopeStack::Cleanup { llvm::Value *SyncExitFn; llvm::Value *SyncArg; CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg) @@ -1885,8 +1885,7 @@ void CGObjCGNU::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF, // Register an all-paths cleanup to release the lock. llvm::Value *SyncExit = CGM.CreateRuntimeFunction(FTy, "objc_sync_exit"); - CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup, - SyncExit, SyncArg); + CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup, SyncExit, SyncArg); // Emit the body of the statement. CGF.EmitStmt(S.getSynchBody()); diff --git a/lib/CodeGen/CGObjCMac.cpp b/lib/CodeGen/CGObjCMac.cpp index 8b4361c034..dc9593f6ea 100644 --- a/lib/CodeGen/CGObjCMac.cpp +++ b/lib/CodeGen/CGObjCMac.cpp @@ -2542,7 +2542,7 @@ void CGObjCMac::EmitSynchronizedStmt(CodeGenFunction &CGF, } namespace { - struct PerformFragileFinally : EHScopeStack::LazyCleanup { + struct PerformFragileFinally : EHScopeStack::Cleanup { const Stmt &S; llvm::Value *SyncArg; llvm::Value *CallTryExitVar; @@ -2745,11 +2745,11 @@ void CGObjCMac::EmitTryOrSynchronizedStmt(CodeGen::CodeGenFunction &CGF, CallTryExitVar); // Push a normal cleanup to leave the try scope. - CGF.EHStack.pushLazyCleanup<PerformFragileFinally>(NormalCleanup, &S, - SyncArg, - CallTryExitVar, - ExceptionData, - &ObjCTypes); + CGF.EHStack.pushCleanup<PerformFragileFinally>(NormalCleanup, &S, + SyncArg, + CallTryExitVar, + ExceptionData, + &ObjCTypes); // Enter a try block: // - Call objc_exception_try_enter to push ExceptionData on top of @@ -5717,7 +5717,7 @@ void CGObjCNonFragileABIMac::EmitObjCGlobalAssign(CodeGen::CodeGenFunction &CGF, } namespace { - struct CallSyncExit : EHScopeStack::LazyCleanup { + struct CallSyncExit : EHScopeStack::Cleanup { llvm::Value *SyncExitFn; llvm::Value *SyncArg; CallSyncExit(llvm::Value *SyncExitFn, llvm::Value *SyncArg) @@ -5741,9 +5741,9 @@ CGObjCNonFragileABIMac::EmitSynchronizedStmt(CodeGen::CodeGenFunction &CGF, ->setDoesNotThrow(); // Register an all-paths cleanup to release the lock. - CGF.EHStack.pushLazyCleanup<CallSyncExit>(NormalAndEHCleanup, - ObjCTypes.getSyncExitFn(), - SyncArg); + CGF.EHStack.pushCleanup<CallSyncExit>(NormalAndEHCleanup, + ObjCTypes.getSyncExitFn(), + SyncArg); // Emit the body of the statement. CGF.EmitStmt(S.getSynchBody()); @@ -5760,7 +5760,7 @@ namespace { llvm::Value *TypeInfo; }; - struct CallObjCEndCatch : EHScopeStack::LazyCleanup { + struct CallObjCEndCatch : EHScopeStack::Cleanup { CallObjCEndCatch(bool MightThrow, llvm::Value *Fn) : MightThrow(MightThrow), Fn(Fn) {} bool MightThrow; @@ -5865,9 +5865,9 @@ void CGObjCNonFragileABIMac::EmitTryStmt(CodeGen::CodeGenFunction &CGF, // Add a cleanup to leave the catch. bool EndCatchMightThrow = (Handler.Variable == 0); - CGF.EHStack.pushLazyCleanup<CallObjCEndCatch>(NormalAndEHCleanup, - EndCatchMightThrow, - ObjCTypes.getObjCEndCatchFn()); + CGF.EHStack.pushCleanup<CallObjCEndCatch>(NormalAndEHCleanup, + EndCatchMightThrow, + ObjCTypes.getObjCEndCatchFn()); // Bind the catch parameter if it exists. if (const VarDecl *CatchParam = Handler.Variable) { diff --git a/lib/CodeGen/CGTemporaries.cpp b/lib/CodeGen/CGTemporaries.cpp index 018eb5f5c6..dfb8dc63c5 100644 --- a/lib/CodeGen/CGTemporaries.cpp +++ b/lib/CodeGen/CGTemporaries.cpp @@ -16,7 +16,7 @@ using namespace clang; using namespace CodeGen; namespace { - struct DestroyTemporary : EHScopeStack::LazyCleanup { + struct DestroyTemporary : EHScopeStack::Cleanup { const CXXTemporary *Temporary; llvm::Value *Addr; llvm::Value *CondPtr; @@ -71,8 +71,8 @@ void CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary, Builder.CreateStore(Builder.getTrue(), CondPtr); } - EHStack.pushLazyCleanup<DestroyTemporary>(NormalAndEHCleanup, - Temporary, Ptr, CondPtr); + EHStack.pushCleanup<DestroyTemporary>(NormalAndEHCleanup, + Temporary, Ptr, CondPtr); } RValue diff --git a/lib/CodeGen/CodeGenFunction.cpp b/lib/CodeGen/CodeGenFunction.cpp index af53f6d114..fe374ef9f6 100644 --- a/lib/CodeGen/CodeGenFunction.cpp +++ b/lib/CodeGen/CodeGenFunction.cpp @@ -796,19 +796,19 @@ static void SimplifyCleanupEdges(CodeGenFunction &CGF, SimplifyCleanupEntry(CGF, Entry); } -static void EmitLazyCleanup(CodeGenFunction &CGF, - EHScopeStack::LazyCleanup *Fn, - bool ForEH) { +static void EmitCleanup(CodeGenFunction &CGF, + EHScopeStack::Cleanup *Fn, + bool ForEH) { if (ForEH) CGF.EHStack.pushTerminate(); Fn->Emit(CGF, ForEH); if (ForEH) CGF.EHStack.popTerminate(); assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?"); } -static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF, - EHScopeStack::LazyCleanup *Fn, - bool ForEH, - llvm::BasicBlock *Entry) { +static void SplitAndEmitCleanup(CodeGenFunction &CGF, + EHScopeStack::Cleanup *Fn, + bool ForEH, + llvm::BasicBlock *Entry) { assert(Entry && "no entry block for cleanup"); // Remove the switch and load from the end of the entry block. @@ -824,7 +824,7 @@ static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF, // Emit the actual cleanup at the end of the entry block. CGF.Builder.SetInsertPoint(Entry); - EmitLazyCleanup(CGF, Fn, ForEH); + EmitCleanup(CGF, Fn, ForEH); // Put the load and switch at the end of the exit block. llvm::BasicBlock *Exit = CGF.Builder.GetInsertBlock(); @@ -837,10 +837,14 @@ static void SplitAndEmitLazyCleanup(CodeGenFunction &CGF, CGF.Builder.ClearInsertionPoint(); } -static void PopLazyCleanupBlock(CodeGenFunction &CGF) { - assert(isa<EHLazyCleanupScope>(*CGF.EHStack.begin()) && "top not a cleanup!"); - EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*CGF.EHStack.begin()); - assert(Scope.getFixupDepth() <= CGF.EHStack.getNumBranchFixups()); +/// Pops a cleanup block. If the block includes a normal cleanup, the +/// current insertion point is threaded through the cleanup, as are +/// any branch fixups on the cleanup. +void CodeGenFunction::PopCleanupBlock() { + assert(!EHStack.empty() && "cleanup stack is empty!"); + assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!"); + EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); + assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups()); // Check whether we need an EH cleanup. This is only true if we've // generated a lazy EH cleanup block. @@ -851,14 +855,14 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) { // - whether there are branch fix-ups through this cleanup unsigned FixupDepth = Scope.getFixupDepth(); - bool HasFixups = CGF.EHStack.getNumBranchFixups() != FixupDepth; + bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth; // - whether control has already been threaded through this cleanup llvm::BasicBlock *NormalEntry = Scope.getNormalBlock(); bool HasExistingBranches = (NormalEntry != 0); // - whether there's a fallthrough - llvm::BasicBlock *FallthroughSource = CGF.Builder.GetInsertBlock(); + llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock(); bool HasFallthrough = (FallthroughSource != 0); bool RequiresNormalCleanup = false; @@ -869,9 +873,9 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) { // If we don't need the cleanup at all, we're done. if (!RequiresNormalCleanup && !RequiresEHCleanup) { - CGF.EHStack.popCleanup(); - assert(CGF.EHStack.getNumBranchFixups() == 0 || - CGF.EHStack.hasNormalCleanups()); + EHStack.popCleanup(); + assert(EHStack.getNumBranchFixups() == 0 || + EHStack.hasNormalCleanups()); return; } @@ -883,35 +887,35 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) { memcpy(CleanupBuffer.data(), Scope.getCleanupBuffer(), Scope.getCleanupSize()); CleanupBuffer.set_size(Scope.getCleanupSize()); - EHScopeStack::LazyCleanup *Fn = - reinterpret_cast<EHScopeStack::LazyCleanup*>(CleanupBuffer.data()); + EHScopeStack::Cleanup *Fn = + reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data()); // We're done with the scope; pop it off so we can emit the cleanups. - CGF.EHStack.popCleanup(); + EHStack.popCleanup(); if (RequiresNormalCleanup) { // If we have a fallthrough and no other need for the cleanup, // emit it directly. if (HasFallthrough && !HasFixups && !HasExistingBranches) { - EmitLazyCleanup(CGF, Fn, /*ForEH*/ false); + EmitCleanup(*this, Fn, /*ForEH*/ false); // Otherwise, the best approach is to thread everything through // the cleanup block and then try to clean up after ourselves. } else { // Force the entry block to exist. if (!HasExistingBranches) { - NormalEntry = CGF.createBasicBlock("cleanup"); - CreateCleanupSwitch(CGF, NormalEntry); + NormalEntry = createBasicBlock("cleanup"); + CreateCleanupSwitch(*this, NormalEntry); } - CGF.EmitBlock(NormalEntry); + EmitBlock(NormalEntry); // Thread the fallthrough edge through the (momentarily trivial) // cleanup. llvm::BasicBlock *FallthroughDestination = 0; if (HasFallthrough) { assert(isa<llvm::BranchInst>(FallthroughSource->getTerminator())); - FallthroughDestination = CGF.createBasicBlock("cleanup.cont"); + FallthroughDestination = createBasicBlock("cleanup.cont"); BranchFixup Fix; Fix.Destination = FallthroughDestination; @@ -924,41 +928,32 @@ static void PopLazyCleanupBlock(CodeGenFunction &CGF) { cast<llvm::BranchInst>(Fix.LatestBranch) ->setSuccessor(0, Fix.Destination); - ThreadFixupThroughCleanup(CGF, Fix, NormalEntry, NormalEntry); + ThreadFixupThroughCleanup(*this, Fix, NormalEntry, NormalEntry); } // Thread any "real" fixups we need to thread. - for (unsigned I = FixupDepth, E = CGF.EHStack.getNumBranchFixups(); + for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); I != E; ++I) if (CGF.EHStack.getBranchFixup(I).Destination) - ThreadFixupThroughCleanup(CGF, CGF.EHStack.getBranchFixup(I), + ThreadFixupThroughCleanup(*this, EHStack.getBranchFixup(I), NormalEntry, NormalEntry); - SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ false, NormalEntry); + SplitAndEmitCleanup(*this, Fn, /*ForEH*/ false, NormalEntry); if (HasFallthrough) - CGF.EmitBlock(FallthroughDestination); + EmitBlock(FallthroughDestination); } } // Emit the EH cleanup if required. if (RequiresEHCleanup) { - CGBuilderTy::InsertPoint SavedIP = CGF.Builder.saveAndClearIP(); - CGF.EmitBlock(EHEntry); - SplitAndEmitLazyCleanup(CGF, Fn, /*ForEH*/ true, EHEntry); - CGF.Builder.restoreIP(SavedIP); + CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); + EmitBlock(EHEntry); + SplitAndEmitCleanup(*this, Fn, /*ForEH*/ true, EHEntry); + Builder.restoreIP(SavedIP); } } -/// Pops a cleanup block. If the block includes a normal cleanup, the -/// current insertion point is threaded through the cleanup, as are -/// any branch fixups on the cleanup. -void CodeGenFunction::PopCleanupBlock() { - assert(!EHStack.empty() && "cleanup stack is empty!"); - assert(isa<EHLazyCleanupScope>(*EHStack.begin())); - return PopLazyCleanupBlock(*this); -} - void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { if (!HaveInsertPoint()) return; @@ -990,8 +985,8 @@ void CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { for (EHScopeStack::iterator I = EHStack.begin(), E = EHStack.find(Dest.ScopeDepth); I != E; ++I) { - if (isa<EHLazyCleanupScope>(*I)) { - EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I); + if (isa<EHCleanupScope>(*I)) { + EHCleanupScope &Scope = cast<EHCleanupScope>(*I); if (Scope.isNormalCleanup()) { llvm::BasicBlock *Block = Scope.getNormalBlock(); if (!Block) { @@ -1034,8 +1029,8 @@ void CodeGenFunction::EmitBranchThroughEHCleanup(JumpDest Dest) { for (EHScopeStack::iterator I = EHStack.begin(), E = EHStack.find(Dest.ScopeDepth); I != E; ++I) { - if (isa<EHLazyCleanupScope>(*I)) { - EHLazyCleanupScope &Scope = cast<EHLazyCleanupScope>(*I); + if (isa<EHCleanupScope>(*I)) { + EHCleanupScope &Scope = cast<EHCleanupScope>(*I); if (Scope.isEHCleanup()) { llvm::BasicBlock *Block = Scope.getEHBlock(); if (!Block) { diff --git a/lib/CodeGen/CodeGenFunction.h b/lib/C |