aboutsummaryrefslogtreecommitdiff
path: root/lib/CodeGen/CGException.cpp
diff options
context:
space:
mode:
authorJohn McCall <rjmccall@apple.com>2011-01-28 11:13:47 +0000
committerJohn McCall <rjmccall@apple.com>2011-01-28 11:13:47 +0000
commit36f893c1efe367f929d92c8b125f964c22ba189e (patch)
tree9abff91cc088561957a0cbc346fb40bfafbdf19e /lib/CodeGen/CGException.cpp
parent804b807ea918184d6de63bd745e1ff75a9bfc679 (diff)
Move all the cleanups framework code into a single file.
Pure motion. git-svn-id: https://llvm.org/svn/llvm-project/cfe/trunk@124484 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/CodeGen/CGException.cpp')
-rw-r--r--lib/CodeGen/CGException.cpp173
1 files changed, 1 insertions, 172 deletions
diff --git a/lib/CodeGen/CGException.cpp b/lib/CodeGen/CGException.cpp
index edfee95efc..fe51ce67e4 100644
--- a/lib/CodeGen/CGException.cpp
+++ b/lib/CodeGen/CGException.cpp
@@ -20,180 +20,12 @@
#include "CGObjCRuntime.h"
#include "CodeGenFunction.h"
#include "CGException.h"
+#include "CGCleanup.h"
#include "TargetInfo.h"
using namespace clang;
using namespace CodeGen;
-/// Push an entry of the given size onto this protected-scope stack.
-char *EHScopeStack::allocate(size_t Size) {
- if (!StartOfBuffer) {
- unsigned Capacity = 1024;
- while (Capacity < Size) Capacity *= 2;
- StartOfBuffer = new char[Capacity];
- StartOfData = EndOfBuffer = StartOfBuffer + Capacity;
- } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) {
- unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer;
- unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer);
-
- unsigned NewCapacity = CurrentCapacity;
- do {
- NewCapacity *= 2;
- } while (NewCapacity < UsedCapacity + Size);
-
- char *NewStartOfBuffer = new char[NewCapacity];
- char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity;
- char *NewStartOfData = NewEndOfBuffer - UsedCapacity;
- memcpy(NewStartOfData, StartOfData, UsedCapacity);
- delete [] StartOfBuffer;
- StartOfBuffer = NewStartOfBuffer;
- EndOfBuffer = NewEndOfBuffer;
- StartOfData = NewStartOfData;
- }
-
- assert(StartOfBuffer + Size <= StartOfData);
- StartOfData -= Size;
- return StartOfData;
-}
-
-EHScopeStack::stable_iterator
-EHScopeStack::getEnclosingEHCleanup(iterator it) const {
- assert(it != end());
- do {
- if (isa<EHCleanupScope>(*it)) {
- if (cast<EHCleanupScope>(*it).isEHCleanup())
- return stabilize(it);
- return cast<EHCleanupScope>(*it).getEnclosingEHCleanup();
- }
- ++it;
- } while (it != end());
- return stable_end();
-}
-
-
-void *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) {
- assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned");
- char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size));
- bool IsNormalCleanup = Kind & NormalCleanup;
- bool IsEHCleanup = Kind & EHCleanup;
- bool IsActive = !(Kind & InactiveCleanup);
- EHCleanupScope *Scope =
- new (Buffer) EHCleanupScope(IsNormalCleanup,
- IsEHCleanup,
- IsActive,
- Size,
- BranchFixups.size(),
- InnermostNormalCleanup,
- InnermostEHCleanup);
- if (IsNormalCleanup)
- InnermostNormalCleanup = stable_begin();
- if (IsEHCleanup)
- InnermostEHCleanup = stable_begin();
-
- return Scope->getCleanupBuffer();
-}
-
-void EHScopeStack::popCleanup() {
- assert(!empty() && "popping exception stack when not empty");
-
- assert(isa<EHCleanupScope>(*begin()));
- EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin());
- InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup();
- InnermostEHCleanup = Cleanup.getEnclosingEHCleanup();
- StartOfData += Cleanup.getAllocatedSize();
-
- if (empty()) NextEHDestIndex = FirstEHDestIndex;
-
- // Destroy the cleanup.
- Cleanup.~EHCleanupScope();
-
- // Check whether we can shrink the branch-fixups stack.
- if (!BranchFixups.empty()) {
- // If we no longer have any normal cleanups, all the fixups are
- // complete.
- if (!hasNormalCleanups())
- BranchFixups.clear();
-
- // Otherwise we can still trim out unnecessary nulls.
- else
- popNullFixups();
- }
-}
-
-EHFilterScope *EHScopeStack::pushFilter(unsigned NumFilters) {
- char *Buffer = allocate(EHFilterScope::getSizeForNumFilters(NumFilters));
- CatchDepth++;
- return new (Buffer) EHFilterScope(NumFilters);
-}
-
-void EHScopeStack::popFilter() {
- assert(!empty() && "popping exception stack when not empty");
-
- EHFilterScope &Filter = cast<EHFilterScope>(*begin());
- StartOfData += EHFilterScope::getSizeForNumFilters(Filter.getNumFilters());
-
- if (empty()) NextEHDestIndex = FirstEHDestIndex;
-
- assert(CatchDepth > 0 && "mismatched filter push/pop");
- CatchDepth--;
-}
-
-EHCatchScope *EHScopeStack::pushCatch(unsigned NumHandlers) {
- char *Buffer = allocate(EHCatchScope::getSizeForNumHandlers(NumHandlers));
- CatchDepth++;
- EHCatchScope *Scope = new (Buffer) EHCatchScope(NumHandlers);
- for (unsigned I = 0; I != NumHandlers; ++I)
- Scope->getHandlers()[I].Index = getNextEHDestIndex();
- return Scope;
-}
-
-void EHScopeStack::pushTerminate() {
- char *Buffer = allocate(EHTerminateScope::getSize());
- CatchDepth++;
- new (Buffer) EHTerminateScope(getNextEHDestIndex());
-}
-
-/// Remove any 'null' fixups on the stack. However, we can't pop more
-/// fixups than the fixup depth on the innermost normal cleanup, or
-/// else fixups that we try to add to that cleanup will end up in the
-/// wrong place. We *could* try to shrink fixup depths, but that's
-/// actually a lot of work for little benefit.
-void EHScopeStack::popNullFixups() {
- // We expect this to only be called when there's still an innermost
- // normal cleanup; otherwise there really shouldn't be any fixups.
- assert(hasNormalCleanups());
-
- EHScopeStack::iterator it = find(InnermostNormalCleanup);
- unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth();
- assert(BranchFixups.size() >= MinSize && "fixup stack out of order");
-
- while (BranchFixups.size() > MinSize &&
- BranchFixups.back().Destination == 0)
- BranchFixups.pop_back();
-}
-
-void CodeGenFunction::initFullExprCleanup() {
- // Create a variable to decide whether the cleanup needs to be run.
- llvm::AllocaInst *active
- = CreateTempAlloca(Builder.getInt1Ty(), "cleanup.cond");
-
- // Initialize it to false at a site that's guaranteed to be run
- // before each evaluation.
- llvm::BasicBlock *block = OutermostConditional->getStartingBlock();
- new llvm::StoreInst(Builder.getFalse(), active, &block->back());
-
- // Initialize it to true at the current location.
- Builder.CreateStore(Builder.getTrue(), active);
-
- // Set that as the active flag in the cleanup.
- EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin());
- assert(cleanup.getActiveFlag() == 0 && "cleanup already has active flag?");
- cleanup.setActiveFlag(active);
-
- if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup();
- if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup();
-}
-
static llvm::Constant *getAllocateExceptionFn(CodeGenFunction &CGF) {
// void *__cxa_allocate_exception(size_t thrown_size);
const llvm::Type *SizeTy = CGF.ConvertType(CGF.getContext().getSizeType());
@@ -1624,6 +1456,3 @@ CodeGenFunction::UnwindDest CodeGenFunction::getRethrowDest() {
return RethrowBlock;
}
-EHScopeStack::Cleanup::~Cleanup() {
- llvm_unreachable("Cleanup is indestructable");
-}