diff options
author | Dan Gohman <gohman@apple.com> | 2010-02-08 20:27:50 +0000 |
---|---|---|
committer | Dan Gohman <gohman@apple.com> | 2010-02-08 20:27:50 +0000 |
commit | 1797ed50f488f2030f9f9a0ac7426262abf5220a (patch) | |
tree | 8692733d172d88c97ca7b658d0986ecafcbc86b6 /lib | |
parent | 7edd8e38c4ad710cd5158de2ffa8eb92b4527375 (diff) |
Rename the PerformTailCallOpt variable to GuaranteedTailCallOpt to reflect
its current purpose.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@95564 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r-- | lib/Target/PowerPC/PPCISelLowering.cpp | 14 | ||||
-rw-r--r-- | lib/Target/PowerPC/PPCRegisterInfo.cpp | 12 | ||||
-rw-r--r-- | lib/Target/TargetMachine.cpp | 6 | ||||
-rw-r--r-- | lib/Target/X86/X86FastISel.cpp | 2 | ||||
-rw-r--r-- | lib/Target/X86/X86ISelLowering.cpp | 12 |
5 files changed, 23 insertions, 23 deletions
diff --git a/lib/Target/PowerPC/PPCISelLowering.cpp b/lib/Target/PowerPC/PPCISelLowering.cpp index 2a4fb024bb..a11d6240f8 100644 --- a/lib/Target/PowerPC/PPCISelLowering.cpp +++ b/lib/Target/PowerPC/PPCISelLowering.cpp @@ -1572,7 +1572,7 @@ PPCTargetLowering::LowerFormalArguments_SVR4( EVT PtrVT = DAG.getTargetLoweringInfo().getPointerTy(); // Potential tail calls could cause overwriting of argument stack slots. - bool isImmutable = !(PerformTailCallOpt && (CallConv==CallingConv::Fast)); + bool isImmutable = !(GuaranteedTailCallOpt && (CallConv==CallingConv::Fast)); unsigned PtrByteSize = 4; // Assign locations to all of the incoming arguments. @@ -1773,7 +1773,7 @@ PPCTargetLowering::LowerFormalArguments_Darwin( EVT PtrVT = DAG.getTargetLoweringInfo().getPointerTy(); bool isPPC64 = PtrVT == MVT::i64; // Potential tail calls could cause overwriting of argument stack slots. - bool isImmutable = !(PerformTailCallOpt && (CallConv==CallingConv::Fast)); + bool isImmutable = !(GuaranteedTailCallOpt && (CallConv==CallingConv::Fast)); unsigned PtrByteSize = isPPC64 ? 8 : 4; unsigned ArgOffset = PPCFrameInfo::getLinkageSize(isPPC64, true); @@ -2164,7 +2164,7 @@ CalculateParameterAndLinkageAreaSize(SelectionDAG &DAG, PPCFrameInfo::getMinCallFrameSize(isPPC64, true)); // Tail call needs the stack to be aligned. - if (CC==CallingConv::Fast && PerformTailCallOpt) { + if (CC==CallingConv::Fast && GuaranteedTailCallOpt) { unsigned TargetAlign = DAG.getMachineFunction().getTarget().getFrameInfo()-> getStackAlignment(); unsigned AlignMask = TargetAlign-1; @@ -2200,7 +2200,7 @@ PPCTargetLowering::IsEligibleForTailCallOptimization(SDValue Callee, bool isVarArg, const SmallVectorImpl<ISD::InputArg> &Ins, SelectionDAG& DAG) const { - if (!PerformTailCallOpt) + if (!GuaranteedTailCallOpt) return false; // Variable argument functions are not supported. @@ -2604,7 +2604,7 @@ PPCTargetLowering::FinishCall(CallingConv::ID CallConv, DebugLoc dl, // the stack. Account for this here so these bytes can be pushed back on in // PPCRegisterInfo::eliminateCallFramePseudoInstr. int BytesCalleePops = - (CallConv==CallingConv::Fast && PerformTailCallOpt) ? NumBytes : 0; + (CallConv==CallingConv::Fast && GuaranteedTailCallOpt) ? NumBytes : 0; if (InFlag.getNode()) Ops.push_back(InFlag); @@ -2720,7 +2720,7 @@ PPCTargetLowering::LowerCall_SVR4(SDValue Chain, SDValue Callee, // and restoring the callers stack pointer in this functions epilog. This is // done because by tail calling the called function might overwrite the value // in this function's (MF) stack pointer stack slot 0(SP). - if (PerformTailCallOpt && CallConv==CallingConv::Fast) + if (GuaranteedTailCallOpt && CallConv==CallingConv::Fast) MF.getInfo<PPCFunctionInfo>()->setHasFastCall(); // Count how many bytes are to be pushed on the stack, including the linkage @@ -2923,7 +2923,7 @@ PPCTargetLowering::LowerCall_Darwin(SDValue Chain, SDValue Callee, // and restoring the callers stack pointer in this functions epilog. This is // done because by tail calling the called function might overwrite the value // in this function's (MF) stack pointer stack slot 0(SP). - if (PerformTailCallOpt && CallConv==CallingConv::Fast) + if (GuaranteedTailCallOpt && CallConv==CallingConv::Fast) MF.getInfo<PPCFunctionInfo>()->setHasFastCall(); unsigned nAltivecParamsAtEnd = 0; diff --git a/lib/Target/PowerPC/PPCRegisterInfo.cpp b/lib/Target/PowerPC/PPCRegisterInfo.cpp index 0c3c8eb649..ad2cdcf396 100644 --- a/lib/Target/PowerPC/PPCRegisterInfo.cpp +++ b/lib/Target/PowerPC/PPCRegisterInfo.cpp @@ -406,7 +406,7 @@ PPCRegisterInfo::getCalleeSavedRegClasses(const MachineFunction *MF) const { static bool needsFP(const MachineFunction &MF) { const MachineFrameInfo *MFI = MF.getFrameInfo(); return NoFramePointerElim || MFI->hasVarSizedObjects() || - (PerformTailCallOpt && MF.getInfo<PPCFunctionInfo>()->hasFastCall()); + (GuaranteedTailCallOpt && MF.getInfo<PPCFunctionInfo>()->hasFastCall()); } static bool spillsCR(const MachineFunction &MF) { @@ -486,7 +486,7 @@ static bool MustSaveLR(const MachineFunction &MF, unsigned LR) { void PPCRegisterInfo:: eliminateCallFramePseudoInstr(MachineFunction &MF, MachineBasicBlock &MBB, MachineBasicBlock::iterator I) const { - if (PerformTailCallOpt && I->getOpcode() == PPC::ADJCALLSTACKUP) { + if (GuaranteedTailCallOpt && I->getOpcode() == PPC::ADJCALLSTACKUP) { // Add (actually subtract) back the amount the callee popped on return. if (int CalleeAmt = I->getOperand(1).getImm()) { bool is64Bit = Subtarget.isPPC64(); @@ -1050,7 +1050,7 @@ PPCRegisterInfo::processFunctionBeforeCalleeSavedScan(MachineFunction &MF, // Reserve stack space to move the linkage area to in case of a tail call. int TCSPDelta = 0; - if (PerformTailCallOpt && (TCSPDelta = FI->getTailCallSPDelta()) < 0) { + if (GuaranteedTailCallOpt && (TCSPDelta = FI->getTailCallSPDelta()) < 0) { MF.getFrameInfo()->CreateFixedObject(-1 * TCSPDelta, TCSPDelta, true, false); } @@ -1160,7 +1160,7 @@ PPCRegisterInfo::processFunctionBeforeFrameFinalized(MachineFunction &MF) // Take into account stack space reserved for tail calls. int TCSPDelta = 0; - if (PerformTailCallOpt && (TCSPDelta = PFI->getTailCallSPDelta()) < 0) { + if (GuaranteedTailCallOpt && (TCSPDelta = PFI->getTailCallSPDelta()) < 0) { LowerBound = TCSPDelta; } @@ -1575,7 +1575,7 @@ void PPCRegisterInfo::emitEpilogue(MachineFunction &MF, // The loaded (or persistent) stack pointer value is offset by the 'stwu' // on entry to the function. Add this offset back now. if (!isPPC64) { - // If this function contained a fastcc call and PerformTailCallOpt is + // If this function contained a fastcc call and GuaranteedTailCallOpt is // enabled (=> hasFastCall()==true) the fastcc call might contain a tail // call which invalidates the stack pointer value in SP(0). So we use the // value of R31 in this case. @@ -1654,7 +1654,7 @@ void PPCRegisterInfo::emitEpilogue(MachineFunction &MF, // Callee pop calling convention. Pop parameter/linkage area. Used for tail // call optimization - if (PerformTailCallOpt && RetOpcode == PPC::BLR && + if (GuaranteedTailCallOpt && RetOpcode == PPC::BLR && MF.getFunction()->getCallingConv() == CallingConv::Fast) { PPCFunctionInfo *FI = MF.getInfo<PPCFunctionInfo>(); unsigned CallerAllocatedAmt = FI->getMinReservedArea(); diff --git a/lib/Target/TargetMachine.cpp b/lib/Target/TargetMachine.cpp index 8d990443be..88871e3580 100644 --- a/lib/Target/TargetMachine.cpp +++ b/lib/Target/TargetMachine.cpp @@ -40,7 +40,7 @@ namespace llvm { bool UnwindTablesMandatory; Reloc::Model RelocationModel; CodeModel::Model CMModel; - bool PerformTailCallOpt; + bool GuaranteedTailCallOpt; unsigned StackAlignment; bool RealignStack; bool DisableJumpTables; @@ -173,9 +173,9 @@ DefCodeModel("code-model", "Large code model"), clEnumValEnd)); static cl::opt<bool, true> -EnablePerformTailCallOpt("tailcallopt", +EnableGuaranteedTailCallOpt("tailcallopt", cl::desc("Turn fastcc calls into tail calls by (potentially) changing ABI."), - cl::location(PerformTailCallOpt), + cl::location(GuaranteedTailCallOpt), cl::init(false)); static cl::opt<unsigned, true> OverrideStackAlignment("stack-alignment", diff --git a/lib/Target/X86/X86FastISel.cpp b/lib/Target/X86/X86FastISel.cpp index d466ce0d4f..392b96dacb 100644 --- a/lib/Target/X86/X86FastISel.cpp +++ b/lib/Target/X86/X86FastISel.cpp @@ -1247,7 +1247,7 @@ bool X86FastISel::X86SelectCall(Instruction *I) { // fastcc with -tailcallopt is intended to provide a guaranteed // tail call optimization. Fastisel doesn't know how to do that. - if (CC == CallingConv::Fast && PerformTailCallOpt) + if (CC == CallingConv::Fast && GuaranteedTailCallOpt) return false; // Let SDISel handle vararg functions. diff --git a/lib/Target/X86/X86ISelLowering.cpp b/lib/Target/X86/X86ISelLowering.cpp index b268e4b9ae..331bac695d 100644 --- a/lib/Target/X86/X86ISelLowering.cpp +++ b/lib/Target/X86/X86ISelLowering.cpp @@ -1391,7 +1391,7 @@ bool X86TargetLowering::IsCalleePop(bool IsVarArg, CallingConv::ID CallingConv){ case CallingConv::X86_FastCall: return !Subtarget->is64Bit(); case CallingConv::Fast: - return PerformTailCallOpt; + return GuaranteedTailCallOpt; } } @@ -1441,7 +1441,7 @@ CreateCopyOfByValArgument(SDValue Src, SDValue Dst, SDValue Chain, /// FuncIsMadeTailCallSafe - Return true if the function is being made into /// a tailcall target by changing its ABI. static bool FuncIsMadeTailCallSafe(CallingConv::ID CC) { - return PerformTailCallOpt && CC == CallingConv::Fast; + return GuaranteedTailCallOpt && CC == CallingConv::Fast; } SDValue @@ -1797,7 +1797,7 @@ X86TargetLowering::LowerCall(SDValue Chain, SDValue Callee, // Sibcalls are automatically detected tailcalls which do not require // ABI changes. - if (!PerformTailCallOpt && isTailCall) + if (!GuaranteedTailCallOpt && isTailCall) IsSibcall = true; if (isTailCall) @@ -1819,7 +1819,7 @@ X86TargetLowering::LowerCall(SDValue Chain, SDValue Callee, // This is a sibcall. The memory operands are available in caller's // own caller's stack. NumBytes = 0; - else if (PerformTailCallOpt && CallConv == CallingConv::Fast) + else if (GuaranteedTailCallOpt && CallConv == CallingConv::Fast) NumBytes = GetAlignedArgumentStackSize(NumBytes, DAG); int FPDiff = 0; @@ -1986,7 +1986,7 @@ X86TargetLowering::LowerCall(SDValue Chain, SDValue Callee, int FI = 0; // Do not flag preceeding copytoreg stuff together with the following stuff. InFlag = SDValue(); - if (PerformTailCallOpt) { + if (GuaranteedTailCallOpt) { for (unsigned i = 0, e = ArgLocs.size(); i != e; ++i) { CCValAssign &VA = ArgLocs[i]; if (VA.isRegLoc()) @@ -2311,7 +2311,7 @@ X86TargetLowering::IsEligibleForTailCallOptimization(SDValue Callee, // If -tailcallopt is specified, make fastcc functions tail-callable. const Function *CallerF = DAG.getMachineFunction().getFunction(); - if (PerformTailCallOpt) { + if (GuaranteedTailCallOpt) { if (CalleeCC == CallingConv::Fast && CallerF->getCallingConv() == CalleeCC) return true; |