aboutsummaryrefslogtreecommitdiff
path: root/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp')
-rw-r--r--lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp16
1 files changed, 16 insertions, 0 deletions
diff --git a/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp b/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
index def4f9a018..e159c111de 100644
--- a/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
+++ b/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
@@ -3467,6 +3467,17 @@ void AsmOperandInfo::ComputeConstraintToUse(const TargetLowering &TLI) {
}
+/// GetRegistersForValue - Assign registers (virtual or physical) for the
+/// specified operand. We prefer to assign virtual registers, to allow the
+/// register allocator handle the assignment process. However, if the asm uses
+/// features that we can't model on machineinstrs, we have SDISel do the
+/// allocation. This produces generally horrible, but correct, code.
+///
+/// OpInfo describes the operand.
+/// HasEarlyClobber is true if there are any early clobber constraints (=&r)
+/// or any explicitly clobbered registers.
+/// Input and OutputRegs are the set of already allocated physical registers.
+///
void SelectionDAGLowering::
GetRegistersForValue(AsmOperandInfo &OpInfo, bool HasEarlyClobber,
std::set<unsigned> &OutputRegs,
@@ -3723,6 +3734,11 @@ void SelectionDAGLowering::visitInlineAsm(CallSite CS) {
// Keep track of whether we see an earlyclobber.
SawEarlyClobber |= OpInfo.isEarlyClobber;
+ // If we see a clobber of a register, it is an early clobber.
+ if (OpInfo.Type == InlineAsm::isClobber &&
+ OpInfo.ConstraintType == TargetLowering::C_Register)
+ SawEarlyClobber = true;
+
// If this is a memory input, and if the operand is not indirect, do what we
// need to to provide an address for the memory input.
if (OpInfo.ConstraintType == TargetLowering::C_Memory &&