aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--lib/Target/ARM/ARMRegisterInfo.td65
1 files changed, 53 insertions, 12 deletions
diff --git a/lib/Target/ARM/ARMRegisterInfo.td b/lib/Target/ARM/ARMRegisterInfo.td
index 2f51144..625e3d3 100644
--- a/lib/Target/ARM/ARMRegisterInfo.td
+++ b/lib/Target/ARM/ARMRegisterInfo.td
@@ -123,6 +123,32 @@ def GPR : RegisterClass<"ARM", [i32], 32, [R0, R1, R2, R3, R4, R5, R6,
ARM::R4, ARM::R5, ARM::R6, ARM::R8,
ARM::R10,ARM::R11,
ARM::LR, ARM::R7 };
+
+ // FP is R11, R9 is available, R12 is available.
+ static const unsigned ARM_GPR_AO_5[] = {
+ ARM::R3, ARM::R2, ARM::R1, ARM::R0,
+ ARM::R4, ARM::R5, ARM::R6, ARM::R7,
+ ARM::R8, ARM::R9, ARM::R10,ARM::R12,
+ ARM::LR, ARM::R11 };
+ // FP is R11, R9 is not available, R12 is available.
+ static const unsigned ARM_GPR_AO_6[] = {
+ ARM::R3, ARM::R2, ARM::R1, ARM::R0,
+ ARM::R4, ARM::R5, ARM::R6, ARM::R7,
+ ARM::R8, ARM::R10,ARM::R12,
+ ARM::LR, ARM::R11 };
+ // FP is R7, R9 is available, R12 is available.
+ static const unsigned ARM_GPR_AO_7[] = {
+ ARM::R3, ARM::R2, ARM::R1, ARM::R0,
+ ARM::R4, ARM::R5, ARM::R6, ARM::R8,
+ ARM::R9, ARM::R10,ARM::R11,ARM::R12,
+ ARM::LR, ARM::R7 };
+ // FP is R7, R9 is not available, R12 is available.
+ static const unsigned ARM_GPR_AO_8[] = {
+ ARM::R3, ARM::R2, ARM::R1, ARM::R0,
+ ARM::R4, ARM::R5, ARM::R6, ARM::R8,
+ ARM::R10,ARM::R11,ARM::R12,
+ ARM::LR, ARM::R7 };
+
// FP is R7, only low registers available.
static const unsigned THUMB_GPR_AO[] = {
ARM::R2, ARM::R1, ARM::R0,
@@ -131,19 +157,20 @@ def GPR : RegisterClass<"ARM", [i32], 32, [R0, R1, R2, R3, R4, R5, R6,
GPRClass::iterator
GPRClass::allocation_order_begin(const MachineFunction &MF) const {
const TargetMachine &TM = MF.getTarget();
+ const MRegisterInfo *RI = TM.getRegisterInfo();
const ARMSubtarget &Subtarget = TM.getSubtarget<ARMSubtarget>();
if (Subtarget.isThumb())
return THUMB_GPR_AO;
if (Subtarget.useThumbBacktraces()) {
if (Subtarget.isR9Reserved())
- return ARM_GPR_AO_4;
+ return RI->requiresRegisterScavenging() ? ARM_GPR_AO_8 : ARM_GPR_AO_4;
else
- return ARM_GPR_AO_3;
+ return RI->requiresRegisterScavenging() ? ARM_GPR_AO_7 : ARM_GPR_AO_3;
} else {
if (Subtarget.isR9Reserved())
- return ARM_GPR_AO_2;
+ return RI->requiresRegisterScavenging() ? ARM_GPR_AO_6 : ARM_GPR_AO_2;
else
- return ARM_GPR_AO_1;
+ return RI->requiresRegisterScavenging() ? ARM_GPR_AO_5 : ARM_GPR_AO_1;
}
}
@@ -156,15 +183,29 @@ def GPR : RegisterClass<"ARM", [i32], 32, [R0, R1, R2, R3, R4, R5, R6,
if (Subtarget.isThumb())
I = THUMB_GPR_AO + (sizeof(THUMB_GPR_AO)/sizeof(unsigned));
else if (Subtarget.useThumbBacktraces()) {
- if (Subtarget.isR9Reserved())
- I = ARM_GPR_AO_4 + (sizeof(ARM_GPR_AO_4)/sizeof(unsigned));
- else
- I = ARM_GPR_AO_3 + (sizeof(ARM_GPR_AO_3)/sizeof(unsigned));
+ if (Subtarget.isR9Reserved()) {
+ if (RI->requiresRegisterScavenging())
+ I = ARM_GPR_AO_8 + (sizeof(ARM_GPR_AO_8)/sizeof(unsigned));
+ else
+ I = ARM_GPR_AO_4 + (sizeof(ARM_GPR_AO_4)/sizeof(unsigned));
+ } else {
+ if (RI->requiresRegisterScavenging())
+ I = ARM_GPR_AO_7 + (sizeof(ARM_GPR_AO_7)/sizeof(unsigned));
+ else
+ I = ARM_GPR_AO_3 + (sizeof(ARM_GPR_AO_3)/sizeof(unsigned));
+ }
} else {
- if (Subtarget.isR9Reserved())
- I = ARM_GPR_AO_2 + (sizeof(ARM_GPR_AO_2)/sizeof(unsigned));
- else
- I = ARM_GPR_AO_1 + (sizeof(ARM_GPR_AO_1)/sizeof(unsigned));
+ if (Subtarget.isR9Reserved()) {
+ if (RI->requiresRegisterScavenging())
+ I = ARM_GPR_AO_6 + (sizeof(ARM_GPR_AO_6)/sizeof(unsigned));
+ else
+ I = ARM_GPR_AO_2 + (sizeof(ARM_GPR_AO_2)/sizeof(unsigned));
+ } else {
+ if (RI->requiresRegisterScavenging())
+ I = ARM_GPR_AO_5 + (sizeof(ARM_GPR_AO_5)/sizeof(unsigned));
+ else
+ I = ARM_GPR_AO_1 + (sizeof(ARM_GPR_AO_1)/sizeof(unsigned));
+ }
}
// Mac OS X requires FP not to be clobbered for backtracing purpose.