diff options
author | Jan Beulich <jbeulich@novell.com> | 2005-09-12 18:49:24 +0200 |
---|---|---|
committer | Linus Torvalds <torvalds@g5.osdl.org> | 2005-09-12 10:50:56 -0700 |
commit | 7effaa882af523085f7acadc5871b75a7e506baf (patch) | |
tree | 420890a24a9904bdaaec20dd9909d2f6f1e0d2f6 /include/asm-x86_64/calling.h | |
parent | b3ab8382245541ea030faaa1645f66258fde452d (diff) | |
download | kernel_samsung_tuna-7effaa882af523085f7acadc5871b75a7e506baf.zip kernel_samsung_tuna-7effaa882af523085f7acadc5871b75a7e506baf.tar.gz kernel_samsung_tuna-7effaa882af523085f7acadc5871b75a7e506baf.tar.bz2 |
[PATCH] x86-64: Fix CFI information
Being the foundation for reliable stack unwinding, this fixes CFI unwind
annotations in many low-level x86_64 routines, plus a config option
(available to all architectures, and also present in the previously sent
patch adding such annotations to i386 code) to enable them separatly
rather than only along with adding full debug information.
Signed-off-by: Jan Beulich <jbeulich@novell.com>
Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
Diffstat (limited to 'include/asm-x86_64/calling.h')
-rw-r--r-- | include/asm-x86_64/calling.h | 23 |
1 files changed, 15 insertions, 8 deletions
diff --git a/include/asm-x86_64/calling.h b/include/asm-x86_64/calling.h index 0bc1265..fc2c5a6 100644 --- a/include/asm-x86_64/calling.h +++ b/include/asm-x86_64/calling.h @@ -65,27 +65,36 @@ .if \skipr11 .else movq (%rsp),%r11 + CFI_RESTORE r11 .endif .if \skipr8910 .else movq 1*8(%rsp),%r10 + CFI_RESTORE r10 movq 2*8(%rsp),%r9 + CFI_RESTORE r9 movq 3*8(%rsp),%r8 + CFI_RESTORE r8 .endif .if \skiprax .else movq 4*8(%rsp),%rax + CFI_RESTORE rax .endif .if \skiprcx .else movq 5*8(%rsp),%rcx + CFI_RESTORE rcx .endif .if \skiprdx .else movq 6*8(%rsp),%rdx + CFI_RESTORE rdx .endif movq 7*8(%rsp),%rsi + CFI_RESTORE rsi movq 8*8(%rsp),%rdi + CFI_RESTORE rdi .if ARG_SKIP+\addskip > 0 addq $ARG_SKIP+\addskip,%rsp CFI_ADJUST_CFA_OFFSET -(ARG_SKIP+\addskip) @@ -124,11 +133,17 @@ .macro RESTORE_REST movq (%rsp),%r15 + CFI_RESTORE r15 movq 1*8(%rsp),%r14 + CFI_RESTORE r14 movq 2*8(%rsp),%r13 + CFI_RESTORE r13 movq 3*8(%rsp),%r12 + CFI_RESTORE r12 movq 4*8(%rsp),%rbp + CFI_RESTORE rbp movq 5*8(%rsp),%rbx + CFI_RESTORE rbx addq $REST_SKIP,%rsp CFI_ADJUST_CFA_OFFSET -(REST_SKIP) .endm @@ -146,11 +161,3 @@ .macro icebp .byte 0xf1 .endm - -#ifdef CONFIG_FRAME_POINTER -#define ENTER enter -#define LEAVE leave -#else -#define ENTER -#define LEAVE -#endif |