summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--ChangeLog8
-rw-r--r--sysdeps/unix/sysv/linux/x86_64/____longjmp_chk.S14
-rw-r--r--sysdeps/x86_64/__longjmp.S14
-rw-r--r--sysdeps/x86_64/setjmp.S15
4 files changed, 41 insertions, 10 deletions
diff --git a/ChangeLog b/ChangeLog
index 721c791513..0c25bc33ee 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,11 @@
+2012-05-21 Roland McGrath <roland@hack.frob.com>
+
+ * sysdeps/x86_64/setjmp.S [PTR_MANGLE] [__ILP32__]: Preserve high bits
+ of %rbp unmolested in the jmp_buf while mangling the low bits.
+ * sysdeps/x86_64/__longjmp.S [PTR_DEMANGLE] [__ILP32__]: Restore the
+ unmolested high bits of %rbp while demangling the low bits.
+ * sysdeps/unix/sysv/linux/x86_64/____longjmp_chk.S: Likewise.
+
2012-05-21 Andreas Jaeger <aj@suse.de>
* include/shlib-compat.h (libc_sunrpc_symbol): New macro.
diff --git a/sysdeps/unix/sysv/linux/x86_64/____longjmp_chk.S b/sysdeps/unix/sysv/linux/x86_64/____longjmp_chk.S
index a0d1732bdc..c392820cd0 100644
--- a/sysdeps/unix/sysv/linux/x86_64/____longjmp_chk.S
+++ b/sysdeps/unix/sysv/linux/x86_64/____longjmp_chk.S
@@ -1,4 +1,4 @@
-/* Copyright (C) 2001,2004,2005,2006,2009,2010,2011 Free Software Foundation, Inc.
+/* Copyright (C) 2001-2012 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@@ -55,12 +55,20 @@ longjmp_msg:
ENTRY(____longjmp_chk)
/* Restore registers. */
mov (JB_RSP*8)(%rdi), %R8_LP
- movq (JB_RBP*8)(%rdi), %r9
+ mov (JB_RBP*8)(%rdi),%R9_LP
mov (JB_PC*8)(%rdi), %RDX_LP
#ifdef PTR_DEMANGLE
PTR_DEMANGLE (%R8_LP)
- PTR_DEMANGLE (%r9)
+ PTR_DEMANGLE (%R9_LP)
PTR_DEMANGLE (%RDX_LP)
+# ifdef __ILP32__
+ /* We ignored the high bits of the %rbp value because only the low
+ bits are mangled. But we cannot presume that %rbp is being used
+ as a pointer and truncate it, so recover the high bits. */
+ movl (JB_RBP*8 + 4)(%rdi), %eax
+ shlq $32, %rax
+ orq %rax, %r9
+# endif
#endif
cmp %R8_LP, %RSP_LP
diff --git a/sysdeps/x86_64/__longjmp.S b/sysdeps/x86_64/__longjmp.S
index 22beb888a1..c0c15dc810 100644
--- a/sysdeps/x86_64/__longjmp.S
+++ b/sysdeps/x86_64/__longjmp.S
@@ -1,4 +1,4 @@
-/* Copyright (C) 2001,2004,2005,2006,2009 Free Software Foundation, Inc.
+/* Copyright (C) 2001-2012 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@@ -26,12 +26,20 @@
ENTRY(__longjmp)
/* Restore registers. */
mov (JB_RSP*8)(%rdi),%R8_LP
- movq (JB_RBP*8)(%rdi),%r9
+ mov (JB_RBP*8)(%rdi),%R9_LP
mov (JB_PC*8)(%rdi),%RDX_LP
#ifdef PTR_DEMANGLE
PTR_DEMANGLE (%R8_LP)
- PTR_DEMANGLE (%r9)
+ PTR_DEMANGLE (%R9_LP)
PTR_DEMANGLE (%RDX_LP)
+# ifdef __ILP32__
+ /* We ignored the high bits of the %rbp value because only the low
+ bits are mangled. But we cannot presume that %rbp is being used
+ as a pointer and truncate it, so recover the high bits. */
+ movl (JB_RBP*8 + 4)(%rdi), %eax
+ shlq $32, %rax
+ orq %rax, %r9
+# endif
#endif
/* We add unwind information for the target here. */
cfi_def_cfa(%rdi, 0)
diff --git a/sysdeps/x86_64/setjmp.S b/sysdeps/x86_64/setjmp.S
index 87c095238c..5639292da4 100644
--- a/sysdeps/x86_64/setjmp.S
+++ b/sysdeps/x86_64/setjmp.S
@@ -1,5 +1,5 @@
/* setjmp for x86-64.
- Copyright (C) 2001, 2003, 2005, 2006 Free Software Foundation, Inc.
+ Copyright (C) 2001-2012 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
@@ -24,9 +24,16 @@ ENTRY (__sigsetjmp)
/* Save registers. */
movq %rbx, (JB_RBX*8)(%rdi)
#ifdef PTR_MANGLE
- movq %rbp, %rax
- PTR_MANGLE (%rax)
- movq %rax, (JB_RBP*8)(%rdi)
+# ifdef __ILP32__
+ /* Save the high bits of %rbp first, since PTR_MANGLE will
+ only handle the low bits but we cannot presume %rbp is
+ being used as a pointer and truncate it. Here we write all
+ of %rbp, but the low bits will be overwritten below. */
+ movq %rbp, (JB_RBP*8)(%rdi)
+# endif
+ mov %RBP_LP, %RAX_LP
+ PTR_MANGLE (%RAX_LP)
+ mov %RAX_LP, (JB_RBP*8)(%rdi)
#else
movq %rbp, (JB_RBP*8)(%rdi)
#endif