diff options
author | Carlos O'Donell <carlos@parisc-linux.org> | 2006-04-22 14:47:21 -0600 |
---|---|---|
committer | Kyle McMartin <kyle@hera.kernel.org> | 2006-06-27 23:28:33 +0000 |
commit | 3fd3a74f45c935f7d6d5c2fb48f06324b18826b7 (patch) | |
tree | 552033cbdc4e19b667a25c55e642a172a1ec3b2e /include/asm-parisc/uaccess.h | |
parent | c8224e0074f1dce12e95e53ca469f6fe49cc9101 (diff) | |
download | op-kernel-dev-3fd3a74f45c935f7d6d5c2fb48f06324b18826b7.zip op-kernel-dev-3fd3a74f45c935f7d6d5c2fb48f06324b18826b7.tar.gz |
[PARISC] Use FIXUP_BRANCH_CLOBBER to asm clobber list
Joel Soete noticed correctly that the fixup's clobbers must be listed
as the ASM clobbers. FIXUP_BRANCH in unaligned.c has a new macro which
lists all the clobbers in the fixup, we use this throughout the file
to simplify the process of listing clobbers in the future.
A missing "r1" clobber is added to our uaccess.h for the 64-bit
__put_kernel_asm. Interestingly this is a pretty serious bug since gcc
generates pretty good use of r1 as a temporary and the uses of
__put_kernel_asm are varied and dangerous if r1 is scratched during
an invalid write.
Signed-off-by: Joel Soete <soete.joel@tiscali.be>
Signed-off-by: Carlos O'Donell <carlos@parisc-linux.org>
Signed-off-by: Kyle McMartin <kyle@parisc-linux.org>
Diffstat (limited to 'include/asm-parisc/uaccess.h')
-rw-r--r-- | include/asm-parisc/uaccess.h | 9 |
1 files changed, 7 insertions, 2 deletions
diff --git a/include/asm-parisc/uaccess.h b/include/asm-parisc/uaccess.h index f6c417c..d973e8b 100644 --- a/include/asm-parisc/uaccess.h +++ b/include/asm-parisc/uaccess.h @@ -172,7 +172,11 @@ struct exception_data { /* * The "__put_user/kernel_asm()" macros tell gcc they read from memory * instead of writing. This is because they do not write to any memory - * gcc knows about, so there are no aliasing issues. + * gcc knows about, so there are no aliasing issues. These macros must + * also be aware that "fixup_put_user_skip_[12]" are executed in the + * context of the fault, and any registers used there must be listed + * as clobbers. In this case only "r1" is used by the current routines. + * r8/r9 are already listed as err/val. */ #ifdef __LP64__ @@ -183,7 +187,8 @@ struct exception_data { "\t.dword\t1b,fixup_put_user_skip_1\n" \ "\t.previous" \ : "=r"(__pu_err) \ - : "r"(ptr), "r"(x), "0"(__pu_err)) + : "r"(ptr), "r"(x), "0"(__pu_err) \ + : "r1") #define __put_user_asm(stx,x,ptr) \ __asm__ __volatile__ ( \ |