diff options
author | Linus Torvalds <torvalds@ppc970.osdl.org> | 2005-06-24 10:39:17 -0700 |
---|---|---|
committer | Linus Torvalds <torvalds@ppc970.osdl.org> | 2005-06-24 10:39:17 -0700 |
commit | 793ae77469121227cd910c4b99f24be1de34bcca (patch) | |
tree | ca8b6bbafab35c5c1099c54696c36dc0b8c17cf7 /include/asm-i386 | |
parent | 59a49e38711a146dc0bef4837c825b5422335460 (diff) | |
download | op-kernel-dev-793ae77469121227cd910c4b99f24be1de34bcca.zip op-kernel-dev-793ae77469121227cd910c4b99f24be1de34bcca.tar.gz |
Add "memory" clobbers to the x86 inline asm of strncmp and friends
They don't actually clobber memory, but gcc doesn't even know they
_read_ memory, so can apparently re-order memory accesses around them.
Which obviously does the wrong thing if the memory access happens to
change the memory that the compare function is accessing..
Verified to fix a strange boot problem by Jens Axboe.
Diffstat (limited to 'include/asm-i386')
-rw-r--r-- | include/asm-i386/string.h | 32 |
1 files changed, 22 insertions, 10 deletions
diff --git a/include/asm-i386/string.h b/include/asm-i386/string.h index 6a78ac5..02c8f5d 100644 --- a/include/asm-i386/string.h +++ b/include/asm-i386/string.h @@ -116,7 +116,8 @@ __asm__ __volatile__( "orb $1,%%al\n" "3:" :"=a" (__res), "=&S" (d0), "=&D" (d1) - :"1" (cs),"2" (ct)); + :"1" (cs),"2" (ct) + :"memory"); return __res; } @@ -138,8 +139,9 @@ __asm__ __volatile__( "3:\tsbbl %%eax,%%eax\n\t" "orb $1,%%al\n" "4:" - :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2) - :"1" (cs),"2" (ct),"3" (count)); + :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2) + :"1" (cs),"2" (ct),"3" (count) + :"memory"); return __res; } @@ -158,7 +160,9 @@ __asm__ __volatile__( "movl $1,%1\n" "2:\tmovl %1,%0\n\t" "decl %0" - :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c)); + :"=a" (__res), "=&S" (d0) + :"1" (s),"0" (c) + :"memory"); return __res; } @@ -175,7 +179,9 @@ __asm__ __volatile__( "leal -1(%%esi),%0\n" "2:\ttestb %%al,%%al\n\t" "jne 1b" - :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c)); + :"=g" (__res), "=&S" (d0), "=&a" (d1) + :"0" (0),"1" (s),"2" (c) + :"memory"); return __res; } @@ -189,7 +195,9 @@ __asm__ __volatile__( "scasb\n\t" "notl %0\n\t" "decl %0" - :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffffu)); + :"=c" (__res), "=&D" (d0) + :"1" (s),"a" (0), "0" (0xffffffffu) + :"memory"); return __res; } @@ -333,7 +341,9 @@ __asm__ __volatile__( "je 1f\n\t" "movl $1,%0\n" "1:\tdecl %0" - :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count)); + :"=D" (__res), "=&c" (d0) + :"a" (c),"0" (cs),"1" (count) + :"memory"); return __res; } @@ -369,7 +379,7 @@ __asm__ __volatile__( "je 2f\n\t" "stosb\n" "2:" - : "=&c" (d0), "=&D" (d1) + :"=&c" (d0), "=&D" (d1) :"a" (c), "q" (count), "0" (count/4), "1" ((long) s) :"memory"); return (s); @@ -392,7 +402,8 @@ __asm__ __volatile__( "jne 1b\n" "3:\tsubl %2,%0" :"=a" (__res), "=&d" (d0) - :"c" (s),"1" (count)); + :"c" (s),"1" (count) + :"memory"); return __res; } /* end of additional stuff */ @@ -473,7 +484,8 @@ static inline void * memscan(void * addr, int c, size_t size) "dec %%edi\n" "1:" : "=D" (addr), "=c" (size) - : "0" (addr), "1" (size), "a" (c)); + : "0" (addr), "1" (size), "a" (c) + : "memory"); return addr; } |