summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@ppc970.osdl.org>2005-06-24 16:01:24 -0700
committerChris Wright <chrisw@osdl.org>2005-06-29 16:00:38 -0700
commit2e8e121926ee6d15151c8e57ddce97275213c5b2 (patch)
tree7a89595c5fe151cf3b431028e044b847de7e9d3a
parent26b1062c23e46c111f5e86bc8ce4d5007805b51a (diff)
[PATCH] Add "memory" clobbers to the x86 inline asm of strncmp and friends
Add "memory" clobbers to the x86 inline asm of strncmp and friends They don't actually clobber memory, but gcc doesn't even know they _read_ memory, so can apparently re-order memory accesses around them. Which obviously does the wrong thing if the memory access happens to change the memory that the compare function is accessing.. Verified to fix a strange boot problem by Jens Axboe. Signed-off-by: Chris Wright <chrisw@osdl.org>
-rw-r--r--include/asm-i386/string.h32
1 files changed, 22 insertions, 10 deletions
diff --git a/include/asm-i386/string.h b/include/asm-i386/string.h
index 6a78ac58c194..02c8f5d22065 100644
--- a/include/asm-i386/string.h
+++ b/include/asm-i386/string.h
@@ -116,7 +116,8 @@ __asm__ __volatile__(
"orb $1,%%al\n"
"3:"
:"=a" (__res), "=&S" (d0), "=&D" (d1)
- :"1" (cs),"2" (ct));
+ :"1" (cs),"2" (ct)
+ :"memory");
return __res;
}
@@ -138,8 +139,9 @@ __asm__ __volatile__(
"3:\tsbbl %%eax,%%eax\n\t"
"orb $1,%%al\n"
"4:"
- :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
- :"1" (cs),"2" (ct),"3" (count));
+ :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
+ :"1" (cs),"2" (ct),"3" (count)
+ :"memory");
return __res;
}
@@ -158,7 +160,9 @@ __asm__ __volatile__(
"movl $1,%1\n"
"2:\tmovl %1,%0\n\t"
"decl %0"
- :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
+ :"=a" (__res), "=&S" (d0)
+ :"1" (s),"0" (c)
+ :"memory");
return __res;
}
@@ -175,7 +179,9 @@ __asm__ __volatile__(
"leal -1(%%esi),%0\n"
"2:\ttestb %%al,%%al\n\t"
"jne 1b"
- :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
+ :"=g" (__res), "=&S" (d0), "=&a" (d1)
+ :"0" (0),"1" (s),"2" (c)
+ :"memory");
return __res;
}
@@ -189,7 +195,9 @@ __asm__ __volatile__(
"scasb\n\t"
"notl %0\n\t"
"decl %0"
- :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffffu));
+ :"=c" (__res), "=&D" (d0)
+ :"1" (s),"a" (0), "0" (0xffffffffu)
+ :"memory");
return __res;
}
@@ -333,7 +341,9 @@ __asm__ __volatile__(
"je 1f\n\t"
"movl $1,%0\n"
"1:\tdecl %0"
- :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
+ :"=D" (__res), "=&c" (d0)
+ :"a" (c),"0" (cs),"1" (count)
+ :"memory");
return __res;
}
@@ -369,7 +379,7 @@ __asm__ __volatile__(
"je 2f\n\t"
"stosb\n"
"2:"
- : "=&c" (d0), "=&D" (d1)
+ :"=&c" (d0), "=&D" (d1)
:"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
:"memory");
return (s);
@@ -392,7 +402,8 @@ __asm__ __volatile__(
"jne 1b\n"
"3:\tsubl %2,%0"
:"=a" (__res), "=&d" (d0)
- :"c" (s),"1" (count));
+ :"c" (s),"1" (count)
+ :"memory");
return __res;
}
/* end of additional stuff */
@@ -473,7 +484,8 @@ static inline void * memscan(void * addr, int c, size_t size)
"dec %%edi\n"
"1:"
: "=D" (addr), "=c" (size)
- : "0" (addr), "1" (size), "a" (c));
+ : "0" (addr), "1" (size), "a" (c)
+ : "memory");
return addr;
}