aboutsummaryrefslogtreecommitdiffstats
path: root/xen/include/asm-x86/system.h
diff options
context:
space:
mode:
authorkaf24@labyrinth.cl.cam.ac.uk <kaf24@labyrinth.cl.cam.ac.uk>2004-08-25 15:40:15 +0000
committerkaf24@labyrinth.cl.cam.ac.uk <kaf24@labyrinth.cl.cam.ac.uk>2004-08-25 15:40:15 +0000
commitcc113283a5f5c82656872f3f4417d258d138df4b (patch)
treec5b2d6450b2a2b35e0ae51460697f4c306ab332a /xen/include/asm-x86/system.h
parent2e93467bf20423c7449f2bb25dadd5894c4525b3 (diff)
downloadxen-cc113283a5f5c82656872f3f4417d258d138df4b.tar.gz
xen-cc113283a5f5c82656872f3f4417d258d138df4b.tar.bz2
xen-cc113283a5f5c82656872f3f4417d258d138df4b.zip
bitkeeper revision 1.1159.51.2 (412cb2dfaIDYjySJYYMTByGbcM77UA)
More grant-table code, and some related sundry improvements.
Diffstat (limited to 'xen/include/asm-x86/system.h')
-rw-r--r--xen/include/asm-x86/system.h20
1 files changed, 10 insertions, 10 deletions
diff --git a/xen/include/asm-x86/system.h b/xen/include/asm-x86/system.h
index 4b25eec921..4835b6e236 100644
--- a/xen/include/asm-x86/system.h
+++ b/xen/include/asm-x86/system.h
@@ -30,33 +30,33 @@ static always_inline unsigned long __xchg(unsigned long x, volatile void * ptr,
case 1:
__asm__ __volatile__("xchgb %b0,%1"
:"=q" (x)
- :"m" (*__xg(ptr)), "0" (x)
+ :"m" (*__xg((volatile void *)ptr)), "0" (x)
:"memory");
break;
case 2:
__asm__ __volatile__("xchgw %w0,%1"
:"=r" (x)
- :"m" (*__xg(ptr)), "0" (x)
+ :"m" (*__xg((volatile void *)ptr)), "0" (x)
:"memory");
break;
#if defined(__i386__)
case 4:
__asm__ __volatile__("xchgl %0,%1"
:"=r" (x)
- :"m" (*__xg(ptr)), "0" (x)
+ :"m" (*__xg((volatile void *)ptr)), "0" (x)
:"memory");
break;
#elif defined(__x86_64__)
case 4:
__asm__ __volatile__("xchgl %k0,%1"
:"=r" (x)
- :"m" (*__xg(ptr)), "0" (x)
+ :"m" (*__xg((volatile void *)ptr)), "0" (x)
:"memory");
break;
case 8:
__asm__ __volatile__("xchgq %0,%1"
:"=r" (x)
- :"m" (*__xg(ptr)), "0" (x)
+ :"m" (*__xg((volatile void *)ptr)), "0" (x)
:"memory");
break;
#endif
@@ -78,33 +78,33 @@ static always_inline unsigned long __cmpxchg(volatile void *ptr, unsigned long o
case 1:
__asm__ __volatile__(LOCK_PREFIX "cmpxchgb %b1,%2"
: "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ : "q"(new), "m"(*__xg((volatile void *)ptr)), "0"(old)
: "memory");
return prev;
case 2:
__asm__ __volatile__(LOCK_PREFIX "cmpxchgw %w1,%2"
: "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ : "r"(new), "m"(*__xg((volatile void *)ptr)), "0"(old)
: "memory");
return prev;
#if defined(__i386__)
case 4:
__asm__ __volatile__(LOCK_PREFIX "cmpxchgl %1,%2"
: "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ : "r"(new), "m"(*__xg((volatile void *)ptr)), "0"(old)
: "memory");
return prev;
#elif defined(__x86_64__)
case 4:
__asm__ __volatile__(LOCK_PREFIX "cmpxchgl %k1,%2"
: "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ : "r"(new), "m"(*__xg((volatile void *)ptr)), "0"(old)
: "memory");
return prev;
case 8:
__asm__ __volatile__(LOCK_PREFIX "cmpxchgq %1,%2"
: "=a"(prev)
- : "q"(new), "m"(*__xg(ptr)), "0"(old)
+ : "r"(new), "m"(*__xg((volatile void *)ptr)), "0"(old)
: "memory");
return prev;
#endif