aboutsummaryrefslogtreecommitdiffstats
path: root/xen/include/asm-x86/system.h
diff options
context:
space:
mode:
authorKeir Fraser <keir@xen.org>2010-12-16 18:46:55 +0000
committerKeir Fraser <keir@xen.org>2010-12-16 18:46:55 +0000
commit165e3aba7c1754d159099a6adf89e54f33090ea5 (patch)
tree097f5d1eca29b55b95f4555fe4ec9f3536ba3a7b /xen/include/asm-x86/system.h
parentab134ffcc5ca8edc3678aa1429e281e47c42462d (diff)
downloadxen-165e3aba7c1754d159099a6adf89e54f33090ea5.tar.gz
xen-165e3aba7c1754d159099a6adf89e54f33090ea5.tar.bz2
xen-165e3aba7c1754d159099a6adf89e54f33090ea5.zip
x86: Remove unnecessary LOCK/LOCK_PREFIX macros.
We don't support !CONFIG_SMP. Signed-off-by: Keir Fraser <keir@xen.org>
Diffstat (limited to 'xen/include/asm-x86/system.h')
-rw-r--r--xen/include/asm-x86/system.h10
1 files changed, 5 insertions, 5 deletions
diff --git a/xen/include/asm-x86/system.h b/xen/include/asm-x86/system.h
index 52816da1c9..a57d35de81 100644
--- a/xen/include/asm-x86/system.h
+++ b/xen/include/asm-x86/system.h
@@ -91,14 +91,14 @@ static always_inline unsigned long __cmpxchg(
switch ( size )
{
case 1:
- asm volatile ( LOCK_PREFIX "cmpxchgb %b1,%2"
+ asm volatile ( "lock; cmpxchgb %b1,%2"
: "=a" (prev)
: "q" (new), "m" (*__xg((volatile void *)ptr)),
"0" (old)
: "memory" );
return prev;
case 2:
- asm volatile ( LOCK_PREFIX "cmpxchgw %w1,%2"
+ asm volatile ( "lock; cmpxchgw %w1,%2"
: "=a" (prev)
: "r" (new), "m" (*__xg((volatile void *)ptr)),
"0" (old)
@@ -106,7 +106,7 @@ static always_inline unsigned long __cmpxchg(
return prev;
#if defined(__i386__)
case 4:
- asm volatile ( LOCK_PREFIX "cmpxchgl %1,%2"
+ asm volatile ( "lock; cmpxchgl %1,%2"
: "=a" (prev)
: "r" (new), "m" (*__xg((volatile void *)ptr)),
"0" (old)
@@ -114,14 +114,14 @@ static always_inline unsigned long __cmpxchg(
return prev;
#elif defined(__x86_64__)
case 4:
- asm volatile ( LOCK_PREFIX "cmpxchgl %k1,%2"
+ asm volatile ( "lock; cmpxchgl %k1,%2"
: "=a" (prev)
: "r" (new), "m" (*__xg((volatile void *)ptr)),
"0" (old)
: "memory" );
return prev;
case 8:
- asm volatile ( LOCK_PREFIX "cmpxchgq %1,%2"
+ asm volatile ( "lock; cmpxchgq %1,%2"
: "=a" (prev)
: "r" (new), "m" (*__xg((volatile void *)ptr)),
"0" (old)