aboutsummaryrefslogtreecommitdiffstats
path: root/xen/include/asm-arm/arm32
diff options
context:
space:
mode:
authorIan Campbell <ian.campbell@citrix.com>2013-02-22 08:57:54 +0000
committerIan Campbell <ian.campbell@citrix.com>2013-02-22 12:14:52 +0000
commit8b93c06fd24ed341a0edb17c21aa2d350b68d24e (patch)
tree6f4b262e745aafcd7b4b73186e957e053e8e5d45 /xen/include/asm-arm/arm32
parent058324a8b3a5e607e0712651ca817ea44dd37125 (diff)
downloadxen-8b93c06fd24ed341a0edb17c21aa2d350b68d24e.tar.gz
xen-8b93c06fd24ed341a0edb17c21aa2d350b68d24e.tar.bz2
xen-8b93c06fd24ed341a0edb17c21aa2d350b68d24e.zip
xen: arm64: xchg and cmpxchg
Signed-off-by: Ian Campbell <ian.campbell@citrix.com> Acked-by: Tim Deegan <tim@xen.org>
Diffstat (limited to 'xen/include/asm-arm/arm32')
-rw-r--r--xen/include/asm-arm/arm32/system.h115
1 files changed, 115 insertions, 0 deletions
diff --git a/xen/include/asm-arm/arm32/system.h b/xen/include/asm-arm/arm32/system.h
index 13800844b2..276e36343d 100644
--- a/xen/include/asm-arm/arm32/system.h
+++ b/xen/include/asm-arm/arm32/system.h
@@ -18,6 +18,121 @@
#define smp_rmb() rmb()
#define smp_wmb() wmb()
+extern void __bad_xchg(volatile void *, int);
+
+static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
+{
+ unsigned long ret;
+ unsigned int tmp;
+
+ smp_mb();
+
+ switch (size) {
+ case 1:
+ asm volatile("@ __xchg1\n"
+ "1: ldrexb %0, [%3]\n"
+ " strexb %1, %2, [%3]\n"
+ " teq %1, #0\n"
+ " bne 1b"
+ : "=&r" (ret), "=&r" (tmp)
+ : "r" (x), "r" (ptr)
+ : "memory", "cc");
+ break;
+ case 4:
+ asm volatile("@ __xchg4\n"
+ "1: ldrex %0, [%3]\n"
+ " strex %1, %2, [%3]\n"
+ " teq %1, #0\n"
+ " bne 1b"
+ : "=&r" (ret), "=&r" (tmp)
+ : "r" (x), "r" (ptr)
+ : "memory", "cc");
+ break;
+ default:
+ __bad_xchg(ptr, size), ret = 0;
+ break;
+ }
+ smp_mb();
+
+ return ret;
+}
+
+/*
+ * Atomic compare and exchange. Compare OLD with MEM, if identical,
+ * store NEW in MEM. Return the initial value in MEM. Success is
+ * indicated by comparing RETURN with OLD.
+ */
+
+extern void __bad_cmpxchg(volatile void *ptr, int size);
+
+static always_inline unsigned long __cmpxchg(
+ volatile void *ptr, unsigned long old, unsigned long new, int size)
+{
+ unsigned long /*long*/ oldval, res;
+
+ switch (size) {
+ case 1:
+ do {
+ asm volatile("@ __cmpxchg1\n"
+ " ldrexb %1, [%2]\n"
+ " mov %0, #0\n"
+ " teq %1, %3\n"
+ " strexbeq %0, %4, [%2]\n"
+ : "=&r" (res), "=&r" (oldval)
+ : "r" (ptr), "Ir" (old), "r" (new)
+ : "memory", "cc");
+ } while (res);
+ break;
+ case 2:
+ do {
+ asm volatile("@ __cmpxchg2\n"
+ " ldrexh %1, [%2]\n"
+ " mov %0, #0\n"
+ " teq %1, %3\n"
+ " strexheq %0, %4, [%2]\n"
+ : "=&r" (res), "=&r" (oldval)
+ : "r" (ptr), "Ir" (old), "r" (new)
+ : "memory", "cc");
+ } while (res);
+ break;
+ case 4:
+ do {
+ asm volatile("@ __cmpxchg4\n"
+ " ldrex %1, [%2]\n"
+ " mov %0, #0\n"
+ " teq %1, %3\n"
+ " strexeq %0, %4, [%2]\n"
+ : "=&r" (res), "=&r" (oldval)
+ : "r" (ptr), "Ir" (old), "r" (new)
+ : "memory", "cc");
+ } while (res);
+ break;
+#if 0
+ case 8:
+ do {
+ asm volatile("@ __cmpxchg8\n"
+ " ldrexd %1, [%2]\n"
+ " mov %0, #0\n"
+ " teq %1, %3\n"
+ " strexdeq %0, %4, [%2]\n"
+ : "=&r" (res), "=&r" (oldval)
+ : "r" (ptr), "Ir" (old), "r" (new)
+ : "memory", "cc");
+ } while (res);
+ break;
+#endif
+ default:
+ __bad_cmpxchg(ptr, size);
+ oldval = 0;
+ }
+
+ return oldval;
+}
+
+#define cmpxchg(ptr,o,n) \
+ ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o), \
+ (unsigned long)(n),sizeof(*(ptr))))
+
#endif
/*
* Local variables: