aboutsummaryrefslogtreecommitdiffstats
path: root/xen/arch/x86/usercopy.c
diff options
context:
space:
mode:
authorKeir Fraser <keir.fraser@citrix.com>2007-11-16 17:59:34 +0000
committerKeir Fraser <keir.fraser@citrix.com>2007-11-16 17:59:34 +0000
commit503ddff7209dc73c7f0a1a99d59bb22a4f2d0e2f (patch)
tree0d955101a8d47a238cb61ab24498ac26ffa0f3e6 /xen/arch/x86/usercopy.c
parent22e2d08efba0e2bd745dc7375df34fa8a23bfbac (diff)
downloadxen-503ddff7209dc73c7f0a1a99d59bb22a4f2d0e2f.tar.gz
xen-503ddff7209dc73c7f0a1a99d59bb22a4f2d0e2f.tar.bz2
xen-503ddff7209dc73c7f0a1a99d59bb22a4f2d0e2f.zip
x86: Clean up some files of inline asm, and fix
__copy_{to,from}_user_ll() for gcc 3.4 (asm output constraints all need to be '=&'). Based on a patch by Jan Beulich. Signed-off-by: Keir Fraser <keir.fraser@citrix.com>
Diffstat (limited to 'xen/arch/x86/usercopy.c')
-rw-r--r--xen/arch/x86/usercopy.c174
1 files changed, 94 insertions, 80 deletions
diff --git a/xen/arch/x86/usercopy.c b/xen/arch/x86/usercopy.c
index f16c4da102..76e3abfde2 100644
--- a/xen/arch/x86/usercopy.c
+++ b/xen/arch/x86/usercopy.c
@@ -12,83 +12,87 @@
unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned n)
{
- unsigned long __d0, __d1, __d2, __n = n;
- __asm__ __volatile__(
- " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n"
- " jbe 1f\n"
- " mov %1,%0\n"
- " neg %0\n"
- " and $"STR(BYTES_PER_LONG-1)",%0\n"
- " sub %0,%3\n"
- "4: rep; movsb\n" /* make 'to' address aligned */
- " mov %3,%0\n"
- " shr $"STR(LONG_BYTEORDER)",%0\n"
- " and $"STR(BYTES_PER_LONG-1)",%3\n"
- " .align 2,0x90\n"
- "0: rep; movs"__OS"\n" /* as many words as possible... */
- " mov %3,%0\n"
- "1: rep; movsb\n" /* ...remainder copied as bytes */
- "2:\n"
- ".section .fixup,\"ax\"\n"
- "5: add %3,%0\n"
- " jmp 2b\n"
- "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
- " jmp 2b\n"
- ".previous\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 4b,5b\n"
- " "__FIXUP_WORD" 0b,3b\n"
- " "__FIXUP_WORD" 1b,2b\n"
- ".previous"
- : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)
- : "3"(__n), "0"(__n), "1"(to), "2"(from)
- : "memory");
- return (unsigned)__n;
+ unsigned long __d0, __d1, __d2, __n = n;
+
+ asm volatile (
+ " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n"
+ " jbe 1f\n"
+ " mov %1,%0\n"
+ " neg %0\n"
+ " and $"STR(BYTES_PER_LONG-1)",%0\n"
+ " sub %0,%3\n"
+ "4: rep movsb\n" /* make 'to' address aligned */
+ " mov %3,%0\n"
+ " shr $"STR(LONG_BYTEORDER)",%0\n"
+ " and $"STR(BYTES_PER_LONG-1)",%3\n"
+ " .align 2,0x90\n"
+ "0: rep movs"__OS"\n" /* as many words as possible... */
+ " mov %3,%0\n"
+ "1: rep movsb\n" /* ...remainder copied as bytes */
+ "2:\n"
+ ".section .fixup,\"ax\"\n"
+ "5: add %3,%0\n"
+ " jmp 2b\n"
+ "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
+ " jmp 2b\n"
+ ".previous\n"
+ ".section __ex_table,\"a\"\n"
+ " "__FIXUP_ALIGN"\n"
+ " "__FIXUP_WORD" 4b,5b\n"
+ " "__FIXUP_WORD" 0b,3b\n"
+ " "__FIXUP_WORD" 1b,2b\n"
+ ".previous"
+ : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
+ : "0" (__n), "1" (to), "2" (from), "3" (__n)
+ : "memory" );
+
+ return __n;
}
unsigned long
__copy_from_user_ll(void *to, const void __user *from, unsigned n)
{
- unsigned long __d0, __d1, __d2, __n = n;
- __asm__ __volatile__(
- " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n"
- " jbe 1f\n"
- " mov %1,%0\n"
- " neg %0\n"
- " and $"STR(BYTES_PER_LONG-1)",%0\n"
- " sub %0,%3\n"
- "4: rep; movsb\n" /* make 'to' address aligned */
- " mov %3,%0\n"
- " shr $"STR(LONG_BYTEORDER)",%0\n"
- " and $"STR(BYTES_PER_LONG-1)",%3\n"
- " .align 2,0x90\n"
- "0: rep; movs"__OS"\n" /* as many words as possible... */
- " mov %3,%0\n"
- "1: rep; movsb\n" /* ...remainder copied as bytes */
- "2:\n"
- ".section .fixup,\"ax\"\n"
- "5: add %3,%0\n"
- " jmp 6f\n"
- "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
- "6: push %0\n"
- " push %%"__OP"ax\n"
- " xor %%eax,%%eax\n"
- " rep; stosb\n"
- " pop %%"__OP"ax\n"
- " pop %0\n"
- " jmp 2b\n"
- ".previous\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 4b,5b\n"
- " "__FIXUP_WORD" 0b,3b\n"
- " "__FIXUP_WORD" 1b,6b\n"
- ".previous"
- : "=&c"(__n), "=&D" (__d0), "=&S" (__d1), "=r"(__d2)
- : "3"(__n), "0"(__n), "1"(to), "2"(from)
- : "memory");
- return (unsigned)__n;
+ unsigned long __d0, __d1, __d2, __n = n;
+
+ asm volatile (
+ " cmp $"STR(2*BYTES_PER_LONG-1)",%0\n"
+ " jbe 1f\n"
+ " mov %1,%0\n"
+ " neg %0\n"
+ " and $"STR(BYTES_PER_LONG-1)",%0\n"
+ " sub %0,%3\n"
+ "4: rep; movsb\n" /* make 'to' address aligned */
+ " mov %3,%0\n"
+ " shr $"STR(LONG_BYTEORDER)",%0\n"
+ " and $"STR(BYTES_PER_LONG-1)",%3\n"
+ " .align 2,0x90\n"
+ "0: rep; movs"__OS"\n" /* as many words as possible... */
+ " mov %3,%0\n"
+ "1: rep; movsb\n" /* ...remainder copied as bytes */
+ "2:\n"
+ ".section .fixup,\"ax\"\n"
+ "5: add %3,%0\n"
+ " jmp 6f\n"
+ "3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
+ "6: push %0\n"
+ " push %%"__OP"ax\n"
+ " xor %%eax,%%eax\n"
+ " rep; stosb\n"
+ " pop %%"__OP"ax\n"
+ " pop %0\n"
+ " jmp 2b\n"
+ ".previous\n"
+ ".section __ex_table,\"a\"\n"
+ " "__FIXUP_ALIGN"\n"
+ " "__FIXUP_WORD" 4b,5b\n"
+ " "__FIXUP_WORD" 0b,3b\n"
+ " "__FIXUP_WORD" 1b,6b\n"
+ ".previous"
+ : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
+ : "0" (__n), "1" (to), "2" (from), "3" (__n)
+ : "memory" );
+
+ return __n;
}
/**
@@ -107,9 +111,9 @@ __copy_from_user_ll(void *to, const void __user *from, unsigned n)
unsigned long
copy_to_user(void __user *to, const void *from, unsigned n)
{
- if (access_ok(to, n))
- n = __copy_to_user(to, from, n);
- return n;
+ if ( access_ok(to, n) )
+ n = __copy_to_user(to, from, n);
+ return n;
}
/**
@@ -131,9 +135,19 @@ copy_to_user(void __user *to, const void *from, unsigned n)
unsigned long
copy_from_user(void *to, const void __user *from, unsigned n)
{
- if (access_ok(from, n))
- n = __copy_from_user(to, from, n);
- else
- memset(to, 0, n);
- return n;
+ if ( access_ok(from, n) )
+ n = __copy_from_user(to, from, n);
+ else
+ memset(to, 0, n);
+ return n;
}
+
+/*
+ * Local variables:
+ * mode: C
+ * c-set-style: "BSD"
+ * c-basic-offset: 4
+ * tab-width: 4
+ * indent-tabs-mode: nil
+ * End:
+ */