aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.rootkeys4
-rw-r--r--xen/include/asm-x86/page.h12
-rw-r--r--xen/include/asm-x86/string.h446
-rw-r--r--xen/include/asm-x86/types.h18
-rw-r--r--xen/include/asm-x86/x86_32/string.h489
-rw-r--r--xen/include/asm-x86/x86_64/string.h16
6 files changed, 456 insertions, 529 deletions
diff --git a/.rootkeys b/.rootkeys
index 52f533e1f1..25ea25e441 100644
--- a/.rootkeys
+++ b/.rootkeys
@@ -1387,7 +1387,7 @@
3ddb79c3Hgbb2g8CyWLMCK-6_ZVQSQ xen/include/asm-x86/smp.h
3ddb79c3jn8ALV_S9W5aeTYUQRKBpg xen/include/asm-x86/smpboot.h
3ddb79c3NiyQE2vQnyGiaBnNjBO1rA xen/include/asm-x86/spinlock.h
-40e1966akOHWvvunCED7x3HPv35QvQ xen/include/asm-x86/string.h
+3e7f358aG11EvMI9VJ4_9hD4LUO7rQ xen/include/asm-x86/string.h
3ddb79c3ezddh34MdelJpa5tNR00Dw xen/include/asm-x86/system.h
42033fc1Bb8ffTshBYFGouGkiAMoUQ xen/include/asm-x86/time.h
3ddb79c4HugMq7IYGxcQKFBpKwKhzA xen/include/asm-x86/types.h
@@ -1404,13 +1404,11 @@
429c852fskvSOgcD5EC25_m9um9t4g xen/include/asm-x86/x86_32/page-3level.h
4208e2a3ZNFroNXbX9OYaOB-xtUyDQ xen/include/asm-x86/x86_32/page.h
3ddb79c3mbqEM7QQr3zVq7NiBNhouA xen/include/asm-x86/x86_32/regs.h
-3e7f358aG11EvMI9VJ4_9hD4LUO7rQ xen/include/asm-x86/x86_32/string.h
3ddb79c3M2n1ROZH6xk3HbyN4CPDqg xen/include/asm-x86/x86_32/uaccess.h
41bf1717bML6GxpclTWJabiaO5W5vg xen/include/asm-x86/x86_64/asm_defns.h
41febc4b1aCGLsm0Y0b_82h7lFtrEA xen/include/asm-x86/x86_64/domain_page.h
4208e2a3Fktw4ZttKdDxbhvTQ6brfQ xen/include/asm-x86/x86_64/page.h
404f1bb86rAXB3aLS1vYdcqpJiEcyg xen/include/asm-x86/x86_64/regs.h
-40e1966azOJZfNI6Ilthe6Q-T3Hewg xen/include/asm-x86/x86_64/string.h
404f1bc4tWkB9Qr8RkKtZGW5eMQzhw xen/include/asm-x86/x86_64/uaccess.h
422f27c8RHFkePhD34VIEpMMqofZcA xen/include/asm-x86/x86_emulate.h
400304fcmRQmDdFYEzDh0wcBba9alg xen/include/public/COPYING
diff --git a/xen/include/asm-x86/page.h b/xen/include/asm-x86/page.h
index 536cb63275..2f134031c2 100644
--- a/xen/include/asm-x86/page.h
+++ b/xen/include/asm-x86/page.h
@@ -185,22 +185,26 @@ typedef struct { u64 pfn; } pagetable_t;
#define pfn_valid(_pfn) ((_pfn) < max_page)
/* High table entries are reserved by the hypervisor. */
-/* FIXME: this breaks with PAE -- kraxel */
+#if defined(CONFIG_X86_32) && !defined(CONFIG_PAE)
#define DOMAIN_ENTRIES_PER_L2_PAGETABLE \
(HYPERVISOR_VIRT_START >> L2_PAGETABLE_SHIFT)
#define HYPERVISOR_ENTRIES_PER_L2_PAGETABLE \
(L2_PAGETABLE_ENTRIES - DOMAIN_ENTRIES_PER_L2_PAGETABLE)
+#else
+#define DOMAIN_ENTRIES_PER_L2_PAGETABLE 0
+#define HYPERVISOR_ENTRIES_PER_L2_PAGETABLE 0
+#endif
#define linear_l1_table \
((l1_pgentry_t *)(LINEAR_PT_VIRT_START))
-#define __linear_l2_table \
+#define __linear_l2_table \
((l2_pgentry_t *)(LINEAR_PT_VIRT_START + \
(LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0))))
-#define __linear_l3_table \
+#define __linear_l3_table \
((l3_pgentry_t *)(LINEAR_PT_VIRT_START + \
(LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0)) + \
(LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<1))))
-#define __linear_l4_table \
+#define __linear_l4_table \
((l4_pgentry_t *)(LINEAR_PT_VIRT_START + \
(LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<0)) + \
(LINEAR_PT_VIRT_START >> (PAGETABLE_ORDER<<1)) + \
diff --git a/xen/include/asm-x86/string.h b/xen/include/asm-x86/string.h
index fd7ae02a85..6ab83cf53d 100644
--- a/xen/include/asm-x86/string.h
+++ b/xen/include/asm-x86/string.h
@@ -1,5 +1,445 @@
-#ifdef __x86_64__
-#include <asm/x86_64/string.h>
+#ifndef __X86_STRING_H__
+#define __X86_STRING_H__
+
+#include <xen/config.h>
+
+#define __HAVE_ARCH_STRCPY
+static inline char *strcpy(char *dest, const char *src)
+{
+ long d0, d1, d2;
+ __asm__ __volatile__ (
+ "1: lodsb \n"
+ " stosb \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ : "=&S" (d0), "=&D" (d1), "=&a" (d2)
+ : "0" (src), "1" (dest) : "memory" );
+ return dest;
+}
+
+#define __HAVE_ARCH_STRNCPY
+static inline char *strncpy(char *dest, const char *src, size_t count)
+{
+ long d0, d1, d2, d3;
+ __asm__ __volatile__ (
+ "1: dec %2 \n"
+ " js 2f \n"
+ " lodsb \n"
+ " stosb \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ " rep ; stosb \n"
+ "2: \n"
+ : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
+ : "0" (src), "1" (dest), "2" (count) : "memory" );
+ return dest;
+}
+
+#define __HAVE_ARCH_STRCAT
+static inline char *strcat(char *dest, const char *src)
+{
+ long d0, d1, d2, d3;
+ __asm__ __volatile__ (
+ " repne ; scasb \n"
+ " dec %1 \n"
+ "1: lodsb \n"
+ " stosb \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
+ : "0" (src), "1" (dest), "2" (0UL), "3" (0xffffffffUL) : "memory" );
+ return dest;
+}
+
+#define __HAVE_ARCH_STRNCAT
+static inline char *strncat(char *dest, const char *src, size_t count)
+{
+ long d0, d1, d2, d3;
+ __asm__ __volatile__ (
+ " repne ; scasb \n"
+ " dec %1 \n"
+ " mov %8,%3 \n"
+ "1: dec %3 \n"
+ " js 2f \n"
+ " lodsb \n"
+ " stosb \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ "2: xor %%eax,%%eax\n"
+ " stosb"
+ : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
+ : "0" (src), "1" (dest), "2" (0UL), "3" (0xffffffffUL), "g" (count)
+ : "memory" );
+ return dest;
+}
+
+#define __HAVE_ARCH_STRCMP
+static inline int strcmp(const char *cs, const char *ct)
+{
+ long d0, d1;
+ register int __res;
+ __asm__ __volatile__ (
+ "1: lodsb \n"
+ " scasb \n"
+ " jne 2f \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ " xor %%eax,%%eax\n"
+ " jmp 3f \n"
+ "2: sbb %%eax,%%eax\n"
+ " or $1,%%al \n"
+ "3: \n"
+ : "=a" (__res), "=&S" (d0), "=&D" (d1)
+ : "1" (cs), "2" (ct) );
+ return __res;
+}
+
+#define __HAVE_ARCH_STRNCMP
+static inline int strncmp(const char *cs, const char *ct, size_t count)
+{
+ long d0, d1, d2;
+ register int __res;
+ __asm__ __volatile__ (
+ "1: dec %3 \n"
+ " js 2f \n"
+ " lodsb \n"
+ " scasb \n"
+ " jne 3f \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ "2: xor %%eax,%%eax\n"
+ " jmp 4f \n"
+ "3: sbb %%eax,%%eax\n"
+ " or $1,%%al \n"
+ "4: \n"
+ : "=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
+ : "1" (cs), "2" (ct), "3" (count) );
+ return __res;
+}
+
+#define __HAVE_ARCH_STRCHR
+static inline char *strchr(const char *s, int c)
+{
+ long d0;
+ register char *__res;
+ __asm__ __volatile__ (
+ " mov %%al,%%ah \n"
+ "1: lodsb \n"
+ " cmp %%ah,%%al \n"
+ " je 2f \n"
+ " test %%al,%%al \n"
+ " jne 1b \n"
+ " mov $1,%1 \n"
+ "2: mov %1,%0 \n"
+ " dec %0 \n"
+ : "=a" (__res), "=&S" (d0) : "1" (s), "0" (c) );
+ return __res;
+}
+
+#define __HAVE_ARCH_STRRCHR
+static inline char *strrchr(const char *s, int c)
+{
+ long d0, d1;
+ register char *__res;
+ __asm__ __volatile__ (
+ " mov %%al,%%ah \n"
+ "1: lodsb \n"
+ " cmp %%ah,%%al \n"
+ " jne 2f \n"
+ " lea -1(%1),%0 \n"
+ "2: test %%al,%%al \n"
+ " jne 1b \n"
+ : "=g" (__res), "=&S" (d0), "=&a" (d1) : "0" (0), "1" (s), "2" (c) );
+ return __res;
+}
+
+#define __HAVE_ARCH_STRLEN
+static inline size_t strlen(const char *s)
+{
+ long d0;
+ register int __res;
+ __asm__ __volatile__ (
+ " repne ; scasb \n"
+ " notl %0 \n"
+ " decl %0 \n"
+ : "=c" (__res), "=&D" (d0) : "1" (s), "a" (0), "0" (0xffffffffUL) );
+ return __res;
+}
+
+static inline void *__variable_memcpy(void *to, const void *from, size_t n)
+{
+ long d0, d1, d2;
+ __asm__ __volatile__ (
+ " rep ; movs"__OS"\n"
+ " mov %4,%3 \n"
+ " rep ; movsb \n"
+ : "=&c" (d0), "=&D" (d1), "=&S" (d2)
+ : "0" (n/BYTES_PER_LONG), "r" (n%BYTES_PER_LONG), "1" (to), "2" (from)
+ : "memory" );
+ return to;
+}
+
+/*
+ * This looks horribly ugly, but the compiler can optimize it totally,
+ * as the count is constant.
+ */
+static always_inline void * __constant_memcpy(
+ void * to, const void * from, size_t n)
+{
+ switch ( n )
+ {
+ case 0:
+ return to;
+ case 1:
+ *(u8 *)to = *(const u8 *)from;
+ return to;
+ case 2:
+ *(u16 *)to = *(const u16 *)from;
+ return to;
+ case 3:
+ *(u16 *)to = *(const u16 *)from;
+ *(2+(u8 *)to) = *(2+(const u8 *)from);
+ return to;
+ case 4:
+ *(u32 *)to = *(const u32 *)from;
+ return to;
+ case 5:
+ *(u32 *)to = *(const u32 *)from;
+ *(4+(u8 *)to) = *(4+(const u8 *)from);
+ return to;
+ case 6:
+ *(u32 *)to = *(const u32 *)from;
+ *(2+(u16 *)to) = *(2+(const u16 *)from);
+ return to;
+ case 7:
+ *(u32 *)to = *(const u32 *)from;
+ *(2+(u16 *)to) = *(2+(const u16 *)from);
+ *(6+(u8 *)to) = *(6+(const u8 *)from);
+ return to;
+ case 8:
+ *(u64 *)to = *(const u64 *)from;
+ return to;
+ case 12:
+ *(u64 *)to = *(const u64 *)from;
+ *(2+(u32 *)to) = *(2+(const u32 *)from);
+ return to;
+ case 16:
+ *(u64 *)to = *(const u64 *)from;
+ *(1+(u64 *)to) = *(1+(const u64 *)from);
+ return to;
+ case 20:
+ *(u64 *)to = *(const u64 *)from;
+ *(1+(u64 *)to) = *(1+(const u64 *)from);
+ *(4+(u32 *)to) = *(4+(const u32 *)from);
+ return to;
+ }
+#define COMMON(x) \
+ __asm__ __volatile__ ( \
+ "rep ; movs"__OS \
+ x \
+ : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
+ : "0" (n/BYTES_PER_LONG), "1" (to), "2" (from) \
+ : "memory" );
+ {
+ long d0, d1, d2;
+ switch ( n % BYTES_PER_LONG )
+ {
+ case 0: COMMON(""); return to;
+ case 1: COMMON("\n\tmovsb"); return to;
+ case 2: COMMON("\n\tmovsw"); return to;
+ case 3: COMMON("\n\tmovsw\n\tmovsb"); return to;
+ case 4: COMMON("\n\tmovsl"); return to;
+ case 5: COMMON("\n\tmovsl\n\tmovsb"); return to;
+ case 6: COMMON("\n\tmovsl\n\tmovsw"); return to;
+ case 7: COMMON("\n\tmovsl\n\tmovsw\n\tmovsb"); return to;
+ }
+ }
+#undef COMMON
+}
+
+#define __HAVE_ARCH_MEMCPY
+#define memcpy(t,f,n) (__memcpy((t),(f),(n)))
+static always_inline
+void *__memcpy(void *t, const void *f, size_t n)
+{
+ return (__builtin_constant_p(n) ?
+ __constant_memcpy((t),(f),(n)) :
+ __variable_memcpy((t),(f),(n)));
+}
+
+/* Some version of gcc don't have this builtin. It's non-critical anyway. */
+#define __HAVE_ARCH_MEMMOVE
+extern void *memmove(void *dest, const void *src, size_t n);
+
+#define __HAVE_ARCH_MEMCMP
+#define memcmp __builtin_memcmp
+
+#define __HAVE_ARCH_MEMCHR
+static inline void *memchr(const void *cs, int c, size_t count)
+{
+ long d0;
+ register void *__res;
+ if ( count == 0 )
+ return NULL;
+ __asm__ __volatile__ (
+ " repne ; scasb\n"
+ " je 1f \n"
+ " mov $1,%0 \n"
+ "1: dec %0 \n"
+ : "=D" (__res), "=&c" (d0) : "a" (c), "0" (cs), "1" (count) );
+ return __res;
+}
+
+static inline void *__memset_generic(void *s, char c, size_t count)
+{
+ long d0, d1;
+ __asm__ __volatile__ (
+ "rep ; stosb"
+ : "=&c" (d0), "=&D" (d1) : "a" (c), "1" (s), "0" (count) : "memory" );
+ return s;
+}
+
+/* we might want to write optimized versions of these later */
+#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
+
+/*
+ * memset(x,0,y) is a reasonably common thing to do, so we want to fill
+ * things 32 bits at a time even when we don't know the size of the
+ * area at compile-time..
+ */
+static inline void *__constant_c_memset(void *s, unsigned long c, size_t count)
+{
+ long d0, d1;
+ __asm__ __volatile__(
+ " rep ; stos"__OS"\n"
+ " mov %3,%4 \n"
+ " rep ; stosb \n"
+ : "=&c" (d0), "=&D" (d1)
+ : "a" (c), "r" (count%BYTES_PER_LONG),
+ "0" (count/BYTES_PER_LONG), "1" (s)
+ : "memory" );
+ return s;
+}
+
+#define __HAVE_ARCH_STRNLEN
+static inline size_t strnlen(const char *s, size_t count)
+{
+ long d0;
+ register int __res;
+ __asm__ __volatile__ (
+ " jmp 2f \n"
+ "1: cmpb $0,(%3) \n"
+ " je 3f \n"
+ " inc %3 \n"
+ "2: dec %1 \n"
+ " jns 1b \n"
+ "3: subl %2,%0 \n"
+ : "=a" (__res), "=&d" (d0)
+ : "c" ((int)(long)s), "0" (s), "1" (count) );
+ return __res;
+}
+
+/*
+ * This looks horribly ugly, but the compiler can optimize it totally,
+ * as we by now know that both pattern and count is constant..
+ */
+static always_inline void *__constant_c_and_count_memset(
+ void *s, unsigned long pattern, size_t count)
+{
+ switch ( count )
+ {
+ case 0:
+ return s;
+ case 1:
+ *(u8 *)s = pattern;
+ return s;
+ case 2:
+ *(u16 *)s = pattern;
+ return s;
+ case 3:
+ *(u16 *)s = pattern;
+ *(2+(u8 *)s) = pattern;
+ return s;
+ case 4:
+ *(u32 *)s = pattern;
+ return s;
+ case 5:
+ *(u32 *)s = pattern;
+ *(4+(u8 *)s) = pattern;
+ return s;
+ case 6:
+ *(u32 *)s = pattern;
+ *(2+(u16 *)s) = pattern;
+ return s;
+ case 7:
+ *(u32 *)s = pattern;
+ *(2+(u16 *)s) = pattern;
+ *(6+(u8 *)s) = pattern;
+ return s;
+ case 8:
+ *(u64 *)s = pattern;
+ return s;
+ }
+#define COMMON(x) \
+ __asm__ __volatile__ ( \
+ "rep ; stos"__OS \
+ x \
+ : "=&c" (d0), "=&D" (d1) \
+ : "a" (pattern), "0" (count/BYTES_PER_LONG), "1" (s) \
+ : "memory" )
+ {
+ long d0, d1;
+ switch ( count % BYTES_PER_LONG )
+ {
+ case 0: COMMON(""); return s;
+ case 1: COMMON("\n\tstosb"); return s;
+ case 2: COMMON("\n\tstosw"); return s;
+ case 3: COMMON("\n\tstosw\n\tstosb"); return s;
+ case 4: COMMON("\n\tstosl"); return s;
+ case 5: COMMON("\n\tstosl\n\tstosb"); return s;
+ case 6: COMMON("\n\tstosl\n\tstosw"); return s;
+ case 7: COMMON("\n\tstosl\n\tstosw\n\tstosb"); return s;
+ }
+ }
+#undef COMMON
+}
+
+#define __constant_c_x_memset(s, c, count) \
+(__builtin_constant_p(count) ? \
+ __constant_c_and_count_memset((s),(c),(count)) : \
+ __constant_c_memset((s),(c),(count)))
+
+#define __var_x_memset(s, c, count) \
+(__builtin_constant_p(count) ? \
+ __constant_count_memset((s),(c),(count)) : \
+ __memset_generic((s),(c),(count)))
+
+#ifdef CONFIG_X86_64
+#define MEMSET_PATTERN_MUL 0x0101010101010101UL
#else
-#include <asm/x86_32/string.h>
+#define MEMSET_PATTERN_MUL 0x01010101UL
#endif
+
+#define __HAVE_ARCH_MEMSET
+#define memset(s, c, count) (__memset((s),(c),(count)))
+#define __memset(s, c, count) \
+(__builtin_constant_p(c) ? \
+ __constant_c_x_memset((s),(MEMSET_PATTERN_MUL*(unsigned char)(c)),(count)) : \
+ __var_x_memset((s),(c),(count)))
+
+#define __HAVE_ARCH_MEMSCAN
+static inline void *memscan(void *addr, int c, size_t size)
+{
+ if ( size == 0 )
+ return addr;
+ __asm__ (
+ " repnz; scasb \n"
+ " jnz 1f \n"
+ " dec %0 \n"
+ "1: \n"
+ : "=D" (addr), "=c" (size)
+ : "0" (addr), "1" (size), "a" (c) );
+ return addr;
+}
+
+#endif /* __X86_STRING_H__ */
diff --git a/xen/include/asm-x86/types.h b/xen/include/asm-x86/types.h
index 55a3e0e5c9..9cec42e7a7 100644
--- a/xen/include/asm-x86/types.h
+++ b/xen/include/asm-x86/types.h
@@ -1,8 +1,5 @@
-#ifndef _X86_TYPES_H
-#define _X86_TYPES_H
-
-typedef unsigned short umode_t;
-
+#ifndef __X86_TYPES_H__
+#define __X86_TYPES_H__
/*
* __xx is ok: it doesn't pollute the POSIX namespace. Use these in the
@@ -45,7 +42,6 @@ typedef unsigned long long u64;
#define BITS_PER_LONG 32
#define BYTES_PER_LONG 4
#define LONG_BYTEORDER 2
-typedef unsigned int size_t;
#if defined(CONFIG_X86_PAE)
typedef u64 physaddr_t;
#else
@@ -57,15 +53,9 @@ typedef unsigned long u64;
#define BITS_PER_LONG 64
#define BYTES_PER_LONG 8
#define LONG_BYTEORDER 3
-typedef unsigned long size_t;
typedef u64 physaddr_t;
#endif
-/* DMA addresses come in generic and 64-bit flavours. */
-
-typedef unsigned long dma_addr_t;
-typedef u64 dma64_addr_t;
-
-typedef unsigned short xmem_bufctl_t;
+typedef unsigned long size_t;
-#endif
+#endif /* __X86_TYPES_H__ */
diff --git a/xen/include/asm-x86/x86_32/string.h b/xen/include/asm-x86/x86_32/string.h
deleted file mode 100644
index b25ec8df61..0000000000
--- a/xen/include/asm-x86/x86_32/string.h
+++ /dev/null
@@ -1,489 +0,0 @@
-#ifndef _I386_STRING_H_
-#define _I386_STRING_H_
-
-#include <xen/config.h>
-
-/*
- * This string-include defines all string functions as inline
- * functions. Use gcc. It also assumes ds=es=data space, this should be
- * normal. Most of the string-functions are rather heavily hand-optimized,
- * see especially strtok,strstr,str[c]spn. They should work, but are not
- * very easy to understand. Everything is done entirely within the register
- * set, making the functions fast and clean. String instructions have been
- * used through-out, making for "slightly" unclear code :-)
- *
- * NO Copyright (C) 1991, 1992 Linus Torvalds,
- * consider these trivial functions to be PD.
- */
-
-
-#define __HAVE_ARCH_STRCPY
-static inline char * strcpy(char * dest,const char *src)
-{
-int d0, d1, d2;
-__asm__ __volatile__(
- "1:\tlodsb\n\t"
- "stosb\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b"
- : "=&S" (d0), "=&D" (d1), "=&a" (d2)
- :"0" (src),"1" (dest) : "memory");
-return dest;
-}
-
-#define __HAVE_ARCH_STRNCPY
-static inline char * strncpy(char * dest,const char *src,size_t count)
-{
-int d0, d1, d2, d3;
-__asm__ __volatile__(
- "1:\tdecl %2\n\t"
- "js 2f\n\t"
- "lodsb\n\t"
- "stosb\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b\n\t"
- "rep\n\t"
- "stosb\n"
- "2:"
- : "=&S" (d0), "=&D" (d1), "=&c" (d2), "=&a" (d3)
- :"0" (src),"1" (dest),"2" (count) : "memory");
-return dest;
-}
-
-#define __HAVE_ARCH_STRCAT
-static inline char * strcat(char * dest,const char * src)
-{
-int d0, d1, d2, d3;
-__asm__ __volatile__(
- "repne\n\t"
- "scasb\n\t"
- "decl %1\n"
- "1:\tlodsb\n\t"
- "stosb\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b"
- : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
- : "0" (src), "1" (dest), "2" (0), "3" (0xffffffff):"memory");
-return dest;
-}
-
-#define __HAVE_ARCH_STRNCAT
-static inline char * strncat(char * dest,const char * src,size_t count)
-{
-int d0, d1, d2, d3;
-__asm__ __volatile__(
- "repne\n\t"
- "scasb\n\t"
- "decl %1\n\t"
- "movl %8,%3\n"
- "1:\tdecl %3\n\t"
- "js 2f\n\t"
- "lodsb\n\t"
- "stosb\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b\n"
- "2:\txorl %2,%2\n\t"
- "stosb"
- : "=&S" (d0), "=&D" (d1), "=&a" (d2), "=&c" (d3)
- : "0" (src),"1" (dest),"2" (0),"3" (0xffffffff), "g" (count)
- : "memory");
-return dest;
-}
-
-#define __HAVE_ARCH_STRCMP
-static inline int strcmp(const char * cs,const char * ct)
-{
-int d0, d1;
-register int __res;
-__asm__ __volatile__(
- "1:\tlodsb\n\t"
- "scasb\n\t"
- "jne 2f\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b\n\t"
- "xorl %%eax,%%eax\n\t"
- "jmp 3f\n"
- "2:\tsbbl %%eax,%%eax\n\t"
- "orb $1,%%al\n"
- "3:"
- :"=a" (__res), "=&S" (d0), "=&D" (d1)
- :"1" (cs),"2" (ct));
-return __res;
-}
-
-#define __HAVE_ARCH_STRNCMP
-static inline int strncmp(const char * cs,const char * ct,size_t count)
-{
-register int __res;
-int d0, d1, d2;
-__asm__ __volatile__(
- "1:\tdecl %3\n\t"
- "js 2f\n\t"
- "lodsb\n\t"
- "scasb\n\t"
- "jne 3f\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b\n"
- "2:\txorl %%eax,%%eax\n\t"
- "jmp 4f\n"
- "3:\tsbbl %%eax,%%eax\n\t"
- "orb $1,%%al\n"
- "4:"
- :"=a" (__res), "=&S" (d0), "=&D" (d1), "=&c" (d2)
- :"1" (cs),"2" (ct),"3" (count));
-return __res;
-}
-
-#define __HAVE_ARCH_STRCHR
-static inline char * strchr(const char * s, int c)
-{
-int d0;
-register char * __res;
-__asm__ __volatile__(
- "movb %%al,%%ah\n"
- "1:\tlodsb\n\t"
- "cmpb %%ah,%%al\n\t"
- "je 2f\n\t"
- "testb %%al,%%al\n\t"
- "jne 1b\n\t"
- "movl $1,%1\n"
- "2:\tmovl %1,%0\n\t"
- "decl %0"
- :"=a" (__res), "=&S" (d0) : "1" (s),"0" (c));
-return __res;
-}
-
-#define __HAVE_ARCH_STRRCHR
-static inline char * strrchr(const char * s, int c)
-{
-int d0, d1;
-register char * __res;
-__asm__ __volatile__(
- "movb %%al,%%ah\n"
- "1:\tlodsb\n\t"
- "cmpb %%ah,%%al\n\t"
- "jne 2f\n\t"
- "leal -1(%%esi),%0\n"
- "2:\ttestb %%al,%%al\n\t"
- "jne 1b"
- :"=g" (__res), "=&S" (d0), "=&a" (d1) :"0" (0),"1" (s),"2" (c));
-return __res;
-}
-
-#define __HAVE_ARCH_STRLEN
-static inline size_t strlen(const char * s)
-{
-int d0;
-register int __res;
-__asm__ __volatile__(
- "repne\n\t"
- "scasb\n\t"
- "notl %0\n\t"
- "decl %0"
- :"=c" (__res), "=&D" (d0) :"1" (s),"a" (0), "0" (0xffffffff));
-return __res;
-}
-
-static inline void * __variable_memcpy(void * to, const void * from, size_t n)
-{
-int d0, d1, d2;
-__asm__ __volatile__(
- "rep ; movsl\n\t"
- "testb $2,%b4\n\t"
- "je 1f\n\t"
- "movsw\n"
- "1:\ttestb $1,%b4\n\t"
- "je 2f\n\t"
- "movsb\n"
- "2:"
- : "=&c" (d0), "=&D" (d1), "=&S" (d2)
- :"0" (n/4), "q" (n),"1" ((long) to),"2" ((long) from)
- : "memory");
-return (to);
-}
-
-/*
- * This looks horribly ugly, but the compiler can optimize it totally,
- * as the count is constant.
- */
-static always_inline void * __constant_memcpy(void * to, const void * from, size_t n)
-{
- switch (n) {
- case 0:
- return to;
- case 1:
- *(unsigned char *)to = *(const unsigned char *)from;
- return to;
- case 2:
- *(unsigned short *)to = *(const unsigned short *)from;
- return to;
- case 3:
- *(unsigned short *)to = *(const unsigned short *)from;
- *(2+(unsigned char *)to) = *(2+(const unsigned char *)from);
- return to;
- case 4:
- *(unsigned long *)to = *(const unsigned long *)from;
- return to;
- case 6: /* for Ethernet addresses */
- *(unsigned long *)to = *(const unsigned long *)from;
- *(2+(unsigned short *)to) = *(2+(const unsigned short *)from);
- return to;
- case 8:
- *(unsigned long *)to = *(const unsigned long *)from;
- *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
- return to;
- case 12:
- *(unsigned long *)to = *(const unsigned long *)from;
- *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
- *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
- return to;
- case 16:
- *(unsigned long *)to = *(const unsigned long *)from;
- *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
- *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
- *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
- return to;
- case 20:
- *(unsigned long *)to = *(const unsigned long *)from;
- *(1+(unsigned long *)to) = *(1+(const unsigned long *)from);
- *(2+(unsigned long *)to) = *(2+(const unsigned long *)from);
- *(3+(unsigned long *)to) = *(3+(const unsigned long *)from);
- *(4+(unsigned long *)to) = *(4+(const unsigned long *)from);
- return to;
- }
-#define COMMON(x) \
-__asm__ __volatile__( \
- "rep ; movsl" \
- x \
- : "=&c" (d0), "=&D" (d1), "=&S" (d2) \
- : "0" (n/4),"1" ((long) to),"2" ((long) from) \
- : "memory");
-{
- int d0, d1, d2;
- switch (n % 4) {
- case 0: COMMON(""); return to;
- case 1: COMMON("\n\tmovsb"); return to;
- case 2: COMMON("\n\tmovsw"); return to;
- default: COMMON("\n\tmovsw\n\tmovsb"); return to;
- }
-}
-
-#undef COMMON
-}
-
-#define __HAVE_ARCH_MEMCPY
-#define memcpy(t,f,n) (__memcpy((t),(f),(n)))
-static always_inline
-void *__memcpy(void *t, const void *f, size_t n)
-{
- return (__builtin_constant_p(n) ?
- __constant_memcpy((t),(f),(n)) :
- __variable_memcpy((t),(f),(n)));
-}
-
-/*
- * struct_cpy(x,y), copy structure *x into (matching structure) *y.
- *
- * We get link-time errors if the structure sizes do not match.
- * There is no runtime overhead, it's all optimized away at
- * compile time.
- */
-//extern void __struct_cpy_bug (void);
-
-/*
-#define struct_cpy(x,y) \
-({ \
- if (sizeof(*(x)) != sizeof(*(y))) \
- __struct_cpy_bug; \
- memcpy(x, y, sizeof(*(x))); \
-})
-*/
-
-#define __HAVE_ARCH_MEMMOVE
-#define memmove(dest,src,n) (__memmove((dest),(src),(n)))
-static inline void *__memmove(void * dest,const void * src, size_t n)
-{
-int d0, d1, d2;
-if (dest<src)
-__asm__ __volatile__(
- "rep\n\t"
- "movsb"
- : "=&c" (d0), "=&S" (d1), "=&D" (d2)
- :"0" (n),"1" (src),"2" (dest)
- : "memory");
-else
-__asm__ __volatile__(
- "std\n\t"
- "rep\n\t"
- "movsb\n\t"
- "cld"
- : "=&c" (d0), "=&S" (d1), "=&D" (d2)
- :"0" (n),
- "1" (n-1+(const char *)src),
- "2" (n-1+(char *)dest)
- :"memory");
-return dest;
-}
-
-#define __HAVE_ARCH_MEMCMP
-#define memcmp __builtin_memcmp
-
-#define __HAVE_ARCH_MEMCHR
-static inline void * memchr(const void * cs,int c,size_t count)
-{
-int d0;
-register void * __res;
-if (!count)
- return NULL;
-__asm__ __volatile__(
- "repne\n\t"
- "scasb\n\t"
- "je 1f\n\t"
- "movl $1,%0\n"
- "1:\tdecl %0"
- :"=D" (__res), "=&c" (d0) : "a" (c),"0" (cs),"1" (count));
-return __res;
-}
-
-static inline void * __memset_generic(void * s, char c,size_t count)
-{
-int d0, d1;
-__asm__ __volatile__(
- "rep\n\t"
- "stosb"
- : "=&c" (d0), "=&D" (d1)
- :"a" (c),"1" (s),"0" (count)
- :"memory");
-return s;
-}
-
-/* we might want to write optimized versions of these later */
-#define __constant_count_memset(s,c,count) __memset_generic((s),(c),(count))
-
-/*
- * memset(x,0,y) is a reasonably common thing to do, so we want to fill
- * things 32 bits at a time even when we don't know the size of the
- * area at compile-time..
- */
-static inline void * __constant_c_memset(void * s, unsigned long c, size_t count)
-{
-int d0, d1;
-__asm__ __volatile__(
- "rep ; stosl\n\t"
- "testb $2,%b3\n\t"
- "je 1f\n\t"
- "stosw\n"
- "1:\ttestb $1,%b3\n\t"
- "je 2f\n\t"
- "stosb\n"
- "2:"
- : "=&c" (d0), "=&D" (d1)
- :"a" (c), "q" (count), "0" (count/4), "1" ((long) s)
- :"memory");
-return (s);
-}
-
-/* Added by Gertjan van Wingerde to make minix and sysv module work */
-#define __HAVE_ARCH_STRNLEN
-static inline size_t strnlen(const char * s, size_t count)
-{
-int d0;
-register int __res;
-__asm__ __volatile__(
- "movl %2,%0\n\t"
- "jmp 2f\n"
- "1:\tcmpb $0,(%0)\n\t"
- "je 3f\n\t"
- "incl %0\n"
- "2:\tdecl %1\n\t"
- "cmpl $-1,%1\n\t"
- "jne 1b\n"
- "3:\tsubl %2,%0"
- :"=a" (__res), "=&d" (d0)
- :"c" (s),"1" (count));
-return __res;
-}
-/* end of additional stuff */
-
-//#define __HAVE_ARCH_STRSTR
-
-//extern char *strstr(const char *cs, const char *ct);
-
-/*
- * This looks horribly ugly, but the compiler can optimize it totally,
- * as we by now know that both pattern and count is constant..
- */
-static always_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count)
-{
- switch (count) {
- case 0:
- return s;
- case 1:
- *(unsigned char *)s = pattern;
- return s;
- case 2:
- *(unsigned short *)s = pattern;
- return s;
- case 3:
- *(unsigned short *)s = pattern;
- *(2+(unsigned char *)s) = pattern;
- return s;
- case 4:
- *(unsigned long *)s = pattern;
- return s;
- }
-#define COMMON(x) \
-__asm__ __volatile__( \
- "rep ; stosl" \
- x \
- : "=&c" (d0), "=&D" (d1) \
- : "a" (pattern),"0" (count/4),"1" ((long) s) \
- : "memory")
-{
- int d0, d1;
- switch (count % 4) {
- case 0: COMMON(""); return s;
- case 1: COMMON("\n\tstosb"); return s;
- case 2: COMMON("\n\tstosw"); return s;
- default: COMMON("\n\tstosw\n\tstosb"); return s;
- }
-}
-
-#undef COMMON
-}
-
-#define __constant_c_x_memset(s, c, count) \
-(__builtin_constant_p(count) ? \
- __constant_c_and_count_memset((s),(c),(count)) : \
- __constant_c_memset((s),(c),(count)))
-
-#define __var_x_memset(s, c, count) \
-(__builtin_constant_p(count) ? \
- __constant_count_memset((s),(c),(count)) : \
- __memset_generic((s),(c),(count)))
-
-#define __HAVE_ARCH_MEMSET
-#define memset(s, c, count) (__memset((s),(c),(count)))
-#define __memset(s, c, count) \
-(__builtin_constant_p(c) ? \
- __constant_c_x_memset((s),(0x01010101UL*(unsigned char)(c)),(count)) : \
- __var_x_memset((s),(c),(count)))
-
-/*
- * find the first occurrence of byte 'c', or 1 past the area if none
- */
-#define __HAVE_ARCH_MEMSCAN
-static inline void * memscan(void * addr, int c, size_t size)
-{
- if (!size)
- return addr;
- __asm__("repnz; scasb\n\t"
- "jnz 1f\n\t"
- "dec %%edi\n"
- "1:"
- : "=D" (addr), "=c" (size)
- : "0" (addr), "1" (size), "a" (c));
- return addr;
-}
-
-#endif
diff --git a/xen/include/asm-x86/x86_64/string.h b/xen/include/asm-x86/x86_64/string.h
deleted file mode 100644
index 613c982718..0000000000
--- a/xen/include/asm-x86/x86_64/string.h
+++ /dev/null
@@ -1,16 +0,0 @@
-#ifndef _X86_64_STRING_H_
-#define _X86_64_STRING_H_
-
-#define __HAVE_ARCH_MEMCPY
-#define memcpy(t,f,n) (__memcpy((t),(f),(n)))
-#define __memcpy(t,f,n) (__builtin_memcpy((t),(f),(n)))
-
-#define __HAVE_ARCH_MEMSET
-#define memset(s, c, count) (__memset((s),(c),(count)))
-#define __memset(s, c, count) (__builtin_memset((s),(c),(count)))
-
-/* Some versions of 64-bit gcc don't have this built in. */
-#define __HAVE_ARCH_MEMMOVE
-extern void *memmove(void *dest, const void *src, size_t n);
-
-#endif