aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKeir Fraser <keir@xen.org>2010-12-24 08:47:23 +0000
committerKeir Fraser <keir@xen.org>2010-12-24 08:47:23 +0000
commit0c4ff0fb92533aff947945f3562221a83bbfb941 (patch)
tree6c87342b77718bc05e02e23841051c8aa01d6abe
parentabf60d361232f53ad3c51fab74ff3b6f084d7f8e (diff)
downloadxen-0c4ff0fb92533aff947945f3562221a83bbfb941.tar.gz
xen-0c4ff0fb92533aff947945f3562221a83bbfb941.tar.bz2
xen-0c4ff0fb92533aff947945f3562221a83bbfb941.zip
x86-64: use PC-relative exception table entries
... thus allowing to make the entries half their current size. Rather than adjusting all instances to the new layout, abstract the construction the table entries via a macro (paralleling a similar one in recent Linux). Also change the name of the section (to allow easier detection of missed cases) and merge the final resulting output sections into .data.read_mostly. Signed-off-by: Jan Beulich <jbeulich@novell.com>
-rw-r--r--xen/arch/x86/cpu/amd.c10
-rw-r--r--xen/arch/x86/domain.c5
-rw-r--r--xen/arch/x86/extable.c68
-rw-r--r--xen/arch/x86/i387.c5
-rw-r--r--xen/arch/x86/usercopy.c18
-rw-r--r--xen/arch/x86/x86_32/asm-offsets.c1
-rw-r--r--xen/arch/x86/x86_32/entry.S48
-rw-r--r--xen/arch/x86/x86_64/asm-offsets.c1
-rw-r--r--xen/arch/x86/x86_64/compat/entry.S34
-rw-r--r--xen/arch/x86/x86_64/entry.S28
-rw-r--r--xen/arch/x86/x86_64/mm.c5
-rw-r--r--xen/arch/x86/xen.lds.S20
-rw-r--r--xen/include/asm-x86/asm_defns.h20
-rw-r--r--xen/include/asm-x86/config.h4
-rw-r--r--xen/include/asm-x86/hvm/vmx/vmx.h11
-rw-r--r--xen/include/asm-x86/msr.h11
-rw-r--r--xen/include/asm-x86/uaccess.h13
-rw-r--r--xen/include/asm-x86/x86_32/asm_defns.h6
-rw-r--r--xen/include/asm-x86/x86_32/system.h10
-rw-r--r--xen/include/asm-x86/x86_32/uaccess.h14
-rw-r--r--xen/include/asm-x86/x86_64/asm_defns.h6
-rw-r--r--xen/include/asm-x86/x86_64/system.h5
22 files changed, 175 insertions, 168 deletions
diff --git a/xen/arch/x86/cpu/amd.c b/xen/arch/x86/cpu/amd.c
index 2fb25c0e70..780fa69f88 100644
--- a/xen/arch/x86/cpu/amd.c
+++ b/xen/arch/x86/cpu/amd.c
@@ -53,10 +53,7 @@ static inline int rdmsr_amd_safe(unsigned int msr, unsigned int *lo,
"3: movl %6,%2\n"
" jmp 2b\n"
".previous\n"
- ".section __ex_table,\"a\"\n"
- __FIXUP_ALIGN "\n"
- __FIXUP_WORD " 1b,3b\n"
- ".previous\n"
+ _ASM_EXTABLE(1b, 3b)
: "=a" (*lo), "=d" (*hi), "=r" (err)
: "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
@@ -73,10 +70,7 @@ static inline int wrmsr_amd_safe(unsigned int msr, unsigned int lo,
"3: movl %6,%0\n"
" jmp 2b\n"
".previous\n"
- ".section __ex_table,\"a\"\n"
- __FIXUP_ALIGN "\n"
- __FIXUP_WORD " 1b,3b\n"
- ".previous\n"
+ _ASM_EXTABLE(1b, 3b)
: "=r" (err)
: "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
"0" (0), "i" (-EFAULT));
diff --git a/xen/arch/x86/domain.c b/xen/arch/x86/domain.c
index 313be27714..f051df0ee3 100644
--- a/xen/arch/x86/domain.c
+++ b/xen/arch/x86/domain.c
@@ -1070,10 +1070,7 @@ arch_do_vcpu_op(
" movl %k0,%%" #seg "\n" \
" jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .align 8\n" \
- " .quad 1b,3b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 3b) \
: "=r" (__r) : "r" (value), "0" (__r) );\
__r; })
diff --git a/xen/arch/x86/extable.c b/xen/arch/x86/extable.c
index da822d34af..8c64585483 100644
--- a/xen/arch/x86/extable.c
+++ b/xen/arch/x86/extable.c
@@ -2,6 +2,7 @@
#include <xen/config.h>
#include <xen/init.h>
#include <xen/perfc.h>
+#include <xen/sort.h>
#include <xen/spinlock.h>
#include <asm/uaccess.h>
@@ -10,29 +11,58 @@ extern struct exception_table_entry __stop___ex_table[];
extern struct exception_table_entry __start___pre_ex_table[];
extern struct exception_table_entry __stop___pre_ex_table[];
-static void __init sort_exception_table(struct exception_table_entry *start,
- struct exception_table_entry *end)
+#ifdef __i386__
+#define EX_FIELD(ptr, field) (ptr)->field
+#define swap_ex NULL
+#else
+#define EX_FIELD(ptr, field) ((unsigned long)&(ptr)->field + (ptr)->field)
+#endif
+
+static inline unsigned long ex_addr(const struct exception_table_entry *x)
{
- struct exception_table_entry *p, *q, tmp;
+ return EX_FIELD(x, addr);
+}
- for ( p = start; p < end; p++ )
- {
- for ( q = p-1; q > start; q-- )
- if ( p->insn > q->insn )
- break;
- if ( ++q != p )
- {
- tmp = *p;
- memmove(q+1, q, (p-q)*sizeof(*p));
- *q = tmp;
- }
- }
+static inline unsigned long ex_cont(const struct exception_table_entry *x)
+{
+ return EX_FIELD(x, cont);
+}
+
+static int __init cmp_ex(const void *a, const void *b)
+{
+ const struct exception_table_entry *l = a, *r = b;
+ unsigned long lip = ex_addr(l);
+ unsigned long rip = ex_addr(r);
+
+ /* avoid overflow */
+ if (lip > rip)
+ return 1;
+ if (lip < rip)
+ return -1;
+ return 0;
+}
+
+#ifndef swap_ex
+static void __init swap_ex(void *a, void *b, int size)
+{
+ struct exception_table_entry *l = a, *r = b, tmp;
+ long delta = b - a;
+
+ tmp = *l;
+ l->addr = r->addr + delta;
+ l->cont = r->cont + delta;
+ r->addr = tmp.addr - delta;
+ r->cont = tmp.cont - delta;
}
+#endif
void __init sort_exception_tables(void)
{
- sort_exception_table(__start___ex_table, __stop___ex_table);
- sort_exception_table(__start___pre_ex_table, __stop___pre_ex_table);
+ sort(__start___ex_table, __stop___ex_table - __start___ex_table,
+ sizeof(struct exception_table_entry), cmp_ex, swap_ex);
+ sort(__start___pre_ex_table,
+ __stop___pre_ex_table - __start___pre_ex_table,
+ sizeof(struct exception_table_entry), cmp_ex, swap_ex);
}
static inline unsigned long
@@ -46,9 +76,9 @@ search_one_table(const struct exception_table_entry *first,
while ( first <= last )
{
mid = (last - first) / 2 + first;
- diff = mid->insn - value;
+ diff = ex_addr(mid) - value;
if (diff == 0)
- return mid->fixup;
+ return ex_cont(mid);
else if (diff < 0)
first = mid+1;
else
diff --git a/xen/arch/x86/i387.c b/xen/arch/x86/i387.c
index 5f3f041613..477efec973 100644
--- a/xen/arch/x86/i387.c
+++ b/xen/arch/x86/i387.c
@@ -122,10 +122,7 @@ void restore_fpu(struct vcpu *v)
" pop %%"__OP"ax \n"
" jmp 1b \n"
".previous \n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN" \n"
- " "__FIXUP_WORD" 1b,2b \n"
- ".previous \n"
+ _ASM_EXTABLE(1b, 2b)
:
: "m" (*fpu_ctxt),
"i" (sizeof(v->arch.guest_context.fpu_ctxt)/4)
diff --git a/xen/arch/x86/usercopy.c b/xen/arch/x86/usercopy.c
index 76e3abfde2..d88e635bb0 100644
--- a/xen/arch/x86/usercopy.c
+++ b/xen/arch/x86/usercopy.c
@@ -36,12 +36,9 @@ unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned n)
"3: lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
" jmp 2b\n"
".previous\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 4b,5b\n"
- " "__FIXUP_WORD" 0b,3b\n"
- " "__FIXUP_WORD" 1b,2b\n"
- ".previous"
+ _ASM_EXTABLE(4b, 5b)
+ _ASM_EXTABLE(0b, 3b)
+ _ASM_EXTABLE(1b, 2b)
: "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
: "0" (__n), "1" (to), "2" (from), "3" (__n)
: "memory" );
@@ -82,12 +79,9 @@ __copy_from_user_ll(void *to, const void __user *from, unsigned n)
" pop %0\n"
" jmp 2b\n"
".previous\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 4b,5b\n"
- " "__FIXUP_WORD" 0b,3b\n"
- " "__FIXUP_WORD" 1b,6b\n"
- ".previous"
+ _ASM_EXTABLE(4b, 5b)
+ _ASM_EXTABLE(0b, 3b)
+ _ASM_EXTABLE(1b, 6b)
: "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
: "0" (__n), "1" (to), "2" (from), "3" (__n)
: "memory" );
diff --git a/xen/arch/x86/x86_32/asm-offsets.c b/xen/arch/x86/x86_32/asm-offsets.c
index 4124ce7edc..c3bba7f4be 100644
--- a/xen/arch/x86/x86_32/asm-offsets.c
+++ b/xen/arch/x86/x86_32/asm-offsets.c
@@ -3,6 +3,7 @@
* This code generates raw asm output which is post-processed
* to extract and format the required data.
*/
+#define COMPILE_OFFSETS
#include <xen/config.h>
#include <xen/perfc.h>
diff --git a/xen/arch/x86/x86_32/entry.S b/xen/arch/x86/x86_32/entry.S
index 4d34b90dd5..d332b8ab0b 100644
--- a/xen/arch/x86/x86_32/entry.S
+++ b/xen/arch/x86/x86_32/entry.S
@@ -119,16 +119,12 @@ failsafe_callback:
movl %eax,UREGS_gs(%esp)
jmp test_all_events
.previous
-.section __pre_ex_table,"a"
- .long .Lft1,.Lfx1
- .long .Lft2,.Lfx1
- .long .Lft3,.Lfx1
- .long .Lft4,.Lfx1
- .long .Lft5,.Lfx1
-.previous
-.section __ex_table,"a"
- .long .Ldf1,failsafe_callback
-.previous
+ _ASM_PRE_EXTABLE(.Lft1, .Lfx1)
+ _ASM_PRE_EXTABLE(.Lft2, .Lfx1)
+ _ASM_PRE_EXTABLE(.Lft3, .Lfx1)
+ _ASM_PRE_EXTABLE(.Lft4, .Lfx1)
+ _ASM_PRE_EXTABLE(.Lft5, .Lfx1)
+ _ASM_EXTABLE(.Ldf1, failsafe_callback)
ALIGN
restore_all_xen:
@@ -392,18 +388,26 @@ UNLIKELY_END(bounce_vm86_3)
movl TRAPBOUNCE_eip(%edx),%eax
movl %eax,UREGS_eip+4(%esp)
ret
-.section __ex_table,"a"
- .long .Lft6,domain_crash_synchronous , .Lft7,domain_crash_synchronous
- .long .Lft8,domain_crash_synchronous , .Lft9,domain_crash_synchronous
- .long .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
- .long .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
- .long .Lft14,domain_crash_synchronous , .Lft15,domain_crash_synchronous
- .long .Lft16,domain_crash_synchronous , .Lft17,domain_crash_synchronous
- .long .Lft18,domain_crash_synchronous , .Lft19,domain_crash_synchronous
- .long .Lft20,domain_crash_synchronous , .Lft21,domain_crash_synchronous
- .long .Lft22,domain_crash_synchronous , .Lft23,domain_crash_synchronous
- .long .Lft24,domain_crash_synchronous , .Lft25,domain_crash_synchronous
-.previous
+ _ASM_EXTABLE(.Lft6, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft7, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft8, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft9, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft14, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft15, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft16, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft17, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft18, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft19, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft20, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft21, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft22, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft23, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft24, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft25, domain_crash_synchronous)
domain_crash_synchronous_string:
.asciz "domain_crash_sync called from entry.S (%lx)\n"
diff --git a/xen/arch/x86/x86_64/asm-offsets.c b/xen/arch/x86/x86_64/asm-offsets.c
index 424137ce43..21b4358f93 100644
--- a/xen/arch/x86/x86_64/asm-offsets.c
+++ b/xen/arch/x86/x86_64/asm-offsets.c
@@ -3,6 +3,7 @@
* This code generates raw asm output which is post-processed
* to extract and format the required data.
*/
+#define COMPILE_OFFSETS
#include <xen/config.h>
#include <xen/perfc.h>
diff --git a/xen/arch/x86/x86_64/compat/entry.S b/xen/arch/x86/x86_64/compat/entry.S
index c2b7cf05c4..13e04ad370 100644
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -197,12 +197,8 @@ compat_failsafe_callback:
1: call compat_create_bounce_frame
jmp compat_test_all_events
.previous
-.section __pre_ex_table,"a"
- .quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
- .quad .Ldf0,compat_failsafe_callback
-.previous
+ _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+ _ASM_EXTABLE(.Ldf0, compat_failsafe_callback)
/* %rdx: trap_bounce, %rbx: struct vcpu */
ENTRY(compat_post_handle_exception)
@@ -330,15 +326,19 @@ UNLIKELY_END(compat_bounce_failsafe)
xorl %edi,%edi
jmp .Lft13
.previous
-.section __ex_table,"a"
- .quad .Lft1,domain_crash_synchronous , .Lft2,compat_crash_page_fault
- .quad .Lft3,compat_crash_page_fault_4 , .Lft4,domain_crash_synchronous
- .quad .Lft5,compat_crash_page_fault_4 , .Lft6,compat_crash_page_fault_8
- .quad .Lft7,compat_crash_page_fault , .Lft8,compat_crash_page_fault
- .quad .Lft9,compat_crash_page_fault_12, .Lft10,compat_crash_page_fault_8
- .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
- .quad .Lft13,.Lfx13
-.previous
+ _ASM_EXTABLE(.Lft1, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft2, compat_crash_page_fault)
+ _ASM_EXTABLE(.Lft3, compat_crash_page_fault_4)
+ _ASM_EXTABLE(.Lft4, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft5, compat_crash_page_fault_4)
+ _ASM_EXTABLE(.Lft6, compat_crash_page_fault_8)
+ _ASM_EXTABLE(.Lft7, compat_crash_page_fault)
+ _ASM_EXTABLE(.Lft8, compat_crash_page_fault)
+ _ASM_EXTABLE(.Lft9, compat_crash_page_fault_12)
+ _ASM_EXTABLE(.Lft10, compat_crash_page_fault_8)
+ _ASM_EXTABLE(.Lft11, compat_crash_page_fault_4)
+ _ASM_EXTABLE(.Lft12, compat_crash_page_fault)
+ _ASM_EXTABLE(.Lft13, .Lfx13)
compat_crash_page_fault_12:
addl $4,%esi
@@ -356,9 +356,7 @@ compat_crash_page_fault:
xorl %edi,%edi
jmp .Lft14
.previous
-.section __ex_table,"a"
- .quad .Lft14,.Lfx14
-.previous
+ _ASM_EXTABLE(.Lft14, .Lfx14)
.section .rodata, "a", @progbits
diff --git a/xen/arch/x86/x86_64/entry.S b/xen/arch/x86/x86_64/entry.S
index 7c5fe92786..b30f2d0c13 100644
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -84,12 +84,8 @@ failsafe_callback:
1: call create_bounce_frame
jmp test_all_events
.previous
-.section __pre_ex_table,"a"
- .quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
- .quad .Ldf0,failsafe_callback
-.previous
+ _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+ _ASM_EXTABLE(.Ldf0, failsafe_callback)
ALIGN
/* No special register assumptions. */
@@ -412,14 +408,18 @@ UNLIKELY_END(bounce_failsafe)
jz domain_crash_synchronous
movq %rax,UREGS_rip+8(%rsp)
ret
-.section __ex_table,"a"
- .quad .Lft2,domain_crash_synchronous , .Lft3,domain_crash_synchronous
- .quad .Lft4,domain_crash_synchronous , .Lft5,domain_crash_synchronous
- .quad .Lft6,domain_crash_synchronous , .Lft7,domain_crash_synchronous
- .quad .Lft8,domain_crash_synchronous , .Lft9,domain_crash_synchronous
- .quad .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
- .quad .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-.previous
+ _ASM_EXTABLE(.Lft2, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft3, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft4, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft5, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft6, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft7, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft8, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft9, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+ _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
domain_crash_synchronous_string:
.asciz "domain_crash_sync called from entry.S\n"
diff --git a/xen/arch/x86/x86_64/mm.c b/xen/arch/x86/x86_64/mm.c
index 1de4e8b03d..e973c0edcb 100644
--- a/xen/arch/x86/x86_64/mm.c
+++ b/xen/arch/x86/x86_64/mm.c
@@ -1119,10 +1119,7 @@ long do_set_segment_base(unsigned int which, unsigned long base)
"2: xorl %k0,%k0 \n"
" jmp 1b \n"
".previous \n"
- ".section __ex_table,\"a\"\n"
- " .align 8 \n"
- " .quad 1b,2b \n"
- ".previous "
+ _ASM_EXTABLE(1b, 2b)
: : "r" (base&0xffff) );
break;
diff --git a/xen/arch/x86/xen.lds.S b/xen/arch/x86/xen.lds.S
index 1ffd4e98de..49691d35c5 100644
--- a/xen/arch/x86/xen.lds.S
+++ b/xen/arch/x86/xen.lds.S
@@ -38,18 +38,19 @@ SECTIONS
*(.rodata.*)
} :text
- . = ALIGN(32); /* Exception table */
- __ex_table : {
+ . = ALIGN(SMP_CACHE_BYTES);
+ .data.read_mostly : {
+ /* Exception table */
__start___ex_table = .;
- *(__ex_table)
+ *(.ex_table)
__stop___ex_table = .;
- } :text
- . = ALIGN(32); /* Pre-exception table */
- __pre_ex_table : {
+ /* Pre-exception table */
__start___pre_ex_table = .;
- *(__pre_ex_table)
+ *(.ex_table.pre)
__stop___pre_ex_table = .;
+
+ *(.data.read_mostly)
} :text
.data : { /* Data */
@@ -59,11 +60,6 @@ SECTIONS
CONSTRUCTORS
} :text
- . = ALIGN(SMP_CACHE_BYTES);
- .data.read_mostly : {
- *(.data.read_mostly)
- } :text
-
#ifdef LOCK_PROFILE
. = ALIGN(32);
__lock_profile_start = .;
diff --git a/xen/include/asm-x86/asm_defns.h b/xen/include/asm-x86/asm_defns.h
index 46402c1c82..ca6152e64a 100644
--- a/xen/include/asm-x86/asm_defns.h
+++ b/xen/include/asm-x86/asm_defns.h
@@ -2,8 +2,10 @@
#ifndef __X86_ASM_DEFNS_H__
#define __X86_ASM_DEFNS_H__
+#ifndef COMPILE_OFFSETS
/* NB. Auto-generated from arch/.../asm-offsets.c */
#include <asm/asm-offsets.h>
+#endif
#include <asm/processor.h>
#ifdef __x86_64__
@@ -12,6 +14,24 @@
#include <asm/x86_32/asm_defns.h>
#endif
+/* Exception table entry */
+#ifdef __ASSEMBLY__
+# define _ASM__EXTABLE(sfx, from, to) \
+ .section .ex_table##sfx, "a" ; \
+ .balign 4 ; \
+ .long _ASM_EX(from), _ASM_EX(to) ; \
+ .previous
+#else
+# define _ASM__EXTABLE(sfx, from, to) \
+ " .section .ex_table" #sfx ",\"a\"\n" \
+ " .balign 4\n" \
+ " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
+ " .previous\n"
+#endif
+
+#define _ASM_EXTABLE(from, to) _ASM__EXTABLE(, from, to)
+#define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
+
#ifdef __ASSEMBLY__
#define UNLIKELY_START(cond, tag) \
diff --git a/xen/include/asm-x86/config.h b/xen/include/asm-x86/config.h
index 58cbe490e6..27c766d2db 100644
--- a/xen/include/asm-x86/config.h
+++ b/xen/include/asm-x86/config.h
@@ -274,8 +274,6 @@ extern unsigned int video_mode, video_flags;
/* For generic assembly code: use macros to define operation/operand sizes. */
#define __OS "q" /* Operation Suffix */
#define __OP "r" /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 8"
-#define __FIXUP_WORD ".quad"
#elif defined(__i386__)
@@ -351,8 +349,6 @@ extern unsigned int video_mode, video_flags;
/* For generic assembly code: use macros to define operation/operand sizes. */
#define __OS "l" /* Operation Suffix */
#define __OP "e" /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 4"
-#define __FIXUP_WORD ".long"
#endif /* __i386__ */
diff --git a/xen/include/asm-x86/hvm/vmx/vmx.h b/xen/include/asm-x86/hvm/vmx/vmx.h
index 60e64061a9..6feeb81f9d 100644
--- a/xen/include/asm-x86/hvm/vmx/vmx.h
+++ b/xen/include/asm-x86/hvm/vmx/vmx.h
@@ -22,6 +22,7 @@
#include <xen/sched.h>
#include <asm/types.h>
#include <asm/regs.h>
+#include <asm/asm_defns.h>
#include <asm/processor.h>
#include <asm/i387.h>
#include <asm/hvm/support.h>
@@ -341,10 +342,7 @@ static inline void __invvpid(int type, u16 vpid, u64 gva)
asm volatile ( "1: " INVVPID_OPCODE MODRM_EAX_08
/* CF==1 or ZF==1 --> crash (ud2) */
"ja 2f ; ud2 ; 2:\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 1b,2b\n"
- ".previous"
+ _ASM_EXTABLE(1b, 2b)
:
: "a" (&operand), "c" (type)
: "memory" );
@@ -404,10 +402,7 @@ static inline int __vmxon(u64 addr)
".section .fixup,\"ax\"\n"
"3: sub $2,%0 ; jmp 2b\n" /* #UD or #GP --> rc = -2 */
".previous\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 1b,3b\n"
- ".previous\n"
+ _ASM_EXTABLE(1b, 3b)
: "=q" (rc)
: "0" (0), "a" (&addr)
: "memory");
diff --git a/xen/include/asm-x86/msr.h b/xen/include/asm-x86/msr.h
index ab3360c683..09fa288081 100644
--- a/xen/include/asm-x86/msr.h
+++ b/xen/include/asm-x86/msr.h
@@ -8,6 +8,7 @@
#include <xen/types.h>
#include <xen/percpu.h>
#include <xen/errno.h>
+#include <asm/asm_defns.h>
#define rdmsr(msr,val1,val2) \
__asm__ __volatile__("rdmsr" \
@@ -44,10 +45,7 @@ static inline void wrmsrl(unsigned int msr, __u64 val)
"3: xorl %0,%0\n; xorl %1,%1\n" \
" movl %5,%2\n; jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " "__FIXUP_ALIGN"\n" \
- " "__FIXUP_WORD" 1b,3b\n" \
- ".previous\n" \
+ _ASM_EXTABLE(1b, 3b) \
: "=a" (lo), "=d" (hi), "=&r" (_rc) \
: "c" (msr), "2" (0), "i" (-EFAULT)); \
val = lo | ((uint64_t)hi << 32); \
@@ -66,10 +64,7 @@ static inline int wrmsr_safe(unsigned int msr, uint64_t val)
".section .fixup,\"ax\"\n"
"3: movl %5,%0\n; jmp 2b\n"
".previous\n"
- ".section __ex_table,\"a\"\n"
- " "__FIXUP_ALIGN"\n"
- " "__FIXUP_WORD" 1b,3b\n"
- ".previous\n"
+ _ASM_EXTABLE(1b, 3b)
: "=&r" (_rc)
: "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
return _rc;
diff --git a/xen/include/asm-x86/uaccess.h b/xen/include/asm-x86/uaccess.h
index cd43529cf8..af624dfab6 100644
--- a/xen/include/asm-x86/uaccess.h
+++ b/xen/include/asm-x86/uaccess.h
@@ -6,6 +6,7 @@
#include <xen/compiler.h>
#include <xen/errno.h>
#include <xen/prefetch.h>
+#include <asm/asm_defns.h>
#include <asm/page.h>
#ifdef __x86_64__
@@ -155,10 +156,7 @@ struct __large_struct { unsigned long buf[100]; };
"3: mov %3,%0\n" \
" jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " "__FIXUP_ALIGN"\n" \
- " "__FIXUP_WORD" 1b,3b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 3b) \
: "=r"(err) \
: ltype (x), "m"(__m(addr)), "i"(errret), "0"(err))
@@ -171,10 +169,7 @@ struct __large_struct { unsigned long buf[100]; };
" xor"itype" %"rtype"1,%"rtype"1\n" \
" jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " "__FIXUP_ALIGN"\n" \
- " "__FIXUP_WORD" 1b,3b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 3b) \
: "=r"(err), ltype (x) \
: "m"(__m(addr)), "i"(errret), "0"(err))
@@ -272,7 +267,7 @@ __copy_from_user(void *to, const void __user *from, unsigned long n)
struct exception_table_entry
{
- unsigned long insn, fixup;
+ s32 addr, cont;
};
extern unsigned long search_exception_table(unsigned long);
diff --git a/xen/include/asm-x86/x86_32/asm_defns.h b/xen/include/asm-x86/x86_32/asm_defns.h
index fc6916126c..e81ebdfc54 100644
--- a/xen/include/asm-x86/x86_32/asm_defns.h
+++ b/xen/include/asm-x86/x86_32/asm_defns.h
@@ -153,4 +153,10 @@ STR(IRQ) #nr "_interrupt:\n\t" \
GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg) \
movl (reg),reg;
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p
+#else
+# define _ASM_EX(p) #p
+#endif
+
#endif /* __X86_32_ASM_DEFNS_H__ */
diff --git a/xen/include/asm-x86/x86_32/system.h b/xen/include/asm-x86/x86_32/system.h
index 40aa63c1c7..134b6d0581 100644
--- a/xen/include/asm-x86/x86_32/system.h
+++ b/xen/include/asm-x86/x86_32/system.h
@@ -49,10 +49,7 @@ static always_inline unsigned long long __cmpxchg8b(
"3: movl $1,%1\n" \
" jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .align 4\n" \
- " .long 1b,3b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 3b) \
: "=a" (_o), "=r" (_rc) \
: _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
: "memory");
@@ -78,10 +75,7 @@ static always_inline unsigned long long __cmpxchg8b(
"3: movl $1,%1\n" \
" jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .align 4\n" \
- " .long 1b,3b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 3b) \
: "=A" (_o), "=r" (_rc) \
: "c" ((u32)((u64)(_n)>>32)), "b" ((u32)(_n)), \
"m" (*__xg((volatile void *)(_p))), "0" (_o), "1" (0) \
diff --git a/xen/include/asm-x86/x86_32/uaccess.h b/xen/include/asm-x86/x86_32/uaccess.h
index b2a92d2632..d6a52301af 100644
--- a/xen/include/asm-x86/x86_32/uaccess.h
+++ b/xen/include/asm-x86/x86_32/uaccess.h
@@ -33,11 +33,8 @@ extern void __uaccess_var_not_u64(void);
"4: movl %3,%0\n" \
" jmp 3b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .align 4\n" \
- " .long 1b,4b\n" \
- " .long 2b,4b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 4b) \
+ _ASM_EXTABLE(2b, 4b) \
: "=r"(retval) \
: "A" (x), "r" (addr), "i"(errret), "0"(retval))
@@ -65,11 +62,8 @@ do { \
" xorl %%edx,%%edx\n" \
" jmp 3b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .align 4\n" \
- " .long 1b,4b\n" \
- " .long 2b,4b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 4b) \
+ _ASM_EXTABLE(2b, 4b) \
: "=r" (retval), "=&A" (x) \
: "r" (addr), "i"(errret), "0"(retval))
diff --git a/xen/include/asm-x86/x86_64/asm_defns.h b/xen/include/asm-x86/x86_64/asm_defns.h
index a56ac63211..24620bae4f 100644
--- a/xen/include/asm-x86/x86_64/asm_defns.h
+++ b/xen/include/asm-x86/x86_64/asm_defns.h
@@ -130,4 +130,10 @@ STR(IRQ) #nr "_interrupt:\n\t" \
GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg) \
movq (reg),reg;
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p-.
+#else
+# define _ASM_EX(p) #p "-."
+#endif
+
#endif /* __X86_64_ASM_DEFNS_H__ */
diff --git a/xen/include/asm-x86/x86_64/system.h b/xen/include/asm-x86/x86_64/system.h
index 4f183c0535..20f038bf94 100644
--- a/xen/include/asm-x86/x86_64/system.h
+++ b/xen/include/asm-x86/x86_64/system.h
@@ -19,10 +19,7 @@
"3: movl $1,%1\n" \
" jmp 2b\n" \
".previous\n" \
- ".section __ex_table,\"a\"\n" \
- " .align 8\n" \
- " .quad 1b,3b\n" \
- ".previous" \
+ _ASM_EXTABLE(1b, 3b) \
: "=a" (_o), "=r" (_rc) \
: _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
: "memory");