aboutsummaryrefslogtreecommitdiffstats
path: root/xen/arch/x86/x86_64/entry.S
diff options
context:
space:
mode:
authorJan Beulich <jbeulich@suse.com>2012-10-30 16:46:07 +0100
committerJan Beulich <jbeulich@suse.com>2012-10-30 16:46:07 +0100
commitecf9846a6a200e8c8cfb26d409d18fd9615df9b9 (patch)
tree9f3cf4c722a6991a8dc032a050984f9ba8014923 /xen/arch/x86/x86_64/entry.S
parent4d246723a85a03406e4969a260291e11b8e05960 (diff)
downloadxen-ecf9846a6a200e8c8cfb26d409d18fd9615df9b9.tar.gz
xen-ecf9846a6a200e8c8cfb26d409d18fd9615df9b9.tar.bz2
xen-ecf9846a6a200e8c8cfb26d409d18fd9615df9b9.zip
x86: save/restore only partial register state where possible
... and make restore conditional not only upon having saved the state, but also upon whether saved state was actually modified (and register values are known to have been preserved). Note that RBP is unconditionally considered a volatile register (i.e. irrespective of CONFIG_FRAME_POINTER), since the RBP handling would become overly complicated due to the need to save/restore it on the compat mode hypercall path [6th argument]. Note further that for compat mode code paths, saving/restoring R8...R15 is entirely unnecessary - we don't allow those guests to enter 64-bit mode, and hence they have no way of seeing these registers' contents (and there consequently also is no information leak, except if the context saving domctl would be considered such). Finally, note that this may not properly deal with gdbstub's needs, yet (but if so, I can't really suggest adjustments, as I don't know that code). Signed-off-by: Jan Beulich <jbeulich@suse.com> Acked-by: Keir Fraser <keir@xen.org>
Diffstat (limited to 'xen/arch/x86/x86_64/entry.S')
-rw-r--r--xen/arch/x86/x86_64/entry.S21
1 files changed, 11 insertions, 10 deletions
diff --git a/xen/arch/x86/x86_64/entry.S b/xen/arch/x86/x86_64/entry.S
index 9076f63a86..29b26579e1 100644
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -123,9 +123,8 @@ ENTRY(syscall_enter)
movl $FLAT_KERNEL_SS,24(%rsp)
pushq %rcx
pushq $0
- movl $TRAP_syscall,4(%rsp)
- movq 24(%rsp),%r11 /* Re-load user RFLAGS into %r11 before SAVE_ALL */
- SAVE_ALL
+ movq 24(%rsp),%r11 /* Re-load user RFLAGS into %r11 before saving */
+ SAVE_VOLATILE TRAP_syscall
GET_CURRENT(%rbx)
movq VCPU_domain(%rbx),%rcx
testb $1,DOMAIN_is_32bit_pv(%rcx)
@@ -222,6 +221,7 @@ test_guest_events:
/* %rbx: struct vcpu */
process_softirqs:
sti
+ SAVE_PRESERVED
call do_softirq
jmp test_all_events
@@ -275,8 +275,7 @@ sysenter_eflags_saved:
pushq $3 /* ring 3 null cs */
pushq $0 /* null rip */
pushq $0
- movl $TRAP_syscall,4(%rsp)
- SAVE_ALL
+ SAVE_VOLATILE TRAP_syscall
GET_CURRENT(%rbx)
cmpb $0,VCPU_sysenter_disables_events(%rbx)
movq VCPU_sysenter_addr(%rbx),%rax
@@ -286,6 +285,7 @@ sysenter_eflags_saved:
leal (,%rcx,TBF_INTERRUPT),%ecx
UNLIKELY_START(z, sysenter_gpf)
movq VCPU_trap_ctxt(%rbx),%rsi
+ SAVE_PRESERVED
movl $TRAP_gp_fault,UREGS_entry_vector(%rsp)
movl %eax,TRAPBOUNCE_error_code(%rdx)
movq TRAP_gp_fault * TRAPINFO_sizeof + TRAPINFO_eip(%rsi),%rax
@@ -302,7 +302,7 @@ UNLIKELY_END(sysenter_gpf)
ENTRY(int80_direct_trap)
pushq $0
- SAVE_ALL
+ SAVE_VOLATILE 0x80
cmpb $0,untrusted_msi(%rip)
UNLIKELY_START(ne, msi_check)
@@ -331,6 +331,7 @@ int80_slow_path:
* IDT entry with DPL==0.
*/
movl $((0x80 << 3) | 0x2),UREGS_error_code(%rsp)
+ SAVE_PRESERVED
movl $TRAP_gp_fault,UREGS_entry_vector(%rsp)
/* A GPF wouldn't have incremented the instruction pointer. */
subq $2,UREGS_rip(%rsp)
@@ -412,7 +413,7 @@ UNLIKELY_END(bounce_failsafe)
/* Rewrite our stack frame and return to guest-OS mode. */
/* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
/* Also clear AC: alignment checks shouldn't trigger in kernel mode. */
- movl $TRAP_syscall,UREGS_entry_vector+8(%rsp)
+ orl $TRAP_syscall,UREGS_entry_vector+8(%rsp)
andl $~(X86_EFLAGS_AC|X86_EFLAGS_VM|X86_EFLAGS_RF|\
X86_EFLAGS_NT|X86_EFLAGS_TF),UREGS_eflags+8(%rsp)
movq $FLAT_KERNEL_SS,UREGS_ss+8(%rsp)
@@ -477,7 +478,7 @@ handle_exception_saved:
jz exception_with_ints_disabled
sti
1: movq %rsp,%rdi
- movl UREGS_entry_vector(%rsp),%eax
+ movzbl UREGS_entry_vector(%rsp),%eax
leaq exception_table(%rip),%rdx
GET_CURRENT(%rbx)
PERFC_INCR(exceptions, %rax, %rbx)
@@ -518,7 +519,7 @@ exception_with_ints_disabled:
/* No special register assumptions. */
FATAL_exception_with_ints_disabled:
- movl UREGS_entry_vector(%rsp),%edi
+ movzbl UREGS_entry_vector(%rsp),%edi
movq %rsp,%rsi
call fatal_trap
ud2
@@ -624,7 +625,7 @@ handle_ist_exception:
movq %rdi,%rsp
rep movsq
1: movq %rsp,%rdi
- movl UREGS_entry_vector(%rsp),%eax
+ movzbl UREGS_entry_vector(%rsp),%eax
leaq exception_table(%rip),%rdx
callq *(%rdx,%rax,8)
jmp ret_from_intr