aboutsummaryrefslogtreecommitdiffstats
path: root/extras/mini-os/arch
diff options
context:
space:
mode:
authorKeir Fraser <keir.fraser@citrix.com>2007-11-23 16:23:28 +0000
committerKeir Fraser <keir.fraser@citrix.com>2007-11-23 16:23:28 +0000
commitfd2f80c255acc5edf0cea58053c81f392042bf51 (patch)
treec15b47372749e83f1e36d0cbc3674d66c60777d8 /extras/mini-os/arch
parenta222af2ef1f49e5c232466ab162fd606732a9d05 (diff)
downloadxen-fd2f80c255acc5edf0cea58053c81f392042bf51.tar.gz
xen-fd2f80c255acc5edf0cea58053c81f392042bf51.tar.bz2
xen-fd2f80c255acc5edf0cea58053c81f392042bf51.zip
[Mini-OS] Fix x86 arch_switch_thread
Fix x86 arch_switch_thread by making it pure assembly. There were missing general register clobbers for x86_64, and BP should theorically be clobbered too, but gcc does not believe that, so the only simple safe solution is to use pure assembly. Signed-off-by: Samuel Thibault <samuel.thibault@citrix.com>
Diffstat (limited to 'extras/mini-os/arch')
-rw-r--r--extras/mini-os/arch/x86/x86_32.S18
-rw-r--r--extras/mini-os/arch/x86/x86_64.S20
2 files changed, 38 insertions, 0 deletions
diff --git a/extras/mini-os/arch/x86/x86_32.S b/extras/mini-os/arch/x86/x86_32.S
index 2d359ab44a..09ffeda9ba 100644
--- a/extras/mini-os/arch/x86/x86_32.S
+++ b/extras/mini-os/arch/x86/x86_32.S
@@ -288,3 +288,21 @@ ENTRY(thread_starter)
call *%ebx
call exit_thread
+ENTRY(__arch_switch_threads)
+ movl 4(%esp), %ecx /* prev */
+ movl 8(%esp), %edx /* next */
+ pushl %ebp
+ pushl %ebx
+ pushl %esi
+ pushl %edi
+ movl %esp, (%ecx) /* save ESP */
+ movl (%edx), %esp /* restore ESP */
+ movl $1f, 4(%ecx) /* save EIP */
+ pushl 4(%edx) /* restore EIP */
+ ret
+1:
+ popl %edi
+ popl %esi
+ popl %ebx
+ popl %ebp
+ ret
diff --git a/extras/mini-os/arch/x86/x86_64.S b/extras/mini-os/arch/x86/x86_64.S
index 0f85577716..55b17da5f7 100644
--- a/extras/mini-os/arch/x86/x86_64.S
+++ b/extras/mini-os/arch/x86/x86_64.S
@@ -386,3 +386,23 @@ ENTRY(thread_starter)
call exit_thread
+ENTRY(__arch_switch_threads)
+ pushq %rbp
+ pushq %rbx
+ pushq %r12
+ pushq %r13
+ pushq %r14
+ pushq %r15
+ movq %rsp, (%rdi) /* save ESP */
+ movq (%rsi), %rsp /* restore ESP */
+ movq $1f, 8(%rdi) /* save EIP */
+ pushq 8(%rsi) /* restore EIP */
+ ret
+1:
+ popq %r15
+ popq %r14
+ popq %r13
+ popq %r12
+ popq %rbx
+ popq %rbp
+ ret