aboutsummaryrefslogtreecommitdiffstats
path: root/xen/include/asm-x86/x86_64/asm_defns.h
blob: bf63ac1423eb5c22f5e821214fd2bb2a2977825b (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
#ifndef __X86_64_ASM_DEFNS_H__
#define __X86_64_ASM_DEFNS_H__

#include <asm/percpu.h>

#ifdef CONFIG_FRAME_POINTER
/* Indicate special exception stack frame by inverting the frame pointer. */
#define SETUP_EXCEPTION_FRAME_POINTER(offs)     \
        leaq  offs(%rsp),%rbp;                  \
        notq  %rbp
#else
#define SETUP_EXCEPTION_FRAME_POINTER(offs)
#endif

#ifndef NDEBUG
#define ASSERT_INTERRUPT_STATUS(x)              \
        pushf;                                  \
        testb $X86_EFLAGS_IF>>8,1(%rsp);        \
        j##x  1f;                               \
        ud2a;                                   \
1:      addq  $8,%rsp;
#else
#define ASSERT_INTERRUPT_STATUS(x)
#endif

#define ASSERT_INTERRUPTS_ENABLED  ASSERT_INTERRUPT_STATUS(nz)
#define ASSERT_INTERRUPTS_DISABLED ASSERT_INTERRUPT_STATUS(z)

/*
 * This flag is set in an exception frame when registers R12-R15 did not get
 * saved.
 */
#define _TRAP_regs_partial 16
#define TRAP_regs_partial  (1 << _TRAP_regs_partial)
/*
 * This flag gets set in an exception frame when registers R12-R15 possibly
 * get modified from their originally saved values and hence need to be
 * restored even if the normal call flow would restore register values.
 *
 * The flag being set implies _TRAP_regs_partial to be unset. Restoring
 * R12-R15 thus is
 * - required when this flag is set,
 * - safe when _TRAP_regs_partial is unset.
 */
#define _TRAP_regs_dirty   17
#define TRAP_regs_dirty    (1 << _TRAP_regs_dirty)

#define mark_regs_dirty(r) ({ \
        struct cpu_user_regs *r__ = (r); \
        ASSERT(!((r__)->entry_vector & TRAP_regs_partial)); \
        r__->entry_vector |= TRAP_regs_dirty; \
})

#define SAVE_ALL                                \
        addq  $-(UREGS_error_code-UREGS_r15), %rsp; \
        cld;                                    \
        movq  %rdi,UREGS_rdi(%rsp);             \
        movq  %rsi,UREGS_rsi(%rsp);             \
        movq  %rdx,UREGS_rdx(%rsp);             \
        movq  %rcx,UREGS_rcx(%rsp);             \
        movq  %rax,UREGS_rax(%rsp);             \
        movq  %r8,UREGS_r8(%rsp);               \
        movq  %r9,UREGS_r9(%rsp);               \
        movq  %r10,UREGS_r10(%rsp);             \
        movq  %r11,UREGS_r11(%rsp);             \
        movq  %rbx,UREGS_rbx(%rsp);             \
        movq  %rbp,UREGS_rbp(%rsp);             \
        SETUP_EXCEPTION_FRAME_POINTER(UREGS_rbp); \
        movq  %r12,UREGS_r12(%rsp);             \
        movq  %r13,UREGS_r13(%rsp);             \
        movq  %r14,UREGS_r14(%rsp);             \
        movq  %r15,UREGS_r15(%rsp);             \

#ifdef __ASSEMBLY__

/*
 * Save all registers not preserved by C code or used in entry/exit code. Mark
 * the frame as partial.
 *
 * @type: exception type
 * @compat: R8-R15 don't need saving, and the frame nevertheless is complete
 */
.macro SAVE_VOLATILE type compat=0
.if \compat
        movl  $\type,UREGS_entry_vector-UREGS_error_code(%rsp)
.else
        movl  $\type|TRAP_regs_partial,\
              UREGS_entry_vector-UREGS_error_code(%rsp)
.endif
        addq  $-(UREGS_error_code-UREGS_r15),%rsp
        cld
        movq  %rdi,UREGS_rdi(%rsp)
        movq  %rsi,UREGS_rsi(%rsp)
        movq  %rdx,UREGS_rdx(%rsp)
        movq  %rcx,UREGS_rcx(%rsp)
        movq  %rax,UREGS_rax(%rsp)
.if !\compat
        movq  %r8,UREGS_r8(%rsp)
        movq  %r9,UREGS_r9(%rsp)
        movq  %r10,UREGS_r10(%rsp)
        movq  %r11,UREGS_r11(%rsp)
.endif
        movq  %rbx,UREGS_rbx(%rsp)
        movq  %rbp,UREGS_rbp(%rsp)
        SETUP_EXCEPTION_FRAME_POINTER(UREGS_rbp)
.endm

/*
 * Complete a frame potentially only partially saved.
 */
.macro SAVE_PRESERVED
        btrl  $_TRAP_regs_partial,UREGS_entry_vector(%rsp)
        jnc   987f
        movq  %r12,UREGS_r12(%rsp)
        movq  %r13,UREGS_r13(%rsp)
        movq  %r14,UREGS_r14(%rsp)
        movq  %r15,UREGS_r15(%rsp)
987:
.endm

/*
 * Reload registers not preserved by C code from frame.
 *
 * @compat: R8-R11 don't need reloading
 *
 * For the way it is used in RESTORE_ALL, this macro must preserve EFLAGS.ZF.
 */
.macro LOAD_C_CLOBBERED compat=0
.if !\compat
        movq  UREGS_r11(%rsp),%r11
        movq  UREGS_r10(%rsp),%r10
        movq  UREGS_r9(%rsp),%r9
        movq  UREGS_r8(%rsp),%r8
.endif
        movq  UREGS_rax(%rsp),%rax
        movq  UREGS_rcx(%rsp),%rcx
        movq  UREGS_rdx(%rsp),%rdx
        movq  UREGS_rsi(%rsp),%rsi
        movq  UREGS_rdi(%rsp),%rdi
.endm

/*
 * Restore all previously saved registers.
 *
 * @adj: extra stack pointer adjustment to be folded into the adjustment done
 *       anyway at the end of the macro
 * @compat: R8-R15 don't need reloading
 */
.macro RESTORE_ALL adj=0 compat=0
.if !\compat
        testl $TRAP_regs_dirty,UREGS_entry_vector(%rsp)
.endif
        LOAD_C_CLOBBERED \compat
.if !\compat
        jz    987f
        movq  UREGS_r15(%rsp),%r15
        movq  UREGS_r14(%rsp),%r14
        movq  UREGS_r13(%rsp),%r13
        movq  UREGS_r12(%rsp),%r12
#ifndef NDEBUG
        .subsection 1
987:    testl $TRAP_regs_partial,UREGS_entry_vector(%rsp)
        jnz   987f
        cmpq  UREGS_r15(%rsp),%r15
        jne   789f
        cmpq  UREGS_r14(%rsp),%r14
        jne   789f
        cmpq  UREGS_r13(%rsp),%r13
        jne   789f
        cmpq  UREGS_r12(%rsp),%r12
        je    987f
789:    ud2
        .subsection 0
#endif
.endif
987:    movq  UREGS_rbp(%rsp),%rbp
        movq  UREGS_rbx(%rsp),%rbx
        subq  $-(UREGS_error_code-UREGS_r15+\adj), %rsp
.endm

#endif

#ifdef PERF_COUNTERS
#define PERFC_INCR(_name,_idx,_cur)             \
        pushq _cur;                             \
        movslq VCPU_processor(_cur),_cur;       \
        pushq %rdx;                             \
        leaq __per_cpu_offset(%rip),%rdx;       \
        movq (%rdx,_cur,8),_cur;                \
        leaq per_cpu__perfcounters(%rip),%rdx;  \
        addq %rdx,_cur;                         \
        popq %rdx;                              \
        incl ASM_PERFC_##_name*4(_cur,_idx,4);  \
        popq _cur
#else
#define PERFC_INCR(_name,_idx,_cur)
#endif

/* Work around AMD erratum #88 */
#define safe_swapgs                             \
        "mfence; swapgs;"

#ifdef __sun__
#define REX64_PREFIX "rex64\\"
#elif defined(__clang__)
#define REX64_PREFIX ".byte 0x48; "
#else
#define REX64_PREFIX "rex64/"
#endif

#define BUILD_COMMON_IRQ()                      \
__asm__(                                        \
    "\n" __ALIGN_STR"\n"                        \
    "common_interrupt:\n\t"                     \
    STR(SAVE_ALL) "\n\t"                        \
    "movq %rsp,%rdi\n\t"                        \
    "callq " STR(do_IRQ) "\n\t"                 \
    "jmp ret_from_intr\n");

#define BUILD_IRQ(nr)                           \
    "pushq $0\n\t"                              \
    "movl $"#nr",4(%rsp)\n\t"                   \
    "jmp common_interrupt"

#ifdef __ASSEMBLY__
# define _ASM_EX(p) p-.
#else
# define _ASM_EX(p) #p "-."
#endif

#endif /* __X86_64_ASM_DEFNS_H__ */