aboutsummaryrefslogtreecommitdiffstats
path: root/xen
diff options
context:
space:
mode:
authorIan Campbell <ian.campbell@citrix.com>2013-07-29 13:21:06 +0100
committerIan Campbell <ian.campbell@citrix.com>2013-07-29 16:54:51 +0100
commit7d413e38c7ad250a5163ce64358917a84aa8bfbf (patch)
tree7336ec6b5b9fa5de6ce6819053b637b3e2c994f1 /xen
parentc6fd2ed3f69247c53c86fd8966361000fec27d12 (diff)
downloadxen-7d413e38c7ad250a5163ce64358917a84aa8bfbf.tar.gz
xen-7d413e38c7ad250a5163ce64358917a84aa8bfbf.tar.bz2
xen-7d413e38c7ad250a5163ce64358917a84aa8bfbf.zip
xen: arm: Handle SMC from 64-bit guests
Similarly to arm32 guests handle it by injecting an undefined instruction trap. Signed-off-by: Ian Campbell <ian.campbell@citrix.com> Acked-by: Tim Deegan <tim@xen.org>
Diffstat (limited to 'xen')
-rw-r--r--xen/arch/arm/traps.c40
-rw-r--r--xen/include/asm-arm/processor.h14
-rw-r--r--xen/include/public/arch-arm.h3
3 files changed, 48 insertions, 9 deletions
diff --git a/xen/arch/arm/traps.c b/xen/arch/arm/traps.c
index b4828f307c..1b9209d30d 100644
--- a/xen/arch/arm/traps.c
+++ b/xen/arch/arm/traps.c
@@ -284,25 +284,49 @@ static vaddr_t exception_handler(vaddr_t offset)
* pipeline adjustments). See TakeUndefInstrException pseudocode in
* ARM.
*/
-static void inject_undef_exception(struct cpu_user_regs *regs,
- register_t preferred_return)
+static void inject_undef32_exception(struct cpu_user_regs *regs)
{
uint32_t spsr = regs->cpsr;
int is_thumb = (regs->cpsr & PSR_THUMB);
/* Saved PC points to the instruction past the faulting instruction. */
uint32_t return_offset = is_thumb ? 2 : 4;
+ BUG_ON( !is_pv32_domain(current->domain) );
+
/* Update processor mode */
cpsr_switch_mode(regs, PSR_MODE_UND);
/* Update banked registers */
regs->spsr_und = spsr;
- regs->lr_und = preferred_return + return_offset;
+ regs->lr_und = regs->pc32 + return_offset;
/* Branch to exception vector */
regs->pc32 = exception_handler(VECTOR32_UND);
}
+#ifdef CONFIG_ARM_64
+/* Inject an undefined exception into a 64 bit guest */
+static void inject_undef64_exception(struct cpu_user_regs *regs, int instr_len)
+{
+ union hsr esr = {
+ .iss = 0,
+ .len = instr_len,
+ .ec = HSR_EC_UNKNOWN,
+ };
+
+ BUG_ON( is_pv32_domain(current->domain) );
+
+ regs->spsr_el1 = regs->cpsr;
+ regs->elr_el1 = regs->pc;
+
+ regs->cpsr = PSR_MODE_EL1h | PSR_ABT_MASK | PSR_FIQ_MASK | \
+ PSR_IRQ_MASK | PSR_DBG_MASK;
+ regs->pc = READ_SYSREG(VBAR_EL1) + VECTOR64_CURRENT_SPx_SYNC;
+
+ WRITE_SYSREG32(esr.bits, ESR_EL1);
+}
+#endif
+
struct reg_ctxt {
/* Guest-side state */
uint32_t sctlr_el1, tcr_el1;
@@ -1266,11 +1290,8 @@ asmlinkage void do_trap_hypervisor(struct cpu_user_regs *regs)
goto bad_trap;
do_cp15_64(regs, hsr);
break;
- case HSR_EC_SMC:
- /* PC32 already contains the preferred exception return
- * address, so no need to adjust here.
- */
- inject_undef_exception(regs, regs->pc32);
+ case HSR_EC_SMC32:
+ inject_undef32_exception(regs);
break;
case HSR_EC_HVC32:
#ifndef NDEBUG
@@ -1291,6 +1312,9 @@ asmlinkage void do_trap_hypervisor(struct cpu_user_regs *regs)
return do_trap_psci(regs);
do_trap_hypercall(regs, &regs->x16, hsr.iss);
break;
+ case HSR_EC_SMC64:
+ inject_undef64_exception(regs, hsr.len);
+ break;
case HSR_EC_SYSREG:
if ( is_pv32_domain(current->domain) )
goto bad_trap;
diff --git a/xen/include/asm-arm/processor.h b/xen/include/asm-arm/processor.h
index 960b83e8ad..948bf2de9e 100644
--- a/xen/include/asm-arm/processor.h
+++ b/xen/include/asm-arm/processor.h
@@ -83,6 +83,7 @@
#define HCR_SWIO (1<<1) /* Set/Way Invalidation Override */
#define HCR_VM (1<<0) /* Virtual MMU Enable */
+#define HSR_EC_UNKNOWN 0x00
#define HSR_EC_WFI_WFE 0x01
#define HSR_EC_CP15_32 0x03
#define HSR_EC_CP15_64 0x04
@@ -95,9 +96,10 @@
#define HSR_EC_CP14_64 0x0c
#define HSR_EC_SVC32 0x11
#define HSR_EC_HVC32 0x12
-#define HSR_EC_SMC 0x13
+#define HSR_EC_SMC32 0x13
#ifdef CONFIG_ARM_64
#define HSR_EC_HVC64 0x16
+#define HSR_EC_SMC64 0x17
#define HSR_EC_SYSREG 0x18
#endif
#define HSR_EC_INSTR_ABORT_GUEST 0x20
@@ -388,11 +390,21 @@ union hsr {
#define CNTx_CTL_PENDING (1u<<2) /* IRQ pending */
/* Exception Vector offsets */
+/* ... ARM32 */
#define VECTOR32_RST 0
#define VECTOR32_UND 4
#define VECTOR32_SVC 8
#define VECTOR32_PABT 12
#define VECTOR32_DABT 16
+/* ... ARM64 */
+#define VECTOR64_CURRENT_SP0_SYNC 0x000
+#define VECTOR64_CURRENT_SP0_IRQ 0x080
+#define VECTOR64_CURRENT_SP0_FIQ 0x100
+#define VECTOR64_CURRENT_SP0_ERROR 0x180
+#define VECTOR64_CURRENT_SPx_SYNC 0x200
+#define VECTOR64_CURRENT_SPx_IRQ 0x280
+#define VECTOR64_CURRENT_SPx_FIQ 0x300
+#define VECTOR64_CURRENT_SPx_ERROR 0x380
#if defined(CONFIG_ARM_32)
# include <asm/arm32/processor.h>
diff --git a/xen/include/public/arch-arm.h b/xen/include/public/arch-arm.h
index cea12b2ea8..cbd53a9eda 100644
--- a/xen/include/public/arch-arm.h
+++ b/xen/include/public/arch-arm.h
@@ -234,6 +234,9 @@ typedef uint64_t xen_callback_t;
#define PSR_IRQ_MASK (1<<7) /* Interrupt mask */
#define PSR_ABT_MASK (1<<8) /* Asynchronous Abort mask */
#define PSR_BIG_ENDIAN (1<<9) /* Big Endian Mode */
+#ifdef __aarch64__ /* For Aarch64 bit 9 is repurposed. */
+#define PSR_DBG_MASK (1<<9)
+#endif
#define PSR_IT_MASK (0x0600fc00) /* Thumb If-Then Mask */
#define PSR_JAZELLE (1<<24) /* Jazelle Mode */