aboutsummaryrefslogtreecommitdiffstats
path: root/xen/include/asm-x86
diff options
context:
space:
mode:
authorKeir Fraser <keir@xen.org>2010-10-29 18:04:46 +0100
committerKeir Fraser <keir@xen.org>2010-10-29 18:04:46 +0100
commit8d2793f2c2f2a403a0a9db264c26956b7461e088 (patch)
treeac2854d67847898d708644e1c9513fc4330b975f /xen/include/asm-x86
parentfd1291a826e17108d7c7f20c887847daba451ef4 (diff)
downloadxen-8d2793f2c2f2a403a0a9db264c26956b7461e088.tar.gz
xen-8d2793f2c2f2a403a0a9db264c26956b7461e088.tar.bz2
xen-8d2793f2c2f2a403a0a9db264c26956b7461e088.zip
x86: Clean up existing XSAVE support
Signed-off-by: Han Weidong <weidong.han@intel.com> Signed-off-by: Shan Haitao <haitao.shan@intel.com>
Diffstat (limited to 'xen/include/asm-x86')
-rw-r--r--xen/include/asm-x86/hvm/vcpu.h2
-rw-r--r--xen/include/asm-x86/i387.h24
2 files changed, 11 insertions, 15 deletions
diff --git a/xen/include/asm-x86/hvm/vcpu.h b/xen/include/asm-x86/hvm/vcpu.h
index 52c6c09e55..53ef98320f 100644
--- a/xen/include/asm-x86/hvm/vcpu.h
+++ b/xen/include/asm-x86/hvm/vcpu.h
@@ -56,7 +56,7 @@ struct hvm_vcpu {
* #NM handler, we XRSTOR the states we XSAVE-ed;
*/
void *xsave_area;
- uint64_t xfeature_mask;
+ uint64_t xcr0;
struct vlapic vlapic;
s64 cache_tsc_offset;
diff --git a/xen/include/asm-x86/i387.h b/xen/include/asm-x86/i387.h
index 39e0e7df56..ba365c0b3c 100644
--- a/xen/include/asm-x86/i387.h
+++ b/xen/include/asm-x86/i387.h
@@ -15,7 +15,7 @@
#include <asm/processor.h>
extern unsigned int xsave_cntxt_size;
-extern u32 xfeature_low, xfeature_high;
+extern u64 xfeature_mask;
extern void xsave_init(void);
extern void xsave_init_save_area(void *save_area);
@@ -49,45 +49,41 @@ struct xsave_struct
#define REX_PREFIX
#endif
-static inline void xsetbv(u32 index, u64 xfeature_mask)
+static inline void xsetbv(u32 index, u64 xfeatures)
{
- u32 hi = xfeature_mask >> 32;
- u32 lo = (u32)xfeature_mask;
+ u32 hi = xfeatures >> 32;
+ u32 lo = (u32)xfeatures;
asm volatile (".byte 0x0f,0x01,0xd1" :: "c" (index),
"a" (lo), "d" (hi));
}
-static inline void set_xcr0(u64 xfeature_mask)
+static inline void set_xcr0(u64 xfeatures)
{
- xsetbv(XCR_XFEATURE_ENABLED_MASK, xfeature_mask);
+ xsetbv(XCR_XFEATURE_ENABLED_MASK, xfeatures);
}
static inline void xsave(struct vcpu *v)
{
- u64 mask = v->arch.hvm_vcpu.xfeature_mask | XSTATE_FP_SSE;
- u32 lo = mask, hi = mask >> 32;
struct xsave_struct *ptr;
ptr =(struct xsave_struct *)v->arch.hvm_vcpu.xsave_area;
asm volatile (".byte " REX_PREFIX "0x0f,0xae,0x27"
:
- : "a" (lo), "d" (hi), "D"(ptr)
+ : "a" (-1), "d" (-1), "D"(ptr)
: "memory");
}
static inline void xrstor(struct vcpu *v)
{
- u64 mask = v->arch.hvm_vcpu.xfeature_mask | XSTATE_FP_SSE;
- u32 lo = mask, hi = mask >> 32;
struct xsave_struct *ptr;
ptr =(struct xsave_struct *)v->arch.hvm_vcpu.xsave_area;
asm volatile (".byte " REX_PREFIX "0x0f,0xae,0x2f"
:
- : "m" (*ptr), "a" (lo), "d" (hi), "D"(ptr));
+ : "m" (*ptr), "a" (-1), "d" (-1), "D"(ptr));
}
extern void init_fpu(void);
@@ -117,9 +113,9 @@ static inline void setup_fpu(struct vcpu *v)
if ( !v->fpu_initialised )
v->fpu_initialised = 1;
- set_xcr0(v->arch.hvm_vcpu.xfeature_mask | XSTATE_FP_SSE);
+ set_xcr0(v->arch.hvm_vcpu.xcr0 | XSTATE_FP_SSE);
xrstor(v);
- set_xcr0(v->arch.hvm_vcpu.xfeature_mask);
+ set_xcr0(v->arch.hvm_vcpu.xcr0);
}
else
{