summaryrefslogtreecommitdiffstats
path: root/include/asm-x86
diff options
context:
space:
mode:
authorSuresh Siddha <suresh.b.siddha@intel.com>2008-07-29 19:29:23 +0200
committerIngo Molnar <mingo@elte.hu>2008-07-30 19:49:26 +0200
commit9dc89c0f96a6ce6a1b7f9a47dd8bf6f17e2002c9 (patch)
treec5f2c57c241d040585d307acd13375fbd3f74dfe /include/asm-x86
parentx86, xsave: reorganization of signal save/restore fpstate code layout (diff)
downloadlinux-9dc89c0f96a6ce6a1b7f9a47dd8bf6f17e2002c9.tar.xz
linux-9dc89c0f96a6ce6a1b7f9a47dd8bf6f17e2002c9.zip
x86, xsave: xsave/xrstor specific routines
Signed-off-by: Suresh Siddha <suresh.b.siddha@intel.com> Signed-off-by: H. Peter Anvin <hpa@zytor.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86')
-rw-r--r--include/asm-x86/xsave.h52
1 files changed, 52 insertions, 0 deletions
diff --git a/include/asm-x86/xsave.h b/include/asm-x86/xsave.h
index e835a917ee19..b716511aede2 100644
--- a/include/asm-x86/xsave.h
+++ b/include/asm-x86/xsave.h
@@ -48,6 +48,58 @@ static inline int xrstor_checking(struct xsave_struct *fx)
return err;
}
+static inline int xsave_check(struct xsave_struct __user *buf)
+{
+ int err;
+ __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x27\n"
+ "2:\n"
+ ".section .fixup,\"ax\"\n"
+ "3: movl $-1,%[err]\n"
+ " jmp 2b\n"
+ ".previous\n"
+ ".section __ex_table,\"a\"\n"
+ _ASM_ALIGN "\n"
+ _ASM_PTR "1b,3b\n"
+ ".previous"
+ : [err] "=r" (err)
+ : "D" (buf), "a" (-1), "d" (-1), "0" (0)
+ : "memory");
+ if (unlikely(err) && __clear_user(buf, xstate_size))
+ err = -EFAULT;
+ /* No need to clear here because the caller clears USED_MATH */
+ return err;
+}
+
+static inline int xrestore_user(struct xsave_struct __user *buf,
+ unsigned int lmask,
+ unsigned int hmask)
+{
+ int err;
+ struct xsave_struct *xstate = ((__force struct xsave_struct *)buf);
+
+ __asm__ __volatile__("1: .byte " REX_PREFIX "0x0f,0xae,0x2f\n"
+ "2:\n"
+ ".section .fixup,\"ax\"\n"
+ "3: movl $-1,%[err]\n"
+ " jmp 2b\n"
+ ".previous\n"
+ ".section __ex_table,\"a\"\n"
+ _ASM_ALIGN "\n"
+ _ASM_PTR "1b,3b\n"
+ ".previous"
+ : [err] "=r" (err)
+ : "D" (xstate), "a" (lmask), "d" (hmask), "0" (0)
+ : "memory"); /* memory required? */
+ return err;
+}
+
+static inline void xrstor_state(struct xsave_struct *fx, int lmask, int hmask)
+{
+ asm volatile(".byte " REX_PREFIX "0x0f,0xae,0x2f\n\t"
+ : : "D" (fx), "m" (*fx), "a" (lmask), "d" (hmask)
+ : "memory");
+}
+
static inline void xsave(struct task_struct *tsk)
{
/* This, however, we can work around by forcing the compiler to select