ptr->fpu_sse.x[FPU_WORD_SIZE_OFFSET] = word_size;
}
+#define XRSTOR_FIXUP ".section .fixup,\"ax\" \n" \
+ "2: mov %[size],%%ecx \n" \
+ " xor %[lmask_out],%[lmask_out] \n" \
+ " rep stosb \n" \
+ " lea %[mem],%[ptr] \n" \
+ " mov %[lmask_in],%[lmask_out] \n" \
+ " jmp 1b \n" \
+ ".previous \n" \
+ _ASM_EXTABLE(1b, 2b)
+
void xrstor(struct vcpu *v, uint64_t mask)
{
uint32_t hmask = mask >> 32;
{
default:
asm volatile ( "1: .byte 0x48,0x0f,0xae,0x2f\n"
- ".section .fixup,\"ax\" \n"
- "2: mov %5,%%ecx \n"
- " xor %1,%1 \n"
- " rep stosb \n"
- " lea %2,%0 \n"
- " mov %3,%1 \n"
- " jmp 1b \n"
- ".previous \n"
- _ASM_EXTABLE(1b, 2b)
- : "+&D" (ptr), "+&a" (lmask)
- : "m" (*ptr), "g" (lmask), "d" (hmask),
- "m" (xsave_cntxt_size)
+ XRSTOR_FIXUP
+ : [ptr] "+&D" (ptr), [lmask_out] "+&a" (lmask)
+ : [mem] "m" (*ptr), [lmask_in] "g" (lmask),
+ [hmask] "d" (hmask), [size] "m" (xsave_cntxt_size)
: "ecx" );
break;
case 4: case 2:
asm volatile ( "1: .byte 0x0f,0xae,0x2f\n"
- ".section .fixup,\"ax\" \n"
- "2: mov %5,%%ecx \n"
- " xor %1,%1 \n"
- " rep stosb \n"
- " lea %2,%0 \n"
- " mov %3,%1 \n"
- " jmp 1b \n"
- ".previous \n"
- _ASM_EXTABLE(1b, 2b)
- : "+&D" (ptr), "+&a" (lmask)
- : "m" (*ptr), "g" (lmask), "d" (hmask),
- "m" (xsave_cntxt_size)
+ XRSTOR_FIXUP
+ : [ptr] "+&D" (ptr), [lmask_out] "+&a" (lmask)
+ : [mem] "m" (*ptr), [lmask_in] "g" (lmask),
+ [hmask] "d" (hmask), [size] "m" (xsave_cntxt_size)
: "ecx" );
break;
}
}
+#undef XRSTOR_FIXUP
bool_t xsave_enabled(const struct vcpu *v)
{