void asm_domain_crash_synchronous(unsigned long addr)
{
/*
- * We need clear AC bit here because in entry.S AC is set
- * by ASM_STAC to temporarily allow accesses to user pages
- * which is prevented by SMAP by default.
+ * We need to clear the AC bit here because the exception fixup logic
+ * may leave user accesses enabled.
*
* For some code paths, where this function is called, clac()
* is not needed, but adding clac() here instead of each place
#ifdef CONFIG_PV32
ENTRY(entry_int82)
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
pushq $0
movl $HYPERCALL_VECTOR, 4(%rsp)
SAVE_ALL compat=1 /* DPL1 gate, restricted to 32bit PV guests only. */
compat_create_bounce_frame:
ASSERT_INTERRUPTS_ENABLED
mov %fs,%edi
- ASM_STAC
+ ALTERNATIVE "", stac, X86_FEATURE_XEN_SMAP
testb $2,UREGS_cs+8(%rsp)
jz 1f
/* Push new frame at registered guest-OS stack base. */
movl TRAPBOUNCE_error_code(%rdx),%eax
.Lft8: movl %eax,%fs:(%rsi) # ERROR CODE
1:
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
/* Rewrite our stack frame and return to guest-OS mode. */
/* IA32 Ref. Vol. 3: TF, VM, RF and NT flags are cleared on trap. */
andl $~(X86_EFLAGS_VM|X86_EFLAGS_RF|\
addl $4,%esi
compat_crash_page_fault:
.Lft14: mov %edi,%fs
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
movl %esi,%edi
call show_page_walk
jmp dom_crash_sync_extable
pushq $0
pushfq
GLOBAL(sysenter_eflags_saved)
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
pushq $3 /* ring 3 null cs */
pushq $0 /* null rip */
pushq $0
jmp .Lbounce_exception
ENTRY(int80_direct_trap)
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
pushq $0
movl $0x80, 4(%rsp)
SAVE_ALL
subq $7*8,%rsi
movq UREGS_ss+8(%rsp),%rax
- ASM_STAC
+ ALTERNATIVE "", stac, X86_FEATURE_XEN_SMAP
movq VCPU_domain(%rbx),%rdi
STORE_GUEST_STACK(rax,6) # SS
movq UREGS_rsp+8(%rsp),%rax
STORE_GUEST_STACK(rax,1) # R11
movq UREGS_rcx+8(%rsp),%rax
STORE_GUEST_STACK(rax,0) # RCX
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
#undef STORE_GUEST_STACK
domain_crash_page_fault_1x8:
addq $8,%rsi
domain_crash_page_fault_0x8:
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
movq %rsi,%rdi
call show_page_walk
ENTRY(dom_crash_sync_extable)
- ASM_CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
# Get out of the guest-save area of the stack.
GET_STACK_END(ax)
leaq STACK_CPUINFO_FIELD(guest_cpu_user_regs)(%rax),%rsp
iretq
ENTRY(common_interrupt)
- SAVE_ALL CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
+ SAVE_ALL
GET_STACK_END(14)
movl $TRAP_page_fault,4(%rsp)
/* No special register assumptions. */
GLOBAL(handle_exception)
- SAVE_ALL CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
+ SAVE_ALL
GET_STACK_END(14)
ENTRY(double_fault)
movl $TRAP_double_fault,4(%rsp)
/* Set AC to reduce chance of further SMAP faults */
- SAVE_ALL STAC
+ ALTERNATIVE "", stac, X86_FEATURE_XEN_SMAP
+ SAVE_ALL
GET_STACK_END(14)
pushq $0
movl $TRAP_nmi,4(%rsp)
handle_ist_exception:
- SAVE_ALL CLAC
+ ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
+ SAVE_ALL
GET_STACK_END(14)
UNLIKELY_END_SECTION "\n" \
".Llikely." #tag ".%=:"
-#endif
-
-#ifdef __ASSEMBLY__
-.macro ASM_STAC
- ALTERNATIVE "", stac, X86_FEATURE_XEN_SMAP
-.endm
-.macro ASM_CLAC
- ALTERNATIVE "", clac, X86_FEATURE_XEN_SMAP
-.endm
-#else
static always_inline void clac(void)
{
/* Note: a barrier is implicit in alternative() */
#endif
#ifdef __ASSEMBLY__
-.macro SAVE_ALL op, compat=0
-.ifeqs "\op", "CLAC"
- ASM_CLAC
-.else
-.ifeqs "\op", "STAC"
- ASM_STAC
-.else
-.ifnb \op
- .err
-.endif
-.endif
-.endif
+.macro SAVE_ALL compat=0
addq $-(UREGS_error_code-UREGS_r15), %rsp
cld
movq %rdi,UREGS_rdi(%rsp)