#include <xen/sort.h>
#include <xen/spinlock.h>
#include <asm/uaccess.h>
+#include <xen/domain_page.h>
#include <xen/virtual_region.h>
#include <xen/livepatch.h>
sort_exception_table(__start___pre_ex_table, __stop___pre_ex_table);
}
-unsigned long
+static unsigned long
search_one_extable(const struct exception_table_entry *first,
const struct exception_table_entry *last,
unsigned long value)
}
unsigned long
-search_exception_table(unsigned long addr)
+search_exception_table(const struct cpu_user_regs *regs)
{
- const struct virtual_region *region = find_text_region(addr);
+ const struct virtual_region *region = find_text_region(regs->rip);
+ unsigned long stub = this_cpu(stubs.addr);
if ( region && region->ex )
- return search_one_extable(region->ex, region->ex_end - 1, addr);
+ return search_one_extable(region->ex, region->ex_end - 1, regs->rip);
+
+ if ( regs->rip >= stub + STUB_BUF_SIZE / 2 &&
+ regs->rip < stub + STUB_BUF_SIZE &&
+ regs->rsp > (unsigned long)regs &&
+ regs->rsp < (unsigned long)get_cpu_info() )
+ {
+ unsigned long retptr = *(unsigned long *)regs->rsp;
+
+ region = find_text_region(retptr);
+ retptr = region && region->ex
+ ? search_one_extable(region->ex, region->ex_end - 1, retptr)
+ : 0;
+ if ( retptr )
+ {
+ /*
+ * Put trap number and error code on the stack (in place of the
+ * original return address) for recovery code to pick up.
+ */
+ union stub_exception_token token = {
+ .fields.ec = regs->error_code,
+ .fields.trapnr = regs->entry_vector,
+ };
+
+ *(unsigned long *)regs->rsp = token.raw;
+ return retptr;
+ }
+ }
+
+ return 0;
+}
+
+#ifndef NDEBUG
+static int __init stub_selftest(void)
+{
+ static const struct {
+ uint8_t opc[4];
+ uint64_t rax;
+ union stub_exception_token res;
+ } tests[] __initconst = {
+ { .opc = { 0x0f, 0xb9, 0xc3, 0xc3 }, /* ud1 */
+ .res.fields.trapnr = TRAP_invalid_op },
+ { .opc = { 0x90, 0x02, 0x00, 0xc3 }, /* nop; add (%rax),%al */
+ .rax = 0x0123456789abcdef,
+ .res.fields.trapnr = TRAP_gp_fault },
+ { .opc = { 0x02, 0x04, 0x04, 0xc3 }, /* add (%rsp,%rax),%al */
+ .rax = 0xfedcba9876543210,
+ .res.fields.trapnr = TRAP_stack_error },
+ };
+ unsigned long addr = this_cpu(stubs.addr) + STUB_BUF_SIZE / 2;
+ unsigned int i;
+
+ for ( i = 0; i < ARRAY_SIZE(tests); ++i )
+ {
+ uint8_t *ptr = map_domain_page(_mfn(this_cpu(stubs.mfn))) +
+ (addr & ~PAGE_MASK);
+ unsigned long res = ~0;
+
+ memset(ptr, 0xcc, STUB_BUF_SIZE / 2);
+ memcpy(ptr, tests[i].opc, ARRAY_SIZE(tests[i].opc));
+ unmap_domain_page(ptr);
+
+ asm volatile ( "call *%[stb]\n"
+ ".Lret%=:\n\t"
+ ".pushsection .fixup,\"ax\"\n"
+ ".Lfix%=:\n\t"
+ "pop %[exn]\n\t"
+ "jmp .Lret%=\n\t"
+ ".popsection\n\t"
+ _ASM_EXTABLE(.Lret%=, .Lfix%=)
+ : [exn] "+m" (res)
+ : [stb] "rm" (addr), "a" (tests[i].rax));
+ ASSERT(res == tests[i].res.raw);
+ }
return 0;
}
+__initcall(stub_selftest);
+#endif
unsigned long
search_pre_exception_table(struct cpu_user_regs *regs)
return;
}
- if ( likely((fixup = search_exception_table(regs->rip)) != 0) )
- {
- dprintk(XENLOG_ERR, "Trap %d: %p -> %p\n",
- trapnr, _p(regs->rip), _p(fixup));
- this_cpu(last_extable_addr) = regs->rip;
- regs->rip = fixup;
- return;
- }
-
if ( ((trapnr == TRAP_copro_error) || (trapnr == TRAP_simd_error)) &&
system_state >= SYS_STATE_active && has_hvm_container_vcpu(curr) &&
curr->arch.hvm_vcpu.fpu_exception_callback )
return;
}
+ if ( likely((fixup = search_exception_table(regs)) != 0) )
+ {
+ dprintk(XENLOG_ERR, "Trap %u: %p [%ps] -> %p\n",
+ trapnr, _p(regs->rip), _p(regs->rip), _p(fixup));
+ this_cpu(last_extable_addr) = regs->rip;
+ regs->rip = fixup;
+ return;
+ }
+
hardware_trap:
if ( debugger_trap_fatal(trapnr, regs) )
return;
}
die:
- if ( (fixup = search_exception_table(regs->rip)) != 0 )
+ if ( (fixup = search_exception_table(regs)) != 0 )
{
this_cpu(last_extable_addr) = regs->rip;
regs->rip = fixup;
if ( pf_type != real_fault )
return;
- if ( likely((fixup = search_exception_table(regs->rip)) != 0) )
+ if ( likely((fixup = search_exception_table(regs)) != 0) )
{
perfc_incr(copy_user_faults);
if ( unlikely(regs->error_code & PFEC_reserved_bit) )
gp_in_kernel:
- if ( likely((fixup = search_exception_table(regs->rip)) != 0) )
+ if ( likely((fixup = search_exception_table(regs)) != 0) )
{
- dprintk(XENLOG_INFO, "GPF (%04x): %p -> %p\n",
- regs->error_code, _p(regs->rip), _p(fixup));
+ dprintk(XENLOG_INFO, "GPF (%04x): %p [%ps] -> %p\n",
+ regs->error_code, _p(regs->rip), _p(regs->rip), _p(fixup));
this_cpu(last_extable_addr) = regs->rip;
regs->rip = fixup;
return;
* watchpoint set on it. No need to bump EIP; the only faulting
* trap is an instruction breakpoint, which can't happen to us.
*/
- WARN_ON(!search_exception_table(regs->rip));
+ WARN_ON(!search_exception_table(regs));
}
goto out;
}
#define __emulate_1op_8byte(_op, _dst, _eflags)
#endif /* __i386__ */
+#ifdef __XEN__
+# define invoke_stub(pre, post, constraints...) do { \
+ union stub_exception_token res_ = { .raw = ~0 }; \
+ asm volatile ( pre "\n\tcall *%[stub]\n\t" post "\n" \
+ ".Lret%=:\n\t" \
+ ".pushsection .fixup,\"ax\"\n" \
+ ".Lfix%=:\n\t" \
+ "pop %[exn]\n\t" \
+ "jmp .Lret%=\n\t" \
+ ".popsection\n\t" \
+ _ASM_EXTABLE(.Lret%=, .Lfix%=) \
+ : [exn] "+g" (res_), constraints, \
+ [stub] "rm" (stub.func) ); \
+ if ( unlikely(~res_.raw) ) \
+ { \
+ gprintk(XENLOG_WARNING, \
+ "exception %u (ec=%04x) in emulation stub (line %u)\n", \
+ res_.fields.trapnr, res_.fields.ec, __LINE__); \
+ gprintk(XENLOG_INFO, "stub: %"__stringify(MAX_INST_LEN)"ph\n", \
+ stub.func); \
+ generate_exception_if(res_.fields.trapnr == EXC_UD, EXC_UD); \
+ domain_crash(current->domain); \
+ goto cannot_emulate; \
+ } \
+} while (0)
+#else
+# define invoke_stub(pre, post, constraints...) \
+ asm volatile ( pre "\n\tcall *%[stub]\n\t" post \
+ : constraints, [stub] "rm" (stub.func) )
+#endif
+
#define emulate_stub(dst, src...) do { \
unsigned long tmp; \
- asm volatile ( _PRE_EFLAGS("[efl]", "[msk]", "[tmp]") \
- "call *%[stub];" \
- _POST_EFLAGS("[efl]", "[msk]", "[tmp]") \
- : dst, [tmp] "=&r" (tmp), [efl] "+g" (_regs._eflags) \
- : [stub] "r" (stub.func), \
- [msk] "i" (EFLAGS_MASK), ## src ); \
+ invoke_stub(_PRE_EFLAGS("[efl]", "[msk]", "[tmp]"), \
+ _POST_EFLAGS("[efl]", "[msk]", "[tmp]"), \
+ dst, [tmp] "=&r" (tmp), [efl] "+g" (_regs._eflags) \
+ : [msk] "i" (EFLAGS_MASK), ## src); \
} while (0)
/* Fetch next part of the instruction being emulated. */
unsigned int nr_ = sizeof((uint8_t[]){ bytes }); \
fic.insn_bytes = nr_; \
memcpy(get_stub(stub), ((uint8_t[]){ bytes, 0xc3 }), nr_ + 1); \
- asm volatile ( "call *%[stub]" : "+m" (fic) : \
- [stub] "rm" (stub.func) ); \
+ invoke_stub("", "", "=m" (fic) : "m" (fic)); \
put_stub(stub); \
} while (0)
unsigned long tmp_; \
fic.insn_bytes = nr_; \
memcpy(get_stub(stub), ((uint8_t[]){ bytes, 0xc3 }), nr_ + 1); \
- asm volatile ( _PRE_EFLAGS("[eflags]", "[mask]", "[tmp]") \
- "call *%[func];" \
- _POST_EFLAGS("[eflags]", "[mask]", "[tmp]") \
- : [eflags] "+g" (_regs._eflags), \
- [tmp] "=&r" (tmp_), "+m" (fic) \
- : [func] "rm" (stub.func), \
- [mask] "i" (X86_EFLAGS_ZF|X86_EFLAGS_PF| \
- X86_EFLAGS_CF) ); \
+ invoke_stub(_PRE_EFLAGS("[eflags]", "[mask]", "[tmp]"), \
+ _POST_EFLAGS("[eflags]", "[mask]", "[tmp]"), \
+ [eflags] "+g" (_regs._eflags), [tmp] "=&r" (tmp_), \
+ "+m" (fic) \
+ : [mask] "i" (X86_EFLAGS_ZF|X86_EFLAGS_PF|X86_EFLAGS_CF)); \
put_stub(stub); \
} while (0)
extern struct exception_table_entry __start___pre_ex_table[];
extern struct exception_table_entry __stop___pre_ex_table[];
-extern unsigned long search_exception_table(unsigned long);
+union stub_exception_token {
+ struct {
+ uint16_t ec;
+ uint8_t trapnr;
+ } fields;
+ unsigned long raw;
+};
+
+extern unsigned long search_exception_table(const struct cpu_user_regs *regs);
extern void sort_exception_tables(void);
extern void sort_exception_table(struct exception_table_entry *start,
const struct exception_table_entry *stop);