#define stack_words_per_line 4
#define ESP_BEFORE_EXCEPTION(regs) ((unsigned long *)regs->rsp)
-static void do_trap(struct cpu_user_regs *regs);
-static void do_reserved_trap(struct cpu_user_regs *regs);
-
-void (* const exception_table[TRAP_nr])(struct cpu_user_regs *regs) = {
- [TRAP_divide_error] = do_trap,
- [TRAP_debug] = do_debug,
- [TRAP_nmi] = (void *)do_nmi,
- [TRAP_int3] = do_int3,
- [TRAP_overflow] = do_trap,
- [TRAP_bounds] = do_trap,
- [TRAP_invalid_op] = do_invalid_op,
- [TRAP_no_device] = do_device_not_available,
- [TRAP_double_fault] = do_reserved_trap,
- [TRAP_copro_seg] = do_reserved_trap,
- [TRAP_invalid_tss] = do_trap,
- [TRAP_no_segment] = do_trap,
- [TRAP_stack_error] = do_trap,
- [TRAP_gp_fault] = do_general_protection,
- [TRAP_page_fault] = do_page_fault,
- [TRAP_spurious_int] = do_reserved_trap,
- [TRAP_copro_error] = do_trap,
- [TRAP_alignment_check] = do_trap,
- [TRAP_machine_check] = (void *)do_machine_check,
- [TRAP_simd_error] = do_trap,
- [TRAP_virtualisation] = do_reserved_trap,
- [X86_EXC_CP] = do_entry_CP,
- [X86_EXC_CP + 1 ...
- (ARRAY_SIZE(exception_table) - 1)] = do_reserved_trap,
-};
-
void show_code(const struct cpu_user_regs *regs)
{
unsigned char insns_before[8] = {}, insns_after[16] = {};
(regs->eflags & X86_EFLAGS_IF) ? "" : " IN INTERRUPT CONTEXT");
}
-static void do_reserved_trap(struct cpu_user_regs *regs)
+void do_unhandled_trap(struct cpu_user_regs *regs)
{
unsigned int trapnr = regs->entry_vector;
return true;
}
-static void do_trap(struct cpu_user_regs *regs)
+void do_trap(struct cpu_user_regs *regs)
{
unsigned int trapnr = regs->entry_vector;
sti
1: movq %rsp,%rdi
movzbl UREGS_entry_vector(%rsp),%eax
- leaq exception_table(%rip),%rdx
#ifdef CONFIG_PERF_COUNTERS
lea per_cpu__perfcounters(%rip), %rcx
add STACK_CPUINFO_FIELD(per_cpu_offset)(%r14), %rcx
incl ASM_PERFC_exceptions * 4(%rcx, %rax, 4)
#endif
- mov (%rdx, %rax, 8), %rdx
- INDIRECT_CALL %rdx
+
+ /*
+ * Dispatch to appropriate C handlers.
+ *
+ * The logic is implemented as an if/else chain. DISPATCH() calls
+ * need be in frequency order for best performance.
+ */
+#define DISPATCH(vec, handler) \
+ cmp $vec, %al; \
+ jne .L_ ## vec ## _done; \
+ call handler; \
+ jmp .L_exn_dispatch_done; \
+.L_ ## vec ## _done:
+
+ DISPATCH(X86_EXC_PF, do_page_fault)
+ DISPATCH(X86_EXC_GP, do_general_protection)
+ DISPATCH(X86_EXC_UD, do_invalid_op)
+ DISPATCH(X86_EXC_NM, do_device_not_available)
+ DISPATCH(X86_EXC_BP, do_int3)
+
+ /* Logically "if ( (1 << vec) & MASK ) { do_trap(); }" */
+ mov $(1 << X86_EXC_DE) | (1 << X86_EXC_OF) | (1 << X86_EXC_BR) |\
+ (1 << X86_EXC_NP) | (1 << X86_EXC_SS) | (1 << X86_EXC_MF) |\
+ (1 << X86_EXC_AC) | (1 << X86_EXC_XM), %edx
+ bt %eax, %edx
+ jnc .L_do_trap_done
+ call do_trap
+ jmp .L_exn_dispatch_done
+.L_do_trap_done:
+
+ DISPATCH(X86_EXC_CP, do_entry_CP)
+#undef DISPATCH
+
+ call do_unhandled_trap
+ BUG /* do_unhandled_trap() shouldn't return. */
+
+.L_exn_dispatch_done:
mov %r15, STACK_CPUINFO_FIELD(xen_cr3)(%r14)
mov %r13b, STACK_CPUINFO_FIELD(use_pv_cr3)(%r14)
#ifdef CONFIG_PV
incl ASM_PERFC_exceptions * 4(%rcx, %rax, 4)
#endif
- leaq exception_table(%rip),%rdx
- mov (%rdx, %rax, 8), %rdx
- INDIRECT_CALL %rdx
+ /*
+ * Dispatch to appropriate C handlers.
+ *
+ * The logic is implemented as an if/else chain. DISPATCH() calls
+ * need be in frequency order for best performance.
+ */
+#define DISPATCH(vec, handler) \
+ cmp $vec, %al; \
+ jne .L_ ## vec ## _done; \
+ call handler; \
+ jmp .L_ist_dispatch_done; \
+.L_ ## vec ## _done:
+
+ DISPATCH(X86_EXC_NMI, do_nmi)
+ DISPATCH(X86_EXC_DB, do_debug)
+ DISPATCH(X86_EXC_MC, do_machine_check)
+#undef DISPATCH
+
+ call do_unhandled_trap
+ BUG /* do_unhandled_trap() shouldn't return. */
+
+.L_ist_dispatch_done:
mov %r15, STACK_CPUINFO_FIELD(xen_cr3)(%r14)
mov %bl, STACK_CPUINFO_FIELD(use_pv_cr3)(%r14)
cmpb $TRAP_nmi,UREGS_entry_vector(%rsp)
entrypoint 1b
- /* Reserved exceptions, heading towards do_reserved_trap(). */
+ /* Reserved exceptions, heading towards do_unhandled_trap(). */
.elseif vec == X86_EXC_CSO || vec == X86_EXC_SPV || \
vec == X86_EXC_VE || (vec > X86_EXC_CP && vec < TRAP_nr)