x86-64: use PC-relative exception table entries
authorKeir Fraser <keir@xen.org>
Fri, 24 Dec 2010 08:47:23 +0000 (08:47 +0000)
committerKeir Fraser <keir@xen.org>
Fri, 24 Dec 2010 08:47:23 +0000 (08:47 +0000)
... thus allowing to make the entries half their current size. Rather
than adjusting all instances to the new layout, abstract the
construction the table entries via a macro (paralleling a similar one
in recent Linux).

Also change the name of the section (to allow easier detection of
missed cases) and merge the final resulting output sections into
.data.read_mostly.

Signed-off-by: Jan Beulich <jbeulich@novell.com>
22 files changed:
xen/arch/x86/cpu/amd.c
xen/arch/x86/domain.c
xen/arch/x86/extable.c
xen/arch/x86/i387.c
xen/arch/x86/usercopy.c
xen/arch/x86/x86_32/asm-offsets.c
xen/arch/x86/x86_32/entry.S
xen/arch/x86/x86_64/asm-offsets.c
xen/arch/x86/x86_64/compat/entry.S
xen/arch/x86/x86_64/entry.S
xen/arch/x86/x86_64/mm.c
xen/arch/x86/xen.lds.S
xen/include/asm-x86/asm_defns.h
xen/include/asm-x86/config.h
xen/include/asm-x86/hvm/vmx/vmx.h
xen/include/asm-x86/msr.h
xen/include/asm-x86/uaccess.h
xen/include/asm-x86/x86_32/asm_defns.h
xen/include/asm-x86/x86_32/system.h
xen/include/asm-x86/x86_32/uaccess.h
xen/include/asm-x86/x86_64/asm_defns.h
xen/include/asm-x86/x86_64/system.h

index 2fb25c0e70b7466af32feaafa92bc550afe64ebc..780fa69f88d973ca4b7d1aac8564d36a391856e9 100644 (file)
@@ -53,10 +53,7 @@ static inline int rdmsr_amd_safe(unsigned int msr, unsigned int *lo,
                     "3: movl %6,%2\n"
                     "   jmp 2b\n"
                     ".previous\n"
-                    ".section __ex_table,\"a\"\n"
-                    __FIXUP_ALIGN "\n"
-                    __FIXUP_WORD " 1b,3b\n"
-                    ".previous\n"
+                    _ASM_EXTABLE(1b, 3b)
                     : "=a" (*lo), "=d" (*hi), "=r" (err)
                     : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
 
@@ -73,10 +70,7 @@ static inline int wrmsr_amd_safe(unsigned int msr, unsigned int lo,
                     "3: movl %6,%0\n"
                     "   jmp 2b\n"
                     ".previous\n"
-                    ".section __ex_table,\"a\"\n"
-                    __FIXUP_ALIGN "\n"
-                    __FIXUP_WORD " 1b,3b\n"
-                    ".previous\n"
+                    _ASM_EXTABLE(1b, 3b)
                     : "=r" (err)
                     : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
                       "0" (0), "i" (-EFAULT));
index 313be2771427a3e274222a0fbfd1685702c8ed44..f051df0ee3def810dfc2bbaf35fbf53e9db0e519 100644 (file)
@@ -1070,10 +1070,7 @@ arch_do_vcpu_op(
         "   movl %k0,%%" #seg "\n"              \
         "   jmp 2b\n"                           \
         ".previous\n"                           \
-        ".section __ex_table,\"a\"\n"           \
-        "   .align 8\n"                         \
-        "   .quad 1b,3b\n"                      \
-        ".previous"                             \
+        _ASM_EXTABLE(1b, 3b)                    \
         : "=r" (__r) : "r" (value), "0" (__r) );\
     __r; })
 
index da822d34affa41403dc1173d5040bf5b2a335b93..8c645854834475b596304468c1d75f82eb326562 100644 (file)
@@ -2,6 +2,7 @@
 #include <xen/config.h>
 #include <xen/init.h>
 #include <xen/perfc.h>
+#include <xen/sort.h>
 #include <xen/spinlock.h>
 #include <asm/uaccess.h>
 
@@ -10,29 +11,58 @@ extern struct exception_table_entry __stop___ex_table[];
 extern struct exception_table_entry __start___pre_ex_table[];
 extern struct exception_table_entry __stop___pre_ex_table[];
 
-static void __init sort_exception_table(struct exception_table_entry *start,
-                                        struct exception_table_entry *end)
+#ifdef __i386__
+#define EX_FIELD(ptr, field) (ptr)->field
+#define swap_ex NULL
+#else
+#define EX_FIELD(ptr, field) ((unsigned long)&(ptr)->field + (ptr)->field)
+#endif
+
+static inline unsigned long ex_addr(const struct exception_table_entry *x)
 {
-    struct exception_table_entry *p, *q, tmp;
+       return EX_FIELD(x, addr);
+}
 
-    for ( p = start; p < end; p++ )
-    {
-        for ( q = p-1; q > start; q-- )
-            if ( p->insn > q->insn )
-                break;
-        if ( ++q != p )
-        {
-            tmp = *p;
-            memmove(q+1, q, (p-q)*sizeof(*p));
-            *q = tmp;
-        }
-    }
+static inline unsigned long ex_cont(const struct exception_table_entry *x)
+{
+       return EX_FIELD(x, cont);
+}
+
+static int __init cmp_ex(const void *a, const void *b)
+{
+       const struct exception_table_entry *l = a, *r = b;
+       unsigned long lip = ex_addr(l);
+       unsigned long rip = ex_addr(r);
+
+       /* avoid overflow */
+       if (lip > rip)
+               return 1;
+       if (lip < rip)
+               return -1;
+       return 0;
+}
+
+#ifndef swap_ex
+static void __init swap_ex(void *a, void *b, int size)
+{
+       struct exception_table_entry *l = a, *r = b, tmp;
+       long delta = b - a;
+
+       tmp = *l;
+       l->addr = r->addr + delta;
+       l->cont = r->cont + delta;
+       r->addr = tmp.addr - delta;
+       r->cont = tmp.cont - delta;
 }
+#endif
 
 void __init sort_exception_tables(void)
 {
-    sort_exception_table(__start___ex_table, __stop___ex_table);
-    sort_exception_table(__start___pre_ex_table, __stop___pre_ex_table);
+    sort(__start___ex_table, __stop___ex_table - __start___ex_table,
+         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
+    sort(__start___pre_ex_table,
+         __stop___pre_ex_table - __start___pre_ex_table,
+         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
 }
 
 static inline unsigned long
@@ -46,9 +76,9 @@ search_one_table(const struct exception_table_entry *first,
     while ( first <= last )
     {
         mid = (last - first) / 2 + first;
-        diff = mid->insn - value;
+        diff = ex_addr(mid) - value;
         if (diff == 0)
-            return mid->fixup;
+            return ex_cont(mid);
         else if (diff < 0)
             first = mid+1;
         else
index 5f3f041613c1d5e6023e1ad46dfd380f0e50e0fb..477efec973d7c43ce38e409b434c661418ee981a 100644 (file)
@@ -122,10 +122,7 @@ void restore_fpu(struct vcpu *v)
             "   pop  %%"__OP"ax       \n"
             "   jmp  1b               \n"
             ".previous                \n"
-            ".section __ex_table,\"a\"\n"
-            "   "__FIXUP_ALIGN"       \n"
-            "   "__FIXUP_WORD" 1b,2b  \n"
-            ".previous                \n"
+            _ASM_EXTABLE(1b, 2b)
             : 
             : "m" (*fpu_ctxt),
               "i" (sizeof(v->arch.guest_context.fpu_ctxt)/4)
index 76e3abfde2efdaf44686ce306aa4925d4be3b4b1..d88e635bb0a0966a3f1f237f7ab547b873d34bec 100644 (file)
@@ -36,12 +36,9 @@ unsigned long __copy_to_user_ll(void __user *to, const void *from, unsigned n)
         "3:  lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
         "    jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "    "__FIXUP_ALIGN"\n"
-        "    "__FIXUP_WORD" 4b,5b\n"
-        "    "__FIXUP_WORD" 0b,3b\n"
-        "    "__FIXUP_WORD" 1b,2b\n"
-        ".previous"
+        _ASM_EXTABLE(4b, 5b)
+        _ASM_EXTABLE(0b, 3b)
+        _ASM_EXTABLE(1b, 2b)
         : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
         : "0" (__n), "1" (to), "2" (from), "3" (__n)
         : "memory" );
@@ -82,12 +79,9 @@ __copy_from_user_ll(void *to, const void __user *from, unsigned n)
         "    pop  %0\n"
         "    jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "    "__FIXUP_ALIGN"\n"
-        "    "__FIXUP_WORD" 4b,5b\n"
-        "    "__FIXUP_WORD" 0b,3b\n"
-        "    "__FIXUP_WORD" 1b,6b\n"
-        ".previous"
+        _ASM_EXTABLE(4b, 5b)
+        _ASM_EXTABLE(0b, 3b)
+        _ASM_EXTABLE(1b, 6b)
         : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
         : "0" (__n), "1" (to), "2" (from), "3" (__n)
         : "memory" );
index 4124ce7edcd92b49ae562d83b549c2f03a78a681..c3bba7f4be13b711ca735d0eb3bed34e8770cd25 100644 (file)
@@ -3,6 +3,7 @@
  * This code generates raw asm output which is post-processed
  * to extract and format the required data.
  */
+#define COMPILE_OFFSETS
 
 #include <xen/config.h>
 #include <xen/perfc.h>
index 4d34b90dd5bb5ec500ff682a6790286b4382a847..d332b8ab0b58b8e105e85cd3da0e0f0c806d53ca 100644 (file)
@@ -119,16 +119,12 @@ failsafe_callback:
         movl  %eax,UREGS_gs(%esp)
         jmp   test_all_events
 .previous
-.section __pre_ex_table,"a"
-        .long .Lft1,.Lfx1
-        .long .Lft2,.Lfx1
-        .long .Lft3,.Lfx1
-        .long .Lft4,.Lfx1
-        .long .Lft5,.Lfx1
-.previous
-.section __ex_table,"a"
-        .long .Ldf1,failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft1, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft2, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft3, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft4, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft5, .Lfx1)
+        _ASM_EXTABLE(.Ldf1, failsafe_callback)
 
         ALIGN
 restore_all_xen:
@@ -392,18 +388,26 @@ UNLIKELY_END(bounce_vm86_3)
         movl TRAPBOUNCE_eip(%edx),%eax
         movl %eax,UREGS_eip+4(%esp)
         ret
-.section __ex_table,"a"
-        .long  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
-        .long  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
-        .long .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
-        .long .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-        .long .Lft14,domain_crash_synchronous , .Lft15,domain_crash_synchronous
-        .long .Lft16,domain_crash_synchronous , .Lft17,domain_crash_synchronous
-        .long .Lft18,domain_crash_synchronous , .Lft19,domain_crash_synchronous
-        .long .Lft20,domain_crash_synchronous , .Lft21,domain_crash_synchronous
-        .long .Lft22,domain_crash_synchronous , .Lft23,domain_crash_synchronous
-        .long .Lft24,domain_crash_synchronous , .Lft25,domain_crash_synchronous
-.previous
+        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft14, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft15, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft16, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft17, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft18, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft19, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft20, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft21, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft22, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft23, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft24, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft25, domain_crash_synchronous)
 
 domain_crash_synchronous_string:
         .asciz "domain_crash_sync called from entry.S (%lx)\n"
index 424137ce4359e8425e0c6551f4d83ba2f38e746a..21b4358f93a74358f4a958d420956c6a60ba58d3 100644 (file)
@@ -3,6 +3,7 @@
  * This code generates raw asm output which is post-processed
  * to extract and format the required data.
  */
+#define COMPILE_OFFSETS
 
 #include <xen/config.h>
 #include <xen/perfc.h>
index c2b7cf05c4e35d8a53f3e17fa0647f2349624b1e..13e04ad370ef90656ee01a75882728658d0d2a40 100644 (file)
@@ -197,12 +197,8 @@ compat_failsafe_callback:
 1:      call  compat_create_bounce_frame
         jmp   compat_test_all_events
 .previous
-.section __pre_ex_table,"a"
-       .quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
-        .quad .Ldf0,compat_failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+        _ASM_EXTABLE(.Ldf0, compat_failsafe_callback)
 
 /* %rdx: trap_bounce, %rbx: struct vcpu */
 ENTRY(compat_post_handle_exception)
@@ -330,15 +326,19 @@ UNLIKELY_END(compat_bounce_failsafe)
         xorl  %edi,%edi
         jmp   .Lft13
 .previous
-.section __ex_table,"a"
-        .quad  .Lft1,domain_crash_synchronous  ,  .Lft2,compat_crash_page_fault
-        .quad  .Lft3,compat_crash_page_fault_4 ,  .Lft4,domain_crash_synchronous
-        .quad  .Lft5,compat_crash_page_fault_4 ,  .Lft6,compat_crash_page_fault_8
-        .quad  .Lft7,compat_crash_page_fault   ,  .Lft8,compat_crash_page_fault
-        .quad  .Lft9,compat_crash_page_fault_12, .Lft10,compat_crash_page_fault_8
-        .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
-        .quad .Lft13,.Lfx13
-.previous
+        _ASM_EXTABLE(.Lft1,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft2,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft3,  compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft5,  compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft6,  compat_crash_page_fault_8)
+        _ASM_EXTABLE(.Lft7,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft8,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft9,  compat_crash_page_fault_12)
+        _ASM_EXTABLE(.Lft10, compat_crash_page_fault_8)
+        _ASM_EXTABLE(.Lft11, compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft12, compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft13, .Lfx13)
 
 compat_crash_page_fault_12:
         addl  $4,%esi
@@ -356,9 +356,7 @@ compat_crash_page_fault:
         xorl  %edi,%edi
         jmp   .Lft14
 .previous
-.section __ex_table,"a"
-        .quad .Lft14,.Lfx14
-.previous
+        _ASM_EXTABLE(.Lft14, .Lfx14)
 
 .section .rodata, "a", @progbits
 
index 7c5fe92786352c873a5d5aa7b3345a0113ff669a..b30f2d0c13fa1f14ab63ef8808e104b8182e565f 100644 (file)
@@ -84,12 +84,8 @@ failsafe_callback:
 1:      call  create_bounce_frame
         jmp   test_all_events
 .previous
-.section __pre_ex_table,"a"
-        .quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
-        .quad .Ldf0,failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+        _ASM_EXTABLE(.Ldf0, failsafe_callback)
 
         ALIGN
 /* No special register assumptions. */
@@ -412,14 +408,18 @@ UNLIKELY_END(bounce_failsafe)
         jz    domain_crash_synchronous
         movq  %rax,UREGS_rip+8(%rsp)
         ret
-.section __ex_table,"a"
-        .quad  .Lft2,domain_crash_synchronous ,  .Lft3,domain_crash_synchronous
-        .quad  .Lft4,domain_crash_synchronous ,  .Lft5,domain_crash_synchronous
-        .quad  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
-        .quad  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
-        .quad .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
-        .quad .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-.previous
+        _ASM_EXTABLE(.Lft2,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft3,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft5,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
 
 domain_crash_synchronous_string:
         .asciz "domain_crash_sync called from entry.S\n"
index 1de4e8b03d3305c28c73aa135ffb3003f23e6b6a..e973c0edcbd9011079735d32f80da34886ca450f 100644 (file)
@@ -1119,10 +1119,7 @@ long do_set_segment_base(unsigned int which, unsigned long base)
             "2:   xorl %k0,%k0        \n"
             "     jmp  1b             \n"
             ".previous                \n"
-            ".section __ex_table,\"a\"\n"
-            "    .align 8             \n"
-            "    .quad 1b,2b          \n"
-            ".previous                  "
+            _ASM_EXTABLE(1b, 2b)
             : : "r" (base&0xffff) );
         break;
 
index 1ffd4e98de0603a84e9bd18d08894060350d9a26..49691d35c5b94347a054fd1ab126c3c57305abb7 100644 (file)
@@ -38,18 +38,19 @@ SECTIONS
        *(.rodata.*)
   } :text
 
-  . = ALIGN(32);               /* Exception table */
-  __ex_table : {
+  . = ALIGN(SMP_CACHE_BYTES);
+  .data.read_mostly : {
+       /* Exception table */
        __start___ex_table = .;
-       *(__ex_table)
+       *(.ex_table)
        __stop___ex_table = .;
-  } :text
 
-  . = ALIGN(32);               /* Pre-exception table */
-  __pre_ex_table : {
+       /* Pre-exception table */
        __start___pre_ex_table = .;
-       *(__pre_ex_table)
+       *(.ex_table.pre)
        __stop___pre_ex_table = .;
+
+       *(.data.read_mostly)
   } :text
 
   .data : {                    /* Data */
@@ -59,11 +60,6 @@ SECTIONS
        CONSTRUCTORS
   } :text
 
-  . = ALIGN(SMP_CACHE_BYTES);
-  .data.read_mostly : {
-       *(.data.read_mostly)
-  } :text
-
 #ifdef LOCK_PROFILE
   . = ALIGN(32);
   __lock_profile_start = .;
index 46402c1c82e5cc44fc03645a9cf8c9f2e1cdbba2..ca6152e64ab28065d427a2724197219e90c3866d 100644 (file)
@@ -2,8 +2,10 @@
 #ifndef __X86_ASM_DEFNS_H__
 #define __X86_ASM_DEFNS_H__
 
+#ifndef COMPILE_OFFSETS
 /* NB. Auto-generated from arch/.../asm-offsets.c */
 #include <asm/asm-offsets.h>
+#endif
 #include <asm/processor.h>
 
 #ifdef __x86_64__
 #include <asm/x86_32/asm_defns.h>
 #endif
 
+/* Exception table entry */
+#ifdef __ASSEMBLY__
+# define _ASM__EXTABLE(sfx, from, to)             \
+    .section .ex_table##sfx, "a" ;                \
+    .balign 4 ;                                   \
+    .long _ASM_EX(from), _ASM_EX(to) ;            \
+    .previous
+#else
+# define _ASM__EXTABLE(sfx, from, to)             \
+    " .section .ex_table" #sfx ",\"a\"\n"         \
+    " .balign 4\n"                                \
+    " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
+    " .previous\n"
+#endif
+
+#define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
+#define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
+
 #ifdef __ASSEMBLY__
 
 #define UNLIKELY_START(cond, tag) \
index 58cbe490e6b9be2d7bbc125183d729d0de4b3b8d..27c766d2db823dba4198669522824b5d060cbd36 100644 (file)
@@ -274,8 +274,6 @@ extern unsigned int video_mode, video_flags;
 /* For generic assembly code: use macros to define operation/operand sizes. */
 #define __OS          "q"  /* Operation Suffix */
 #define __OP          "r"  /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 8"
-#define __FIXUP_WORD  ".quad"
 
 #elif defined(__i386__)
 
@@ -351,8 +349,6 @@ extern unsigned int video_mode, video_flags;
 /* For generic assembly code: use macros to define operation/operand sizes. */
 #define __OS          "l"  /* Operation Suffix */
 #define __OP          "e"  /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 4"
-#define __FIXUP_WORD  ".long"
 
 #endif /* __i386__ */
 
index 60e64061a94d8fca70fdc6fa4e3f3cdfc346dee0..6feeb81f9d66fdace6ce2469b7e336444436c191 100644 (file)
@@ -22,6 +22,7 @@
 #include <xen/sched.h>
 #include <asm/types.h>
 #include <asm/regs.h>
+#include <asm/asm_defns.h>
 #include <asm/processor.h>
 #include <asm/i387.h>
 #include <asm/hvm/support.h>
@@ -341,10 +342,7 @@ static inline void __invvpid(int type, u16 vpid, u64 gva)
     asm volatile ( "1: " INVVPID_OPCODE MODRM_EAX_08
                    /* CF==1 or ZF==1 --> crash (ud2) */
                    "ja 2f ; ud2 ; 2:\n"
-                   ".section __ex_table,\"a\"\n"
-                   "    "__FIXUP_ALIGN"\n"
-                   "    "__FIXUP_WORD" 1b,2b\n"
-                   ".previous"
+                   _ASM_EXTABLE(1b, 2b)
                    :
                    : "a" (&operand), "c" (type)
                    : "memory" );
@@ -404,10 +402,7 @@ static inline int __vmxon(u64 addr)
         ".section .fixup,\"ax\"\n"
         "3: sub $2,%0 ; jmp 2b\n"    /* #UD or #GP --> rc = -2 */
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "   "__FIXUP_ALIGN"\n"
-        "   "__FIXUP_WORD" 1b,3b\n"
-        ".previous\n"
+        _ASM_EXTABLE(1b, 3b)
         : "=q" (rc)
         : "0" (0), "a" (&addr)
         : "memory");
index ab3360c6837a3ac75453b1b822d40b900966fac0..09fa28808102e39863bc7f3a5ece396f85102892 100644 (file)
@@ -8,6 +8,7 @@
 #include <xen/types.h>
 #include <xen/percpu.h>
 #include <xen/errno.h>
+#include <asm/asm_defns.h>
 
 #define rdmsr(msr,val1,val2) \
      __asm__ __volatile__("rdmsr" \
@@ -44,10 +45,7 @@ static inline void wrmsrl(unsigned int msr, __u64 val)
         "3: xorl %0,%0\n; xorl %1,%1\n" \
         "   movl %5,%2\n; jmp 2b\n" \
         ".previous\n" \
-        ".section __ex_table,\"a\"\n" \
-        "   "__FIXUP_ALIGN"\n" \
-        "   "__FIXUP_WORD" 1b,3b\n" \
-        ".previous\n" \
+        _ASM_EXTABLE(1b, 3b) \
         : "=a" (lo), "=d" (hi), "=&r" (_rc) \
         : "c" (msr), "2" (0), "i" (-EFAULT)); \
     val = lo | ((uint64_t)hi << 32); \
@@ -66,10 +64,7 @@ static inline int wrmsr_safe(unsigned int msr, uint64_t val)
         ".section .fixup,\"ax\"\n"
         "3: movl %5,%0\n; jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "   "__FIXUP_ALIGN"\n"
-        "   "__FIXUP_WORD" 1b,3b\n"
-        ".previous\n"
+        _ASM_EXTABLE(1b, 3b)
         : "=&r" (_rc)
         : "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
     return _rc;
index cd43529cf8dcf46544ecf4dd101e683e44aa880d..af624dfab649b16b11861868220941ff072a14c7 100644 (file)
@@ -6,6 +6,7 @@
 #include <xen/compiler.h>
 #include <xen/errno.h>
 #include <xen/prefetch.h>
+#include <asm/asm_defns.h>
 #include <asm/page.h>
 
 #ifdef __x86_64__
@@ -155,10 +156,7 @@ struct __large_struct { unsigned long buf[100]; };
                "3:     mov %3,%0\n"                                    \
                "       jmp 2b\n"                                       \
                ".previous\n"                                           \
-               ".section __ex_table,\"a\"\n"                           \
-               "       "__FIXUP_ALIGN"\n"                              \
-               "       "__FIXUP_WORD" 1b,3b\n"                         \
-               ".previous"                                             \
+               _ASM_EXTABLE(1b, 3b)                                    \
                : "=r"(err)                                             \
                : ltype (x), "m"(__m(addr)), "i"(errret), "0"(err))
 
@@ -171,10 +169,7 @@ struct __large_struct { unsigned long buf[100]; };
                "       xor"itype" %"rtype"1,%"rtype"1\n"               \
                "       jmp 2b\n"                                       \
                ".previous\n"                                           \
-               ".section __ex_table,\"a\"\n"                           \
-               "       "__FIXUP_ALIGN"\n"                              \
-               "       "__FIXUP_WORD" 1b,3b\n"                         \
-               ".previous"                                             \
+               _ASM_EXTABLE(1b, 3b)                                    \
                : "=r"(err), ltype (x)                                  \
                : "m"(__m(addr)), "i"(errret), "0"(err))
 
@@ -272,7 +267,7 @@ __copy_from_user(void *to, const void __user *from, unsigned long n)
 
 struct exception_table_entry
 {
-       unsigned long insn, fixup;
+       s32 addr, cont;
 };
 
 extern unsigned long search_exception_table(unsigned long);
index fc6916126cbdc4669e603bfd8cc39a6b5bc32a05..e81ebdfc54773b2488cff3d7841f9e5dd67c896c 100644 (file)
@@ -153,4 +153,10 @@ STR(IRQ) #nr "_interrupt:\n\t"                  \
         GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
         movl (reg),reg;
 
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p
+#else
+# define _ASM_EX(p) #p
+#endif
+
 #endif /* __X86_32_ASM_DEFNS_H__ */
index 40aa63c1c771e29619ffbefd451a526ded556eb7..134b6d0581f167eace02988804cd41f361c4ad3c 100644 (file)
@@ -49,10 +49,7 @@ static always_inline unsigned long long __cmpxchg8b(
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
         ".previous\n"                                                   \
-        ".section __ex_table,\"a\"\n"                                   \
-        "       .align 4\n"                                             \
-        "       .long 1b,3b\n"                                          \
-        ".previous"                                                     \
+        _ASM_EXTABLE(1b, 3b)                                            \
         : "=a" (_o), "=r" (_rc)                                         \
         : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
         : "memory");
@@ -78,10 +75,7 @@ static always_inline unsigned long long __cmpxchg8b(
             "3:     movl $1,%1\n"                                       \
             "       jmp 2b\n"                                           \
             ".previous\n"                                               \
-            ".section __ex_table,\"a\"\n"                               \
-            "       .align 4\n"                                         \
-            "       .long 1b,3b\n"                                      \
-            ".previous"                                                 \
+            _ASM_EXTABLE(1b, 3b)                                        \
             : "=A" (_o), "=r" (_rc)                                     \
             : "c" ((u32)((u64)(_n)>>32)), "b" ((u32)(_n)),              \
               "m" (*__xg((volatile void *)(_p))), "0" (_o), "1" (0)     \
index b2a92d2632b73423c6c651d7d519d741f5844897..d6a52301af537eba83d1f69da577a30976796f13 100644 (file)
@@ -33,11 +33,8 @@ extern void __uaccess_var_not_u64(void);
                "4:     movl %3,%0\n"                           \
                "       jmp 3b\n"                               \
                ".previous\n"                                   \
-               ".section __ex_table,\"a\"\n"                   \
-               "       .align 4\n"                             \
-               "       .long 1b,4b\n"                          \
-               "       .long 2b,4b\n"                          \
-               ".previous"                                     \
+               _ASM_EXTABLE(1b, 4b)                            \
+               _ASM_EXTABLE(2b, 4b)                            \
                : "=r"(retval)                                  \
                : "A" (x), "r" (addr), "i"(errret), "0"(retval))
 
@@ -65,11 +62,8 @@ do {                                                                 \
                "       xorl %%edx,%%edx\n"                     \
                "       jmp 3b\n"                               \
                ".previous\n"                                   \
-               ".section __ex_table,\"a\"\n"                   \
-               "       .align 4\n"                             \
-               "       .long 1b,4b\n"                          \
-               "       .long 2b,4b\n"                          \
-               ".previous"                                     \
+               _ASM_EXTABLE(1b, 4b)                            \
+               _ASM_EXTABLE(2b, 4b)                            \
                : "=r" (retval), "=&A" (x)                      \
                : "r" (addr), "i"(errret), "0"(retval))
 
index a56ac63211af1074dcc7fa587e8ae7e00901ad84..24620bae4f4d33c31c556e5cc0b0c19e8af4ecde 100644 (file)
@@ -130,4 +130,10 @@ STR(IRQ) #nr "_interrupt:\n\t"                  \
         GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
         movq (reg),reg;
 
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p-.
+#else
+# define _ASM_EX(p) #p "-."
+#endif
+
 #endif /* __X86_64_ASM_DEFNS_H__ */
index 4f183c053523d0a76307239df9289935db8da666..20f038bf9490d360d729873ef601f14a04c99ed5 100644 (file)
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
         ".previous\n"                                                   \
-        ".section __ex_table,\"a\"\n"                                   \
-        "       .align 8\n"                                             \
-        "       .quad 1b,3b\n"                                          \
-        ".previous"                                                     \
+        _ASM_EXTABLE(1b, 3b)                                            \
         : "=a" (_o), "=r" (_rc)                                         \
         : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
         : "memory");