xen-devel.lists.xenproject.org archive mirror
 help / color / mirror / Atom feed
* [PATCH 2/3] x86-64: use PC-relative exception table entries
@ 2010-12-23 12:57 Jan Beulich
  0 siblings, 0 replies; only message in thread
From: Jan Beulich @ 2010-12-23 12:57 UTC (permalink / raw)
  To: xen-devel@lists.xensource.com

[-- Attachment #1: Type: text/plain, Size: 26359 bytes --]

... thus allowing to make the entries half their current size. Rather
than adjusting all instances to the new layout, abstract the
construction the table entries via a macro (paralleling a similar one
in recent Linux).

Also change the name of the section (to allow easier detection of
missed cases) and merge the final resulting output sections into
.data.read_mostly.

Signed-off-by: Jan Beulich <jbeulich@novell.com>

--- 2010-12-23.orig/xen/arch/x86/cpu/amd.c
+++ 2010-12-23/xen/arch/x86/cpu/amd.c
@@ -53,10 +53,7 @@ static inline int rdmsr_amd_safe(unsigne
 		     "3: movl %6,%2\n"
 		     "   jmp 2b\n"
 		     ".previous\n"
-		     ".section __ex_table,\"a\"\n"
-		     __FIXUP_ALIGN "\n"
-		     __FIXUP_WORD " 1b,3b\n"
-		     ".previous\n"
+		     _ASM_EXTABLE(1b, 3b)
 		     : "=a" (*lo), "=d" (*hi), "=r" (err)
 		     : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
 
@@ -73,10 +70,7 @@ static inline int wrmsr_amd_safe(unsigne
 		     "3: movl %6,%0\n"
 		     "   jmp 2b\n"
 		     ".previous\n"
-		     ".section __ex_table,\"a\"\n"
-		     __FIXUP_ALIGN "\n"
-		     __FIXUP_WORD " 1b,3b\n"
-		     ".previous\n"
+		     _ASM_EXTABLE(1b, 3b)
 		     : "=r" (err)
 		     : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
 		       "0" (0), "i" (-EFAULT));
--- 2010-12-23.orig/xen/arch/x86/domain.c
+++ 2010-12-23/xen/arch/x86/domain.c
@@ -1070,10 +1070,7 @@ arch_do_vcpu_op(
         "   movl %k0,%%" #seg "\n"              \
         "   jmp 2b\n"                           \
         ".previous\n"                           \
-        ".section __ex_table,\"a\"\n"           \
-        "   .align 8\n"                         \
-        "   .quad 1b,3b\n"                      \
-        ".previous"                             \
+        _ASM_EXTABLE(1b, 3b)                    \
         : "=r" (__r) : "r" (value), "0" (__r) );\
     __r; })
 
--- 2010-12-23.orig/xen/arch/x86/extable.c
+++ 2010-12-23/xen/arch/x86/extable.c
@@ -2,6 +2,7 @@
 #include <xen/config.h>
 #include <xen/init.h>
 #include <xen/perfc.h>
+#include <xen/sort.h>
 #include <xen/spinlock.h>
 #include <asm/uaccess.h>
 
@@ -10,29 +11,58 @@ extern struct exception_table_entry __st
 extern struct exception_table_entry __start___pre_ex_table[];
 extern struct exception_table_entry __stop___pre_ex_table[];
 
-static void __init sort_exception_table(struct exception_table_entry *start,
-                                        struct exception_table_entry *end)
+#ifdef __i386__
+#define EX_FIELD(ptr, field) (ptr)->field
+#define swap_ex NULL
+#else
+#define EX_FIELD(ptr, field) ((unsigned long)&(ptr)->field + (ptr)->field)
+#endif
+
+static inline unsigned long ex_addr(const struct exception_table_entry *x)
 {
-    struct exception_table_entry *p, *q, tmp;
+	return EX_FIELD(x, addr);
+}
 
-    for ( p = start; p < end; p++ )
-    {
-        for ( q = p-1; q > start; q-- )
-            if ( p->insn > q->insn )
-                break;
-        if ( ++q != p )
-        {
-            tmp = *p;
-            memmove(q+1, q, (p-q)*sizeof(*p));
-            *q = tmp;
-        }
-    }
+static inline unsigned long ex_cont(const struct exception_table_entry *x)
+{
+	return EX_FIELD(x, cont);
+}
+
+static int __init cmp_ex(const void *a, const void *b)
+{
+	const struct exception_table_entry *l = a, *r = b;
+	unsigned long lip = ex_addr(l);
+	unsigned long rip = ex_addr(r);
+
+	/* avoid overflow */
+	if (lip > rip)
+		return 1;
+	if (lip < rip)
+		return -1;
+	return 0;
+}
+
+#ifndef swap_ex
+static void __init swap_ex(void *a, void *b, int size)
+{
+	struct exception_table_entry *l = a, *r = b, tmp;
+	long delta = b - a;
+
+	tmp = *l;
+	l->addr = r->addr + delta;
+	l->cont = r->cont + delta;
+	r->addr = tmp.addr - delta;
+	r->cont = tmp.cont - delta;
 }
+#endif
 
 void __init sort_exception_tables(void)
 {
-    sort_exception_table(__start___ex_table, __stop___ex_table);
-    sort_exception_table(__start___pre_ex_table, __stop___pre_ex_table);
+    sort(__start___ex_table, __stop___ex_table - __start___ex_table,
+         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
+    sort(__start___pre_ex_table,
+         __stop___pre_ex_table - __start___pre_ex_table,
+         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
 }
 
 static inline unsigned long
@@ -46,9 +76,9 @@ search_one_table(const struct exception_
     while ( first <= last )
     {
         mid = (last - first) / 2 + first;
-        diff = mid->insn - value;
+        diff = ex_addr(mid) - value;
         if (diff == 0)
-            return mid->fixup;
+            return ex_cont(mid);
         else if (diff < 0)
             first = mid+1;
         else
--- 2010-12-23.orig/xen/arch/x86/i387.c
+++ 2010-12-23/xen/arch/x86/i387.c
@@ -122,10 +122,7 @@ void restore_fpu(struct vcpu *v)
             "   pop  %%"__OP"ax       \n"
             "   jmp  1b               \n"
             ".previous                \n"
-            ".section __ex_table,\"a\"\n"
-            "   "__FIXUP_ALIGN"       \n"
-            "   "__FIXUP_WORD" 1b,2b  \n"
-            ".previous                \n"
+            _ASM_EXTABLE(1b, 2b)
             : 
             : "m" (*fpu_ctxt),
               "i" (sizeof(v->arch.guest_context.fpu_ctxt)/4)
--- 2010-12-23.orig/xen/arch/x86/usercopy.c
+++ 2010-12-23/xen/arch/x86/usercopy.c
@@ -36,12 +36,9 @@ unsigned long __copy_to_user_ll(void __u
         "3:  lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
         "    jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "    "__FIXUP_ALIGN"\n"
-        "    "__FIXUP_WORD" 4b,5b\n"
-        "    "__FIXUP_WORD" 0b,3b\n"
-        "    "__FIXUP_WORD" 1b,2b\n"
-        ".previous"
+        _ASM_EXTABLE(4b, 5b)
+        _ASM_EXTABLE(0b, 3b)
+        _ASM_EXTABLE(1b, 2b)
         : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
         : "0" (__n), "1" (to), "2" (from), "3" (__n)
         : "memory" );
@@ -82,12 +79,9 @@ __copy_from_user_ll(void *to, const void
         "    pop  %0\n"
         "    jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "    "__FIXUP_ALIGN"\n"
-        "    "__FIXUP_WORD" 4b,5b\n"
-        "    "__FIXUP_WORD" 0b,3b\n"
-        "    "__FIXUP_WORD" 1b,6b\n"
-        ".previous"
+        _ASM_EXTABLE(4b, 5b)
+        _ASM_EXTABLE(0b, 3b)
+        _ASM_EXTABLE(1b, 6b)
         : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
         : "0" (__n), "1" (to), "2" (from), "3" (__n)
         : "memory" );
--- 2010-12-23.orig/xen/arch/x86/x86_32/asm-offsets.c
+++ 2010-12-23/xen/arch/x86/x86_32/asm-offsets.c
@@ -3,6 +3,7 @@
  * This code generates raw asm output which is post-processed
  * to extract and format the required data.
  */
+#define COMPILE_OFFSETS
 
 #include <xen/config.h>
 #include <xen/perfc.h>
--- 2010-12-23.orig/xen/arch/x86/x86_32/entry.S
+++ 2010-12-23/xen/arch/x86/x86_32/entry.S
@@ -119,16 +119,12 @@ failsafe_callback:
         movl  %eax,UREGS_gs(%esp)
         jmp   test_all_events
 .previous
-.section __pre_ex_table,"a"
-        .long .Lft1,.Lfx1
-        .long .Lft2,.Lfx1
-        .long .Lft3,.Lfx1
-        .long .Lft4,.Lfx1
-        .long .Lft5,.Lfx1
-.previous
-.section __ex_table,"a"
-        .long .Ldf1,failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft1, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft2, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft3, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft4, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft5, .Lfx1)
+        _ASM_EXTABLE(.Ldf1, failsafe_callback)
 
         ALIGN
 restore_all_xen:
@@ -392,18 +388,26 @@ UNLIKELY_END(bounce_vm86_3)
         movl TRAPBOUNCE_eip(%edx),%eax
         movl %eax,UREGS_eip+4(%esp)
         ret
-.section __ex_table,"a"
-        .long  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
-        .long  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
-        .long .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
-        .long .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-        .long .Lft14,domain_crash_synchronous , .Lft15,domain_crash_synchronous
-        .long .Lft16,domain_crash_synchronous , .Lft17,domain_crash_synchronous
-        .long .Lft18,domain_crash_synchronous , .Lft19,domain_crash_synchronous
-        .long .Lft20,domain_crash_synchronous , .Lft21,domain_crash_synchronous
-        .long .Lft22,domain_crash_synchronous , .Lft23,domain_crash_synchronous
-        .long .Lft24,domain_crash_synchronous , .Lft25,domain_crash_synchronous
-.previous
+        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft14, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft15, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft16, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft17, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft18, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft19, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft20, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft21, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft22, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft23, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft24, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft25, domain_crash_synchronous)
 
 domain_crash_synchronous_string:
         .asciz "domain_crash_sync called from entry.S (%lx)\n"
--- 2010-12-23.orig/xen/arch/x86/x86_64/asm-offsets.c
+++ 2010-12-23/xen/arch/x86/x86_64/asm-offsets.c
@@ -3,6 +3,7 @@
  * This code generates raw asm output which is post-processed
  * to extract and format the required data.
  */
+#define COMPILE_OFFSETS
 
 #include <xen/config.h>
 #include <xen/perfc.h>
--- 2010-12-23.orig/xen/arch/x86/x86_64/compat/entry.S
+++ 2010-12-23/xen/arch/x86/x86_64/compat/entry.S
@@ -197,12 +197,8 @@ compat_failsafe_callback:
 1:      call  compat_create_bounce_frame
         jmp   compat_test_all_events
 .previous
-.section __pre_ex_table,"a"
-	.quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
-        .quad .Ldf0,compat_failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+        _ASM_EXTABLE(.Ldf0, compat_failsafe_callback)
 
 /* %rdx: trap_bounce, %rbx: struct vcpu */
 ENTRY(compat_post_handle_exception)
@@ -330,15 +326,19 @@ UNLIKELY_END(compat_bounce_failsafe)
         xorl  %edi,%edi
         jmp   .Lft13
 .previous
-.section __ex_table,"a"
-        .quad  .Lft1,domain_crash_synchronous  ,  .Lft2,compat_crash_page_fault
-        .quad  .Lft3,compat_crash_page_fault_4 ,  .Lft4,domain_crash_synchronous
-        .quad  .Lft5,compat_crash_page_fault_4 ,  .Lft6,compat_crash_page_fault_8
-        .quad  .Lft7,compat_crash_page_fault   ,  .Lft8,compat_crash_page_fault
-        .quad  .Lft9,compat_crash_page_fault_12, .Lft10,compat_crash_page_fault_8
-        .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
-        .quad .Lft13,.Lfx13
-.previous
+        _ASM_EXTABLE(.Lft1,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft2,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft3,  compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft5,  compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft6,  compat_crash_page_fault_8)
+        _ASM_EXTABLE(.Lft7,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft8,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft9,  compat_crash_page_fault_12)
+        _ASM_EXTABLE(.Lft10, compat_crash_page_fault_8)
+        _ASM_EXTABLE(.Lft11, compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft12, compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft13, .Lfx13)
 
 compat_crash_page_fault_12:
         addl  $4,%esi
@@ -356,9 +356,7 @@ compat_crash_page_fault:
         xorl  %edi,%edi
         jmp   .Lft14
 .previous
-.section __ex_table,"a"
-        .quad .Lft14,.Lfx14
-.previous
+        _ASM_EXTABLE(.Lft14, .Lfx14)
 
 .section .rodata, "a", @progbits
 
--- 2010-12-23.orig/xen/arch/x86/x86_64/entry.S
+++ 2010-12-23/xen/arch/x86/x86_64/entry.S
@@ -84,12 +84,8 @@ failsafe_callback:
 1:      call  create_bounce_frame
         jmp   test_all_events
 .previous
-.section __pre_ex_table,"a"
-        .quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
-        .quad .Ldf0,failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+        _ASM_EXTABLE(.Ldf0, failsafe_callback)
 
         ALIGN
 /* No special register assumptions. */
@@ -412,14 +408,18 @@ UNLIKELY_END(bounce_failsafe)
         jz    domain_crash_synchronous
         movq  %rax,UREGS_rip+8(%rsp)
         ret
-.section __ex_table,"a"
-        .quad  .Lft2,domain_crash_synchronous ,  .Lft3,domain_crash_synchronous
-        .quad  .Lft4,domain_crash_synchronous ,  .Lft5,domain_crash_synchronous
-        .quad  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
-        .quad  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
-        .quad .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
-        .quad .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-.previous
+        _ASM_EXTABLE(.Lft2,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft3,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft5,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
 
 domain_crash_synchronous_string:
         .asciz "domain_crash_sync called from entry.S\n"
--- 2010-12-23.orig/xen/arch/x86/x86_64/mm.c
+++ 2010-12-23/xen/arch/x86/x86_64/mm.c
@@ -1119,10 +1119,7 @@ long do_set_segment_base(unsigned int wh
             "2:   xorl %k0,%k0        \n"
             "     jmp  1b             \n"
             ".previous                \n"
-            ".section __ex_table,\"a\"\n"
-            "    .align 8             \n"
-            "    .quad 1b,2b          \n"
-            ".previous                  "
+            _ASM_EXTABLE(1b, 2b)
             : : "r" (base&0xffff) );
         break;
 
--- 2010-12-23.orig/xen/arch/x86/xen.lds.S
+++ 2010-12-23/xen/arch/x86/xen.lds.S
@@ -38,18 +38,19 @@ SECTIONS
        *(.rodata.*)
   } :text
 
-  . = ALIGN(32);               /* Exception table */
-  __ex_table : {
+  . = ALIGN(SMP_CACHE_BYTES);
+  .data.read_mostly : {
+       /* Exception table */
        __start___ex_table = .;
-       *(__ex_table)
+       *(.ex_table)
        __stop___ex_table = .;
-  } :text
 
-  . = ALIGN(32);               /* Pre-exception table */
-  __pre_ex_table : {
+       /* Pre-exception table */
        __start___pre_ex_table = .;
-       *(__pre_ex_table)
+       *(.ex_table.pre)
        __stop___pre_ex_table = .;
+
+       *(.data.read_mostly)
   } :text
 
   .data : {                    /* Data */
@@ -59,11 +60,6 @@ SECTIONS
        CONSTRUCTORS
   } :text
 
-  . = ALIGN(SMP_CACHE_BYTES);
-  .data.read_mostly : {
-       *(.data.read_mostly)
-  } :text
-
 #ifdef LOCK_PROFILE
   . = ALIGN(32);
   __lock_profile_start = .;
--- 2010-12-23.orig/xen/include/asm-x86/asm_defns.h
+++ 2010-12-23/xen/include/asm-x86/asm_defns.h
@@ -2,8 +2,10 @@
 #ifndef __X86_ASM_DEFNS_H__
 #define __X86_ASM_DEFNS_H__
 
+#ifndef COMPILE_OFFSETS
 /* NB. Auto-generated from arch/.../asm-offsets.c */
 #include <asm/asm-offsets.h>
+#endif
 #include <asm/processor.h>
 
 #ifdef __x86_64__
@@ -12,6 +14,24 @@
 #include <asm/x86_32/asm_defns.h>
 #endif
 
+/* Exception table entry */
+#ifdef __ASSEMBLY__
+# define _ASM__EXTABLE(sfx, from, to)             \
+    .section .ex_table##sfx, "a" ;                \
+    .balign 4 ;                                   \
+    .long _ASM_EX(from), _ASM_EX(to) ;            \
+    .previous
+#else
+# define _ASM__EXTABLE(sfx, from, to)             \
+    " .section .ex_table" #sfx ",\"a\"\n"         \
+    " .balign 4\n"                                \
+    " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
+    " .previous\n"
+#endif
+
+#define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
+#define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
+
 #ifdef __ASSEMBLY__
 
 #define UNLIKELY_START(cond, tag) \
--- 2010-12-23.orig/xen/include/asm-x86/config.h
+++ 2010-12-23/xen/include/asm-x86/config.h
@@ -274,8 +274,6 @@ extern unsigned int video_mode, video_fl
 /* For generic assembly code: use macros to define operation/operand sizes. */
 #define __OS          "q"  /* Operation Suffix */
 #define __OP          "r"  /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 8"
-#define __FIXUP_WORD  ".quad"
 
 #elif defined(__i386__)
 
@@ -351,8 +349,6 @@ extern unsigned int video_mode, video_fl
 /* For generic assembly code: use macros to define operation/operand sizes. */
 #define __OS          "l"  /* Operation Suffix */
 #define __OP          "e"  /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 4"
-#define __FIXUP_WORD  ".long"
 
 #endif /* __i386__ */
 
--- 2010-12-23.orig/xen/include/asm-x86/hvm/vmx/vmx.h
+++ 2010-12-23/xen/include/asm-x86/hvm/vmx/vmx.h
@@ -22,6 +22,7 @@
 #include <xen/sched.h>
 #include <asm/types.h>
 #include <asm/regs.h>
+#include <asm/asm_defns.h>
 #include <asm/processor.h>
 #include <asm/i387.h>
 #include <asm/hvm/support.h>
@@ -341,10 +342,7 @@ static inline void __invvpid(int type, u
     asm volatile ( "1: " INVVPID_OPCODE MODRM_EAX_08
                    /* CF==1 or ZF==1 --> crash (ud2) */
                    "ja 2f ; ud2 ; 2:\n"
-                   ".section __ex_table,\"a\"\n"
-                   "    "__FIXUP_ALIGN"\n"
-                   "    "__FIXUP_WORD" 1b,2b\n"
-                   ".previous"
+                   _ASM_EXTABLE(1b, 2b)
                    :
                    : "a" (&operand), "c" (type)
                    : "memory" );
@@ -404,10 +402,7 @@ static inline int __vmxon(u64 addr)
         ".section .fixup,\"ax\"\n"
         "3: sub $2,%0 ; jmp 2b\n"    /* #UD or #GP --> rc = -2 */
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "   "__FIXUP_ALIGN"\n"
-        "   "__FIXUP_WORD" 1b,3b\n"
-        ".previous\n"
+        _ASM_EXTABLE(1b, 3b)
         : "=q" (rc)
         : "0" (0), "a" (&addr)
         : "memory");
--- 2010-12-23.orig/xen/include/asm-x86/msr.h
+++ 2010-12-23/xen/include/asm-x86/msr.h
@@ -8,6 +8,7 @@
 #include <xen/types.h>
 #include <xen/percpu.h>
 #include <xen/errno.h>
+#include <asm/asm_defns.h>
 
 #define rdmsr(msr,val1,val2) \
      __asm__ __volatile__("rdmsr" \
@@ -44,10 +45,7 @@ static inline void wrmsrl(unsigned int m
         "3: xorl %0,%0\n; xorl %1,%1\n" \
         "   movl %5,%2\n; jmp 2b\n" \
         ".previous\n" \
-        ".section __ex_table,\"a\"\n" \
-        "   "__FIXUP_ALIGN"\n" \
-        "   "__FIXUP_WORD" 1b,3b\n" \
-        ".previous\n" \
+        _ASM_EXTABLE(1b, 3b) \
         : "=a" (lo), "=d" (hi), "=&r" (_rc) \
         : "c" (msr), "2" (0), "i" (-EFAULT)); \
     val = lo | ((uint64_t)hi << 32); \
@@ -66,10 +64,7 @@ static inline int wrmsr_safe(unsigned in
         ".section .fixup,\"ax\"\n"
         "3: movl %5,%0\n; jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "   "__FIXUP_ALIGN"\n"
-        "   "__FIXUP_WORD" 1b,3b\n"
-        ".previous\n"
+        _ASM_EXTABLE(1b, 3b)
         : "=&r" (_rc)
         : "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
     return _rc;
--- 2010-12-23.orig/xen/include/asm-x86/uaccess.h
+++ 2010-12-23/xen/include/asm-x86/uaccess.h
@@ -6,6 +6,7 @@
 #include <xen/compiler.h>
 #include <xen/errno.h>
 #include <xen/prefetch.h>
+#include <asm/asm_defns.h>
 #include <asm/page.h>
 
 #ifdef __x86_64__
@@ -155,10 +156,7 @@ struct __large_struct { unsigned long bu
 		"3:	mov %3,%0\n"					\
 		"	jmp 2b\n"					\
 		".previous\n"						\
-		".section __ex_table,\"a\"\n"				\
-		"	"__FIXUP_ALIGN"\n"				\
-		"	"__FIXUP_WORD" 1b,3b\n"				\
-		".previous"						\
+		_ASM_EXTABLE(1b, 3b)					\
 		: "=r"(err)						\
 		: ltype (x), "m"(__m(addr)), "i"(errret), "0"(err))
 
@@ -171,10 +169,7 @@ struct __large_struct { unsigned long bu
 		"	xor"itype" %"rtype"1,%"rtype"1\n"		\
 		"	jmp 2b\n"					\
 		".previous\n"						\
-		".section __ex_table,\"a\"\n"				\
-		"	"__FIXUP_ALIGN"\n"				\
-		"	"__FIXUP_WORD" 1b,3b\n"				\
-		".previous"						\
+		_ASM_EXTABLE(1b, 3b)					\
 		: "=r"(err), ltype (x)					\
 		: "m"(__m(addr)), "i"(errret), "0"(err))
 
@@ -272,7 +267,7 @@ __copy_from_user(void *to, const void __
 
 struct exception_table_entry
 {
-	unsigned long insn, fixup;
+	s32 addr, cont;
 };
 
 extern unsigned long search_exception_table(unsigned long);
--- 2010-12-23.orig/xen/include/asm-x86/x86_32/asm_defns.h
+++ 2010-12-23/xen/include/asm-x86/x86_32/asm_defns.h
@@ -153,4 +153,10 @@ STR(IRQ) #nr "_interrupt:\n\t"          
         GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
         movl (reg),reg;
 
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p
+#else
+# define _ASM_EX(p) #p
+#endif
+
 #endif /* __X86_32_ASM_DEFNS_H__ */
--- 2010-12-23.orig/xen/include/asm-x86/x86_32/system.h
+++ 2010-12-23/xen/include/asm-x86/x86_32/system.h
@@ -49,10 +49,7 @@ static always_inline unsigned long long 
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
         ".previous\n"                                                   \
-        ".section __ex_table,\"a\"\n"                                   \
-        "       .align 4\n"                                             \
-        "       .long 1b,3b\n"                                          \
-        ".previous"                                                     \
+        _ASM_EXTABLE(1b, 3b)                                            \
         : "=a" (_o), "=r" (_rc)                                         \
         : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
         : "memory");
@@ -78,10 +75,7 @@ static always_inline unsigned long long 
             "3:     movl $1,%1\n"                                       \
             "       jmp 2b\n"                                           \
             ".previous\n"                                               \
-            ".section __ex_table,\"a\"\n"                               \
-            "       .align 4\n"                                         \
-            "       .long 1b,3b\n"                                      \
-            ".previous"                                                 \
+            _ASM_EXTABLE(1b, 3b)                                        \
             : "=A" (_o), "=r" (_rc)                                     \
             : "c" ((u32)((u64)(_n)>>32)), "b" ((u32)(_n)),              \
               "m" (*__xg((volatile void *)(_p))), "0" (_o), "1" (0)     \
--- 2010-12-23.orig/xen/include/asm-x86/x86_32/uaccess.h
+++ 2010-12-23/xen/include/asm-x86/x86_32/uaccess.h
@@ -33,11 +33,8 @@ extern void __uaccess_var_not_u64(void);
 		"4:	movl %3,%0\n"				\
 		"	jmp 3b\n"				\
 		".previous\n"					\
-		".section __ex_table,\"a\"\n"			\
-		"	.align 4\n"				\
-		"	.long 1b,4b\n"				\
-		"	.long 2b,4b\n"				\
-		".previous"					\
+		_ASM_EXTABLE(1b, 4b)				\
+		_ASM_EXTABLE(2b, 4b)				\
 		: "=r"(retval)					\
 		: "A" (x), "r" (addr), "i"(errret), "0"(retval))
 
@@ -65,11 +62,8 @@ do {									\
 		"	xorl %%edx,%%edx\n"			\
 		"	jmp 3b\n"				\
 		".previous\n"					\
-		".section __ex_table,\"a\"\n"			\
-		"	.align 4\n"				\
-		"	.long 1b,4b\n"				\
-		"	.long 2b,4b\n"				\
-		".previous"					\
+		_ASM_EXTABLE(1b, 4b)				\
+		_ASM_EXTABLE(2b, 4b)				\
 		: "=r" (retval), "=&A" (x)			\
 		: "r" (addr), "i"(errret), "0"(retval))
 
--- 2010-12-23.orig/xen/include/asm-x86/x86_64/asm_defns.h
+++ 2010-12-23/xen/include/asm-x86/x86_64/asm_defns.h
@@ -130,4 +130,10 @@ STR(IRQ) #nr "_interrupt:\n\t"          
         GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
         movq (reg),reg;
 
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p-.
+#else
+# define _ASM_EX(p) #p "-."
+#endif
+
 #endif /* __X86_64_ASM_DEFNS_H__ */
--- 2010-12-23.orig/xen/include/asm-x86/x86_64/system.h
+++ 2010-12-23/xen/include/asm-x86/x86_64/system.h
@@ -19,10 +19,7 @@
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
         ".previous\n"                                                   \
-        ".section __ex_table,\"a\"\n"                                   \
-        "       .align 8\n"                                             \
-        "       .quad 1b,3b\n"                                          \
-        ".previous"                                                     \
+        _ASM_EXTABLE(1b, 3b)                                            \
         : "=a" (_o), "=r" (_rc)                                         \
         : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
         : "memory");



[-- Attachment #2: x86-extable.patch --]
[-- Type: text/plain, Size: 26355 bytes --]

... thus allowing to make the entries half their current size. Rather
than adjusting all instances to the new layout, abstract the
construction the table entries via a macro (paralleling a similar one
in recent Linux).

Also change the name of the section (to allow easier detection of
missed cases) and merge the final resulting output sections into
.data.read_mostly.

Signed-off-by: Jan Beulich <jbeulich@novell.com>

--- 2010-12-23.orig/xen/arch/x86/cpu/amd.c
+++ 2010-12-23/xen/arch/x86/cpu/amd.c
@@ -53,10 +53,7 @@ static inline int rdmsr_amd_safe(unsigne
 		     "3: movl %6,%2\n"
 		     "   jmp 2b\n"
 		     ".previous\n"
-		     ".section __ex_table,\"a\"\n"
-		     __FIXUP_ALIGN "\n"
-		     __FIXUP_WORD " 1b,3b\n"
-		     ".previous\n"
+		     _ASM_EXTABLE(1b, 3b)
 		     : "=a" (*lo), "=d" (*hi), "=r" (err)
 		     : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
 
@@ -73,10 +70,7 @@ static inline int wrmsr_amd_safe(unsigne
 		     "3: movl %6,%0\n"
 		     "   jmp 2b\n"
 		     ".previous\n"
-		     ".section __ex_table,\"a\"\n"
-		     __FIXUP_ALIGN "\n"
-		     __FIXUP_WORD " 1b,3b\n"
-		     ".previous\n"
+		     _ASM_EXTABLE(1b, 3b)
 		     : "=r" (err)
 		     : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
 		       "0" (0), "i" (-EFAULT));
--- 2010-12-23.orig/xen/arch/x86/domain.c
+++ 2010-12-23/xen/arch/x86/domain.c
@@ -1070,10 +1070,7 @@ arch_do_vcpu_op(
         "   movl %k0,%%" #seg "\n"              \
         "   jmp 2b\n"                           \
         ".previous\n"                           \
-        ".section __ex_table,\"a\"\n"           \
-        "   .align 8\n"                         \
-        "   .quad 1b,3b\n"                      \
-        ".previous"                             \
+        _ASM_EXTABLE(1b, 3b)                    \
         : "=r" (__r) : "r" (value), "0" (__r) );\
     __r; })
 
--- 2010-12-23.orig/xen/arch/x86/extable.c
+++ 2010-12-23/xen/arch/x86/extable.c
@@ -2,6 +2,7 @@
 #include <xen/config.h>
 #include <xen/init.h>
 #include <xen/perfc.h>
+#include <xen/sort.h>
 #include <xen/spinlock.h>
 #include <asm/uaccess.h>
 
@@ -10,29 +11,58 @@ extern struct exception_table_entry __st
 extern struct exception_table_entry __start___pre_ex_table[];
 extern struct exception_table_entry __stop___pre_ex_table[];
 
-static void __init sort_exception_table(struct exception_table_entry *start,
-                                        struct exception_table_entry *end)
+#ifdef __i386__
+#define EX_FIELD(ptr, field) (ptr)->field
+#define swap_ex NULL
+#else
+#define EX_FIELD(ptr, field) ((unsigned long)&(ptr)->field + (ptr)->field)
+#endif
+
+static inline unsigned long ex_addr(const struct exception_table_entry *x)
 {
-    struct exception_table_entry *p, *q, tmp;
+	return EX_FIELD(x, addr);
+}
 
-    for ( p = start; p < end; p++ )
-    {
-        for ( q = p-1; q > start; q-- )
-            if ( p->insn > q->insn )
-                break;
-        if ( ++q != p )
-        {
-            tmp = *p;
-            memmove(q+1, q, (p-q)*sizeof(*p));
-            *q = tmp;
-        }
-    }
+static inline unsigned long ex_cont(const struct exception_table_entry *x)
+{
+	return EX_FIELD(x, cont);
+}
+
+static int __init cmp_ex(const void *a, const void *b)
+{
+	const struct exception_table_entry *l = a, *r = b;
+	unsigned long lip = ex_addr(l);
+	unsigned long rip = ex_addr(r);
+
+	/* avoid overflow */
+	if (lip > rip)
+		return 1;
+	if (lip < rip)
+		return -1;
+	return 0;
+}
+
+#ifndef swap_ex
+static void __init swap_ex(void *a, void *b, int size)
+{
+	struct exception_table_entry *l = a, *r = b, tmp;
+	long delta = b - a;
+
+	tmp = *l;
+	l->addr = r->addr + delta;
+	l->cont = r->cont + delta;
+	r->addr = tmp.addr - delta;
+	r->cont = tmp.cont - delta;
 }
+#endif
 
 void __init sort_exception_tables(void)
 {
-    sort_exception_table(__start___ex_table, __stop___ex_table);
-    sort_exception_table(__start___pre_ex_table, __stop___pre_ex_table);
+    sort(__start___ex_table, __stop___ex_table - __start___ex_table,
+         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
+    sort(__start___pre_ex_table,
+         __stop___pre_ex_table - __start___pre_ex_table,
+         sizeof(struct exception_table_entry), cmp_ex, swap_ex);
 }
 
 static inline unsigned long
@@ -46,9 +76,9 @@ search_one_table(const struct exception_
     while ( first <= last )
     {
         mid = (last - first) / 2 + first;
-        diff = mid->insn - value;
+        diff = ex_addr(mid) - value;
         if (diff == 0)
-            return mid->fixup;
+            return ex_cont(mid);
         else if (diff < 0)
             first = mid+1;
         else
--- 2010-12-23.orig/xen/arch/x86/i387.c
+++ 2010-12-23/xen/arch/x86/i387.c
@@ -122,10 +122,7 @@ void restore_fpu(struct vcpu *v)
             "   pop  %%"__OP"ax       \n"
             "   jmp  1b               \n"
             ".previous                \n"
-            ".section __ex_table,\"a\"\n"
-            "   "__FIXUP_ALIGN"       \n"
-            "   "__FIXUP_WORD" 1b,2b  \n"
-            ".previous                \n"
+            _ASM_EXTABLE(1b, 2b)
             : 
             : "m" (*fpu_ctxt),
               "i" (sizeof(v->arch.guest_context.fpu_ctxt)/4)
--- 2010-12-23.orig/xen/arch/x86/usercopy.c
+++ 2010-12-23/xen/arch/x86/usercopy.c
@@ -36,12 +36,9 @@ unsigned long __copy_to_user_ll(void __u
         "3:  lea 0(%3,%0,"STR(BYTES_PER_LONG)"),%0\n"
         "    jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "    "__FIXUP_ALIGN"\n"
-        "    "__FIXUP_WORD" 4b,5b\n"
-        "    "__FIXUP_WORD" 0b,3b\n"
-        "    "__FIXUP_WORD" 1b,2b\n"
-        ".previous"
+        _ASM_EXTABLE(4b, 5b)
+        _ASM_EXTABLE(0b, 3b)
+        _ASM_EXTABLE(1b, 2b)
         : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
         : "0" (__n), "1" (to), "2" (from), "3" (__n)
         : "memory" );
@@ -82,12 +79,9 @@ __copy_from_user_ll(void *to, const void
         "    pop  %0\n"
         "    jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "    "__FIXUP_ALIGN"\n"
-        "    "__FIXUP_WORD" 4b,5b\n"
-        "    "__FIXUP_WORD" 0b,3b\n"
-        "    "__FIXUP_WORD" 1b,6b\n"
-        ".previous"
+        _ASM_EXTABLE(4b, 5b)
+        _ASM_EXTABLE(0b, 3b)
+        _ASM_EXTABLE(1b, 6b)
         : "=&c" (__n), "=&D" (__d0), "=&S" (__d1), "=&r" (__d2)
         : "0" (__n), "1" (to), "2" (from), "3" (__n)
         : "memory" );
--- 2010-12-23.orig/xen/arch/x86/x86_32/asm-offsets.c
+++ 2010-12-23/xen/arch/x86/x86_32/asm-offsets.c
@@ -3,6 +3,7 @@
  * This code generates raw asm output which is post-processed
  * to extract and format the required data.
  */
+#define COMPILE_OFFSETS
 
 #include <xen/config.h>
 #include <xen/perfc.h>
--- 2010-12-23.orig/xen/arch/x86/x86_32/entry.S
+++ 2010-12-23/xen/arch/x86/x86_32/entry.S
@@ -119,16 +119,12 @@ failsafe_callback:
         movl  %eax,UREGS_gs(%esp)
         jmp   test_all_events
 .previous
-.section __pre_ex_table,"a"
-        .long .Lft1,.Lfx1
-        .long .Lft2,.Lfx1
-        .long .Lft3,.Lfx1
-        .long .Lft4,.Lfx1
-        .long .Lft5,.Lfx1
-.previous
-.section __ex_table,"a"
-        .long .Ldf1,failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft1, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft2, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft3, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft4, .Lfx1)
+        _ASM_PRE_EXTABLE(.Lft5, .Lfx1)
+        _ASM_EXTABLE(.Ldf1, failsafe_callback)
 
         ALIGN
 restore_all_xen:
@@ -392,18 +388,26 @@ UNLIKELY_END(bounce_vm86_3)
         movl TRAPBOUNCE_eip(%edx),%eax
         movl %eax,UREGS_eip+4(%esp)
         ret
-.section __ex_table,"a"
-        .long  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
-        .long  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
-        .long .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
-        .long .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-        .long .Lft14,domain_crash_synchronous , .Lft15,domain_crash_synchronous
-        .long .Lft16,domain_crash_synchronous , .Lft17,domain_crash_synchronous
-        .long .Lft18,domain_crash_synchronous , .Lft19,domain_crash_synchronous
-        .long .Lft20,domain_crash_synchronous , .Lft21,domain_crash_synchronous
-        .long .Lft22,domain_crash_synchronous , .Lft23,domain_crash_synchronous
-        .long .Lft24,domain_crash_synchronous , .Lft25,domain_crash_synchronous
-.previous
+        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft14, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft15, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft16, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft17, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft18, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft19, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft20, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft21, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft22, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft23, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft24, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft25, domain_crash_synchronous)
 
 domain_crash_synchronous_string:
         .asciz "domain_crash_sync called from entry.S (%lx)\n"
--- 2010-12-23.orig/xen/arch/x86/x86_64/asm-offsets.c
+++ 2010-12-23/xen/arch/x86/x86_64/asm-offsets.c
@@ -3,6 +3,7 @@
  * This code generates raw asm output which is post-processed
  * to extract and format the required data.
  */
+#define COMPILE_OFFSETS
 
 #include <xen/config.h>
 #include <xen/perfc.h>
--- 2010-12-23.orig/xen/arch/x86/x86_64/compat/entry.S
+++ 2010-12-23/xen/arch/x86/x86_64/compat/entry.S
@@ -197,12 +197,8 @@ compat_failsafe_callback:
 1:      call  compat_create_bounce_frame
         jmp   compat_test_all_events
 .previous
-.section __pre_ex_table,"a"
-	.quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
-        .quad .Ldf0,compat_failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+        _ASM_EXTABLE(.Ldf0, compat_failsafe_callback)
 
 /* %rdx: trap_bounce, %rbx: struct vcpu */
 ENTRY(compat_post_handle_exception)
@@ -330,15 +326,19 @@ UNLIKELY_END(compat_bounce_failsafe)
         xorl  %edi,%edi
         jmp   .Lft13
 .previous
-.section __ex_table,"a"
-        .quad  .Lft1,domain_crash_synchronous  ,  .Lft2,compat_crash_page_fault
-        .quad  .Lft3,compat_crash_page_fault_4 ,  .Lft4,domain_crash_synchronous
-        .quad  .Lft5,compat_crash_page_fault_4 ,  .Lft6,compat_crash_page_fault_8
-        .quad  .Lft7,compat_crash_page_fault   ,  .Lft8,compat_crash_page_fault
-        .quad  .Lft9,compat_crash_page_fault_12, .Lft10,compat_crash_page_fault_8
-        .quad .Lft11,compat_crash_page_fault_4 , .Lft12,compat_crash_page_fault
-        .quad .Lft13,.Lfx13
-.previous
+        _ASM_EXTABLE(.Lft1,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft2,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft3,  compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft5,  compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft6,  compat_crash_page_fault_8)
+        _ASM_EXTABLE(.Lft7,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft8,  compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft9,  compat_crash_page_fault_12)
+        _ASM_EXTABLE(.Lft10, compat_crash_page_fault_8)
+        _ASM_EXTABLE(.Lft11, compat_crash_page_fault_4)
+        _ASM_EXTABLE(.Lft12, compat_crash_page_fault)
+        _ASM_EXTABLE(.Lft13, .Lfx13)
 
 compat_crash_page_fault_12:
         addl  $4,%esi
@@ -356,9 +356,7 @@ compat_crash_page_fault:
         xorl  %edi,%edi
         jmp   .Lft14
 .previous
-.section __ex_table,"a"
-        .quad .Lft14,.Lfx14
-.previous
+        _ASM_EXTABLE(.Lft14, .Lfx14)
 
 .section .rodata, "a", @progbits
 
--- 2010-12-23.orig/xen/arch/x86/x86_64/entry.S
+++ 2010-12-23/xen/arch/x86/x86_64/entry.S
@@ -84,12 +84,8 @@ failsafe_callback:
 1:      call  create_bounce_frame
         jmp   test_all_events
 .previous
-.section __pre_ex_table,"a"
-        .quad .Lft0,.Lfx0
-.previous
-.section __ex_table,"a"
-        .quad .Ldf0,failsafe_callback
-.previous
+        _ASM_PRE_EXTABLE(.Lft0, .Lfx0)
+        _ASM_EXTABLE(.Ldf0, failsafe_callback)
 
         ALIGN
 /* No special register assumptions. */
@@ -412,14 +408,18 @@ UNLIKELY_END(bounce_failsafe)
         jz    domain_crash_synchronous
         movq  %rax,UREGS_rip+8(%rsp)
         ret
-.section __ex_table,"a"
-        .quad  .Lft2,domain_crash_synchronous ,  .Lft3,domain_crash_synchronous
-        .quad  .Lft4,domain_crash_synchronous ,  .Lft5,domain_crash_synchronous
-        .quad  .Lft6,domain_crash_synchronous ,  .Lft7,domain_crash_synchronous
-        .quad  .Lft8,domain_crash_synchronous ,  .Lft9,domain_crash_synchronous
-        .quad .Lft10,domain_crash_synchronous , .Lft11,domain_crash_synchronous
-        .quad .Lft12,domain_crash_synchronous , .Lft13,domain_crash_synchronous
-.previous
+        _ASM_EXTABLE(.Lft2,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft3,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft4,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft5,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft6,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft7,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft8,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft9,  domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft10, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft11, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft12, domain_crash_synchronous)
+        _ASM_EXTABLE(.Lft13, domain_crash_synchronous)
 
 domain_crash_synchronous_string:
         .asciz "domain_crash_sync called from entry.S\n"
--- 2010-12-23.orig/xen/arch/x86/x86_64/mm.c
+++ 2010-12-23/xen/arch/x86/x86_64/mm.c
@@ -1119,10 +1119,7 @@ long do_set_segment_base(unsigned int wh
             "2:   xorl %k0,%k0        \n"
             "     jmp  1b             \n"
             ".previous                \n"
-            ".section __ex_table,\"a\"\n"
-            "    .align 8             \n"
-            "    .quad 1b,2b          \n"
-            ".previous                  "
+            _ASM_EXTABLE(1b, 2b)
             : : "r" (base&0xffff) );
         break;
 
--- 2010-12-23.orig/xen/arch/x86/xen.lds.S
+++ 2010-12-23/xen/arch/x86/xen.lds.S
@@ -38,18 +38,19 @@ SECTIONS
        *(.rodata.*)
   } :text
 
-  . = ALIGN(32);               /* Exception table */
-  __ex_table : {
+  . = ALIGN(SMP_CACHE_BYTES);
+  .data.read_mostly : {
+       /* Exception table */
        __start___ex_table = .;
-       *(__ex_table)
+       *(.ex_table)
        __stop___ex_table = .;
-  } :text
 
-  . = ALIGN(32);               /* Pre-exception table */
-  __pre_ex_table : {
+       /* Pre-exception table */
        __start___pre_ex_table = .;
-       *(__pre_ex_table)
+       *(.ex_table.pre)
        __stop___pre_ex_table = .;
+
+       *(.data.read_mostly)
   } :text
 
   .data : {                    /* Data */
@@ -59,11 +60,6 @@ SECTIONS
        CONSTRUCTORS
   } :text
 
-  . = ALIGN(SMP_CACHE_BYTES);
-  .data.read_mostly : {
-       *(.data.read_mostly)
-  } :text
-
 #ifdef LOCK_PROFILE
   . = ALIGN(32);
   __lock_profile_start = .;
--- 2010-12-23.orig/xen/include/asm-x86/asm_defns.h
+++ 2010-12-23/xen/include/asm-x86/asm_defns.h
@@ -2,8 +2,10 @@
 #ifndef __X86_ASM_DEFNS_H__
 #define __X86_ASM_DEFNS_H__
 
+#ifndef COMPILE_OFFSETS
 /* NB. Auto-generated from arch/.../asm-offsets.c */
 #include <asm/asm-offsets.h>
+#endif
 #include <asm/processor.h>
 
 #ifdef __x86_64__
@@ -12,6 +14,24 @@
 #include <asm/x86_32/asm_defns.h>
 #endif
 
+/* Exception table entry */
+#ifdef __ASSEMBLY__
+# define _ASM__EXTABLE(sfx, from, to)             \
+    .section .ex_table##sfx, "a" ;                \
+    .balign 4 ;                                   \
+    .long _ASM_EX(from), _ASM_EX(to) ;            \
+    .previous
+#else
+# define _ASM__EXTABLE(sfx, from, to)             \
+    " .section .ex_table" #sfx ",\"a\"\n"         \
+    " .balign 4\n"                                \
+    " .long " _ASM_EX(from) ", " _ASM_EX(to) "\n" \
+    " .previous\n"
+#endif
+
+#define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
+#define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
+
 #ifdef __ASSEMBLY__
 
 #define UNLIKELY_START(cond, tag) \
--- 2010-12-23.orig/xen/include/asm-x86/config.h
+++ 2010-12-23/xen/include/asm-x86/config.h
@@ -274,8 +274,6 @@ extern unsigned int video_mode, video_fl
 /* For generic assembly code: use macros to define operation/operand sizes. */
 #define __OS          "q"  /* Operation Suffix */
 #define __OP          "r"  /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 8"
-#define __FIXUP_WORD  ".quad"
 
 #elif defined(__i386__)
 
@@ -351,8 +349,6 @@ extern unsigned int video_mode, video_fl
 /* For generic assembly code: use macros to define operation/operand sizes. */
 #define __OS          "l"  /* Operation Suffix */
 #define __OP          "e"  /* Operand Prefix */
-#define __FIXUP_ALIGN ".align 4"
-#define __FIXUP_WORD  ".long"
 
 #endif /* __i386__ */
 
--- 2010-12-23.orig/xen/include/asm-x86/hvm/vmx/vmx.h
+++ 2010-12-23/xen/include/asm-x86/hvm/vmx/vmx.h
@@ -22,6 +22,7 @@
 #include <xen/sched.h>
 #include <asm/types.h>
 #include <asm/regs.h>
+#include <asm/asm_defns.h>
 #include <asm/processor.h>
 #include <asm/i387.h>
 #include <asm/hvm/support.h>
@@ -341,10 +342,7 @@ static inline void __invvpid(int type, u
     asm volatile ( "1: " INVVPID_OPCODE MODRM_EAX_08
                    /* CF==1 or ZF==1 --> crash (ud2) */
                    "ja 2f ; ud2 ; 2:\n"
-                   ".section __ex_table,\"a\"\n"
-                   "    "__FIXUP_ALIGN"\n"
-                   "    "__FIXUP_WORD" 1b,2b\n"
-                   ".previous"
+                   _ASM_EXTABLE(1b, 2b)
                    :
                    : "a" (&operand), "c" (type)
                    : "memory" );
@@ -404,10 +402,7 @@ static inline int __vmxon(u64 addr)
         ".section .fixup,\"ax\"\n"
         "3: sub $2,%0 ; jmp 2b\n"    /* #UD or #GP --> rc = -2 */
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "   "__FIXUP_ALIGN"\n"
-        "   "__FIXUP_WORD" 1b,3b\n"
-        ".previous\n"
+        _ASM_EXTABLE(1b, 3b)
         : "=q" (rc)
         : "0" (0), "a" (&addr)
         : "memory");
--- 2010-12-23.orig/xen/include/asm-x86/msr.h
+++ 2010-12-23/xen/include/asm-x86/msr.h
@@ -8,6 +8,7 @@
 #include <xen/types.h>
 #include <xen/percpu.h>
 #include <xen/errno.h>
+#include <asm/asm_defns.h>
 
 #define rdmsr(msr,val1,val2) \
      __asm__ __volatile__("rdmsr" \
@@ -44,10 +45,7 @@ static inline void wrmsrl(unsigned int m
         "3: xorl %0,%0\n; xorl %1,%1\n" \
         "   movl %5,%2\n; jmp 2b\n" \
         ".previous\n" \
-        ".section __ex_table,\"a\"\n" \
-        "   "__FIXUP_ALIGN"\n" \
-        "   "__FIXUP_WORD" 1b,3b\n" \
-        ".previous\n" \
+        _ASM_EXTABLE(1b, 3b) \
         : "=a" (lo), "=d" (hi), "=&r" (_rc) \
         : "c" (msr), "2" (0), "i" (-EFAULT)); \
     val = lo | ((uint64_t)hi << 32); \
@@ -66,10 +64,7 @@ static inline int wrmsr_safe(unsigned in
         ".section .fixup,\"ax\"\n"
         "3: movl %5,%0\n; jmp 2b\n"
         ".previous\n"
-        ".section __ex_table,\"a\"\n"
-        "   "__FIXUP_ALIGN"\n"
-        "   "__FIXUP_WORD" 1b,3b\n"
-        ".previous\n"
+        _ASM_EXTABLE(1b, 3b)
         : "=&r" (_rc)
         : "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
     return _rc;
--- 2010-12-23.orig/xen/include/asm-x86/uaccess.h
+++ 2010-12-23/xen/include/asm-x86/uaccess.h
@@ -6,6 +6,7 @@
 #include <xen/compiler.h>
 #include <xen/errno.h>
 #include <xen/prefetch.h>
+#include <asm/asm_defns.h>
 #include <asm/page.h>
 
 #ifdef __x86_64__
@@ -155,10 +156,7 @@ struct __large_struct { unsigned long bu
 		"3:	mov %3,%0\n"					\
 		"	jmp 2b\n"					\
 		".previous\n"						\
-		".section __ex_table,\"a\"\n"				\
-		"	"__FIXUP_ALIGN"\n"				\
-		"	"__FIXUP_WORD" 1b,3b\n"				\
-		".previous"						\
+		_ASM_EXTABLE(1b, 3b)					\
 		: "=r"(err)						\
 		: ltype (x), "m"(__m(addr)), "i"(errret), "0"(err))
 
@@ -171,10 +169,7 @@ struct __large_struct { unsigned long bu
 		"	xor"itype" %"rtype"1,%"rtype"1\n"		\
 		"	jmp 2b\n"					\
 		".previous\n"						\
-		".section __ex_table,\"a\"\n"				\
-		"	"__FIXUP_ALIGN"\n"				\
-		"	"__FIXUP_WORD" 1b,3b\n"				\
-		".previous"						\
+		_ASM_EXTABLE(1b, 3b)					\
 		: "=r"(err), ltype (x)					\
 		: "m"(__m(addr)), "i"(errret), "0"(err))
 
@@ -272,7 +267,7 @@ __copy_from_user(void *to, const void __
 
 struct exception_table_entry
 {
-	unsigned long insn, fixup;
+	s32 addr, cont;
 };
 
 extern unsigned long search_exception_table(unsigned long);
--- 2010-12-23.orig/xen/include/asm-x86/x86_32/asm_defns.h
+++ 2010-12-23/xen/include/asm-x86/x86_32/asm_defns.h
@@ -153,4 +153,10 @@ STR(IRQ) #nr "_interrupt:\n\t"          
         GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
         movl (reg),reg;
 
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p
+#else
+# define _ASM_EX(p) #p
+#endif
+
 #endif /* __X86_32_ASM_DEFNS_H__ */
--- 2010-12-23.orig/xen/include/asm-x86/x86_32/system.h
+++ 2010-12-23/xen/include/asm-x86/x86_32/system.h
@@ -49,10 +49,7 @@ static always_inline unsigned long long 
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
         ".previous\n"                                                   \
-        ".section __ex_table,\"a\"\n"                                   \
-        "       .align 4\n"                                             \
-        "       .long 1b,3b\n"                                          \
-        ".previous"                                                     \
+        _ASM_EXTABLE(1b, 3b)                                            \
         : "=a" (_o), "=r" (_rc)                                         \
         : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
         : "memory");
@@ -78,10 +75,7 @@ static always_inline unsigned long long 
             "3:     movl $1,%1\n"                                       \
             "       jmp 2b\n"                                           \
             ".previous\n"                                               \
-            ".section __ex_table,\"a\"\n"                               \
-            "       .align 4\n"                                         \
-            "       .long 1b,3b\n"                                      \
-            ".previous"                                                 \
+            _ASM_EXTABLE(1b, 3b)                                        \
             : "=A" (_o), "=r" (_rc)                                     \
             : "c" ((u32)((u64)(_n)>>32)), "b" ((u32)(_n)),              \
               "m" (*__xg((volatile void *)(_p))), "0" (_o), "1" (0)     \
--- 2010-12-23.orig/xen/include/asm-x86/x86_32/uaccess.h
+++ 2010-12-23/xen/include/asm-x86/x86_32/uaccess.h
@@ -33,11 +33,8 @@ extern void __uaccess_var_not_u64(void);
 		"4:	movl %3,%0\n"				\
 		"	jmp 3b\n"				\
 		".previous\n"					\
-		".section __ex_table,\"a\"\n"			\
-		"	.align 4\n"				\
-		"	.long 1b,4b\n"				\
-		"	.long 2b,4b\n"				\
-		".previous"					\
+		_ASM_EXTABLE(1b, 4b)				\
+		_ASM_EXTABLE(2b, 4b)				\
 		: "=r"(retval)					\
 		: "A" (x), "r" (addr), "i"(errret), "0"(retval))
 
@@ -65,11 +62,8 @@ do {									\
 		"	xorl %%edx,%%edx\n"			\
 		"	jmp 3b\n"				\
 		".previous\n"					\
-		".section __ex_table,\"a\"\n"			\
-		"	.align 4\n"				\
-		"	.long 1b,4b\n"				\
-		"	.long 2b,4b\n"				\
-		".previous"					\
+		_ASM_EXTABLE(1b, 4b)				\
+		_ASM_EXTABLE(2b, 4b)				\
 		: "=r" (retval), "=&A" (x)			\
 		: "r" (addr), "i"(errret), "0"(retval))
 
--- 2010-12-23.orig/xen/include/asm-x86/x86_64/asm_defns.h
+++ 2010-12-23/xen/include/asm-x86/x86_64/asm_defns.h
@@ -130,4 +130,10 @@ STR(IRQ) #nr "_interrupt:\n\t"          
         GET_CPUINFO_FIELD(CPUINFO_current_vcpu,reg)     \
         movq (reg),reg;
 
+#ifdef __ASSEMBLY__
+# define _ASM_EX(p) p-.
+#else
+# define _ASM_EX(p) #p "-."
+#endif
+
 #endif /* __X86_64_ASM_DEFNS_H__ */
--- 2010-12-23.orig/xen/include/asm-x86/x86_64/system.h
+++ 2010-12-23/xen/include/asm-x86/x86_64/system.h
@@ -19,10 +19,7 @@
         "3:     movl $1,%1\n"                                           \
         "       jmp 2b\n"                                               \
         ".previous\n"                                                   \
-        ".section __ex_table,\"a\"\n"                                   \
-        "       .align 8\n"                                             \
-        "       .quad 1b,3b\n"                                          \
-        ".previous"                                                     \
+        _ASM_EXTABLE(1b, 3b)                                            \
         : "=a" (_o), "=r" (_rc)                                         \
         : _regtype (_n), "m" (*__xg((volatile void *)_p)), "0" (_o), "1" (0) \
         : "memory");

[-- Attachment #3: Type: text/plain, Size: 138 bytes --]

_______________________________________________
Xen-devel mailing list
Xen-devel@lists.xensource.com
http://lists.xensource.com/xen-devel

^ permalink raw reply	[flat|nested] only message in thread

only message in thread, other threads:[~2010-12-23 12:57 UTC | newest]

Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2010-12-23 12:57 [PATCH 2/3] x86-64: use PC-relative exception table entries Jan Beulich

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox;
as well as URLs for NNTP newsgroup(s).