format cleanup for readability. Tab out back-slashes.
authorMatthew Dillon <dillon@dragonflybsd.org>
Sat, 28 Jun 2003 07:00:58 +0000 (07:00 +0000)
committerMatthew Dillon <dillon@dragonflybsd.org>
Sat, 28 Jun 2003 07:00:58 +0000 (07:00 +0000)
sys/i386/icu/icu_vector.s
sys/i386/isa/icu_vector.s
sys/platform/pc32/icu/icu_vector.s
sys/platform/pc32/isa/icu_vector.s

index 5c15eaf..dd2ded2 100644 (file)
@@ -1,7 +1,7 @@
 /*
  *     from: vector.s, 386BSD 0.1 unknown origin
  * $FreeBSD: src/sys/i386/isa/icu_vector.s,v 1.14.2.2 2000/07/18 21:12:42 dfr Exp $
- * $DragonFly: src/sys/i386/icu/Attic/icu_vector.s,v 1.5 2003/06/28 02:09:52 dillon Exp $
+ * $DragonFly: src/sys/i386/icu/Attic/icu_vector.s,v 1.6 2003/06/28 07:00:58 dillon Exp $
  */
 
 /*
  * Macros for interrupt interrupt entry, call to handler, and exit.
  */
 
-#define        FAST_INTR(irq_num, vec_name, enable_icus) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   %eax ;          /* save only call-used registers */ \
-       pushl   %ecx ; \
-       pushl   %edx ; \
-       pushl   %ds ; \
-       MAYBE_PUSHL_ES ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%ds ; \
-       MAYBE_MOVW_AX_ES ; \
-       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
+#define        FAST_INTR(irq_num, vec_name, enable_icus)                       \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   %eax ;          /* save only call-used registers */     \
+       pushl   %ecx ;                                                  \
+       pushl   %edx ;                                                  \
+       pushl   %ds ;                                                   \
+       MAYBE_PUSHL_ES ;                                                \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%ds ;                                               \
+       MAYBE_MOVW_AX_ES ;                                              \
+       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ;                      \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
        call    *_intr_handler + (irq_num) * 4 ; /* do the work ASAP */ \
        enable_icus ;           /* (re)enable ASAP (helps edge trigger?) */ \
-       addl    $4,%esp ; \
-       incl    _cnt+V_INTR ;   /* book-keeping can wait */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
+       addl    $4,%esp ;                                               \
+       incl    _cnt+V_INTR ;   /* book-keeping can wait */             \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
        movl    _curthread, %ecx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;      \
-       jge     1f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;                              \
+       jge     1f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ecx),%eax ; /* unmasking pending ints? */ \
-       notl    %eax ; \
-       andl    _ipending,%eax ; \
-       jne     2f ;            /* yes, maybe handle them */ \
-1: ; \
-       MEXITCOUNT ; \
-       MAYBE_POPL_ES ; \
-       popl    %ds ; \
-       popl    %edx ; \
-       popl    %ecx ; \
-       popl    %eax ; \
-       iret ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
+       notl    %eax ;                                                  \
+       andl    _ipending,%eax ;                                        \
+       jne     2f ;            /* yes, maybe handle them */            \
+1: ;                                                                   \
+       MEXITCOUNT ;                                                    \
+       MAYBE_POPL_ES ;                                                 \
+       popl    %ds ;                                                   \
+       popl    %edx ;                                                  \
+       popl    %ecx ;                                                  \
+       popl    %eax ;                                                  \
+       iret ;                                                          \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
        cmpb    $3,_intr_nesting_level ;        /* is there enough stack? */ \
-       jae     1b ;            /* no, return */ \
-       movl    TD_MACH+MTD_CPL(%ecx),%eax ;    \
-       /* XXX next line is probably unnecessary now. */ \
+       jae     1b ;            /* no, return */                        \
+       movl    TD_MACH+MTD_CPL(%ecx),%eax ;                            \
+       /* XXX next line is probably unnecessary now. */                \
        movl    $HWI_MASK|SWI_MASK,TD_MACH+MTD_CPL(%ecx) ; /* limit nesting ... */ \
-       incb    _intr_nesting_level ;   /* ... really limit it ... */ \
+       incb    _intr_nesting_level ;   /* ... really limit it ... */   \
        sti ;                   /* ... to do this as early as possible */ \
-       MAYBE_POPL_ES ;         /* discard most of thin frame ... */ \
-       popl    %ecx ;          /* ... original %ds ... */ \
-       popl    %edx ; \
-       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */ \
-       pushal ;                /* build fat frame (grrr) ... */ \
-       pushl   %ecx ;          /* ... actually %ds ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
+       MAYBE_POPL_ES ;         /* discard most of thin frame ... */    \
+       popl    %ecx ;          /* ... original %ds ... */              \
+       popl    %edx ;                                                  \
+       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */               \
+       pushal ;                /* build fat frame (grrr) ... */        \
+       pushl   %ecx ;          /* ... actually %ds ... */              \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
        movl    (3+8+0)*4(%esp),%ecx ;  /* ... %ecx from thin frame ... */ \
-       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */ \
-       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */ \
-       pushl   %eax ; \
-       subl    $4,%esp ;       /* junk for unit number */ \
-       MEXITCOUNT ; \
+       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */      \
+       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */   \
+       pushl   %eax ;                                                  \
+       subl    $4,%esp ;       /* junk for unit number */              \
+       MEXITCOUNT ;                                                    \
        jmp     _doreti
 
 #define        INTR(irq_num, vec_name, icu, enable_icus, reg, maybe_extra_ipending) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   $0 ;            /* dummy error code */ \
-       pushl   $0 ;            /* dummy trap type */ \
-       pushal ; \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   $0 ;            /* dummy error code */                  \
+       pushl   $0 ;            /* dummy trap type */                   \
+       pushal ;                                                        \
        pushl   %ds ;           /* save our data and extra segments ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
        mov     $KDSEL,%ax ;    /* ... and reload with kernel's own ... */ \
-       mov     %ax,%ds ;       /* ... early for obsolete reasons */ \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
-       maybe_extra_ipending ; \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       orb     $IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       enable_icus ; \
+       mov     %ax,%ds ;       /* ... early for obsolete reasons */    \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
+       maybe_extra_ipending ;                                          \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       orb     $IRQ_BIT(irq_num),%al ;                                 \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       enable_icus ;                                                   \
        movl    _curthread, %ebx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;      \
-       jge     2f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;                              \
+       jge     2f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ebx),%eax ; /* is this interrupt masked by the cpl? */ \
-       testb   $IRQ_BIT(irq_num),%reg ; \
-       jne     2f ; \
-       incb    _intr_nesting_level ; \
-__CONCAT(Xresume,irq_num): ; \
+       testb   $IRQ_BIT(irq_num),%reg ;                                \
+       jne     2f ;                                                    \
+       incb    _intr_nesting_level ;                                   \
+__CONCAT(Xresume,irq_num): ;                                           \
        FAKE_MCOUNT(13*4(%esp)) ;       /* XXX late to avoid double count */ \
-       incl    _cnt+V_INTR ;   /* tally interrupts */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
-       movl    TD_MACH+MTD_CPL(%ebx),%eax ; \
-       pushl   %eax ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
-       orl     _intr_mask + (irq_num) * 4,%eax ; \
-       movl    %eax,TD_MACH+MTD_CPL(%ebx) ; \
-       sti ; \
-       call    *_intr_handler + (irq_num) * 4 ; \
+       incl    _cnt+V_INTR ;   /* tally interrupts */                  \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
+       movl    TD_MACH+MTD_CPL(%ebx),%eax ;                            \
+       pushl   %eax ;                                                  \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
+       orl     _intr_mask + (irq_num) * 4,%eax ;                       \
+       movl    %eax,TD_MACH+MTD_CPL(%ebx) ;                            \
+       sti ;                                                           \
+       call    *_intr_handler + (irq_num) * 4 ;                        \
        cli ;                   /* must unmask _imen and icu atomically */ \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       andb    $~IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       sti ;                   /* XXX _doreti repeats the cli/sti */ \
-       MEXITCOUNT ; \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       andb    $~IRQ_BIT(irq_num),%al ;                                \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       sti ;                   /* XXX _doreti repeats the cli/sti */   \
+       MEXITCOUNT ;                                                    \
        /* We could usually avoid the following jmp by inlining some of */ \
-       /* _doreti, but it's probably better to use less cache. */ \
-       jmp     _doreti ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
-       /* XXX skip mcounting here to avoid double count */ \
-       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ; \
-       movl    $TDPRI_CRIT,_reqpri ; \
-       popl    %fs ; \
-       popl    %es ; \
-       popl    %ds ; \
-       popal ; \
-       addl    $4+4,%esp ; \
+       /* _doreti, but it's probably better to use less cache. */      \
+       jmp     _doreti ;                                               \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
+       /* XXX skip mcounting here to avoid double count */             \
+       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ;       \
+       movl    $TDPRI_CRIT,_reqpri ;                                   \
+       popl    %fs ;                                                   \
+       popl    %es ;                                                   \
+       popl    %ds ;                                                   \
+       popal ;                                                         \
+       addl    $4+4,%esp ;                                             \
        iret
 
 MCOUNT_LABEL(bintr)
index a4ec0df..8667a5e 100644 (file)
@@ -1,7 +1,7 @@
 /*
  *     from: vector.s, 386BSD 0.1 unknown origin
  * $FreeBSD: src/sys/i386/isa/icu_vector.s,v 1.14.2.2 2000/07/18 21:12:42 dfr Exp $
- * $DragonFly: src/sys/i386/isa/Attic/icu_vector.s,v 1.5 2003/06/28 02:09:52 dillon Exp $
+ * $DragonFly: src/sys/i386/isa/Attic/icu_vector.s,v 1.6 2003/06/28 07:00:58 dillon Exp $
  */
 
 /*
  * Macros for interrupt interrupt entry, call to handler, and exit.
  */
 
-#define        FAST_INTR(irq_num, vec_name, enable_icus) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   %eax ;          /* save only call-used registers */ \
-       pushl   %ecx ; \
-       pushl   %edx ; \
-       pushl   %ds ; \
-       MAYBE_PUSHL_ES ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%ds ; \
-       MAYBE_MOVW_AX_ES ; \
-       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
+#define        FAST_INTR(irq_num, vec_name, enable_icus)                       \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   %eax ;          /* save only call-used registers */     \
+       pushl   %ecx ;                                                  \
+       pushl   %edx ;                                                  \
+       pushl   %ds ;                                                   \
+       MAYBE_PUSHL_ES ;                                                \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%ds ;                                               \
+       MAYBE_MOVW_AX_ES ;                                              \
+       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ;                      \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
        call    *_intr_handler + (irq_num) * 4 ; /* do the work ASAP */ \
        enable_icus ;           /* (re)enable ASAP (helps edge trigger?) */ \
-       addl    $4,%esp ; \
-       incl    _cnt+V_INTR ;   /* book-keeping can wait */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
+       addl    $4,%esp ;                                               \
+       incl    _cnt+V_INTR ;   /* book-keeping can wait */             \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
        movl    _curthread, %ecx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;      \
-       jge     1f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;                              \
+       jge     1f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ecx),%eax ; /* unmasking pending ints? */ \
-       notl    %eax ; \
-       andl    _ipending,%eax ; \
-       jne     2f ;            /* yes, maybe handle them */ \
-1: ; \
-       MEXITCOUNT ; \
-       MAYBE_POPL_ES ; \
-       popl    %ds ; \
-       popl    %edx ; \
-       popl    %ecx ; \
-       popl    %eax ; \
-       iret ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
+       notl    %eax ;                                                  \
+       andl    _ipending,%eax ;                                        \
+       jne     2f ;            /* yes, maybe handle them */            \
+1: ;                                                                   \
+       MEXITCOUNT ;                                                    \
+       MAYBE_POPL_ES ;                                                 \
+       popl    %ds ;                                                   \
+       popl    %edx ;                                                  \
+       popl    %ecx ;                                                  \
+       popl    %eax ;                                                  \
+       iret ;                                                          \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
        cmpb    $3,_intr_nesting_level ;        /* is there enough stack? */ \
-       jae     1b ;            /* no, return */ \
-       movl    TD_MACH+MTD_CPL(%ecx),%eax ;    \
-       /* XXX next line is probably unnecessary now. */ \
+       jae     1b ;            /* no, return */                        \
+       movl    TD_MACH+MTD_CPL(%ecx),%eax ;                            \
+       /* XXX next line is probably unnecessary now. */                \
        movl    $HWI_MASK|SWI_MASK,TD_MACH+MTD_CPL(%ecx) ; /* limit nesting ... */ \
-       incb    _intr_nesting_level ;   /* ... really limit it ... */ \
+       incb    _intr_nesting_level ;   /* ... really limit it ... */   \
        sti ;                   /* ... to do this as early as possible */ \
-       MAYBE_POPL_ES ;         /* discard most of thin frame ... */ \
-       popl    %ecx ;          /* ... original %ds ... */ \
-       popl    %edx ; \
-       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */ \
-       pushal ;                /* build fat frame (grrr) ... */ \
-       pushl   %ecx ;          /* ... actually %ds ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
+       MAYBE_POPL_ES ;         /* discard most of thin frame ... */    \
+       popl    %ecx ;          /* ... original %ds ... */              \
+       popl    %edx ;                                                  \
+       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */               \
+       pushal ;                /* build fat frame (grrr) ... */        \
+       pushl   %ecx ;          /* ... actually %ds ... */              \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
        movl    (3+8+0)*4(%esp),%ecx ;  /* ... %ecx from thin frame ... */ \
-       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */ \
-       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */ \
-       pushl   %eax ; \
-       subl    $4,%esp ;       /* junk for unit number */ \
-       MEXITCOUNT ; \
+       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */      \
+       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */   \
+       pushl   %eax ;                                                  \
+       subl    $4,%esp ;       /* junk for unit number */              \
+       MEXITCOUNT ;                                                    \
        jmp     _doreti
 
 #define        INTR(irq_num, vec_name, icu, enable_icus, reg, maybe_extra_ipending) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   $0 ;            /* dummy error code */ \
-       pushl   $0 ;            /* dummy trap type */ \
-       pushal ; \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   $0 ;            /* dummy error code */                  \
+       pushl   $0 ;            /* dummy trap type */                   \
+       pushal ;                                                        \
        pushl   %ds ;           /* save our data and extra segments ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
        mov     $KDSEL,%ax ;    /* ... and reload with kernel's own ... */ \
-       mov     %ax,%ds ;       /* ... early for obsolete reasons */ \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
-       maybe_extra_ipending ; \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       orb     $IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       enable_icus ; \
+       mov     %ax,%ds ;       /* ... early for obsolete reasons */    \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
+       maybe_extra_ipending ;                                          \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       orb     $IRQ_BIT(irq_num),%al ;                                 \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       enable_icus ;                                                   \
        movl    _curthread, %ebx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;      \
-       jge     2f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;                              \
+       jge     2f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ebx),%eax ; /* is this interrupt masked by the cpl? */ \
-       testb   $IRQ_BIT(irq_num),%reg ; \
-       jne     2f ; \
-       incb    _intr_nesting_level ; \
-__CONCAT(Xresume,irq_num): ; \
+       testb   $IRQ_BIT(irq_num),%reg ;                                \
+       jne     2f ;                                                    \
+       incb    _intr_nesting_level ;                                   \
+__CONCAT(Xresume,irq_num): ;                                           \
        FAKE_MCOUNT(13*4(%esp)) ;       /* XXX late to avoid double count */ \
-       incl    _cnt+V_INTR ;   /* tally interrupts */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
-       movl    TD_MACH+MTD_CPL(%ebx),%eax ; \
-       pushl   %eax ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
-       orl     _intr_mask + (irq_num) * 4,%eax ; \
-       movl    %eax,TD_MACH+MTD_CPL(%ebx) ; \
-       sti ; \
-       call    *_intr_handler + (irq_num) * 4 ; \
+       incl    _cnt+V_INTR ;   /* tally interrupts */                  \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
+       movl    TD_MACH+MTD_CPL(%ebx),%eax ;                            \
+       pushl   %eax ;                                                  \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
+       orl     _intr_mask + (irq_num) * 4,%eax ;                       \
+       movl    %eax,TD_MACH+MTD_CPL(%ebx) ;                            \
+       sti ;                                                           \
+       call    *_intr_handler + (irq_num) * 4 ;                        \
        cli ;                   /* must unmask _imen and icu atomically */ \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       andb    $~IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       sti ;                   /* XXX _doreti repeats the cli/sti */ \
-       MEXITCOUNT ; \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       andb    $~IRQ_BIT(irq_num),%al ;                                \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       sti ;                   /* XXX _doreti repeats the cli/sti */   \
+       MEXITCOUNT ;                                                    \
        /* We could usually avoid the following jmp by inlining some of */ \
-       /* _doreti, but it's probably better to use less cache. */ \
-       jmp     _doreti ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
-       /* XXX skip mcounting here to avoid double count */ \
-       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ; \
-       movl    $TDPRI_CRIT,_reqpri ; \
-       popl    %fs ; \
-       popl    %es ; \
-       popl    %ds ; \
-       popal ; \
-       addl    $4+4,%esp ; \
+       /* _doreti, but it's probably better to use less cache. */      \
+       jmp     _doreti ;                                               \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
+       /* XXX skip mcounting here to avoid double count */             \
+       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ;       \
+       movl    $TDPRI_CRIT,_reqpri ;                                   \
+       popl    %fs ;                                                   \
+       popl    %es ;                                                   \
+       popl    %ds ;                                                   \
+       popal ;                                                         \
+       addl    $4+4,%esp ;                                             \
        iret
 
 MCOUNT_LABEL(bintr)
index 77cecc3..218b1f7 100644 (file)
@@ -1,7 +1,7 @@
 /*
  *     from: vector.s, 386BSD 0.1 unknown origin
  * $FreeBSD: src/sys/i386/isa/icu_vector.s,v 1.14.2.2 2000/07/18 21:12:42 dfr Exp $
- * $DragonFly: src/sys/platform/pc32/icu/icu_vector.s,v 1.5 2003/06/28 02:09:52 dillon Exp $
+ * $DragonFly: src/sys/platform/pc32/icu/icu_vector.s,v 1.6 2003/06/28 07:00:58 dillon Exp $
  */
 
 /*
  * Macros for interrupt interrupt entry, call to handler, and exit.
  */
 
-#define        FAST_INTR(irq_num, vec_name, enable_icus) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   %eax ;          /* save only call-used registers */ \
-       pushl   %ecx ; \
-       pushl   %edx ; \
-       pushl   %ds ; \
-       MAYBE_PUSHL_ES ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%ds ; \
-       MAYBE_MOVW_AX_ES ; \
-       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
+#define        FAST_INTR(irq_num, vec_name, enable_icus)                       \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   %eax ;          /* save only call-used registers */     \
+       pushl   %ecx ;                                                  \
+       pushl   %edx ;                                                  \
+       pushl   %ds ;                                                   \
+       MAYBE_PUSHL_ES ;                                                \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%ds ;                                               \
+       MAYBE_MOVW_AX_ES ;                                              \
+       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ;                      \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
        call    *_intr_handler + (irq_num) * 4 ; /* do the work ASAP */ \
        enable_icus ;           /* (re)enable ASAP (helps edge trigger?) */ \
-       addl    $4,%esp ; \
-       incl    _cnt+V_INTR ;   /* book-keeping can wait */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
+       addl    $4,%esp ;                                               \
+       incl    _cnt+V_INTR ;   /* book-keeping can wait */             \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
        movl    _curthread, %ecx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;      \
-       jge     1f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;                              \
+       jge     1f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ecx),%eax ; /* unmasking pending ints? */ \
-       notl    %eax ; \
-       andl    _ipending,%eax ; \
-       jne     2f ;            /* yes, maybe handle them */ \
-1: ; \
-       MEXITCOUNT ; \
-       MAYBE_POPL_ES ; \
-       popl    %ds ; \
-       popl    %edx ; \
-       popl    %ecx ; \
-       popl    %eax ; \
-       iret ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
+       notl    %eax ;                                                  \
+       andl    _ipending,%eax ;                                        \
+       jne     2f ;            /* yes, maybe handle them */            \
+1: ;                                                                   \
+       MEXITCOUNT ;                                                    \
+       MAYBE_POPL_ES ;                                                 \
+       popl    %ds ;                                                   \
+       popl    %edx ;                                                  \
+       popl    %ecx ;                                                  \
+       popl    %eax ;                                                  \
+       iret ;                                                          \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
        cmpb    $3,_intr_nesting_level ;        /* is there enough stack? */ \
-       jae     1b ;            /* no, return */ \
-       movl    TD_MACH+MTD_CPL(%ecx),%eax ;    \
-       /* XXX next line is probably unnecessary now. */ \
+       jae     1b ;            /* no, return */                        \
+       movl    TD_MACH+MTD_CPL(%ecx),%eax ;                            \
+       /* XXX next line is probably unnecessary now. */                \
        movl    $HWI_MASK|SWI_MASK,TD_MACH+MTD_CPL(%ecx) ; /* limit nesting ... */ \
-       incb    _intr_nesting_level ;   /* ... really limit it ... */ \
+       incb    _intr_nesting_level ;   /* ... really limit it ... */   \
        sti ;                   /* ... to do this as early as possible */ \
-       MAYBE_POPL_ES ;         /* discard most of thin frame ... */ \
-       popl    %ecx ;          /* ... original %ds ... */ \
-       popl    %edx ; \
-       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */ \
-       pushal ;                /* build fat frame (grrr) ... */ \
-       pushl   %ecx ;          /* ... actually %ds ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
+       MAYBE_POPL_ES ;         /* discard most of thin frame ... */    \
+       popl    %ecx ;          /* ... original %ds ... */              \
+       popl    %edx ;                                                  \
+       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */               \
+       pushal ;                /* build fat frame (grrr) ... */        \
+       pushl   %ecx ;          /* ... actually %ds ... */              \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
        movl    (3+8+0)*4(%esp),%ecx ;  /* ... %ecx from thin frame ... */ \
-       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */ \
-       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */ \
-       pushl   %eax ; \
-       subl    $4,%esp ;       /* junk for unit number */ \
-       MEXITCOUNT ; \
+       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */      \
+       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */   \
+       pushl   %eax ;                                                  \
+       subl    $4,%esp ;       /* junk for unit number */              \
+       MEXITCOUNT ;                                                    \
        jmp     _doreti
 
 #define        INTR(irq_num, vec_name, icu, enable_icus, reg, maybe_extra_ipending) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   $0 ;            /* dummy error code */ \
-       pushl   $0 ;            /* dummy trap type */ \
-       pushal ; \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   $0 ;            /* dummy error code */                  \
+       pushl   $0 ;            /* dummy trap type */                   \
+       pushal ;                                                        \
        pushl   %ds ;           /* save our data and extra segments ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
        mov     $KDSEL,%ax ;    /* ... and reload with kernel's own ... */ \
-       mov     %ax,%ds ;       /* ... early for obsolete reasons */ \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
-       maybe_extra_ipending ; \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       orb     $IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       enable_icus ; \
+       mov     %ax,%ds ;       /* ... early for obsolete reasons */    \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
+       maybe_extra_ipending ;                                          \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       orb     $IRQ_BIT(irq_num),%al ;                                 \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       enable_icus ;                                                   \
        movl    _curthread, %ebx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;      \
-       jge     2f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;                              \
+       jge     2f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ebx),%eax ; /* is this interrupt masked by the cpl? */ \
-       testb   $IRQ_BIT(irq_num),%reg ; \
-       jne     2f ; \
-       incb    _intr_nesting_level ; \
-__CONCAT(Xresume,irq_num): ; \
+       testb   $IRQ_BIT(irq_num),%reg ;                                \
+       jne     2f ;                                                    \
+       incb    _intr_nesting_level ;                                   \
+__CONCAT(Xresume,irq_num): ;                                           \
        FAKE_MCOUNT(13*4(%esp)) ;       /* XXX late to avoid double count */ \
-       incl    _cnt+V_INTR ;   /* tally interrupts */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
-       movl    TD_MACH+MTD_CPL(%ebx),%eax ; \
-       pushl   %eax ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
-       orl     _intr_mask + (irq_num) * 4,%eax ; \
-       movl    %eax,TD_MACH+MTD_CPL(%ebx) ; \
-       sti ; \
-       call    *_intr_handler + (irq_num) * 4 ; \
+       incl    _cnt+V_INTR ;   /* tally interrupts */                  \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
+       movl    TD_MACH+MTD_CPL(%ebx),%eax ;                            \
+       pushl   %eax ;                                                  \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
+       orl     _intr_mask + (irq_num) * 4,%eax ;                       \
+       movl    %eax,TD_MACH+MTD_CPL(%ebx) ;                            \
+       sti ;                                                           \
+       call    *_intr_handler + (irq_num) * 4 ;                        \
        cli ;                   /* must unmask _imen and icu atomically */ \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       andb    $~IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       sti ;                   /* XXX _doreti repeats the cli/sti */ \
-       MEXITCOUNT ; \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       andb    $~IRQ_BIT(irq_num),%al ;                                \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       sti ;                   /* XXX _doreti repeats the cli/sti */   \
+       MEXITCOUNT ;                                                    \
        /* We could usually avoid the following jmp by inlining some of */ \
-       /* _doreti, but it's probably better to use less cache. */ \
-       jmp     _doreti ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
-       /* XXX skip mcounting here to avoid double count */ \
-       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ; \
-       movl    $TDPRI_CRIT,_reqpri ; \
-       popl    %fs ; \
-       popl    %es ; \
-       popl    %ds ; \
-       popal ; \
-       addl    $4+4,%esp ; \
+       /* _doreti, but it's probably better to use less cache. */      \
+       jmp     _doreti ;                                               \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
+       /* XXX skip mcounting here to avoid double count */             \
+       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ;       \
+       movl    $TDPRI_CRIT,_reqpri ;                                   \
+       popl    %fs ;                                                   \
+       popl    %es ;                                                   \
+       popl    %ds ;                                                   \
+       popal ;                                                         \
+       addl    $4+4,%esp ;                                             \
        iret
 
 MCOUNT_LABEL(bintr)
index b2bdf4f..f73614c 100644 (file)
@@ -1,7 +1,7 @@
 /*
  *     from: vector.s, 386BSD 0.1 unknown origin
  * $FreeBSD: src/sys/i386/isa/icu_vector.s,v 1.14.2.2 2000/07/18 21:12:42 dfr Exp $
- * $DragonFly: src/sys/platform/pc32/isa/Attic/icu_vector.s,v 1.5 2003/06/28 02:09:52 dillon Exp $
+ * $DragonFly: src/sys/platform/pc32/isa/Attic/icu_vector.s,v 1.6 2003/06/28 07:00:58 dillon Exp $
  */
 
 /*
  * Macros for interrupt interrupt entry, call to handler, and exit.
  */
 
-#define        FAST_INTR(irq_num, vec_name, enable_icus) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   %eax ;          /* save only call-used registers */ \
-       pushl   %ecx ; \
-       pushl   %edx ; \
-       pushl   %ds ; \
-       MAYBE_PUSHL_ES ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%ds ; \
-       MAYBE_MOVW_AX_ES ; \
-       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
+#define        FAST_INTR(irq_num, vec_name, enable_icus)                       \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   %eax ;          /* save only call-used registers */     \
+       pushl   %ecx ;                                                  \
+       pushl   %edx ;                                                  \
+       pushl   %ds ;                                                   \
+       MAYBE_PUSHL_ES ;                                                \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%ds ;                                               \
+       MAYBE_MOVW_AX_ES ;                                              \
+       FAKE_MCOUNT((4+ACTUALLY_PUSHED)*4(%esp)) ;                      \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
        call    *_intr_handler + (irq_num) * 4 ; /* do the work ASAP */ \
        enable_icus ;           /* (re)enable ASAP (helps edge trigger?) */ \
-       addl    $4,%esp ; \
-       incl    _cnt+V_INTR ;   /* book-keeping can wait */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
+       addl    $4,%esp ;                                               \
+       incl    _cnt+V_INTR ;   /* book-keeping can wait */             \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
        movl    _curthread, %ecx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;      \
-       jge     1f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ecx) ;                              \
+       jge     1f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ecx),%eax ; /* unmasking pending ints? */ \
-       notl    %eax ; \
-       andl    _ipending,%eax ; \
-       jne     2f ;            /* yes, maybe handle them */ \
-1: ; \
-       MEXITCOUNT ; \
-       MAYBE_POPL_ES ; \
-       popl    %ds ; \
-       popl    %edx ; \
-       popl    %ecx ; \
-       popl    %eax ; \
-       iret ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
+       notl    %eax ;                                                  \
+       andl    _ipending,%eax ;                                        \
+       jne     2f ;            /* yes, maybe handle them */            \
+1: ;                                                                   \
+       MEXITCOUNT ;                                                    \
+       MAYBE_POPL_ES ;                                                 \
+       popl    %ds ;                                                   \
+       popl    %edx ;                                                  \
+       popl    %ecx ;                                                  \
+       popl    %eax ;                                                  \
+       iret ;                                                          \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
        cmpb    $3,_intr_nesting_level ;        /* is there enough stack? */ \
-       jae     1b ;            /* no, return */ \
-       movl    TD_MACH+MTD_CPL(%ecx),%eax ;    \
-       /* XXX next line is probably unnecessary now. */ \
+       jae     1b ;            /* no, return */                        \
+       movl    TD_MACH+MTD_CPL(%ecx),%eax ;                            \
+       /* XXX next line is probably unnecessary now. */                \
        movl    $HWI_MASK|SWI_MASK,TD_MACH+MTD_CPL(%ecx) ; /* limit nesting ... */ \
-       incb    _intr_nesting_level ;   /* ... really limit it ... */ \
+       incb    _intr_nesting_level ;   /* ... really limit it ... */   \
        sti ;                   /* ... to do this as early as possible */ \
-       MAYBE_POPL_ES ;         /* discard most of thin frame ... */ \
-       popl    %ecx ;          /* ... original %ds ... */ \
-       popl    %edx ; \
-       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */ \
-       pushal ;                /* build fat frame (grrr) ... */ \
-       pushl   %ecx ;          /* ... actually %ds ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
-       mov     $KDSEL,%ax ; \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
+       MAYBE_POPL_ES ;         /* discard most of thin frame ... */    \
+       popl    %ecx ;          /* ... original %ds ... */              \
+       popl    %edx ;                                                  \
+       xchgl   %eax,4(%esp) ;  /* orig %eax; save cpl */               \
+       pushal ;                /* build fat frame (grrr) ... */        \
+       pushl   %ecx ;          /* ... actually %ds ... */              \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
+       mov     $KDSEL,%ax ;                                            \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
        movl    (3+8+0)*4(%esp),%ecx ;  /* ... %ecx from thin frame ... */ \
-       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */ \
-       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */ \
-       pushl   %eax ; \
-       subl    $4,%esp ;       /* junk for unit number */ \
-       MEXITCOUNT ; \
+       movl    %ecx,(3+6)*4(%esp) ;    /* ... to fat frame ... */      \
+       movl    (3+8+1)*4(%esp),%eax ;  /* ... cpl from thin frame */   \
+       pushl   %eax ;                                                  \
+       subl    $4,%esp ;       /* junk for unit number */              \
+       MEXITCOUNT ;                                                    \
        jmp     _doreti
 
 #define        INTR(irq_num, vec_name, icu, enable_icus, reg, maybe_extra_ipending) \
-       .text ; \
-       SUPERALIGN_TEXT ; \
-IDTVEC(vec_name) ; \
-       pushl   $0 ;            /* dummy error code */ \
-       pushl   $0 ;            /* dummy trap type */ \
-       pushal ; \
+       .text ;                                                         \
+       SUPERALIGN_TEXT ;                                               \
+IDTVEC(vec_name) ;                                                     \
+       pushl   $0 ;            /* dummy error code */                  \
+       pushl   $0 ;            /* dummy trap type */                   \
+       pushal ;                                                        \
        pushl   %ds ;           /* save our data and extra segments ... */ \
-       pushl   %es ; \
-       pushl   %fs ; \
+       pushl   %es ;                                                   \
+       pushl   %fs ;                                                   \
        mov     $KDSEL,%ax ;    /* ... and reload with kernel's own ... */ \
-       mov     %ax,%ds ;       /* ... early for obsolete reasons */ \
-       mov     %ax,%es ; \
-       mov     $KPSEL,%ax ; \
-       mov     %ax,%fs ; \
-       maybe_extra_ipending ; \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       orb     $IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       enable_icus ; \
+       mov     %ax,%ds ;       /* ... early for obsolete reasons */    \
+       mov     %ax,%es ;                                               \
+       mov     $KPSEL,%ax ;                                            \
+       mov     %ax,%fs ;                                               \
+       maybe_extra_ipending ;                                          \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       orb     $IRQ_BIT(irq_num),%al ;                                 \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       enable_icus ;                                                   \
        movl    _curthread, %ebx ; /* are we in a critical section? */  \
-       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;      \
-       jge     2f ;    \
+       cmpl    $TDPRI_CRIT,TD_PRI(%ebx) ;                              \
+       jge     2f ;                                                    \
        movl    TD_MACH+MTD_CPL(%ebx),%eax ; /* is this interrupt masked by the cpl? */ \
-       testb   $IRQ_BIT(irq_num),%reg ; \
-       jne     2f ; \
-       incb    _intr_nesting_level ; \
-__CONCAT(Xresume,irq_num): ; \
+       testb   $IRQ_BIT(irq_num),%reg ;                                \
+       jne     2f ;                                                    \
+       incb    _intr_nesting_level ;                                   \
+__CONCAT(Xresume,irq_num): ;                                           \
        FAKE_MCOUNT(13*4(%esp)) ;       /* XXX late to avoid double count */ \
-       incl    _cnt+V_INTR ;   /* tally interrupts */ \
-       movl    _intr_countp + (irq_num) * 4,%eax ; \
-       incl    (%eax) ; \
-       movl    TD_MACH+MTD_CPL(%ebx),%eax ; \
-       pushl   %eax ; \
-       pushl   _intr_unit + (irq_num) * 4 ; \
-       orl     _intr_mask + (irq_num) * 4,%eax ; \
-       movl    %eax,TD_MACH+MTD_CPL(%ebx) ; \
-       sti ; \
-       call    *_intr_handler + (irq_num) * 4 ; \
+       incl    _cnt+V_INTR ;   /* tally interrupts */                  \
+       movl    _intr_countp + (irq_num) * 4,%eax ;                     \
+       incl    (%eax) ;                                                \
+       movl    TD_MACH+MTD_CPL(%ebx),%eax ;                            \
+       pushl   %eax ;                                                  \
+       pushl   _intr_unit + (irq_num) * 4 ;                            \
+       orl     _intr_mask + (irq_num) * 4,%eax ;                       \
+       movl    %eax,TD_MACH+MTD_CPL(%ebx) ;                            \
+       sti ;                                                           \
+       call    *_intr_handler + (irq_num) * 4 ;                        \
        cli ;                   /* must unmask _imen and icu atomically */ \
-       movb    _imen + IRQ_BYTE(irq_num),%al ; \
-       andb    $~IRQ_BIT(irq_num),%al ; \
-       movb    %al,_imen + IRQ_BYTE(irq_num) ; \
-       outb    %al,$icu+ICU_IMR_OFFSET ; \
-       sti ;                   /* XXX _doreti repeats the cli/sti */ \
-       MEXITCOUNT ; \
+       movb    _imen + IRQ_BYTE(irq_num),%al ;                         \
+       andb    $~IRQ_BIT(irq_num),%al ;                                \
+       movb    %al,_imen + IRQ_BYTE(irq_num) ;                         \
+       outb    %al,$icu+ICU_IMR_OFFSET ;                               \
+       sti ;                   /* XXX _doreti repeats the cli/sti */   \
+       MEXITCOUNT ;                                                    \
        /* We could usually avoid the following jmp by inlining some of */ \
-       /* _doreti, but it's probably better to use less cache. */ \
-       jmp     _doreti ; \
-; \
-       ALIGN_TEXT ; \
-2: ; \
-       /* XXX skip mcounting here to avoid double count */ \
-       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ; \
-       movl    $TDPRI_CRIT,_reqpri ; \
-       popl    %fs ; \
-       popl    %es ; \
-       popl    %ds ; \
-       popal ; \
-       addl    $4+4,%esp ; \
+       /* _doreti, but it's probably better to use less cache. */      \
+       jmp     _doreti ;                                               \
+;                                                                      \
+       ALIGN_TEXT ;                                                    \
+2: ;                                                                   \
+       /* XXX skip mcounting here to avoid double count */             \
+       orb     $IRQ_BIT(irq_num),_ipending + IRQ_BYTE(irq_num) ;       \
+       movl    $TDPRI_CRIT,_reqpri ;                                   \
+       popl    %fs ;                                                   \
+       popl    %es ;                                                   \
+       popl    %ds ;                                                   \
+       popal ;                                                         \
+       addl    $4+4,%esp ;                                             \
        iret
 
 MCOUNT_LABEL(bintr)