ChangeSet 1.2216, 2005/03/26 13:30:45-08:00, [EMAIL PROTECTED]

        [PATCH] m68knommu: cleanup ColdFire specific trap handling asm code
        
        A couple of fixes to the ColdFire specific trap handling code:
        
        . do not clear the stack alignment bits in exception frame (in Lreturn)
          breaks return stack badly if applications work with stack un-aligned.
        . use generated asm-offsets instead of local offset definitions
        . use THREAD_SIZE definition instead of hard coding stack size
          (this is needed to support 4k stacks)
        . some instruction optimization (uses instructions that encode using
          ledd bytes)
        
        Patches originally from Philippe De Muyter <[EMAIL PROTECTED]>
        
        Signed-off-by: Greg Ungerer <[EMAIL PROTECTED]>
        Signed-off-by: Linus Torvalds <[EMAIL PROTECTED]>



 entry.S |   76 +++++++++++++++++++++++++++-------------------------------------
 1 files changed, 33 insertions(+), 43 deletions(-)


diff -Nru a/arch/m68knommu/platform/5307/entry.S 
b/arch/m68knommu/platform/5307/entry.S
--- a/arch/m68knommu/platform/5307/entry.S      2005-03-26 17:29:58 -08:00
+++ b/arch/m68knommu/platform/5307/entry.S      2005-03-26 17:29:58 -08:00
@@ -5,6 +5,7 @@
  *  Copyright (C) 1998  D. Jeff Dionne <[EMAIL PROTECTED]>,
  *                      Kenneth Albanowski <[EMAIL PROTECTED]>,
  *  Copyright (C) 2000  Lineo Inc. (www.lineo.com) 
+ *  Copyright (C) 2004  Macq Electronique SA. (www.macqel.com)
  *
  * Based on:
  *
@@ -22,6 +23,7 @@
  * ColdFire support by Greg Ungerer ([EMAIL PROTECTED])
  * 5307 fixes by David W. Miller
  * linux 2.4 support David McCullough <[EMAIL PROTECTED]>
+ * Bug, speed and maintainability fixes by Philippe De Muyter <[EMAIL 
PROTECTED]>
  */
 
 #include <linux/config.h>
@@ -35,7 +37,7 @@
 #include <asm/asm-offsets.h>
 #include <asm/entry.h>
 
-.data
+.bss
 
 sw_ksp:
 .long  0
@@ -59,7 +61,7 @@
        move    #0x2000,%sr             /* enable intrs again */
 
        movel   #-LENOSYS,%d2
-       movel   %d2,LD0(%sp)            /* default return value in d0 */
+       movel   %d2,PT_D0(%sp)          /* default return value in d0 */
                                        /* original D0 is in orig_d0 */
        movel   %d0,%d2
 
@@ -77,14 +79,14 @@
        lsrl    #2,%d2
 
        movel   %sp,%d2                 /* get thread_info pointer */
-       andl    #0xffffe000,%d2         /* at start of 8k kernel stack */
+       andl    #-THREAD_SIZE,%d2       /* at start of kernel stack */
        movel   %d2,%a0
        btst    #TIF_SYSCALL_TRACE,%a0@(TI_FLAGS)
        bnes    1f
 
        movel   %d3,%a0
        jbsr    %a0@
-       movel   %d0,%sp@(LD0)           /* save the return value */
+       movel   %d0,%sp@(PT_D0)         /* save the return value */
        jra     ret_from_exception
 1:
        subql   #4,%sp
@@ -94,7 +96,7 @@
        addql   #4,%sp
        movel   %d3,%a0
        jbsr    %a0@
-       movel   %d0,%sp@(LD0)           /* save the return value */
+       movel   %d0,%sp@(PT_D0)         /* save the return value */
        subql   #4,%sp                  /* dummy return address */
        SAVE_SWITCH_STACK
        jbsr    syscall_trace
@@ -104,20 +106,20 @@
        addql   #4,%sp
 
 ret_from_exception:
-       btst    #5,%sp@(LSR)            /* check if returning to kernel */
+       btst    #5,%sp@(PT_SR)          /* check if returning to kernel */
        jeq     Luser_return            /* if so, skip resched, signals */
 
 Lkernel_return:
-       moveml  %sp@,%d1-%d5/%a0-%a2
-       addl    #32,%sp                 /* space for 8 regs */
-       movel   [EMAIL PROTECTED],%d0
-       addql   #4,%sp                  /* orig d0 */
-       addl    [EMAIL PROTECTED],%sp           /* stk adj */
+       moveml  %sp@,%d1-%d5/%a0-%a2
+       lea     %sp@(32),%sp            /* space for 8 regs */
+       movel   [EMAIL PROTECTED],%d0
+       addql   #4,%sp                  /* orig d0 */
+       addl    [EMAIL PROTECTED],%sp           /* stk adj */
        rte
 
 Luser_return:
        movel   %sp,%d1                 /* get thread_info pointer */
-       andl    #0xffffe000,%d1         /* at base of 8k kernel stack */
+       andl    #-THREAD_SIZE,%d1       /* at base of kernel stack */
        movel   %d1,%a0
        movel   %a0@(TI_FLAGS),%d1      /* get thread_info->flags */
        andl    #_TIF_WORK_MASK,%d1
@@ -126,27 +128,17 @@
 Lreturn:
        move    #0x2700,%sr             /* disable intrs */
        movel   sw_usp,%a0              /* get usp */
-       moveml  %sp@(LFORMATVEC),%d1-%d2 /* copy exception */
-       moveml  %d1-%d2,%a0@(-8)
-       bclr    #5,%a0@(-8)             /* clear format byte, bit 5 to make
-                                        * stack appear modulo 4 which it WILL
-                                        * be when we do the rte because it was
-                                        * generated in setup_frame
-                                        */
-       bclr    #4,%a0@(-8)             /* clear format byte, bit 4 to make 
-                                        * stack appear modulo 4 which it WILL
-                                        * be when we do the rte because it was
-                                        * generated in setup_frame
-                                        */
-       moveml  %sp@,%d1-%d5/%a0-%a2
-       addl    #32,%sp                 /* space for 8 regs */
-       movel   [EMAIL PROTECTED],%d0                  
-       addql   #4,%sp                  /* orig d0 */
-       addl    [EMAIL PROTECTED],%sp           /* stk adj */
-       addql   #8,%sp                  /* remove exception */
-       movel   %sp,sw_ksp              /* save ksp */
-       movel   sw_usp,%sp              /* restore usp */
-       subql   #8,%sp                  /* set exception */
+       movel   %sp@(PT_PC),[EMAIL PROTECTED]   /* copy exception program 
counter */
+       movel   %sp@(PT_FORMATVEC),[EMAIL PROTECTED]/* copy exception 
format/vector/sr */
+       moveml  %sp@,%d1-%d5/%a0-%a2
+       lea     %sp@(32),%sp            /* space for 8 regs */
+       movel   [EMAIL PROTECTED],%d0
+       addql   #4,%sp                  /* orig d0 */
+       addl    [EMAIL PROTECTED],%sp           /* stk adj */
+       addql   #8,%sp                  /* remove exception */
+       movel   %sp,sw_ksp              /* save ksp */
+       subql   #8,sw_usp               /* set exception */
+       movel   sw_usp,%sp              /* restore usp */
        rte
 
 Lwork_to_do:
@@ -160,8 +152,7 @@
        subql   #4,%sp                  /* dummy return address */
        SAVE_SWITCH_STACK
        pea     %sp@(SWITCH_STACK_SIZE)
-       clr     %d1
-       movel   %d1,[EMAIL PROTECTED]
+       clrl    [EMAIL PROTECTED]
        jsr     do_signal
        addql   #8,%sp
        RESTORE_SWITCH_STACK
@@ -176,10 +167,10 @@
 ENTRY(inthandler)
        SAVE_ALL
        moveq   #-1,%d0
-       movel   %d0,%sp@(LORIG_D0)
+       movel   %d0,%sp@(PT_ORIG_D0)
        addql   #1,local_irq_count
 
-       movew   %sp@(LFORMATVEC),%d0    /* put exception # in d0 */
+       movew   %sp@(PT_FORMATVEC),%d0  /* put exception # in d0 */
        andl    #0x03fc,%d0             /* mask out vector only */
 
        leal    per_cpu__kstat+STAT_IRQ,%a0
@@ -197,7 +188,7 @@
 
        movel   %a0@,%a0                /* get function to call */
        jbsr    %a0@                    /* call vector handler */
-       addl    #12,%sp                 /* pop parameters off stack */
+       lea     %sp@(12),%sp            /* pop parameters off stack */
 
        bra     ret_from_interrupt      /* this was fallthrough */
 
@@ -210,15 +201,14 @@
 ENTRY(fasthandler)
        SAVE_LOCAL
 
-       movew   %sp@(LFORMATVEC),%d0
+       movew   %sp@(PT_FORMATVEC),%d0
        andl    #0x03fc,%d0             /* mask out vector only */
 
        leal    per_cpu__kstat+STAT_IRQ,%a0
        addql   #1,%a0@(%d0)
 
        movel   %sp,[EMAIL PROTECTED]           /* push regs arg onto stack */
-       clrl    %d1
-       movel   %d1,[EMAIL PROTECTED]           /* push devid arg */
+       clrl    [EMAIL PROTECTED]                       /* push devid arg */
        lsrl    #2,%d0                  /* calculate real vector # */
        movel   %d0,[EMAIL PROTECTED]           /* push vector # on stack */
 
@@ -226,7 +216,7 @@
        lea     irq_list,%a0
        movel   %a0@(%d0),%a0           /* get function to call */
        jbsr    %a0@                    /* call vector handler */
-       addl    #12,%sp                 /* pop parameters off stack */
+       lea     %sp@(12),%sp            /* pop parameters off stack */
 
        RESTORE_LOCAL
 
@@ -236,7 +226,7 @@
 1:
        RESTORE_ALL
 2:
-       moveb   %sp@(LSR),%d0
+       moveb   %sp@(PT_SR),%d0
        andl    #0x7,%d0
        jhi     1b
 
-
To unsubscribe from this list: send the line "unsubscribe bk-commits-head" in
the body of a message to [EMAIL PROTECTED]
More majordomo info at  http://vger.kernel.org/majordomo-info.html

Reply via email to