We can't emulate stwu since that may corrupt current exception stack.
So we will have to do real store operation in the exception return code.

Firstly we'll allocate a trampoline exception frame below the kprobed
function stack and copy the current exception frame to the trampoline.
Then we can do this real store operation to implement 'stwu', and reroute
the trampoline frame to r1 to complete this exception migration.

Signed-off-by: Tiejun Chen <tiejun.c...@windriver.com>
---
v4:

* improve register usage
* use copy_and_flush() to replace memcpy since that only clobber r0, r6
  and LR explicitly.
* retest

 arch/powerpc/kernel/entry_32.S |   44 ++++++++++++++++++++++++++++++++++------
 arch/powerpc/kernel/entry_64.S |   32 +++++++++++++++++++++++++++++
 2 files changed, 70 insertions(+), 6 deletions(-)

diff --git a/arch/powerpc/kernel/entry_32.S b/arch/powerpc/kernel/entry_32.S
index 1536d2c..b18e494 100644
--- a/arch/powerpc/kernel/entry_32.S
+++ b/arch/powerpc/kernel/entry_32.S
@@ -832,19 +832,53 @@ restore_user:
        bnel-   load_dbcr0
 #endif
 
-#ifdef CONFIG_PREEMPT
        b       restore
 
 /* N.B. the only way to get here is from the beq following ret_from_except. */
 resume_kernel:
-       /* check current_thread_info->preempt_count */
+       /* check current_thread_info, _TIF_EMULATE_STACK_STORE */
        CURRENT_THREAD_INFO(r9, r1)
+       lwz     r8,TI_FLAGS(r9)
+       andis.  r8,r8,_TIF_EMULATE_STACK_STORE@h
+       beq+    1f
+
+       addi    r8,r1,INT_FRAME_SIZE    /* Get the kprobed function entry */
+
+       lwz     r3,GPR1(r1)
+       subi    r3,r3,INT_FRAME_SIZE    /* dst: Allocate a trampoline exception 
frame */
+       mr      r4,r1                   /* src:  current exception frame */
+       li      r5,INT_FRAME_SIZE       /* size: INT_FRAME_SIZE */
+       li      r6,0                    /* start offset: 0 */
+       mr      r1,r3                   /* Reroute the trampoline frame to r1 */
+       /* Copy from the original to the trampoline.
+        * Its safe since copy_and_flush *only* clobbers r0, r6 and lr.
+        */
+       bl      copy_and_flush
+
+       /* Do real store operation to complete stwu */
+       lwz     r5,GPR1(r1)
+       stw     r8,0(r5)
+
+       /* Clear _TIF_EMULATE_STACK_STORE flag */
+       lis     r11,_TIF_EMULATE_STACK_STORE@h
+       addi    r5,r9,TI_FLAGS
+0:     lwarx   r8,0,r5
+       andc    r8,r8,r11
+#ifdef CONFIG_IBM405_ERR77
+       dcbt    0,r5
+#endif
+       stwcx.  r8,0,r5
+       bne-    0b
+1:
+
+#ifdef CONFIG_PREEMPT
+       /* check current_thread_info->preempt_count */
        lwz     r0,TI_PREEMPT(r9)
        cmpwi   0,r0,0          /* if non-zero, just restore regs and return */
        bne     restore
-       lwz     r0,TI_FLAGS(r9)
-       andi.   r0,r0,_TIF_NEED_RESCHED
+       andi.   r8,r8,_TIF_NEED_RESCHED
        beq+    restore
+       lwz     r3,_MSR(r1)
        andi.   r0,r3,MSR_EE    /* interrupts off? */
        beq     restore         /* don't schedule if so */
 #ifdef CONFIG_TRACE_IRQFLAGS
@@ -865,8 +899,6 @@ resume_kernel:
         */
        bl      trace_hardirqs_on
 #endif
-#else
-resume_kernel:
 #endif /* CONFIG_PREEMPT */
 
        /* interrupts are hard-disabled at this point */
diff --git a/arch/powerpc/kernel/entry_64.S b/arch/powerpc/kernel/entry_64.S
index b40e0b4..d060f4d 100644
--- a/arch/powerpc/kernel/entry_64.S
+++ b/arch/powerpc/kernel/entry_64.S
@@ -593,6 +593,38 @@ _GLOBAL(ret_from_except_lite)
        b       .ret_from_except
 
 resume_kernel:
+       /* check current_thread_info, _TIF_EMULATE_STACK_STORE */
+       CURRENT_THREAD_INFO(r9, r1)
+       ld      r8,TI_FLAGS(r9)
+       andis.  r8,r8,_TIF_EMULATE_STACK_STORE@h
+       beq+    1f
+
+       addi    r8,r1,INT_FRAME_SIZE    /* Get the kprobed function entry */
+
+       lwz     r3,GPR1(r1)
+       subi    r3,r3,INT_FRAME_SIZE    /* dst: Allocate a trampoline exception 
frame */
+       mr      r4,r1                   /* src:  current exception frame */
+       li      r5,INT_FRAME_SIZE       /* size: INT_FRAME_SIZE */
+       li      r6,0                    /* start offset: 0 */
+       mr      r1,r3                   /* Reroute the trampoline frame to r1 */
+       /* Copy from the original to the trampoline.
+        * Its safe since copy_and_flush *only* clobbers r0, r6 and lr
+        */
+       bl      .copy_and_flush
+
+       /* Do real store operation to complete stwu */
+       lwz     r5,GPR1(r1)
+       std     r8,0(r5)
+
+       /* Clear _TIF_EMULATE_STACK_STORE flag */
+       lis     r11,_TIF_EMULATE_STACK_STORE@h
+       addi    r5,r9,TI_FLAGS
+       ldarx   r4,0,r5
+       andc    r4,r4,r11
+       stdcx.  r4,0,r5
+       bne-    0b
+1:
+
 #ifdef CONFIG_PREEMPT
        /* Check if we need to preempt */
        andi.   r0,r4,_TIF_NEED_RESCHED
-- 
1.7.9.5

_______________________________________________
Linuxppc-dev mailing list
Linuxppc-dev@lists.ozlabs.org
https://lists.ozlabs.org/listinfo/linuxppc-dev

Reply via email to