powerpc: Avoid link stack corruption for MMU on exceptions
authorMichael Neuling <mikey@neuling.org>
Tue, 13 Aug 2013 05:54:52 +0000 (15:54 +1000)
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>
Wed, 14 Aug 2013 05:33:18 +0000 (15:33 +1000)
When we have MMU on exceptions (POWER8) and a relocatable kernel, we
need to branch from the initial exception vectors at 0x0 to up high
where the kernel might be located.  Currently we do this using the link
register.

Unfortunately this corrupts the link stack and instead we should use the
count register.  We did this for the syscall entry path in:
  6a40480 powerpc: Avoid link stack corruption in MMU on syscall entry path
but I stupidly forgot to do the same for other exceptions.

This patch changes the initial exception vectors to use the count
register instead of the link register when we need to branch up to the
relocated kernel.

I have a dodgy userspace test which loops calling a function that reads
the PVR (mfpvr in userspace will be emulated by the kernel via the
program check exception).  On POWER8 and with CONFIG_RELOCATABLE=y, I
get a ~10% performance improvement with my userspace test with this
patch.

Signed-off-by: Michael Neuling <mikey@neuling.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
arch/powerpc/include/asm/exception-64s.h
arch/powerpc/include/asm/paca.h

index 4834a6d5a75cc4e71005ce366e8cb82b5820625b..cca12f08484201aaf86a16920a2edcf7921a6fd5 100644 (file)
 #define EX_LR          72
 #define EX_CFAR                80
 #define EX_PPR         88      /* SMT thread status register (priority) */
+#define EX_CTR         96
 
 #ifdef CONFIG_RELOCATABLE
 #define __EXCEPTION_RELON_PROLOG_PSERIES_1(label, h)                   \
        ld      r12,PACAKBASE(r13);     /* get high part of &label */   \
        mfspr   r11,SPRN_##h##SRR0;     /* save SRR0 */                 \
        LOAD_HANDLER(r12,label);                                        \
-       mtlr    r12;                                                    \
+       mtctr   r12;                                                    \
        mfspr   r12,SPRN_##h##SRR1;     /* and SRR1 */                  \
        li      r10,MSR_RI;                                             \
        mtmsrd  r10,1;                  /* Set RI (EE=0) */             \
-       blr;
+       bctr;
 #else
 /* If not relocatable, we can jump directly -- and save messing with LR */
 #define __EXCEPTION_RELON_PROLOG_PSERIES_1(label, h)                   \
 
 #if defined(CONFIG_RELOCATABLE)
 /*
- * If we support interrupts with relocation on AND we're a relocatable
- * kernel, we need to use LR to get to the 2nd level handler.  So, save/restore
- * it when required.
+ * If we support interrupts with relocation on AND we're a relocatable kernel,
+ * we need to use CTR to get to the 2nd level handler.  So, save/restore it
+ * when required.
  */
-#define SAVE_LR(reg, area)     mflr    reg ;   std     reg,area+EX_LR(r13)
-#define GET_LR(reg, area)                      ld      reg,area+EX_LR(r13)
-#define RESTORE_LR(reg, area)  ld      reg,area+EX_LR(r13) ; mtlr reg
+#define SAVE_CTR(reg, area)    mfctr   reg ;   std     reg,area+EX_CTR(r13)
+#define GET_CTR(reg, area)                     ld      reg,area+EX_CTR(r13)
+#define RESTORE_CTR(reg, area) ld      reg,area+EX_CTR(r13) ; mtctr reg
 #else
-/* ...else LR is unused and in register. */
-#define SAVE_LR(reg, area)
-#define GET_LR(reg, area)      mflr    reg
-#define RESTORE_LR(reg, area)
+/* ...else CTR is unused and in register. */
+#define SAVE_CTR(reg, area)
+#define GET_CTR(reg, area)     mfctr   reg
+#define RESTORE_CTR(reg, area)
 #endif
 
 /*
@@ -164,7 +165,7 @@ END_FTR_SECTION_NESTED(ftr,ftr,943)
 #define __EXCEPTION_PROLOG_1(area, extra, vec)                         \
        OPT_SAVE_REG_TO_PACA(area+EX_PPR, r9, CPU_FTR_HAS_PPR);         \
        OPT_SAVE_REG_TO_PACA(area+EX_CFAR, r10, CPU_FTR_CFAR);          \
-       SAVE_LR(r10, area);                                             \
+       SAVE_CTR(r10, area);                                            \
        mfcr    r9;                                                     \
        extra(vec);                                                     \
        std     r11,area+EX_R11(r13);                                   \
@@ -270,7 +271,7 @@ do_kvm_##n:                                                         \
        sth     r1,PACA_TRAP_SAVE(r13);                                    \
        std     r3,area+EX_R3(r13);                                        \
        addi    r3,r13,area;            /* r3 -> where regs are saved*/    \
-       RESTORE_LR(r1, area);                                              \
+       RESTORE_CTR(r1, area);                                             \
        b       bad_stack;                                                 \
 3:     std     r9,_CCR(r1);            /* save CR in stackframe        */ \
        std     r11,_NIP(r1);           /* save SRR0 in stackframe      */ \
@@ -298,10 +299,10 @@ do_kvm_##n:                                                               \
        ld      r10,area+EX_CFAR(r13);                                     \
        std     r10,ORIG_GPR3(r1);                                         \
        END_FTR_SECTION_NESTED(CPU_FTR_CFAR, CPU_FTR_CFAR, 66);            \
-       GET_LR(r9,area);                /* Get LR, later save to stack  */ \
+       mflr    r9;                     /* Get LR, later save to stack  */ \
        ld      r2,PACATOC(r13);        /* get kernel TOC into r2       */ \
        std     r9,_LINK(r1);                                              \
-       mfctr   r10;                    /* save CTR in stackframe       */ \
+       GET_CTR(r10, area);                                                \
        std     r10,_CTR(r1);                                              \
        lbz     r10,PACASOFTIRQEN(r13);                            \
        mfspr   r11,SPRN_XER;           /* save XER in stackframe       */ \
index 77c91e74b612675cd7312ea1ada7dfba7b997e9c..17d40a2a00c443fba6b6e59356d65094f48d7693 100644 (file)
@@ -93,9 +93,9 @@ struct paca_struct {
         * Now, starting in cacheline 2, the exception save areas
         */
        /* used for most interrupts/exceptions */
-       u64 exgen[12] __attribute__((aligned(0x80)));
-       u64 exmc[12];           /* used for machine checks */
-       u64 exslb[12];          /* used for SLB/segment table misses
+       u64 exgen[13] __attribute__((aligned(0x80)));
+       u64 exmc[13];           /* used for machine checks */
+       u64 exslb[13];          /* used for SLB/segment table misses
                                 * on the linear mapping */
        /* SLB related definitions */
        u16 vmalloc_sllp;