[PATCH 18/18] powerpc: enforce usage of RA 0-R31 where possible

2012-06-25 Thread Michael Neuling
Some macros use RA where when RA=R0 the values is 0, so make this
the enforced mnemonic in the macro.

Idea suggested by Andreas Schwab.

Signed-off-by: Michael Neuling mi...@neuling.org
---

---
Index: b/arch/powerpc/include/asm/ppc-opcode.h
===
--- a/arch/powerpc/include/asm/ppc-opcode.h
+++ b/arch/powerpc/include/asm/ppc-opcode.h
@@ -231,7 +231,7 @@
 #define PPC_RFDI   stringify_in_c(.long PPC_INST_RFDI)
 #define PPC_RFMCI  stringify_in_c(.long PPC_INST_RFMCI)
 #define PPC_TLBILX(t, a, b)stringify_in_c(.long PPC_INST_TLBILX | \
-   __PPC_T_TLB(t) | __PPC_RA(a) | 
__PPC_RB(b))
+   __PPC_T_TLB(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_TLBILX_ALL(a, b)   PPC_TLBILX(0, a, b)
 #define PPC_TLBILX_PID(a, b)   PPC_TLBILX(1, a, b)
 #define PPC_TLBILX_VA(a, b)PPC_TLBILX(3, a, b)
@@ -240,23 +240,23 @@
 #define PPC_TLBIE(lp,a)stringify_in_c(.long PPC_INST_TLBIE | \
   ___PPC_RB(a) | ___PPC_RS(lp))
 #define PPC_TLBSRX_DOT(a,b)stringify_in_c(.long PPC_INST_TLBSRX_DOT | \
-   __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RA0(a) | __PPC_RB(b))
 #define PPC_TLBIVAX(a,b)   stringify_in_c(.long PPC_INST_TLBIVAX | \
-   __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RA0(a) | __PPC_RB(b))
 
 #define PPC_ERATWE(s, a, w)stringify_in_c(.long PPC_INST_ERATWE | \
__PPC_RS(s) | __PPC_RA(a) | __PPC_WS(w))
 #define PPC_ERATRE(s, a, w)stringify_in_c(.long PPC_INST_ERATRE | \
__PPC_RS(s) | __PPC_RA(a) | __PPC_WS(w))
 #define PPC_ERATILX(t, a, b)   stringify_in_c(.long PPC_INST_ERATILX | \
-   __PPC_T_TLB(t) | __PPC_RA(a) | \
+   __PPC_T_TLB(t) | __PPC_RA0(a) | \
__PPC_RB(b))
 #define PPC_ERATIVAX(s, a, b)  stringify_in_c(.long PPC_INST_ERATIVAX | \
-   __PPC_RS(s) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(s) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_ERATSX(t, a, w)stringify_in_c(.long PPC_INST_ERATSX | \
-   __PPC_RS(t) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_ERATSX_DOT(t, a, w)stringify_in_c(.long 
PPC_INST_ERATSX_DOT | \
-   __PPC_RS(t) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_SLBFEE_DOT(t, b)   stringify_in_c(.long PPC_INST_SLBFEE | \
__PPC_RT(t) | __PPC_RB(b))
 /* PASemi instructions */
Index: b/arch/powerpc/kernel/cpu_setup_a2.S
===
--- a/arch/powerpc/kernel/cpu_setup_a2.S
+++ b/arch/powerpc/kernel/cpu_setup_a2.S
@@ -112,7 +112,7 @@ _icswx_skip_guest:
 * a bolted entry though it will be in LRU and so will go away 
eventually
 * but let's not bother for now
 */
-   PPC_ERATILX(0,R0,R0)
+   PPC_ERATILX(0,0,R0)
 1:
blr
 
Index: b/arch/powerpc/kernel/exceptions-64e.S
===
--- a/arch/powerpc/kernel/exceptions-64e.S
+++ b/arch/powerpc/kernel/exceptions-64e.S
@@ -903,7 +903,7 @@ skpinv: addir6,r6,1 /* 
Increment */
bne 1b  /* If not, repeat */
 
/* Invalidate all TLBs */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -961,7 +961,7 @@ skpinv: addir6,r6,1 /* 
Increment */
tlbwe
 
/* Invalidate TLB1 */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -1020,7 +1020,7 @@ skpinv:   addir6,r6,1 /* 
Increment */
tlbwe
 
/* Invalidate TLB1 */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -1138,7 +1138,7 @@ a2_tlbinit_after_iprot_flush:
tlbwe
 #endif /* CONFIG_PPC_EARLY_DEBUG_WSP */
 
-   PPC_TLBILX(0,R0,R0)
+   PPC_TLBILX(0,0,R0)
sync
isync
 
Index: b/arch/powerpc/mm/tlb_low_64e.S
===
--- a/arch/powerpc/mm/tlb_low_64e.S
+++ b/arch/powerpc/mm/tlb_low_64e.S
@@ -126,7 +126,7 @@ BEGIN_MMU_FTR_SECTION
/* Set the TLB reservation and search for existing entry. Then load
 * the entry.
 */
-   PPC_TLBSRX_DOT(R0,R16)
+   PPC_TLBSRX_DOT(0,R16)
ldx 

[PATCH 18/18] powerpc: enforce usage of RA 0-R31 where possible

2012-06-20 Thread Michael Neuling
Some macros use RA where when RA=R0 the values is 0, so make this
the enforced mnemonic in the macro.

Idea suggested by Andreas Schwab.

Signed-off-by: Michael Neuling mi...@neuling.org
---

 arch/powerpc/include/asm/ppc-opcode.h |   14 +++---
 arch/powerpc/kernel/cpu_setup_a2.S|2 +-
 arch/powerpc/kernel/exceptions-64e.S  |8 
 arch/powerpc/mm/tlb_low_64e.S |   10 +-
 arch/powerpc/mm/tlb_nohash_low.S  |   16 
 5 files changed, 25 insertions(+), 25 deletions(-)

Index: powerpc-test/arch/powerpc/include/asm/ppc-opcode.h
===
--- powerpc-test.orig/arch/powerpc/include/asm/ppc-opcode.h
+++ powerpc-test/arch/powerpc/include/asm/ppc-opcode.h
@@ -198,7 +198,7 @@
 #define PPC_RFDI   stringify_in_c(.long PPC_INST_RFDI)
 #define PPC_RFMCI  stringify_in_c(.long PPC_INST_RFMCI)
 #define PPC_TLBILX(t, a, b)stringify_in_c(.long PPC_INST_TLBILX | \
-   __PPC_T_TLB(t) | __PPC_RA(a) | 
__PPC_RB(b))
+   __PPC_T_TLB(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_TLBILX_ALL(a, b)   PPC_TLBILX(0, a, b)
 #define PPC_TLBILX_PID(a, b)   PPC_TLBILX(1, a, b)
 #define PPC_TLBILX_VA(a, b)PPC_TLBILX(3, a, b)
@@ -207,23 +207,23 @@
 #define PPC_TLBIE(lp,a)stringify_in_c(.long PPC_INST_TLBIE | \
   ___PPC_RB(a) | ___PPC_RS(lp))
 #define PPC_TLBSRX_DOT(a,b)stringify_in_c(.long PPC_INST_TLBSRX_DOT | \
-   __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RA0(a) | __PPC_RB(b))
 #define PPC_TLBIVAX(a,b)   stringify_in_c(.long PPC_INST_TLBIVAX | \
-   __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RA0(a) | __PPC_RB(b))
 
 #define PPC_ERATWE(s, a, w)stringify_in_c(.long PPC_INST_ERATWE | \
__PPC_RS(s) | __PPC_RA(a) | __PPC_WS(w))
 #define PPC_ERATRE(s, a, w)stringify_in_c(.long PPC_INST_ERATRE | \
__PPC_RS(s) | __PPC_RA(a) | __PPC_WS(w))
 #define PPC_ERATILX(t, a, b)   stringify_in_c(.long PPC_INST_ERATILX | \
-   __PPC_T_TLB(t) | __PPC_RA(a) | \
+   __PPC_T_TLB(t) | __PPC_RA0(a) | \
__PPC_RB(b))
 #define PPC_ERATIVAX(s, a, b)  stringify_in_c(.long PPC_INST_ERATIVAX | \
-   __PPC_RS(s) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(s) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_ERATSX(t, a, w)stringify_in_c(.long PPC_INST_ERATSX | \
-   __PPC_RS(t) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_ERATSX_DOT(t, a, w)stringify_in_c(.long 
PPC_INST_ERATSX_DOT | \
-   __PPC_RS(t) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_SLBFEE_DOT(t, b)   stringify_in_c(.long PPC_INST_SLBFEE | \
__PPC_RT(t) | __PPC_RB(b))
 /* PASemi instructions */
Index: powerpc-test/arch/powerpc/kernel/cpu_setup_a2.S
===
--- powerpc-test.orig/arch/powerpc/kernel/cpu_setup_a2.S
+++ powerpc-test/arch/powerpc/kernel/cpu_setup_a2.S
@@ -112,7 +112,7 @@ _icswx_skip_guest:
 * a bolted entry though it will be in LRU and so will go away 
eventually
 * but let's not bother for now
 */
-   PPC_ERATILX(0,R0,R0)
+   PPC_ERATILX(0,0,R0)
 1:
blr
 
Index: powerpc-test/arch/powerpc/kernel/exceptions-64e.S
===
--- powerpc-test.orig/arch/powerpc/kernel/exceptions-64e.S
+++ powerpc-test/arch/powerpc/kernel/exceptions-64e.S
@@ -903,7 +903,7 @@ skpinv: addir6,r6,1 /* 
Increment */
bne 1b  /* If not, repeat */
 
/* Invalidate all TLBs */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -961,7 +961,7 @@ skpinv: addir6,r6,1 /* 
Increment */
tlbwe
 
/* Invalidate TLB1 */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -1020,7 +1020,7 @@ skpinv:   addir6,r6,1 /* 
Increment */
tlbwe
 
/* Invalidate TLB1 */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -1138,7 +1138,7 @@ a2_tlbinit_after_iprot_flush:
tlbwe
 #endif /* CONFIG_PPC_EARLY_DEBUG_WSP */
 
-   PPC_TLBILX(0,R0,R0)
+   

[PATCH 18/18] powerpc: enforce usage of RA 0-R31 where possible

2012-06-14 Thread Michael Neuling
Some macros use RA where when RA=R0 the values is 0, so make this
the enforced mnemonic in the macro.

Idea suggested by Andreas Schwab.

Signed-off-by: Michael Neuling mi...@neuling.org
---

 arch/powerpc/include/asm/ppc-opcode.h |   14 +++---
 arch/powerpc/kernel/cpu_setup_a2.S|2 +-
 arch/powerpc/kernel/exceptions-64e.S  |8 
 arch/powerpc/mm/tlb_low_64e.S |   10 +-
 arch/powerpc/mm/tlb_nohash_low.S  |   16 
 5 files changed, 25 insertions(+), 25 deletions(-)

Index: powerpc-test/arch/powerpc/include/asm/ppc-opcode.h
===
--- powerpc-test.orig/arch/powerpc/include/asm/ppc-opcode.h
+++ powerpc-test/arch/powerpc/include/asm/ppc-opcode.h
@@ -165,7 +165,7 @@
 #define PPC_RFDI   stringify_in_c(.long PPC_INST_RFDI)
 #define PPC_RFMCI  stringify_in_c(.long PPC_INST_RFMCI)
 #define PPC_TLBILX(t, a, b)stringify_in_c(.long PPC_INST_TLBILX | \
-   __PPC_T_TLB(t) | __PPC_RA(a) | 
__PPC_RB(b))
+   __PPC_T_TLB(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_TLBILX_ALL(a, b)   PPC_TLBILX(0, a, b)
 #define PPC_TLBILX_PID(a, b)   PPC_TLBILX(1, a, b)
 #define PPC_TLBILX_VA(a, b)PPC_TLBILX(3, a, b)
@@ -174,23 +174,23 @@
 #define PPC_TLBIE(lp,a)stringify_in_c(.long PPC_INST_TLBIE | \
   ___PPC_RB(a) | ___PPC_RS(lp))
 #define PPC_TLBSRX_DOT(a,b)stringify_in_c(.long PPC_INST_TLBSRX_DOT | \
-   __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RA0(a) | __PPC_RB(b))
 #define PPC_TLBIVAX(a,b)   stringify_in_c(.long PPC_INST_TLBIVAX | \
-   __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RA0(a) | __PPC_RB(b))
 
 #define PPC_ERATWE(s, a, w)stringify_in_c(.long PPC_INST_ERATWE | \
__PPC_RS(s) | __PPC_RA(a) | __PPC_WS(w))
 #define PPC_ERATRE(s, a, w)stringify_in_c(.long PPC_INST_ERATRE | \
__PPC_RS(s) | __PPC_RA(a) | __PPC_WS(w))
 #define PPC_ERATILX(t, a, b)   stringify_in_c(.long PPC_INST_ERATILX | \
-   __PPC_T_TLB(t) | __PPC_RA(a) | \
+   __PPC_T_TLB(t) | __PPC_RA0(a) | \
__PPC_RB(b))
 #define PPC_ERATIVAX(s, a, b)  stringify_in_c(.long PPC_INST_ERATIVAX | \
-   __PPC_RS(s) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(s) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_ERATSX(t, a, w)stringify_in_c(.long PPC_INST_ERATSX | \
-   __PPC_RS(t) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_ERATSX_DOT(t, a, w)stringify_in_c(.long 
PPC_INST_ERATSX_DOT | \
-   __PPC_RS(t) | __PPC_RA(a) | __PPC_RB(b))
+   __PPC_RS(t) | __PPC_RA0(a) | 
__PPC_RB(b))
 #define PPC_SLBFEE_DOT(t, b)   stringify_in_c(.long PPC_INST_SLBFEE | \
__PPC_RT(t) | __PPC_RB(b))
 /* PASemi instructions */
Index: powerpc-test/arch/powerpc/kernel/cpu_setup_a2.S
===
--- powerpc-test.orig/arch/powerpc/kernel/cpu_setup_a2.S
+++ powerpc-test/arch/powerpc/kernel/cpu_setup_a2.S
@@ -112,7 +112,7 @@ _icswx_skip_guest:
 * a bolted entry though it will be in LRU and so will go away 
eventually
 * but let's not bother for now
 */
-   PPC_ERATILX(0,R0,R0)
+   PPC_ERATILX(0,0,R0)
 1:
blr
 
Index: powerpc-test/arch/powerpc/kernel/exceptions-64e.S
===
--- powerpc-test.orig/arch/powerpc/kernel/exceptions-64e.S
+++ powerpc-test/arch/powerpc/kernel/exceptions-64e.S
@@ -903,7 +903,7 @@ skpinv: addir6,r6,1 /* 
Increment */
bne 1b  /* If not, repeat */
 
/* Invalidate all TLBs */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -961,7 +961,7 @@ skpinv: addir6,r6,1 /* 
Increment */
tlbwe
 
/* Invalidate TLB1 */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -1020,7 +1020,7 @@ skpinv:   addir6,r6,1 /* 
Increment */
tlbwe
 
/* Invalidate TLB1 */
-   PPC_TLBILX_ALL(R0,R0)
+   PPC_TLBILX_ALL(0,R0)
sync
isync
 
@@ -1138,7 +1138,7 @@ a2_tlbinit_after_iprot_flush:
tlbwe
 #endif /* CONFIG_PPC_EARLY_DEBUG_WSP */
 
-   PPC_TLBILX(0,R0,R0)
+