Author: Craig Topper
Date: 2020-11-26T02:03:06-08:00
New Revision: d9500c2e230e9cc68d3d647864fa824cc3c06b3f

URL: 
https://github.com/llvm/llvm-project/commit/d9500c2e230e9cc68d3d647864fa824cc3c06b3f
DIFF: 
https://github.com/llvm/llvm-project/commit/d9500c2e230e9cc68d3d647864fa824cc3c06b3f.diff

LOG: [RISCV] Add isel patterns for sbsetw/sbclrw/sbinvw with sext_inreg as the 
root.

This handles cases were the input isn't known to be sign extended.

Added: 
    

Modified: 
    llvm/lib/Target/RISCV/RISCVInstrInfoB.td
    llvm/test/CodeGen/RISCV/rv64Zbs.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td 
b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td
index 29ccffb05cfd..e80ef9987b45 100644
--- a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td
+++ b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td
@@ -893,10 +893,16 @@ def : Pat<(riscv_rolw GPR:$rs1, uimm5:$rs2),
 let Predicates = [HasStdExtZbs, IsRV64] in {
 def : Pat<(and (not (riscv_sllw 1, GPR:$rs2)), (assertsexti32 GPR:$rs1)),
           (SBCLRW GPR:$rs1, GPR:$rs2)>;
+def : Pat<(sext_inreg (and (not (riscv_sllw 1, GPR:$rs2)), GPR:$rs1), i32),
+          (SBCLRW GPR:$rs1, GPR:$rs2)>;
 def : Pat<(or (riscv_sllw 1, GPR:$rs2), (assertsexti32 GPR:$rs1)),
           (SBSETW GPR:$rs1, GPR:$rs2)>;
+def : Pat<(sext_inreg (or (riscv_sllw 1, GPR:$rs2), GPR:$rs1), i32),
+          (SBSETW GPR:$rs1, GPR:$rs2)>;
 def : Pat<(xor (riscv_sllw 1, GPR:$rs2), (assertsexti32 GPR:$rs1)),
           (SBINVW GPR:$rs1, GPR:$rs2)>;
+def : Pat<(sext_inreg (xor (riscv_sllw 1, GPR:$rs2), GPR:$rs1), i32),
+          (SBINVW GPR:$rs1, GPR:$rs2)>;
 def : Pat<(and (riscv_srlw GPR:$rs1, GPR:$rs2), 1),
           (SBEXTW GPR:$rs1, GPR:$rs2)>;
 } // Predicates = [HasStdExtZbs, IsRV64]

diff  --git a/llvm/test/CodeGen/RISCV/rv64Zbs.ll 
b/llvm/test/CodeGen/RISCV/rv64Zbs.ll
index be6b5ad54004..85d1bd5b3c2f 100644
--- a/llvm/test/CodeGen/RISCV/rv64Zbs.ll
+++ b/llvm/test/CodeGen/RISCV/rv64Zbs.ll
@@ -69,20 +69,13 @@ define signext i32 @sbclr_i32_load(i32* %p, i32 signext %b) 
nounwind {
 ; RV64IB-LABEL: sbclr_i32_load:
 ; RV64IB:       # %bb.0:
 ; RV64IB-NEXT:    lw a0, 0(a0)
-; RV64IB-NEXT:    addi a2, zero, 1
-; RV64IB-NEXT:    sllw a1, a2, a1
-; RV64IB-NEXT:    andn a0, a0, a1
-; RV64IB-NEXT:    sext.w a0, a0
+; RV64IB-NEXT:    sbclrw a0, a0, a1
 ; RV64IB-NEXT:    ret
 ;
 ; RV64IBS-LABEL: sbclr_i32_load:
 ; RV64IBS:       # %bb.0:
 ; RV64IBS-NEXT:    lw a0, 0(a0)
-; RV64IBS-NEXT:    addi a2, zero, 1
-; RV64IBS-NEXT:    sllw a1, a2, a1
-; RV64IBS-NEXT:    not a1, a1
-; RV64IBS-NEXT:    and a0, a1, a0
-; RV64IBS-NEXT:    sext.w a0, a0
+; RV64IBS-NEXT:    sbclrw a0, a0, a1
 ; RV64IBS-NEXT:    ret
   %a = load i32, i32* %p
   %shl = shl i32 1, %b
@@ -198,19 +191,13 @@ define signext i32 @sbset_i32_load(i32* %p, i32 signext 
%b) nounwind {
 ; RV64IB-LABEL: sbset_i32_load:
 ; RV64IB:       # %bb.0:
 ; RV64IB-NEXT:    lw a0, 0(a0)
-; RV64IB-NEXT:    addi a2, zero, 1
-; RV64IB-NEXT:    sllw a1, a2, a1
-; RV64IB-NEXT:    or a0, a1, a0
-; RV64IB-NEXT:    sext.w a0, a0
+; RV64IB-NEXT:    sbsetw a0, a0, a1
 ; RV64IB-NEXT:    ret
 ;
 ; RV64IBS-LABEL: sbset_i32_load:
 ; RV64IBS:       # %bb.0:
 ; RV64IBS-NEXT:    lw a0, 0(a0)
-; RV64IBS-NEXT:    addi a2, zero, 1
-; RV64IBS-NEXT:    sllw a1, a2, a1
-; RV64IBS-NEXT:    or a0, a1, a0
-; RV64IBS-NEXT:    sext.w a0, a0
+; RV64IBS-NEXT:    sbsetw a0, a0, a1
 ; RV64IBS-NEXT:    ret
   %a = load i32, i32* %p
   %shl = shl i32 1, %b
@@ -321,19 +308,13 @@ define signext i32 @sbinv_i32_load(i32* %p, i32 signext 
%b) nounwind {
 ; RV64IB-LABEL: sbinv_i32_load:
 ; RV64IB:       # %bb.0:
 ; RV64IB-NEXT:    lw a0, 0(a0)
-; RV64IB-NEXT:    addi a2, zero, 1
-; RV64IB-NEXT:    sllw a1, a2, a1
-; RV64IB-NEXT:    xor a0, a1, a0
-; RV64IB-NEXT:    sext.w a0, a0
+; RV64IB-NEXT:    sbinvw a0, a0, a1
 ; RV64IB-NEXT:    ret
 ;
 ; RV64IBS-LABEL: sbinv_i32_load:
 ; RV64IBS:       # %bb.0:
 ; RV64IBS-NEXT:    lw a0, 0(a0)
-; RV64IBS-NEXT:    addi a2, zero, 1
-; RV64IBS-NEXT:    sllw a1, a2, a1
-; RV64IBS-NEXT:    xor a0, a1, a0
-; RV64IBS-NEXT:    sext.w a0, a0
+; RV64IBS-NEXT:    sbinvw a0, a0, a1
 ; RV64IBS-NEXT:    ret
   %a = load i32, i32* %p
   %shl = shl i32 1, %b


        
_______________________________________________
llvm-branch-commits mailing list
llvm-branch-commits@lists.llvm.org
https://lists.llvm.org/cgi-bin/mailman/listinfo/llvm-branch-commits

Reply via email to