https://gcc.gnu.org/g:9f9faebb8ebfc0103461641cc49ba0b21877b2b1

commit r15-2069-g9f9faebb8ebfc0103461641cc49ba0b21877b2b1
Author: Richard Sandiford <richard.sandif...@arm.com>
Date:   Tue Jul 16 15:31:17 2024 +0100

    rtl-ssa: Enforce earlyclobbers on hard-coded clobbers [PR115891]
    
    The asm in the testcase has a memory operand and also clobbers ax.
    The clobber means that ax cannot be used to hold inputs, which
    extends to the address of the memory.
    
    I think I had an implicit assumption that constrain_operands
    would enforce this, but in hindsight, that clearly wasn't going
    to be true.  constrain_operands only looks at constraints, and
    these clobbers are by definition outside the constraint system.
    (And that's why they have to be handled conservatively, since there's
    no way to distinguish the earlyclobber and non-earlyclobber cases.)
    
    The semantics of hard-coded clobbers are generic enough that I think
    they should be handled directly by rtl-ssa, rather than by consumers.
    And in the context of rtl-ssa, the easiest way to check for a clash is
    to walk the list of input registers, which we already have to hand.
    It therefore seemed better not to push this down to a more generic
    rtl helper.
    
    The patch detects hard-coded clobbers in the same way as regrename:
    by temporarily stubbing out the operands with pc_rtx.
    
    gcc/
            PR rtl-optimization/115891
            * rtl-ssa/changes.cc (find_clobbered_access): New function.
            (recog_level2): Use it to check for overlap between input
            registers and hard-coded clobbers.  Conditionally reset
            recog_data.insn after changing the insn code.
    
    gcc/testsuite/
            PR rtl-optimization/115891
            * gcc.target/i386/pr115891.c: New test.

Diff:
---
 gcc/rtl-ssa/changes.cc                   | 60 +++++++++++++++++++++++++++++++-
 gcc/testsuite/gcc.target/i386/pr115891.c | 10 ++++++
 2 files changed, 69 insertions(+), 1 deletion(-)

diff --git a/gcc/rtl-ssa/changes.cc b/gcc/rtl-ssa/changes.cc
index 6b6f7cd5d3ab..43c7b8e1e605 100644
--- a/gcc/rtl-ssa/changes.cc
+++ b/gcc/rtl-ssa/changes.cc
@@ -944,6 +944,25 @@ add_clobber (insn_change &change, add_regno_clobber_fn 
add_regno_clobber,
   return true;
 }
 
+// See if PARALLEL pattern PAT clobbers any of the registers in ACCESSES.
+// Return one such access if so, otherwise return null.
+static access_info *
+find_clobbered_access (access_array accesses, rtx pat)
+{
+  rtx subpat;
+  for (int i = 0; i < XVECLEN (pat, 0); ++i)
+    if (GET_CODE (subpat = XVECEXP (pat, 0, i)) == CLOBBER)
+      {
+       rtx x = XEXP (subpat, 0);
+       if (REG_P (x))
+         for (auto *access : accesses)
+           if (access->regno () >= REGNO (x)
+               && access->regno () < END_REGNO (x))
+             return access;
+      }
+  return nullptr;
+}
+
 // Try to recognize the new form of the insn associated with CHANGE,
 // adding any clobbers that are necessary to make the instruction match
 // an .md pattern.  Return true on success.
@@ -1035,9 +1054,48 @@ recog_level2 (insn_change &change, add_regno_clobber_fn 
add_regno_clobber)
       pat = newpat;
     }
 
+  INSN_CODE (rtl) = icode;
+  if (recog_data.insn == rtl)
+    recog_data.insn = nullptr;
+
+  // See if the pattern contains any hard-coded clobbers of registers
+  // that are also inputs to the instruction.  The standard rtl semantics
+  // treat such clobbers as earlyclobbers, since there is no way of proving
+  // which clobbers conflict with the inputs and which don't.
+  //
+  // (Non-hard-coded clobbers are handled by constraint satisfaction instead.)
+  rtx subpat;
+  if (GET_CODE (pat) == PARALLEL)
+    for (int i = 0; i < XVECLEN (pat, 0); ++i)
+      if (GET_CODE (subpat = XVECEXP (pat, 0, i)) == CLOBBER
+         && REG_P (XEXP (subpat, 0)))
+       {
+         // Stub out all operands, so that we can tell which registers
+         // are hard-coded.
+         extract_insn (rtl);
+         for (int j = 0; j < recog_data.n_operands; ++j)
+           *recog_data.operand_loc[j] = pc_rtx;
+
+         auto *use = find_clobbered_access (change.new_uses, pat);
+
+         // Restore the operands.
+         for (int j = 0; j < recog_data.n_operands; ++j)
+           *recog_data.operand_loc[j] = recog_data.operand[j];
+
+         if (use)
+           {
+             if (dump_file && (dump_flags & TDF_DETAILS))
+               {
+                 fprintf (dump_file, "register %d is both clobbered"
+                          " and used as an input:\n", use->regno ());
+                 print_rtl_single (dump_file, pat);
+               }
+             return false;
+           }
+       }
+
   // check_asm_operands checks the constraints after RA, so we don't
   // need to do it again.
-  INSN_CODE (rtl) = icode;
   if (reload_completed && !asm_p)
     {
       extract_insn (rtl);
diff --git a/gcc/testsuite/gcc.target/i386/pr115891.c 
b/gcc/testsuite/gcc.target/i386/pr115891.c
new file mode 100644
index 000000000000..b1a1b159c5d2
--- /dev/null
+++ b/gcc/testsuite/gcc.target/i386/pr115891.c
@@ -0,0 +1,10 @@
+/* { dg-require-effective-target ia32 } */
+/* { dg-options "-O2" } */
+
+void __attribute__((regparm(1)))
+f (int *ptr)
+{
+  asm volatile ("mem = %0" : "=m" (*ptr) :: "ax");
+}
+
+/* { dg-final { scan-assembler-not {mem = [^\n]*%eax} } } */

Reply via email to