OK, I fixed the issues in your last email.  I initially found one
regression while testing.  In lra_create_live_ranges_1 I had removed
the 'call_p = false' statement but did not replaced it with anything.
This resulted in no regressions on aarch64 but caused a single
regression on x86 (gcc.target/i386/pr87759.c).  I replaced the
line with 'call_insn = NULL' and the regression went away so I
have clean bootstraps and no regressions on aarch64 and x86 now.

If this looks good to you can I go ahead and check it in?  I know
we are in Stage 3 now, but my recollection is that patches that were
initially submitted during Stage 1 could go ahead once approved.

Steve Ellcey
sell...@marvell.com



2019-01-10  Steve Ellcey  <sell...@marvell.com>

        * config/aarch64/aarch64.c (aarch64_simd_call_p): New function.
        (aarch64_hard_regno_call_part_clobbered): Add insn argument.
        (aarch64_return_call_with_max_clobbers): New function.
        (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): New macro.
        * config/avr/avr.c (avr_hard_regno_call_part_clobbered): Add insn
        argument.
        * config/i386/i386.c (ix86_hard_regno_call_part_clobbered): Ditto.
        * config/mips/mips.c (mips_hard_regno_call_part_clobbered): Ditto.
        * config/rs6000/rs6000.c (rs6000_hard_regno_call_part_clobbered): Ditto.
        * config/s390/s390.c (s390_hard_regno_call_part_clobbered): Ditto.
        * cselib.c (cselib_process_insn): Add argument to
        targetm.hard_regno_call_part_clobbered call.
        * ira-conflicts.c (ira_build_conflicts): Ditto.
        * ira-costs.c (ira_tune_allocno_costs): Ditto.
        * lra-constraints.c (inherit_reload_reg): Ditto.
        * lra-int.h (struct lra_reg): Add call_insn field, remove call_p field.
        * lra-lives.c (check_pseudos_live_through_calls): Add call_insn
        argument.  Call targetm.return_call_with_max_clobbers.
        Add argument to targetm.hard_regno_call_part_clobbered call.
        (calls_have_same_clobbers_p): New function.
        (process_bb_lives): Add call_insn and last_call_insn variables.
        Pass call_insn to check_pseudos_live_through_calls.
        Modify if stmt to check targetm.return_call_with_max_clobbers.
        Update setting of flush variable.
        (lra_create_live_ranges_1): Set call_insn to NULL instead of call_p
        to false.
        * lra.c (initialize_lra_reg_info_element): Set call_insn to NULL.
        * regcprop.c (copyprop_hardreg_forward_1): Add argument to
        targetm.hard_regno_call_part_clobbered call.
        * reginfo.c (choose_hard_reg_mode): Ditto.
        * regrename.c (check_new_reg_p): Ditto.
        * reload.c (find_equiv_reg): Ditto.
        * reload1.c (emit_reload_insns): Ditto.
        * sched-deps.c (deps_analyze_insn): Ditto.
        * sel-sched.c (init_regs_for_mode): Ditto.
        (mark_unavailable_hard_regs): Ditto.
        * targhooks.c (default_dwarf_frame_reg_mode): Ditto.
        * target.def (hard_regno_call_part_clobbered): Add insn argument.
        (return_call_with_max_clobbers): New target function.
        * doc/tm.texi: Regenerate.
        * doc/tm.texi.in (TARGET_RETURN_CALL_WITH_MAX_CLOBBERS): New hook.
        * hooks.c (hook_bool_uint_mode_false): Change to
        hook_bool_insn_uint_mode_false.
        * hooks.h (hook_bool_uint_mode_false): Ditto.
diff --git a/gcc/config/aarch64/aarch64.c b/gcc/config/aarch64/aarch64.c
index 1c300af..7a1f838 100644
--- a/gcc/config/aarch64/aarch64.c
+++ b/gcc/config/aarch64/aarch64.c
@@ -1655,14 +1655,53 @@ aarch64_reg_save_mode (tree fndecl, unsigned regno)
 	   : (aarch64_simd_decl_p (fndecl) ? E_TFmode : E_DFmode);
 }
 
+/* Return true if the instruction is a call to a SIMD function, false
+   if it is not a SIMD function or if we do not know anything about
+   the function.  */
+
+static bool
+aarch64_simd_call_p (rtx_insn *insn)
+{
+  rtx symbol;
+  rtx call;
+  tree fndecl;
+
+  gcc_assert (CALL_P (insn));
+  call = get_call_rtx_from (insn);
+  symbol = XEXP (XEXP (call, 0), 0);
+  if (GET_CODE (symbol) != SYMBOL_REF)
+    return false;
+  fndecl = SYMBOL_REF_DECL (symbol);
+  if (!fndecl)
+    return false;
+
+  return aarch64_simd_decl_p (fndecl);
+}
+
 /* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED.  The callee only saves
    the lower 64 bits of a 128-bit register.  Tell the compiler the callee
    clobbers the top 64 bits when restoring the bottom 64 bits.  */
 
 static bool
-aarch64_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+aarch64_hard_regno_call_part_clobbered (rtx_insn *insn, unsigned int regno,
+					machine_mode mode)
+{
+  bool simd_p = insn && CALL_P (insn) && aarch64_simd_call_p (insn);
+  return FP_REGNUM_P (regno)
+	 && maybe_gt (GET_MODE_SIZE (mode), simd_p ? 16 : 8);
+}
+
+/* Implement TARGET_RETURN_CALL_WITH_MAX_CLOBBERS.  */
+
+rtx_insn *
+aarch64_return_call_with_max_clobbers (rtx_insn *call_1, rtx_insn *call_2)
 {
-  return FP_REGNUM_P (regno) && maybe_gt (GET_MODE_SIZE (mode), 8);
+  gcc_assert (CALL_P (call_1) && CALL_P (call_2));
+
+  if (!aarch64_simd_call_p (call_1) || aarch64_simd_call_p (call_2))
+    return call_1;
+  else
+    return call_2;
 }
 
 /* Implement REGMODE_NATURAL_SIZE.  */
@@ -18825,6 +18864,10 @@ aarch64_libgcc_floating_mode_supported_p
 #define TARGET_HARD_REGNO_CALL_PART_CLOBBERED \
   aarch64_hard_regno_call_part_clobbered
 
+#undef TARGET_RETURN_CALL_WITH_MAX_CLOBBERS
+#define TARGET_RETURN_CALL_WITH_MAX_CLOBBERS \
+  aarch64_return_call_with_max_clobbers
+
 #undef TARGET_CONSTANT_ALIGNMENT
 #define TARGET_CONSTANT_ALIGNMENT aarch64_constant_alignment
 
diff --git a/gcc/config/avr/avr.c b/gcc/config/avr/avr.c
index 023308b..a53b909 100644
--- a/gcc/config/avr/avr.c
+++ b/gcc/config/avr/avr.c
@@ -12181,7 +12181,8 @@ avr_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
 /* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED.  */
 
 static bool
-avr_hard_regno_call_part_clobbered (unsigned regno, machine_mode mode)
+avr_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+				    unsigned regno, machine_mode mode)
 {
   /* FIXME: This hook gets called with MODE:REGNO combinations that don't
         represent valid hard registers like, e.g. HI:29.  Returning TRUE
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 0e23eaa..1bb535a 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -40216,7 +40216,8 @@ ix86_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
    the low 16 bytes are saved.  */
 
 static bool
-ix86_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+ix86_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+				     unsigned int regno, machine_mode mode)
 {
   return SSE_REGNO_P (regno) && GET_MODE_SIZE (mode) > 16;
 }
diff --git a/gcc/config/mips/mips.c b/gcc/config/mips/mips.c
index 95dc946..a8022b8 100644
--- a/gcc/config/mips/mips.c
+++ b/gcc/config/mips/mips.c
@@ -12906,7 +12906,8 @@ mips_hard_regno_scratch_ok (unsigned int regno)
    registers with MODE > 64 bits are part clobbered too.  */
 
 static bool
-mips_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+mips_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+				     unsigned int regno, machine_mode mode)
 {
   if (TARGET_FLOATXX
       && hard_regno_nregs (regno, mode) == 1
diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c
index 0357dc8..3330b68 100644
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -2197,7 +2197,8 @@ rs6000_modes_tieable_p (machine_mode mode1, machine_mode mode2)
 /* Implement TARGET_HARD_REGNO_CALL_PART_CLOBBERED.  */
 
 static bool
-rs6000_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+rs6000_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+				       unsigned int regno, machine_mode mode)
 {
   if (TARGET_32BIT
       && TARGET_POWERPC64
diff --git a/gcc/config/s390/s390.c b/gcc/config/s390/s390.c
index ea2be10..6a571a3 100644
--- a/gcc/config/s390/s390.c
+++ b/gcc/config/s390/s390.c
@@ -10098,7 +10098,8 @@ s390_hard_regno_scratch_ok (unsigned int regno)
    bytes are saved across calls, however.  */
 
 static bool
-s390_hard_regno_call_part_clobbered (unsigned int regno, machine_mode mode)
+s390_hard_regno_call_part_clobbered (rtx_insn *insn ATTRIBUTE_UNUSED,
+				     unsigned int regno, machine_mode mode)
 {
   if (!TARGET_64BIT
       && TARGET_ZARCH
diff --git a/gcc/cselib.c b/gcc/cselib.c
index cef4bc0..84c17c2 100644
--- a/gcc/cselib.c
+++ b/gcc/cselib.c
@@ -2770,7 +2770,7 @@ cselib_process_insn (rtx_insn *insn)
 	if (call_used_regs[i]
 	    || (REG_VALUES (i) && REG_VALUES (i)->elt
 		&& (targetm.hard_regno_call_part_clobbered
-		    (i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
+		    (insn, i, GET_MODE (REG_VALUES (i)->elt->val_rtx)))))
 	  cselib_invalidate_regno (i, reg_raw_mode[i]);
 
       /* Since it is not clear how cselib is going to be used, be
diff --git a/gcc/doc/tm.texi.in b/gcc/doc/tm.texi.in
index 976a700..97a2ade 100644
--- a/gcc/doc/tm.texi.in
+++ b/gcc/doc/tm.texi.in
@@ -1707,6 +1707,8 @@ of @code{CALL_USED_REGISTERS}.
 @cindex call-saved register
 @hook TARGET_HARD_REGNO_CALL_PART_CLOBBERED
 
+@hook TARGET_RETURN_CALL_WITH_MAX_CLOBBERS
+
 @findex fixed_regs
 @findex call_used_regs
 @findex global_regs
diff --git a/gcc/hooks.c b/gcc/hooks.c
index bbc35fc..f95659b 100644
--- a/gcc/hooks.c
+++ b/gcc/hooks.c
@@ -142,7 +142,7 @@ hook_bool_puint64_puint64_true (poly_uint64, poly_uint64)
 
 /* Generic hook that takes (unsigned int, machine_mode) and returns false.  */
 bool
-hook_bool_uint_mode_false (unsigned int, machine_mode)
+hook_bool_insn_uint_mode_false (rtx_insn *, unsigned int, machine_mode)
 {
   return false;
 }
diff --git a/gcc/hooks.h b/gcc/hooks.h
index 9e4bc29..0bc8117 100644
--- a/gcc/hooks.h
+++ b/gcc/hooks.h
@@ -40,7 +40,8 @@ extern bool hook_bool_const_rtx_insn_const_rtx_insn_true (const rtx_insn *,
 extern bool hook_bool_mode_uhwi_false (machine_mode,
 				       unsigned HOST_WIDE_INT);
 extern bool hook_bool_puint64_puint64_true (poly_uint64, poly_uint64);
-extern bool hook_bool_uint_mode_false (unsigned int, machine_mode);
+extern bool hook_bool_insn_uint_mode_false (rtx_insn *, unsigned int,
+					    machine_mode);
 extern bool hook_bool_uint_mode_true (unsigned int, machine_mode);
 extern bool hook_bool_tree_false (tree);
 extern bool hook_bool_const_tree_false (const_tree);
diff --git a/gcc/ira-conflicts.c b/gcc/ira-conflicts.c
index 7ec709d..5bd6c0c 100644
--- a/gcc/ira-conflicts.c
+++ b/gcc/ira-conflicts.c
@@ -808,7 +808,7 @@ ira_build_conflicts (void)
 		 regs must conflict with them.  */
 	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
 		if (!TEST_HARD_REG_BIT (call_used_reg_set, regno)
-		    && targetm.hard_regno_call_part_clobbered (regno,
+		    && targetm.hard_regno_call_part_clobbered (NULL, regno,
 							       obj_mode))
 		  {
 		    SET_HARD_REG_BIT (OBJECT_CONFLICT_HARD_REGS (obj), regno);
diff --git a/gcc/ira-costs.c b/gcc/ira-costs.c
index 0ca70a0..a17dae3 100644
--- a/gcc/ira-costs.c
+++ b/gcc/ira-costs.c
@@ -2379,7 +2379,7 @@ ira_tune_allocno_costs (void)
 						   *crossed_calls_clobber_regs)
 		  && (ira_hard_reg_set_intersection_p (regno, mode,
 						       call_used_reg_set)
-		      || targetm.hard_regno_call_part_clobbered (regno,
+		      || targetm.hard_regno_call_part_clobbered (NULL, regno,
 								 mode)))
 		cost += (ALLOCNO_CALL_FREQ (a)
 			 * (ira_memory_move_cost[mode][rclass][0]
diff --git a/gcc/lra-assigns.c b/gcc/lra-assigns.c
index 1312804..cb4a873 100644
--- a/gcc/lra-assigns.c
+++ b/gcc/lra-assigns.c
@@ -1621,7 +1621,7 @@ lra_assign (bool &fails_p)
        asm is removed and it can result in incorrect allocation.  */
     for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
       if (lra_reg_info[i].nrefs != 0 && reg_renumber[i] >= 0
-	  && lra_reg_info[i].call_p
+	  && lra_reg_info[i].call_insn
 	  && overlaps_hard_reg_set_p (call_used_reg_set,
 				      PSEUDO_REGNO_MODE (i), reg_renumber[i]))
 	gcc_unreachable ();
diff --git a/gcc/lra-constraints.c b/gcc/lra-constraints.c
index 4f434e5..1440451 100644
--- a/gcc/lra-constraints.c
+++ b/gcc/lra-constraints.c
@@ -5377,7 +5377,8 @@ need_for_call_save_p (int regno)
 	       : call_used_reg_set,
 	       PSEUDO_REGNO_MODE (regno), reg_renumber[regno])
 	      || (targetm.hard_regno_call_part_clobbered
-		  (reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
+		  (lra_reg_info[regno].call_insn,
+		   reg_renumber[regno], PSEUDO_REGNO_MODE (regno)))));
 }
 
 /* Global registers occurring in the current EBB.  */
diff --git a/gcc/lra-int.h b/gcc/lra-int.h
index 9d9e81d..d0a8fac 100644
--- a/gcc/lra-int.h
+++ b/gcc/lra-int.h
@@ -91,10 +91,6 @@ struct lra_reg
   /* True if the pseudo should not be assigned to a stack register.  */
   bool no_stack_p;
 #endif
-  /* True if the pseudo crosses a call.	 It is setup in lra-lives.c
-     and used to check that the pseudo crossing a call did not get a
-     call used hard register.  */
-  bool call_p;
   /* Number of references and execution frequencies of the register in
      *non-debug* insns.	 */
   int nrefs, freq;
@@ -107,6 +103,8 @@ struct lra_reg
   int val;
   /* Offset from relative eliminate register to pesudo reg.  */
   poly_int64 offset;
+  /* Call instruction, if any, that may affect this psuedo reg.  */
+  rtx_insn *call_insn;
   /* These members are set up in lra-lives.c and updated in
      lra-coalesce.c.  */
   /* The biggest size mode in which each pseudo reg is referred in
diff --git a/gcc/lra-lives.c b/gcc/lra-lives.c
index a00ec38..b77b675 100644
--- a/gcc/lra-lives.c
+++ b/gcc/lra-lives.c
@@ -576,25 +576,39 @@ lra_setup_reload_pseudo_preferenced_hard_reg (int regno,
 
 /* Check that REGNO living through calls and setjumps, set up conflict
    regs using LAST_CALL_USED_REG_SET, and clear corresponding bits in
-   PSEUDOS_LIVE_THROUGH_CALLS and PSEUDOS_LIVE_THROUGH_SETJUMPS.  */
+   PSEUDOS_LIVE_THROUGH_CALLS and PSEUDOS_LIVE_THROUGH_SETJUMPS.
+   CALL_INSN may be the specific call we want to check that REGNO lives
+   through or a call that is guaranteed to clobber REGNO if any call
+   in the current block clobbers REGNO.  */
+
 static inline void
 check_pseudos_live_through_calls (int regno,
-				  HARD_REG_SET last_call_used_reg_set)
+				  HARD_REG_SET last_call_used_reg_set,
+				  rtx_insn *call_insn)
 {
   int hr;
+  rtx_insn *old_call_insn;
 
   if (! sparseset_bit_p (pseudos_live_through_calls, regno))
     return;
+
+  gcc_assert (call_insn && CALL_P (call_insn));
+  old_call_insn = lra_reg_info[regno].call_insn;
+  if (!old_call_insn
+      || (targetm.return_call_with_max_clobbers
+	  && targetm.return_call_with_max_clobbers (old_call_insn, call_insn)
+	     == call_insn))
+    lra_reg_info[regno].call_insn = call_insn;
+
   sparseset_clear_bit (pseudos_live_through_calls, regno);
   IOR_HARD_REG_SET (lra_reg_info[regno].conflict_hard_regs,
 		    last_call_used_reg_set);
 
   for (hr = 0; HARD_REGISTER_NUM_P (hr); hr++)
-    if (targetm.hard_regno_call_part_clobbered (hr,
+    if (targetm.hard_regno_call_part_clobbered (call_insn, hr,
 						PSEUDO_REGNO_MODE (regno)))
       add_to_hard_reg_set (&lra_reg_info[regno].conflict_hard_regs,
 			   PSEUDO_REGNO_MODE (regno), hr);
-  lra_reg_info[regno].call_p = true;
   if (! sparseset_bit_p (pseudos_live_through_setjumps, regno))
     return;
   sparseset_clear_bit (pseudos_live_through_setjumps, regno);
@@ -615,6 +629,19 @@ reg_early_clobber_p (const struct lra_insn_reg *reg, int n_alt)
 		  && TEST_BIT (reg->early_clobber_alts, n_alt))));
 }
 
+/* Return true if call instructions CALL1 and CALL2 use ABIs that
+   preserve the same set of registers.  */
+
+static bool
+calls_have_same_clobbers_p (rtx_insn *call1, rtx_insn *call2)
+{
+  if (!targetm.return_call_with_max_clobbers)
+    return false;
+
+  return (targetm.return_call_with_max_clobbers (call1, call2) == call1
+          && targetm.return_call_with_max_clobbers (call2, call1) == call2);
+}
+
 /* Process insns of the basic block BB to update pseudo live ranges,
    pseudo hard register conflicts, and insn notes.  We do it on
    backward scan of BB insns.  CURR_POINT is the program point where
@@ -635,6 +662,8 @@ process_bb_lives (basic_block bb, int &curr_point, bool dead_insn_p)
   rtx link, *link_loc;
   bool need_curr_point_incr;
   HARD_REG_SET last_call_used_reg_set;
+  rtx_insn *call_insn = NULL;
+  rtx_insn *last_call_insn = NULL;
 
   reg_live_out = df_get_live_out (bb);
   sparseset_clear (pseudos_live);
@@ -847,7 +876,8 @@ process_bb_lives (basic_block bb, int &curr_point, bool dead_insn_p)
 	      update_pseudo_point (reg->regno, curr_point, USE_POINT);
 	      mark_regno_live (reg->regno, reg->biggest_mode);
 	      check_pseudos_live_through_calls (reg->regno,
-						last_call_used_reg_set);
+						last_call_used_reg_set,
+						call_insn);
 	    }
 
 	  if (!HARD_REGISTER_NUM_P (reg->regno))
@@ -896,7 +926,8 @@ process_bb_lives (basic_block bb, int &curr_point, bool dead_insn_p)
 
       if (call_p)
 	{
-	  if (! flag_ipa_ra)
+	  call_insn = curr_insn;
+	  if (! flag_ipa_ra && ! targetm.return_call_with_max_clobbers)
 	    COPY_HARD_REG_SET(last_call_used_reg_set, call_used_reg_set);
 	  else
 	    {
@@ -905,18 +936,24 @@ process_bb_lives (basic_block bb, int &curr_point, bool dead_insn_p)
 				      call_used_reg_set);
 
 	      bool flush = (! hard_reg_set_empty_p (last_call_used_reg_set)
-			    && ! hard_reg_set_equal_p (last_call_used_reg_set,
-						       this_call_used_reg_set));
+			    && ( ! hard_reg_set_equal_p (last_call_used_reg_set,
+						       this_call_used_reg_set)))
+			   || (last_call_insn && ! calls_have_same_clobbers_p
+						     (call_insn,
+						      last_call_insn));
 
 	      EXECUTE_IF_SET_IN_SPARSESET (pseudos_live, j)
 		{
 		  IOR_HARD_REG_SET (lra_reg_info[j].actual_call_used_reg_set,
 				    this_call_used_reg_set);
+
 		  if (flush)
-		    check_pseudos_live_through_calls
-		      (j, last_call_used_reg_set);
+		    check_pseudos_live_through_calls (j,
+						      last_call_used_reg_set,
+						      last_call_insn);
 		}
 	      COPY_HARD_REG_SET(last_call_used_reg_set, this_call_used_reg_set);
+	      last_call_insn = call_insn;
 	    }
 
 	  sparseset_ior (pseudos_live_through_calls,
@@ -956,7 +993,8 @@ process_bb_lives (basic_block bb, int &curr_point, bool dead_insn_p)
 	      update_pseudo_point (reg->regno, curr_point, USE_POINT);
 	    mark_regno_live (reg->regno, reg->biggest_mode);
 	    check_pseudos_live_through_calls (reg->regno,
-					      last_call_used_reg_set);
+					      last_call_used_reg_set,
+					      call_insn);
 	  }
 
       for (reg = curr_static_id->hard_regs; reg != NULL; reg = reg->next)
@@ -1125,7 +1163,7 @@ process_bb_lives (basic_block bb, int &curr_point, bool dead_insn_p)
       if (sparseset_cardinality (pseudos_live_through_calls) == 0)
 	break;
       if (sparseset_bit_p (pseudos_live_through_calls, j))
-	check_pseudos_live_through_calls (j, last_call_used_reg_set);
+	check_pseudos_live_through_calls (j, last_call_used_reg_set, call_insn);
     }
 
   for (i = 0; HARD_REGISTER_NUM_P (i); ++i)
@@ -1359,7 +1397,7 @@ lra_create_live_ranges_1 (bool all_p, bool dead_insn_p)
 	lra_reg_info[i].biggest_mode = GET_MODE (regno_reg_rtx[i]);
       else
 	lra_reg_info[i].biggest_mode = VOIDmode;
-      lra_reg_info[i].call_p = false;
+      lra_reg_info[i].call_insn = NULL;
       if (!HARD_REGISTER_NUM_P (i)
 	  && lra_reg_info[i].nrefs != 0)
 	{
diff --git a/gcc/lra.c b/gcc/lra.c
index 592b990..e00e6e7 100644
--- a/gcc/lra.c
+++ b/gcc/lra.c
@@ -1344,6 +1344,7 @@ initialize_lra_reg_info_element (int i)
   lra_reg_info[i].val = get_new_reg_value ();
   lra_reg_info[i].offset = 0;
   lra_reg_info[i].copies = NULL;
+  lra_reg_info[i].call_insn = NULL;
 }
 
 /* Initialize common reg info and copies.  */
diff --git a/gcc/regcprop.c b/gcc/regcprop.c
index b107ea2..e6bdeb0 100644
--- a/gcc/regcprop.c
+++ b/gcc/regcprop.c
@@ -1054,7 +1054,7 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd)
 	  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
 	    if ((TEST_HARD_REG_BIT (regs_invalidated_by_this_call, regno)
 		 || (targetm.hard_regno_call_part_clobbered
-		     (regno, vd->e[regno].mode)))
+		     (insn, regno, vd->e[regno].mode)))
 		&& (regno < set_regno || regno >= set_regno + set_nregs))
 	      kill_value_regno (regno, 1, vd);
 
diff --git a/gcc/reginfo.c b/gcc/reginfo.c
index 7a7fa4d..315c5ec 100644
--- a/gcc/reginfo.c
+++ b/gcc/reginfo.c
@@ -639,7 +639,7 @@ choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
     if (hard_regno_nregs (regno, mode) == nregs
 	&& targetm.hard_regno_mode_ok (regno, mode)
 	&& (!call_saved
-	    || !targetm.hard_regno_call_part_clobbered (regno, mode))
+	    || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
 	&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
       found_mode = mode;
 
@@ -647,7 +647,7 @@ choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
     if (hard_regno_nregs (regno, mode) == nregs
 	&& targetm.hard_regno_mode_ok (regno, mode)
 	&& (!call_saved
-	    || !targetm.hard_regno_call_part_clobbered (regno, mode))
+	    || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
 	&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
       found_mode = mode;
 
@@ -655,7 +655,7 @@ choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
     if (hard_regno_nregs (regno, mode) == nregs
 	&& targetm.hard_regno_mode_ok (regno, mode)
 	&& (!call_saved
-	    || !targetm.hard_regno_call_part_clobbered (regno, mode))
+	    || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
 	&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
       found_mode = mode;
 
@@ -663,7 +663,7 @@ choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
     if (hard_regno_nregs (regno, mode) == nregs
 	&& targetm.hard_regno_mode_ok (regno, mode)
 	&& (!call_saved
-	    || !targetm.hard_regno_call_part_clobbered (regno, mode))
+	    || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
 	&& maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
       found_mode = mode;
 
@@ -677,7 +677,7 @@ choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
       if (hard_regno_nregs (regno, mode) == nregs
 	  && targetm.hard_regno_mode_ok (regno, mode)
 	  && (!call_saved
-	      || !targetm.hard_regno_call_part_clobbered (regno, mode)))
+	      || !targetm.hard_regno_call_part_clobbered (NULL, regno, mode)))
 	return mode;
     }
 
diff --git a/gcc/regrename.c b/gcc/regrename.c
index a180ced..637b3cb 100644
--- a/gcc/regrename.c
+++ b/gcc/regrename.c
@@ -339,9 +339,9 @@ check_new_reg_p (int reg ATTRIBUTE_UNUSED, int new_reg,
 	 && ! DEBUG_INSN_P (tmp->insn))
 	|| (this_head->need_caller_save_reg
 	    && ! (targetm.hard_regno_call_part_clobbered
-		  (reg, GET_MODE (*tmp->loc)))
+		  (NULL, reg, GET_MODE (*tmp->loc)))
 	    && (targetm.hard_regno_call_part_clobbered
-		(new_reg, GET_MODE (*tmp->loc)))))
+		(NULL, new_reg, GET_MODE (*tmp->loc)))))
       return false;
 
   return true;
diff --git a/gcc/reload.c b/gcc/reload.c
index 3ad11a8..72cc38a 100644
--- a/gcc/reload.c
+++ b/gcc/reload.c
@@ -6912,13 +6912,14 @@ find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
 	  if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
 	    for (i = 0; i < nregs; ++i)
 	      if (call_used_regs[regno + i]
-		  || targetm.hard_regno_call_part_clobbered (regno + i, mode))
+		  || targetm.hard_regno_call_part_clobbered (NULL, regno + i,
+							     mode))
 		return 0;
 
 	  if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
 	    for (i = 0; i < valuenregs; ++i)
 	      if (call_used_regs[valueno + i]
-		  || targetm.hard_regno_call_part_clobbered (valueno + i,
+		  || targetm.hard_regno_call_part_clobbered (NULL, valueno + i,
 							     mode))
 		return 0;
 	}
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 42012e4..bb112d8 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -8289,7 +8289,8 @@ emit_reload_insns (struct insn_chain *chain)
 			   : out_regno + k);
 		      reg_reloaded_insn[regno + k] = insn;
 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
-		      if (targetm.hard_regno_call_part_clobbered (regno + k,
+		      if (targetm.hard_regno_call_part_clobbered (NULL,
+								  regno + k,
 								  mode))
 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
 					  regno + k);
@@ -8369,7 +8370,8 @@ emit_reload_insns (struct insn_chain *chain)
 			   : in_regno + k);
 		      reg_reloaded_insn[regno + k] = insn;
 		      SET_HARD_REG_BIT (reg_reloaded_valid, regno + k);
-		      if (targetm.hard_regno_call_part_clobbered (regno + k,
+		      if (targetm.hard_regno_call_part_clobbered (NULL,
+								  regno + k,
 								  mode))
 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
 					  regno + k);
@@ -8485,7 +8487,7 @@ emit_reload_insns (struct insn_chain *chain)
 		      CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + k);
 		      SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + k);
 		      if (targetm.hard_regno_call_part_clobbered
-			  (src_regno + k, mode))
+			  (NULL, src_regno + k, mode))
 			SET_HARD_REG_BIT (reg_reloaded_call_part_clobbered,
 					  src_regno + k);
 		      else
diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c
index a9e934d..6cf4caf 100644
--- a/gcc/sched-deps.c
+++ b/gcc/sched-deps.c
@@ -3728,7 +3728,7 @@ deps_analyze_insn (struct deps_desc *deps, rtx_insn *insn)
              Since we only have a choice between 'might be clobbered'
              and 'definitely not clobbered', we must include all
              partly call-clobbered registers here.  */
-	    else if (targetm.hard_regno_call_part_clobbered (i,
+	    else if (targetm.hard_regno_call_part_clobbered (insn, i,
 							     reg_raw_mode[i])
                      || TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
               SET_REGNO_REG_SET (reg_pending_clobbers, i);
diff --git a/gcc/sel-sched.c b/gcc/sel-sched.c
index bf4b2dd..315f2c0 100644
--- a/gcc/sel-sched.c
+++ b/gcc/sel-sched.c
@@ -1102,7 +1102,7 @@ init_regs_for_mode (machine_mode mode)
       if (i >= 0)
         continue;
 
-      if (targetm.hard_regno_call_part_clobbered (cur_reg, mode))
+      if (targetm.hard_regno_call_part_clobbered (NULL, cur_reg, mode))
         SET_HARD_REG_BIT (sel_hrd.regs_for_call_clobbered[mode],
                           cur_reg);
 
@@ -1251,7 +1251,7 @@ mark_unavailable_hard_regs (def_t def, struct reg_rename *reg_rename_p,
 
   /* Exclude registers that are partially call clobbered.  */
   if (def->crosses_call
-      && !targetm.hard_regno_call_part_clobbered (regno, mode))
+      && !targetm.hard_regno_call_part_clobbered (NULL, regno, mode))
     AND_COMPL_HARD_REG_SET (reg_rename_p->available_for_renaming,
                             sel_hrd.regs_for_call_clobbered[mode]);
 
diff --git a/gcc/target.def b/gcc/target.def
index 2aeb1ff..7ebc90b 100644
--- a/gcc/target.def
+++ b/gcc/target.def
@@ -5766,14 +5766,30 @@ DEFHOOK
 (hard_regno_call_part_clobbered,
  "This hook should return true if @var{regno} is partly call-saved and\n\
 partly call-clobbered, and if a value of mode @var{mode} would be partly\n\
-clobbered by a call.  For example, if the low 32 bits of @var{regno} are\n\
-preserved across a call but higher bits are clobbered, this hook should\n\
-return true for a 64-bit mode but false for a 32-bit mode.\n\
+clobbered by call instruction @var{insn}.  If @var{insn} is NULL then it\n\
+should return true if any call could partly clobber the register.\n\
+For example, if the low 32 bits of @var{regno} are preserved across a call\n\
+but higher bits are clobbered, this hook should return true for a 64-bit\n\
+mode but false for a 32-bit mode.\n\
 \n\
 The default implementation returns false, which is correct\n\
 for targets that don't have partly call-clobbered registers.",
- bool, (unsigned int regno, machine_mode mode),
- hook_bool_uint_mode_false)
+ bool, (rtx_insn *insn, unsigned int regno, machine_mode mode),
+ hook_bool_insn_uint_mode_false)
+
+DEFHOOK
+(return_call_with_max_clobbers,
+ "This hook returns a pointer to the call that partially clobbers the\n\
+most registers.  If a platform supports multiple ABIs where the registers\n\
+that are partially clobbered may vary, this function compares two\n\
+calls and returns a pointer to the one that clobbers the most registers.\n\
+If both calls clobber the same registers, @var{call_1} must be returned.\n\
+\n\
+The registers clobbered in different ABIs must be a proper subset or\n\
+superset of all other ABIs.  @var{call_1} must always be a call insn,\n\
+call_2 may be NULL or a call insn.",
+ rtx_insn *, (rtx_insn *call_1, rtx_insn *call_2),
+ NULL)
 
 /* Return the smallest number of different values for which it is best to
    use a jump-table instead of a tree of conditional branches.  */
diff --git a/gcc/targhooks.c b/gcc/targhooks.c
index 898848f..2cbdc4a 100644
--- a/gcc/targhooks.c
+++ b/gcc/targhooks.c
@@ -1930,7 +1930,7 @@ default_dwarf_frame_reg_mode (int regno)
 {
   machine_mode save_mode = reg_raw_mode[regno];
 
-  if (targetm.hard_regno_call_part_clobbered (regno, save_mode))
+  if (targetm.hard_regno_call_part_clobbered (NULL, regno, save_mode))
     save_mode = choose_hard_reg_mode (regno, 1, true);
   return save_mode;
 }

Reply via email to