it's exposed by linux kernel for x86.

the root cause is current "single_set" will ignore CLOBBER & USE,
while we need to take them into account when handling shrink-wrap.

this patch add one parameter to single_set_2 to support return
NULL_RTX if we want to remove any side-effect. add a new helper function
"single_set_no_clobber_use" added.

pass x86-64 bootstrap & check-gcc/g++, also manually checked ths issue
reported at 63404 is gone away.

also no regression on aarch64-none-elf regression test.

comments?

thanks.

2014-09-26  Jiong Wang  <jiong.w...@arm.com>

        * rtl.h (single_set_no_clobber_use): New function.
        (single_set_2): New parameter "fail_on_clobber_use".
        (single_set): Likewise.
        * config/ia64/ia64.c (ia64_single_set): Likewise.
        * rtlanal.c (single_set_2): Return NULL_RTX if fail_on_clobber_use be 
true.
        * shrink-wrap.c (move_insn_for_shrink_wrap): Use 
single_set_no_clobber_use.

diff --git a/gcc/config/ia64/ia64.c b/gcc/config/ia64/ia64.c
index 9337be1..09d3c4a 100644
--- a/gcc/config/ia64/ia64.c
+++ b/gcc/config/ia64/ia64.c
@@ -7172,7 +7172,7 @@ ia64_single_set (rtx_insn *insn)
       break;
 
     default:
-      ret = single_set_2 (insn, x);
+      ret = single_set_2 (insn, x, false);
       break;
     }
 
diff --git a/gcc/rtl.h b/gcc/rtl.h
index e73f731..7c40d5a 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -2797,7 +2797,7 @@ extern void set_insn_deleted (rtx);
 
 /* Functions in rtlanal.c */
 
-extern rtx single_set_2 (const rtx_insn *, const_rtx);
+extern rtx single_set_2 (const rtx_insn *, const_rtx, bool fail_on_clobber_use);
 
 /* Handle the cheap and common cases inline for performance.  */
 
@@ -2810,7 +2810,20 @@ inline rtx single_set (const rtx_insn *insn)
     return PATTERN (insn);
 
   /* Defer to the more expensive case.  */
-  return single_set_2 (insn, PATTERN (insn));
+  return single_set_2 (insn, PATTERN (insn), false);
+}
+
+inline rtx single_set_no_clobber_use (const rtx_insn *insn)
+{
+  if (!INSN_P (insn))
+    return NULL_RTX;
+
+  if (GET_CODE (PATTERN (insn)) == SET)
+    return PATTERN (insn);
+
+  /* Defer to the more expensive case, and return NULL_RTX if there is
+     USE or CLOBBER.  */
+  return single_set_2 (insn, PATTERN (insn), true);
 }
 
 extern enum machine_mode get_address_mode (rtx mem);
diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c
index 3063458..cb5e36a 100644
--- a/gcc/rtlanal.c
+++ b/gcc/rtlanal.c
@@ -1182,7 +1182,7 @@ record_hard_reg_uses (rtx *px, void *data)
    will not be used, which we ignore.  */
 
 rtx
-single_set_2 (const rtx_insn *insn, const_rtx pat)
+single_set_2 (const rtx_insn *insn, const_rtx pat, bool fail_on_clobber_use)
 {
   rtx set = NULL;
   int set_verified = 1;
@@ -1197,6 +1197,8 @@ single_set_2 (const rtx_insn *insn, const_rtx pat)
 	    {
 	    case USE:
 	    case CLOBBER:
+	      if (fail_on_clobber_use)
+		return NULL_RTX;
 	      break;
 
 	    case SET:
diff --git a/gcc/shrink-wrap.c b/gcc/shrink-wrap.c
index b1ff8a2..5624ef7 100644
--- a/gcc/shrink-wrap.c
+++ b/gcc/shrink-wrap.c
@@ -177,7 +177,7 @@ move_insn_for_shrink_wrap (basic_block bb, rtx_insn *insn,
   edge live_edge;
 
   /* Look for a simple register copy.  */
-  set = single_set (insn);
+  set = single_set_no_clobber_use (insn);
   if (!set)
     return false;
   src = SET_SRC (set);

Reply via email to