diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index c75b08e..59c8dd6 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,25 @@
+2019-06-18  Jiangning Liu  <jiangning@os.amperecomputing.com>
+
+	PR rtl-optimization/89430
+	* cse.c (fixed_base_plus_p): Move from here...
+	* rtlanal.c (fixed_base_plus_p): ...to here...
+	* rtl.h (fixed_base_plus_p): Add extern declaration.
+	* ifcvt.c
+	(noce_process_if_block): For no else_bb and memory store ifcvt case,
+	early exit if only it is global and current function has address
+	taken.
+	(noce_try_cmove_arith): Do ifcvt for no else_bb case, if the variable
+	is on stack and dominating access can prove the newly introduce memory
+	access is safe.
+	(find_all_must_be_sfp_insns): New function.
+	(find_all_may_be_sfp_insns): New function.
+	(no_stack_address_taken): New function.
+	(no_need_to_analyze_sfp): New function.
+	(noce_mem_is_on_stack): New function.
+	(noce_valid_for_dominating): New function.
+	* ifcvt.h : Add new fields.
+	* testsuite/gcc.dg/ifcvt-6.c: New.
+
 2019-06-17  Jakub Jelinek  <jakub@redhat.com>
 
 	* omp-low.c (struct omp_context): Add scan_inclusive field.
diff --git a/gcc/cse.c b/gcc/cse.c
index 35840a6d..a6fcf92 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -540,7 +540,6 @@ static bitmap cse_ebb_live_in, cse_ebb_live_out;
    already as part of an already processed extended basic block.  */
 static sbitmap cse_visited_basic_blocks;
 
-static bool fixed_base_plus_p (rtx x);
 static int notreg_cost (rtx, machine_mode, enum rtx_code, int);
 static int preferable (int, int, int, int);
 static void new_basic_block (void);
@@ -606,29 +605,6 @@ static machine_mode cse_cc_succs (basic_block, basic_block, rtx, rtx,
 
 static const struct rtl_hooks cse_rtl_hooks = RTL_HOOKS_INITIALIZER;
 
-/* Nonzero if X has the form (PLUS frame-pointer integer).  */
-
-static bool
-fixed_base_plus_p (rtx x)
-{
-  switch (GET_CODE (x))
-    {
-    case REG:
-      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
-	return true;
-      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
-	return true;
-      return false;
-
-    case PLUS:
-      if (!CONST_INT_P (XEXP (x, 1)))
-	return false;
-      return fixed_base_plus_p (XEXP (x, 0));
-
-    default:
-      return false;
-    }
-}
 
 /* Dump the expressions in the equivalence class indicated by CLASSP.
    This function is used only for debugging.  */
diff --git a/gcc/ifcvt.c b/gcc/ifcvt.c
index 7b2f6e6..d04040e 100644
--- a/gcc/ifcvt.c
+++ b/gcc/ifcvt.c
@@ -46,6 +46,7 @@
 #include "rtl-iter.h"
 #include "ifcvt.h"
 #include "params.h"
+#include "domwalk.h"
 
 #ifndef MAX_CONDITIONAL_EXECUTE
 #define MAX_CONDITIONAL_EXECUTE \
@@ -76,6 +77,18 @@ static int num_true_changes;
 /* Whether conditional execution changes were made.  */
 static int cond_exec_changed_p;
 
+/* REGNO bitmap for defs that may be stack frame address.  */
+static bitmap bba_sets_may_be_sfp;
+
+/* REGNO bitmap for defs that must be stack frame address.  */
+static bitmap bba_sets_must_be_sfp;
+
+/* true if current function doesn't have local address taken.  */
+static bool cfun_no_stack_address_taken;
+
+/* A flag to indicate if a pointer to stack is found.  */
+static bool sfp_found;
+
 /* Forward references.  */
 static int count_bb_insns (const_basic_block);
 static bool cheap_bb_rtx_cost_p (const_basic_block, profile_probability, int);
@@ -99,6 +112,14 @@ static int dead_or_predicable (basic_block, basic_block, basic_block,
 			       edge, int);
 static void noce_emit_move_insn (rtx, rtx);
 static rtx_insn *block_has_only_trap (basic_block);
+static bool noce_mem_is_on_stack (rtx_insn *a_insn, const_rtx x);
+static bool noce_valid_for_dominating (basic_block bb, rtx_insn *a_insn,
+				       const_rtx x, bool is_store);
+static bool noce_mem_maybe_invalid_p (struct noce_if_info *if_info);
+static bool no_need_to_analyze_sfp (const_rtx set);
+static void find_all_must_be_sfp_insns (void);
+static void find_all_may_be_sfp_insns (void);
+static bool no_stack_address_taken (void);
 
 /* Count the number of non-jump active insns in BB.  */
 
@@ -2029,6 +2050,94 @@ noce_emit_bb (rtx last_insn, basic_block bb, bool simple)
   return true;
 }
 
+/* Return true of X, a MEM expression, is on the stack.  A_INSN contains
+   X if A_INSN exists.  */
+
+static bool
+noce_mem_is_on_stack (rtx_insn *a_insn, const_rtx x)
+{
+  df_ref use;
+
+  gcc_assert (x);
+  gcc_assert (MEM_P (x));
+
+  if (!a_insn)
+    return false;
+
+  /* Early exits if find base is a stack register.  */
+  rtx a = XEXP (x, 0);
+  if (fixed_base_plus_p (a))
+    return true;
+
+  if (!reg_mentioned_p (x, a_insn))
+    return false;
+
+  /* Check if x is on stack.  Assume a mem expression using registers
+     related to stack register is always on stack.  */
+  FOR_EACH_INSN_USE (use, a_insn)
+    if (reg_mentioned_p (DF_REF_REG (use), x)
+	&& bitmap_bit_p (bba_sets_must_be_sfp, DF_REF_REGNO (use)))
+      return true;
+
+  return false;
+}
+
+/* Always return true, if there is a dominating store.
+
+   When there is a dominating load from memory on stack,
+   1) if A_INSN is a memory read, return true.
+   2) if A_INSN is a memory write, return true if the memory is on stack.
+      This is to guarantee the memory is *not* readonly.  */
+
+static bool
+noce_valid_for_dominating (basic_block bb, rtx_insn *a_insn,
+			   const_rtx x, bool is_store)
+{
+  rtx_insn *insn;
+  rtx set;
+
+  gcc_assert (MEM_P (x));
+
+  FOR_BB_INSNS (bb, insn)
+    {
+      set = single_set (insn);
+      if (!set)
+	continue;
+
+      /* Dominating store.  */
+      if (rtx_equal_p (x, SET_DEST (set)))
+	return true;
+
+      /* Dominating load.  */
+      if (rtx_equal_p (x, SET_SRC (set)))
+	if (is_store && noce_mem_is_on_stack (a_insn, x))
+	  return true;
+    }
+
+  return false;
+}
+
+/* Return false if the memory A or B must be valid.
+   This function must be called before latent swap of A and B.  */
+
+static bool
+noce_mem_maybe_invalid_p (struct noce_if_info *if_info)
+{
+  if (!if_info->set_b && MEM_P (if_info->orig_x))
+    {
+      if (!if_info->else_bb && MEM_P (if_info->b))
+	return !noce_valid_for_dominating (if_info->test_bb,
+					   if_info->insn_a,
+					   if_info->b, true);
+    }
+
+  /* ??? We could handle this if we knew that a load from A or B could
+     not trap or fault.  This is also true if we've already loaded
+     from the address along the path from ENTRY.  */
+  return (may_trap_or_fault_p (if_info->a)
+	  || may_trap_or_fault_p (if_info->b));
+}
+
 /* Try more complex cases involving conditional_move.  */
 
 static int
@@ -2065,10 +2174,7 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
       is_mem = 1;
     }
 
-  /* ??? We could handle this if we knew that a load from A or B could
-     not trap or fault.  This is also true if we've already loaded
-     from the address along the path from ENTRY.  */
-  else if (may_trap_or_fault_p (a) || may_trap_or_fault_p (b))
+  else if (noce_mem_maybe_invalid_p (if_info))
     return FALSE;
 
   /* if (test) x = a + b; else x = c - d;
@@ -2234,7 +2340,7 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
   /* If insn to set up A clobbers any registers B depends on, try to
      swap insn that sets up A with the one that sets up B.  If even
      that doesn't help, punt.  */
-  if (modified_in_a && !modified_in_b)
+  if (!modified_in_a && modified_in_b)
     {
       if (!noce_emit_bb (emit_b, else_bb, b_simple))
 	goto end_seq_and_fail;
@@ -2242,7 +2348,7 @@ noce_try_cmove_arith (struct noce_if_info *if_info)
       if (!noce_emit_bb (emit_a, then_bb, a_simple))
 	goto end_seq_and_fail;
     }
-  else if (!modified_in_a)
+  else if (!modified_in_b)
     {
       if (!noce_emit_bb (emit_a, then_bb, a_simple))
 	goto end_seq_and_fail;
@@ -3563,12 +3669,27 @@ noce_process_if_block (struct noce_if_info *if_info)
     }
 
   if (!set_b && MEM_P (orig_x))
-    /* We want to avoid store speculation to avoid cases like
-	 if (pthread_mutex_trylock(mutex))
-	   ++global_variable;
-       Rather than go to much effort here, we rely on the SSA optimizers,
-       which do a good enough job these days.  */
-    return FALSE;
+    {
+      /* We want to avoid store speculation to avoid cases like
+	   if (pthread_mutex_trylock (mutex))
+	     ++global_variable;
+	 Tree if conversion cannot handle this case well, and it intends to
+	 help vectorization for loops only.  */
+      if (!noce_mem_is_on_stack (insn_a, orig_x))
+	return FALSE;
+
+      /* For case like,
+	   if (pthread_mutex_trylock (mutex))
+	     ++local_variable;
+	 If any stack variable address is taken, potentially this local
+	 variable could be modified by other threads and introduce store
+	 speculation.  */
+      if (!cfun_no_stack_address_taken)
+	return FALSE;
+    }
+
+  if_info->set_a = set_a;
+  if_info->set_b = set_b;
 
   if (noce_try_move (if_info))
     goto success;
@@ -5347,6 +5468,297 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
 
   return FALSE;
 }
+
+class must_be_sfp_dom_walker : public dom_walker
+{
+public:
+  must_be_sfp_dom_walker (cdi_direction direction) : dom_walker (direction)
+    {}
+
+  virtual edge before_dom_children (basic_block);
+};
+
+edge
+must_be_sfp_dom_walker::before_dom_children (basic_block bb)
+{
+  rtx_insn *a_insn;
+
+  FOR_BB_INSNS (bb, a_insn)
+    {
+      df_ref def, use;
+
+      rtx sset_a = single_set (a_insn);
+      if (!sset_a)
+	continue;
+      rtx src = SET_SRC (sset_a);
+      rtx dest = SET_DEST (sset_a);
+
+      if (!REG_P (dest))
+	continue;
+
+      /* For the case below,
+	   Control flow: B1->B3, B2->B3
+	   B1: p = &local_var
+	   B2: p = &global_var
+	   B3: ... = *p
+	 pointer p is an address for either local or global variable.
+	 so we don't treat p as a stack address pointer.  To make
+	 algorithm simple, we ignore all non-SSA cases.  */
+      bool skip_insn = false;
+      unsigned int dest_regno = 0;
+      FOR_EACH_INSN_DEF (def, a_insn)
+	{
+	  dest_regno = DF_REF_REGNO (def);
+	  /* Skip current insn if
+	     1) it is already marked as a pointer to the stack.
+	     2) or we see multiple definition points.  */
+	  if (bitmap_bit_p (bba_sets_must_be_sfp, dest_regno)
+	      || REG_N_SETS (dest_regno) > 1)
+	    {
+	      skip_insn = true;
+	      break;
+	    }
+	}
+      if (skip_insn)
+	continue;
+
+      /* Handle case like "x1 = sp + offset".  */
+      if (fixed_base_plus_p (src))
+	{
+	  bitmap_set_bit (bba_sets_must_be_sfp, dest_regno);
+	  sfp_found = true;
+	  continue;
+	}
+
+      /* Handle case like "x2 = x1 + offset", in which x1 is already
+	 identified as a pointer to the stack.  */
+      if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
+	  && CONST_INT_P (XEXP (src, 1)))
+	{
+	  rtx x1 = XEXP (src, 0);
+	  if (!REG_P (x1))
+	    continue;
+
+	  FOR_EACH_INSN_USE (use, a_insn)
+	    {
+	      if (!rtx_equal_p (x1, DF_REF_REG (use)))
+		continue;
+
+	      if (bitmap_bit_p (bba_sets_must_be_sfp, DF_REF_REGNO (use)))
+		{
+		  bitmap_set_bit (bba_sets_must_be_sfp, dest_regno);
+		  sfp_found = true;
+		  break;
+		}
+	    }
+	}
+    }
+  return NULL;
+}
+
+/* Find all insns that must be stack address and store REGNO into
+   bitmap BBA_SETS_MUST_BE_SFP.  */
+
+static void
+find_all_must_be_sfp_insns (void)
+{
+  do {
+    sfp_found = false;
+    must_be_sfp_dom_walker (CDI_DOMINATORS)
+      .walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  } while (sfp_found);
+}
+
+class may_be_sfp_dom_walker : public dom_walker
+{
+public:
+  may_be_sfp_dom_walker (cdi_direction direction) : dom_walker (direction)
+    {}
+
+  virtual edge before_dom_children (basic_block);
+};
+
+/* Return true if we are sure SET doesn't need to be further analyzed
+   to calcluate BBA_SETS_MAY_BE_SFP.  */
+
+static bool no_need_to_analyze_sfp (const_rtx set)
+{
+  rtx src = SET_SRC (set);
+  rtx dest = SET_DEST (set);
+
+  /* Skip insn that is already identified as a pointer to the stack.  */
+  unsigned int dest_regno = REGNO (dest);
+  if (bitmap_bit_p (bba_sets_may_be_sfp, dest_regno))
+    return true;
+
+  /* If we see "hard_register = ...", which implies passing out
+     current frame potentially, we don't collect this case.
+     can be treated as address taken in no_stack_address_taken.  */
+  if (HARD_REGISTER_P (dest))
+    return true;
+
+  /* Memory load and store are not to generate a pointer to the stack.  */
+  if (MEM_P (src) || MEM_P (dest))
+    return true;
+
+  return false;
+}
+
+edge
+may_be_sfp_dom_walker::before_dom_children (basic_block bb)
+{
+  rtx_insn *a_insn;
+  const_rtx pat;
+
+  FOR_BB_INSNS (bb, a_insn)
+    {
+      if (!INSN_P (a_insn))
+	continue;
+
+      pat = PATTERN (a_insn);
+      if (GET_CODE (pat) == SET)
+	{
+	  if (no_need_to_analyze_sfp (pat))
+	    continue;
+
+	  /* Collect all latent insns that generate pointers to the stack.  */
+	  df_ref use;
+    	  FOR_EACH_INSN_USE (use, a_insn)
+	    {
+	      rtx x = DF_REF_REG (use);
+	      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
+		  || bitmap_bit_p (bba_sets_may_be_sfp, DF_REF_REGNO (use)))
+		{
+		  bitmap_set_bit (bba_sets_may_be_sfp, REGNO (SET_DEST (pat)));
+		  sfp_found = true;
+		  break;
+		}
+	    }
+	}
+      else if (GET_CODE (pat) == PARALLEL)
+	{
+	  for (int i = 0; i < XVECLEN (pat, 0); i++)
+	    {
+	      rtx sub = XVECEXP (pat, 0, i);
+	      if (GET_CODE (sub) == SET)
+		{
+		  if (no_need_to_analyze_sfp (sub))
+		    continue;
+
+		  /* Aggressively mark it as a pointer to the stack to
+		     avoid any local stack address to escape out of current
+		     frame.  */
+		  bitmap_set_bit (bba_sets_may_be_sfp,
+				  REGNO (SET_DEST (sub)));
+		  sfp_found = true;
+		}
+	    }
+	}
+    }
+
+  return NULL;
+}
+
+/* Find all insns that may be pointers to the stack and store REGNO into
+   bitmap BBA_SETS_MAY_BE_SFP.  We iterate all insns in current func
+   until no more latent insns generating stack address are found.  The
+   collection of pointers to the stack BBA_SETS_MAY_BE_SFP will be used
+   to help analyze local stack variable address taken.  Stack variable
+   address can be passed out of current frame if only any pointer to the
+   stack is passed to hard register or stored into memory.  */
+
+static void
+find_all_may_be_sfp_insns (void)
+{
+  do {
+    sfp_found = false;
+    may_be_sfp_dom_walker (CDI_DOMINATORS)
+      .walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
+  } while (sfp_found);
+}
+
+/* Return true if current function doesn't pass stack address out of frame.  */
+
+static bool
+no_stack_address_taken (void)
+{
+  basic_block bb;
+  rtx_insn *a_insn;
+  const_rtx pat;
+  df_ref use;
+  rtx src, dest;
+
+  FOR_ALL_BB_FN (bb, cfun)
+    FOR_BB_INSNS (bb, a_insn)
+      {
+	if (!INSN_P (a_insn))
+	  continue;
+
+	pat = PATTERN (a_insn);
+	if (GET_CODE (pat) == SET)
+	  {
+	    src = SET_SRC (pat);
+	    dest = SET_DEST (pat);
+
+	    /* Skip if it is already identified as pointers to the stack.  */
+	    if (bitmap_bit_p (bba_sets_may_be_sfp, REGNO (dest)))
+	      continue;
+
+	    /* Load cannot introduce address taken.  */
+	    if (MEM_P (src))
+	      continue;
+
+	    FOR_EACH_INSN_USE (use, a_insn)
+	      {
+		/* Skip if it doesn't use any pointers to the stack at all.  */
+		if (!bitmap_bit_p (bba_sets_may_be_sfp, DF_REF_REGNO (use)))
+		  continue;
+
+		/* Store is safe if src doesn't contain any pointers to the
+		   stack.  */
+		if (MEM_P (dest)
+		    && !reg_mentioned_p (DF_REF_REG (use), src))
+		  continue;
+
+		/* All other cases potentially introduce address taken.  */
+		return false;
+	      }
+	  }
+	else if (GET_CODE (pat) == PARALLEL)
+	  {
+	    for (int i = 0; i < XVECLEN (pat, 0); i++)
+	      {
+		rtx sub = XVECEXP (pat, 0, i);
+		src = SET_SRC (sub);
+		dest = SET_DEST (sub);
+
+		if (GET_CODE (sub) == SET)
+		  {
+	    	    /* Skip if it is already identified as pointers to
+		       the stack.  */
+		    if (bitmap_bit_p (bba_sets_may_be_sfp, REGNO (dest)))
+		      continue;
+
+		    /* Load cannot introduce address taken.  */
+		    if (MEM_P (src))
+		      continue;
+
+		    /* For all other cases, conservatively treat it as
+		       assdress taken if only a pointer to the stack is
+		       used.  */
+		    FOR_EACH_INSN_USE (use, a_insn)
+		      {
+			if (bitmap_bit_p (bba_sets_may_be_sfp,
+					  DF_REF_REGNO (use)))
+			  return false;
+		      }
+		  }
+	      }
+	  }
+      }
+  return true;
+}
+
 
 /* Main entry point for all if-conversion.  AFTER_COMBINE is true if
    we are after combine pass.  */
@@ -5381,6 +5793,18 @@ if_convert (bool after_combine)
 
   df_set_flags (DF_LR_RUN_DCE);
 
+  bba_sets_must_be_sfp = BITMAP_ALLOC (&reg_obstack);
+  bba_sets_may_be_sfp = BITMAP_ALLOC (&reg_obstack);
+
+  /* Prepare for stack variable anslysis.  */
+  regstat_init_n_sets_and_refs ();
+  calculate_dominance_info (CDI_DOMINATORS);
+  find_all_must_be_sfp_insns ();
+  find_all_may_be_sfp_insns ();
+  cfun_no_stack_address_taken = no_stack_address_taken ();
+  free_dominance_info (CDI_DOMINATORS);
+  regstat_free_n_sets_and_refs ();
+
   /* Go through each of the basic blocks looking for things to convert.  If we
      have conditional execution, we make multiple passes to allow us to handle
      IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
@@ -5413,6 +5837,9 @@ if_convert (bool after_combine)
     }
   while (cond_exec_changed_p);
 
+  BITMAP_FREE (bba_sets_may_be_sfp);
+  BITMAP_FREE (bba_sets_must_be_sfp);
+
 #ifdef IFCVT_MULTIPLE_DUMPS
   if (dump_file)
     fprintf (dump_file, "\n\n========== no more changes\n");
diff --git a/gcc/ifcvt.h b/gcc/ifcvt.h
index 153ad96..46a32ad 100644
--- a/gcc/ifcvt.h
+++ b/gcc/ifcvt.h
@@ -73,6 +73,8 @@ struct noce_if_info
 
   /* The SET_SRC of INSN_A and INSN_B.  */
   rtx a, b;
+  /* The SET of INSN_A and INSN_B.  */
+  rtx set_a, set_b;
 
   /* The SET_DEST of INSN_A.  */
   rtx x;
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 31fba82..5c5e018 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -3754,6 +3754,8 @@ extern struct target_rtl *this_target_rtl;
 #define hard_frame_pointer_rtx	(global_rtl[GR_HARD_FRAME_POINTER])
 #define arg_pointer_rtx		(global_rtl[GR_ARG_POINTER])
 
+extern bool fixed_base_plus_p (rtx x);
+
 #ifndef GENERATOR_FILE
 /* Return the attributes of a MEM rtx.  */
 static inline const struct mem_attrs *
diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c
index 268a387..48c5c2f 100644
--- a/gcc/rtlanal.c
+++ b/gcc/rtlanal.c
@@ -341,6 +341,30 @@ rtx_varies_p (const_rtx x, bool for_alias)
   return 0;
 }
 
+/* Nonzero if X has the form (PLUS frame-pointer integer).  */
+
+bool
+fixed_base_plus_p (rtx x)
+{
+  switch (GET_CODE (x))
+    {
+    case REG:
+      if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx)
+	return true;
+      if (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
+	return true;
+      return false;
+
+    case PLUS:
+      if (!CONST_INT_P (XEXP (x, 1)))
+	return false;
+      return fixed_base_plus_p (XEXP (x, 0));
+
+    default:
+      return false;
+    }
+}
+
 /* Compute an approximation for the offset between the register
    FROM and TO for the current function, as it was at the start
    of the routine.  */
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index 27a522e..c50cbce 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,7 @@
+2019-06-18  Jiangning Liu  <jiangning.liu@os.amperecomputing.com>
+
+	* gcc.dg/ifcvt-6.c: New test.
+
 2019-06-17  Jakub Jelinek  <jakub@redhat.com>
 
 	* gcc.dg/vect/vect-simd-8.c: New test.
diff --git a/gcc/testsuite/gcc.dg/ifcvt-6.c b/gcc/testsuite/gcc.dg/ifcvt-6.c
new file mode 100644
index 0000000..0c9dac42
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/ifcvt-6.c
@@ -0,0 +1,12 @@
+/* { dg-do compile { target { aarch64*-*-* } } } */
+/* { dg-options "-fdump-rtl-ce1 -O2" } */
+
+unsigned test_ifcvt (unsigned k, unsigned b) {
+        unsigned a[2];
+        if (b < a[k]) {
+                a[k] = b;
+        }
+        return a[0]+a[1];
+}
+
+/* { dg-final { scan-rtl-dump "if-conversion succeeded through noce_try_cmove_arith" "ce1" } } */
