On Tue, 26 Feb 2019, Jakub Jelinek wrote:

> On Tue, Feb 26, 2019 at 05:18:17PM +0100, Jakub Jelinek wrote:
> > > > Shall I modify the patch for that?
> > > 
> > > Might it even simplify the patch?  If not the only comment on the
> > > original patch is that it would be nice to test it on a SJLJ EH
> > > target ...
> > 
> >  1 file changed, 29 insertions(+), 16 deletions(-)
> > so not really simplify it, but not terrible either.
> > 
> > Here is the incremental (untested) diff of what handles that.
> > 
> > I don't have access to any standard SJLJ EH targets, but will try
> > --enable-sjlj-exceptions on x86_64-linux to see how far I get with that.
> 
> Full updated patch that passed normal bootstrap/regtest on x86_64-linux and
> i686-linux.
> 
> --enable-sjlj-exceptions bootstrap on x86_64-linux failed miserably,
> some entry-points are removed from libgcc_s.so in that case and
> make: relocation error: /lib64/libgc.so.1: symbol __gcc_personality_v0 
> version GCC_3.3.1 not defined in file libgcc_s.so.1 with link time reference
> On the other side, even without SJLJ EH, the testsuite coverage is quite
> good, at least during development of the patch I made several mistakes and
> each time there were dozens to hundreds of failing tests in the testsuite,
> including __builtin_setjmp, non-local goto, etc.
> 
> That said, if anybody is able to test this on some SJLJ setup, it would be
> greatly appreciated.

Patch is OK.  I suppose auto-testers will pick up fallout and we
can always revert...

Richard.

> 2019-02-26  Jakub Jelinek  <ja...@redhat.com>
> 
>       PR tree-optimization/89280
>       * tree-cfgcleanup.c (maybe_dead_abnormal_edge_p,
>       builtin_setjmp_setup_bb): New functions.
>       (cleanup_control_flow_pre): Ignore maybe_dead_abnormal_edge_p edges.
>       When visiting __builtin_setjmp_setup block, queue in special
>       setjmp_vec vector edges from .ABNORMAL_DISPATCHER to corresponding
>       __builtin_setjmp_receiver.  Remove .ABNORMAL_DISPATCHER basic blocks
>       from visited after the loop if they don't have any visited successor
>       blocks.
> 
>       * gcc.c-torture/compile/pr89280.c: New test.
>       * gcc.dg/torture/pr57147-2.c: Don't expect a setjmp after noreturn
>       function.  Skip the test for -O0.
> 
> --- gcc/tree-cfgcleanup.c.jj  2019-02-23 01:14:03.032266203 +0100
> +++ gcc/tree-cfgcleanup.c     2019-02-23 01:40:03.589681687 +0100
> @@ -723,6 +723,97 @@ cleanup_tree_cfg_bb (basic_block bb)
>    return false;
>  }
>  
> +/* Return true if E is an EDGE_ABNORMAL edge for returns_twice calls,
> +   i.e. one going from .ABNORMAL_DISPATCHER to basic block which doesn't
> +   start with a forced or nonlocal label.  Calls which return twice can 
> return
> +   the second time only if they are called normally the first time, so basic
> +   blocks which can be only entered through these abnormal edges but not
> +   normally are effectively unreachable as well.  Additionally ignore
> +   __builtin_setjmp_receiver starting blocks, which have one FORCED_LABEL
> +   and which are always only reachable through EDGE_ABNORMAL edge.  They are
> +   handled in cleanup_control_flow_pre.  */
> +
> +static bool
> +maybe_dead_abnormal_edge_p (edge e)
> +{
> +  if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL)
> +    return false;
> +
> +  gimple_stmt_iterator gsi = gsi_start_nondebug_after_labels_bb (e->src);
> +  gimple *g = gsi_stmt (gsi);
> +  if (!g || !gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
> +    return false;
> +
> +  tree target = NULL_TREE;
> +  for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
> +    if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
> +      {
> +     tree this_target = gimple_label_label (label_stmt);
> +     if (DECL_NONLOCAL (this_target))
> +       return false;
> +     if (FORCED_LABEL (this_target))
> +       {
> +         if (target)
> +           return false;
> +         target = this_target;
> +       }
> +      }
> +    else
> +      break;
> +
> +  if (target)
> +    {
> +      /* If there was a single FORCED_LABEL, check for
> +      __builtin_setjmp_receiver with address of that label.  */
> +      if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
> +     gsi_next_nondebug (&gsi);
> +      if (gsi_end_p (gsi))
> +     return false;
> +      if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_RECEIVER))
> +     return false;
> +
> +      tree arg = gimple_call_arg (gsi_stmt (gsi), 0);
> +      if (TREE_CODE (arg) != ADDR_EXPR || TREE_OPERAND (arg, 0) != target)
> +     return false;
> +    }
> +  return true;
> +}
> +
> +/* If BB is a basic block ending with __builtin_setjmp_setup, return edge
> +   from .ABNORMAL_DISPATCHER basic block to corresponding
> +   __builtin_setjmp_receiver basic block, otherwise return NULL.  */
> +static edge
> +builtin_setjmp_setup_bb (basic_block bb)
> +{
> +  if (EDGE_COUNT (bb->succs) != 2
> +      || ((EDGE_SUCC (bb, 0)->flags
> +        & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
> +       && (EDGE_SUCC (bb, 1)->flags
> +           & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL))
> +    return NULL;
> +
> +  gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
> +  if (gsi_end_p (gsi)
> +      || !gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_SETJMP_SETUP))
> +    return NULL;
> +
> +  tree arg = gimple_call_arg (gsi_stmt (gsi), 1);
> +  if (TREE_CODE (arg) != ADDR_EXPR
> +      || TREE_CODE (TREE_OPERAND (arg, 0)) != LABEL_DECL)
> +    return NULL;
> +
> +  basic_block recv_bb = label_to_block (cfun, TREE_OPERAND (arg, 0));
> +  if (EDGE_COUNT (recv_bb->preds) != 1
> +      || (EDGE_PRED (recv_bb, 0)->flags
> +       & (EDGE_ABNORMAL | EDGE_EH)) != EDGE_ABNORMAL
> +      || (EDGE_SUCC (bb, 0)->dest != EDGE_PRED (recv_bb, 0)->src
> +       && EDGE_SUCC (bb, 1)->dest != EDGE_PRED (recv_bb, 0)->src))
> +    return NULL;
> +
> +  /* EDGE_PRED (recv_bb, 0)->src should be the .ABNORMAL_DISPATCHER bb.  */
> +  return EDGE_PRED (recv_bb, 0);
> +}
> +
>  /* Do cleanup_control_flow_bb in PRE order.  */
>  
>  static bool
> @@ -736,10 +827,13 @@ cleanup_control_flow_pre ()
>    dom_state saved_state = dom_info_state (CDI_DOMINATORS);
>    set_dom_info_availability (CDI_DOMINATORS, DOM_NONE);
>  
> -  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
> +  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 2);
>    auto_sbitmap visited (last_basic_block_for_fn (cfun));
>    bitmap_clear (visited);
>  
> +  vec<edge, va_gc> *setjmp_vec = NULL;
> +  auto_vec<basic_block, 4> abnormal_dispatchers;
> +
>    stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
>  
>    while (! stack.is_empty ())
> @@ -749,12 +843,37 @@ cleanup_control_flow_pre ()
>        basic_block dest = ei_edge (ei)->dest;
>  
>        if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
> -       && ! bitmap_bit_p (visited, dest->index))
> +       && !bitmap_bit_p (visited, dest->index)
> +       && (ei_container (ei) == setjmp_vec
> +           || !maybe_dead_abnormal_edge_p (ei_edge (ei))))
>       {
>         bitmap_set_bit (visited, dest->index);
>         /* We only possibly remove edges from DEST here, leaving
>            possibly unreachable code in the IL.  */
>         retval |= cleanup_control_flow_bb (dest);
> +
> +       /* Check for __builtin_setjmp_setup.  Edges from .ABNORMAL_DISPATCH
> +          to __builtin_setjmp_receiver will be normally ignored by
> +          maybe_dead_abnormal_edge_p.  If DEST is a visited
> +          __builtin_setjmp_setup, queue edge from .ABNORMAL_DISPATCH
> +          to __builtin_setjmp_receiver, so that it will be visited too.  */
> +       if (edge e = builtin_setjmp_setup_bb (dest))
> +         {
> +           vec_safe_push (setjmp_vec, e);
> +           if (vec_safe_length (setjmp_vec) == 1)
> +             stack.quick_push (ei_start (setjmp_vec));
> +         }
> +
> +       if ((ei_edge (ei)->flags
> +            & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
> +         {
> +           gimple_stmt_iterator gsi
> +             = gsi_start_nondebug_after_labels_bb (dest);
> +           gimple *g = gsi_stmt (gsi);
> +           if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
> +             abnormal_dispatchers.safe_push (dest);
> +         }
> +
>         if (EDGE_COUNT (dest->succs) > 0)
>           stack.quick_push (ei_start (dest->succs));
>       }
> @@ -763,10 +882,32 @@ cleanup_control_flow_pre ()
>         if (!ei_one_before_end_p (ei))
>           ei_next (&stack.last ());
>         else
> -         stack.pop ();
> +         {
> +           if (ei_container (ei) == setjmp_vec)
> +             vec_safe_truncate (setjmp_vec, 0);
> +           stack.pop ();
> +         }
>       }
>      }
>  
> +  vec_free (setjmp_vec);
> +
> +  /* If we've marked .ABNORMAL_DISPATCHER basic block(s) as visited
> +     above, but haven't marked any of their successors as visited,
> +     unmark them now, so that they can be removed as useless.  */
> +  basic_block dispatcher_bb;
> +  unsigned int k;
> +  FOR_EACH_VEC_ELT (abnormal_dispatchers, k, dispatcher_bb)
> +    {
> +      edge e;
> +      edge_iterator ei;
> +      FOR_EACH_EDGE (e, ei, dispatcher_bb->succs)
> +     if (bitmap_bit_p (visited, e->dest->index))
> +       break;
> +      if (e == NULL)
> +     bitmap_clear_bit (visited, dispatcher_bb->index);
> +    }
> +
>    set_dom_info_availability (CDI_DOMINATORS, saved_state);
>  
>    /* We are deleting BBs in non-reverse dominator order, make sure
> --- gcc/testsuite/gcc.c-torture/compile/pr89280.c.jj  2019-02-23 
> 01:28:52.633838814 +0100
> +++ gcc/testsuite/gcc.c-torture/compile/pr89280.c     2019-02-23 
> 01:28:52.632838830 +0100
> @@ -0,0 +1,48 @@
> +/* PR tree-optimization/89280 */
> +
> +int a;
> +void foo (void);
> +__attribute__ ((returns_twice)) int bar (void);
> +void baz (int, int);
> +void *buf[5];
> +
> +static inline void
> +inl (int x)
> +{
> +  while (x)
> +    foo ();
> +}
> +
> +void
> +test1 (void)
> +{
> +  for (;;)
> +    foo ();
> +  baz (bar (), a);
> +}
> +
> +void
> +test2 (void)
> +{
> +  for (;;)
> +    foo ();
> +  baz (__builtin_setjmp (buf), a);
> +  if (a)
> +    __builtin_longjmp (buf, 1);
> +}
> +
> +void
> +test3 (void)
> +{
> +  inl (1);
> +  baz (bar (), a);
> +}
> +
> +void
> +test4 (void)
> +{
> +  inl (2);
> +  baz (__builtin_setjmp (buf), a);
> +  if (a)
> +    __builtin_longjmp (buf, 1);
> +}
> --- gcc/testsuite/gcc.dg/torture/pr57147-2.c.jj       2019-02-23 
> 01:14:03.218263169 +0100
> +++ gcc/testsuite/gcc.dg/torture/pr57147-2.c  2019-02-23 01:42:41.529079696 
> +0100
> @@ -1,6 +1,7 @@
>  /* { dg-do compile } */
>  /* { dg-options "-fdump-tree-optimized" } */
>  /* { dg-skip-if "" { *-*-* } { "-fno-fat-lto-objects" } { "" } } */
> +/* { dg-skip-if "" { *-*-* } { "-O0" } { "" } } */
>  /* { dg-require-effective-target indirect_jumps } */
>  
>  struct __jmp_buf_tag {};
> @@ -19,4 +20,4 @@ void TestSyscall(void)
>    _setjmp (g_return_jmp_buf);
>  }
>  
> -/* { dg-final { scan-tree-dump "setjmp" "optimized" } } */
> +/* { dg-final { scan-tree-dump-not "setjmp" "optimized" } } */
> 
> 
>       Jakub
> 
> 

-- 
Richard Biener <rguent...@suse.de>
SUSE LINUX GmbH, GF: Felix Imendoerffer, Jane Smithard, Graham Norton, HRB 
21284 (AG Nuernberg)

Reply via email to